From 047d46ea5383cf339ca41a22020dde2a09203ed3 Mon Sep 17 00:00:00 2001 From: Keepers Date: Tue, 5 Dec 2023 15:18:49 -0700 Subject: [PATCH] add new funcs and tidy up drive limiter (#4734) adds some new functions to the drive limiter that will be used specifically in the tree-based backup process. Also updates the limiter tests to have separate versions for the tree and non-tree variations. In this PR, the tree variation will definitely fail. The next PR will be focused on backup process corrections needed to ensure that the limit handling is correct according to the existing tests. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :sunflower: Feature #### Issue(s) * #4689 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- .../m365/collection/drive/collections.go | 12 +- .../m365/collection/drive/collections_test.go | 1470 +++++++++-------- .../m365/collection/drive/collections_tree.go | 148 +- .../collection/drive/collections_tree_test.go | 721 +++++--- .../m365/collection/drive/delta_tree.go | 66 +- .../m365/collection/drive/delta_tree_test.go | 38 +- src/internal/m365/collection/drive/limiter.go | 50 +- .../m365/collection/drive/limiter_test.go | 1120 ++++++------- 8 files changed, 1901 insertions(+), 1724 deletions(-) diff --git a/src/internal/m365/collection/drive/collections.go b/src/internal/m365/collection/drive/collections.go index 1c199025e..8d5f2164c 100644 --- a/src/internal/m365/collection/drive/collections.go +++ b/src/internal/m365/collection/drive/collections.go @@ -29,6 +29,8 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) +var errGetTreeNotImplemented = clues.New("forced error: cannot run tree-based backup: incomplete implementation") + const ( restrictedDirectory = "Site Pages" @@ -292,14 +294,14 @@ func (c *Collections) Get( errs *fault.Bus, ) ([]data.BackupCollection, bool, error) { if c.ctrl.ToggleFeatures.UseDeltaTree { - _, _, err := c.getTree(ctx, prevMetadata, ssmb, errs) + colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, ssmb, errs) if err != nil { return nil, false, clues.Wrap(err, "processing backup using tree") } - return nil, - false, - clues.New("forced error: cannot run tree-based backup: incomplete implementation") + return colls, + canUsePrevBackup, + errGetTreeNotImplemented } deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata( @@ -856,7 +858,7 @@ func (c *Collections) PopulateDriveCollections( // Don't check for containers we've already seen. if _, ok := c.CollectionMap[driveID][id]; !ok { if id != lastContainerID { - if limiter.atLimit(stats, ignoreMe) { + if limiter.atLimit(stats) { break } diff --git a/src/internal/m365/collection/drive/collections_test.go b/src/internal/m365/collection/drive/collections_test.go index 875d21ba2..30c756eb4 100644 --- a/src/internal/m365/collection/drive/collections_test.go +++ b/src/internal/m365/collection/drive/collections_test.go @@ -172,11 +172,11 @@ func malwareItem( return c } -func driveRootItem(id string) models.DriveItemable { +func driveRootItem() models.DriveItemable { name := rootName item := models.NewDriveItem() item.SetName(&name) - item.SetId(&id) + item.SetId(ptr.To(rootID)) item.SetRoot(models.NewRoot()) item.SetFolder(models.NewFolder()) @@ -243,7 +243,19 @@ func toPath(elems ...string) string { } } -func fullPath(driveID any, elems ...string) string { +func fullPath(elems ...string) string { + return toPath(append( + []string{ + tenant, + path.OneDriveService.String(), + user, + path.FilesCategory.String(), + odConsts.DriveFolderPrefixBuilder(id(drive)).String(), + }, + elems...)...) +} + +func driveFullPath(driveID any, elems ...string) string { return toPath(append( []string{ tenant, @@ -255,7 +267,13 @@ func fullPath(driveID any, elems ...string) string { elems...)...) } -func parent(driveID any, elems ...string) string { +func parentDir(elems ...string) string { + return toPath(append( + []string{odConsts.DriveFolderPrefixBuilder(id(drive)).String()}, + elems...)...) +} + +func driveParentDir(driveID any, elems ...string) string { return toPath(append( []string{odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String()}, elems...)...) @@ -347,19 +365,19 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Invalid item", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(item), name(item), parent(drive), rootID, -1), + driveRootItem(), + driveItem(id(item), name(item), driveParentDir(drive), rootID, -1), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.Error, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), + rootID: asNotMoved(t, driveFullPath(drive)), }, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{}, @@ -367,22 +385,22 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single File", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(file), name(file), parent(drive), rootID, isFile), + driveRootItem(), + driveItem(id(file), name(file), driveParentDir(drive), rootID, isFile), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), + rootID: asNotMoved(t, driveFullPath(drive)), }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, // Root folder is skipped since it's always present. expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, expectedExcludes: makeExcludeMap(id(file)), expectedTopLevelPackages: map[string]struct{}{}, @@ -390,20 +408,20 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Folder", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asNew(t, fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asNew(t, driveFullPath(drive, name(folder))), }, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), }, expectedItemCount: 1, expectedContainerCount: 2, @@ -413,21 +431,21 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Folder created twice", // deleted a created with same name in between a backup items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - idx(folder, 2): asNew(t, fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + idx(folder, 2): asNew(t, driveFullPath(drive, name(folder))), }, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - idx(folder, 2): fullPath(drive, name(folder)), + rootID: driveFullPath(drive), + idx(folder, 2): driveFullPath(drive, name(folder)), }, expectedItemCount: 1, expectedContainerCount: 2, @@ -437,90 +455,90 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Package", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveRootItem(), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(pkg): asNew(t, fullPath(drive, name(pkg))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(pkg): asNew(t, driveFullPath(drive, name(pkg))), }, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(pkg): fullPath(drive, name(pkg)), + rootID: driveFullPath(drive), + id(pkg): driveFullPath(drive, name(pkg)), }, expectedItemCount: 1, expectedContainerCount: 2, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{ - fullPath(drive, name(pkg)): {}, + driveFullPath(drive, name(pkg)): {}, }, expectedCountPackages: 1, }, { name: "Single Package with subfolder", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), - driveItem(id(folder), name(folder), parent(drive, name(pkg)), id(pkg), isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive, name(pkg)), id(pkg), isFolder), + driveRootItem(), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveItem(id(folder), name(folder), driveParentDir(drive, name(pkg)), id(pkg), isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(pkg)), id(pkg), isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(pkg): asNew(t, fullPath(drive, name(pkg))), - id(folder): asNew(t, fullPath(drive, name(pkg), name(folder))), - id(subfolder): asNew(t, fullPath(drive, name(pkg), name(subfolder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(pkg): asNew(t, driveFullPath(drive, name(pkg))), + id(folder): asNew(t, driveFullPath(drive, name(pkg), name(folder))), + id(subfolder): asNew(t, driveFullPath(drive, name(pkg), name(subfolder))), }, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(pkg): fullPath(drive, name(pkg)), - id(folder): fullPath(drive, name(pkg), name(folder)), - id(subfolder): fullPath(drive, name(pkg), name(subfolder)), + rootID: driveFullPath(drive), + id(pkg): driveFullPath(drive, name(pkg)), + id(folder): driveFullPath(drive, name(pkg), name(folder)), + id(subfolder): driveFullPath(drive, name(pkg), name(subfolder)), }, expectedItemCount: 3, expectedContainerCount: 4, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{ - fullPath(drive, name(pkg)): {}, + driveFullPath(drive, name(pkg)): {}, }, expectedCountPackages: 3, }, { name: "1 root file, 1 folder, 1 package, 2 files, 3 collections", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(file, "inRoot"), namex(file, "inRoot"), parent(drive), rootID, isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), - driveItem(idx(file, "inFolder"), namex(file, "inFolder"), parent(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inPackage"), namex(file, "inPackage"), parent(drive, name(pkg)), id(pkg), isFile), + driveRootItem(), + driveItem(idx(file, "inRoot"), namex(file, "inRoot"), driveParentDir(drive), rootID, isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveItem(idx(file, "inFolder"), namex(file, "inFolder"), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inPackage"), namex(file, "inPackage"), driveParentDir(drive, name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asNew(t, fullPath(drive, name(folder))), - id(pkg): asNew(t, fullPath(drive, name(pkg))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asNew(t, driveFullPath(drive, name(folder))), + id(pkg): asNew(t, driveFullPath(drive, name(pkg))), }, expectedItemCount: 5, expectedFileCount: 3, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(pkg): fullPath(drive, name(pkg)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(pkg): driveFullPath(drive, name(pkg)), }, expectedTopLevelPackages: map[string]struct{}{ - fullPath(drive, name(pkg)): {}, + driveFullPath(drive, name(pkg)): {}, }, expectedCountPackages: 1, expectedExcludes: makeExcludeMap(idx(file, "inRoot"), idx(file, "inFolder"), idx(file, "inPackage")), @@ -528,24 +546,24 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "contains folder selector", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(file, "inRoot"), namex(file, "inRoot"), parent(drive), rootID, isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive, name(folder)), id(folder), isFolder), - driveItem(idx(folder, 2), name(folder), parent(drive, name(folder), name(subfolder)), id(subfolder), isFolder), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), - driveItem(idx(file, "inFolder"), idx(file, "inFolder"), parent(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), parent(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), - driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), parent(drive, name(pkg)), id(pkg), isFile), + driveRootItem(), + driveItem(idx(file, "inRoot"), namex(file, "inRoot"), driveParentDir(drive), rootID, isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(folder)), id(folder), isFolder), + driveItem(idx(folder, 2), name(folder), driveParentDir(drive, name(folder), name(subfolder)), id(subfolder), isFolder), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveItem(idx(file, "inFolder"), idx(file, "inFolder"), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), driveParentDir(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), + driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), driveParentDir(drive, name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, scope: (&selectors.OneDriveBackup{}).Folders([]string{name(folder)})[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - id(folder): asNew(t, fullPath(drive, name(folder))), - id(subfolder): asNew(t, fullPath(drive, name(folder), name(subfolder))), - idx(folder, 2): asNew(t, fullPath(drive, name(folder), name(subfolder), name(folder))), + id(folder): asNew(t, driveFullPath(drive, name(folder))), + id(subfolder): asNew(t, driveFullPath(drive, name(folder), name(subfolder))), + idx(folder, 2): asNew(t, driveFullPath(drive, name(folder), name(subfolder), name(folder))), }, expectedItemCount: 5, expectedFileCount: 2, @@ -553,9 +571,9 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { // just "folder" isn't added here because the include check is done on the // parent path since we only check later if something is a folder or not. expectedPrevPaths: map[string]string{ - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), - idx(folder, 2): fullPath(drive, name(folder), name(subfolder), name(folder)), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + idx(folder, 2): driveFullPath(drive, name(folder), name(subfolder), name(folder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(idx(file, "inFolder"), idx(file, "inFolder2")), @@ -563,15 +581,15 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "prefix subfolder selector", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(file, "inRoot"), namex(file, "inRoot"), parent(drive), rootID, isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive, name(folder)), id(folder), isFolder), - driveItem(idx(folder, 2), name(folder), parent(drive, name(folder), name(subfolder)), id(subfolder), isFolder), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), - driveItem(idx(file, "inFolder"), idx(file, "inFolder"), parent(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), parent(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), - driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), parent(drive, name(pkg)), id(pkg), isFile), + driveRootItem(), + driveItem(idx(file, "inRoot"), namex(file, "inRoot"), driveParentDir(drive), rootID, isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(folder)), id(folder), isFolder), + driveItem(idx(folder, 2), name(folder), driveParentDir(drive, name(folder), name(subfolder)), id(subfolder), isFolder), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveItem(idx(file, "inFolder"), idx(file, "inFolder"), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), driveParentDir(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), + driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), driveParentDir(drive, name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, scope: (&selectors.OneDriveBackup{}).Folders( @@ -580,15 +598,15 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - id(subfolder): asNew(t, fullPath(drive, name(folder), name(subfolder))), - idx(folder, 2): asNew(t, fullPath(drive, name(folder), name(subfolder), name(folder))), + id(subfolder): asNew(t, driveFullPath(drive, name(folder), name(subfolder))), + idx(folder, 2): asNew(t, driveFullPath(drive, name(folder), name(subfolder), name(folder))), }, expectedItemCount: 3, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - id(subfolder): fullPath(drive, name(folder), name(subfolder)), - idx(folder, 2): fullPath(drive, name(folder), name(subfolder), name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + idx(folder, 2): driveFullPath(drive, name(folder), name(subfolder), name(folder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(idx(file, "inFolder2")), @@ -596,28 +614,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "match subfolder selector", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(file), name(file), parent(drive), rootID, isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive, name(folder)), id(folder), isFolder), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), - driveItem(idx(file, 1), namex(file, 1), parent(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inSubfolder"), namex(file, "inSubfolder"), parent(drive, name(folder), name(subfolder)), id(subfolder), isFile), - driveItem(idx(file, 9), namex(file, 9), parent(drive, name(pkg)), id(pkg), isFile), + driveRootItem(), + driveItem(id(file), name(file), driveParentDir(drive), rootID, isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(folder)), id(folder), isFolder), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveItem(idx(file, 1), namex(file, 1), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inSubfolder"), namex(file, "inSubfolder"), driveParentDir(drive, name(folder), name(subfolder)), id(subfolder), isFile), + driveItem(idx(file, 9), namex(file, 9), driveParentDir(drive, name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, scope: (&selectors.OneDriveBackup{}).Folders([]string{toPath(name(folder), name(subfolder))})[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - id(subfolder): asNew(t, fullPath(drive, name(folder), name(subfolder))), + id(subfolder): asNew(t, driveFullPath(drive, name(folder), name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 1, // No child folders for subfolder so nothing here. expectedPrevPaths: map[string]string{ - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(idx(file, "inSubfolder")), @@ -625,27 +643,27 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "not moved folder tree", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asNotMoved(t, fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asNotMoved(t, driveFullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -653,27 +671,27 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -681,28 +699,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree twice within backup", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 1), name(folder), parent(drive), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(idx(folder, 1), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - idx(folder, 1): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + idx(folder, 1): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - idx(folder, 2): asNew(t, fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + idx(folder, 2): asNew(t, driveFullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - idx(folder, 2): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + idx(folder, 2): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -710,28 +728,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder tree twice within backup", items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(drive), rootID, isFolder), - driveItem(id(folder), name(drive), parent(drive), rootID, isFolder), - delItem(id(folder), parent(drive), rootID, isFolder), + driveRootItem(), + delItem(id(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(folder), name(drive), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asDeleted(t, fullPath(drive, "")), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asDeleted(t, driveFullPath(drive, "")), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + rootID: driveFullPath(drive), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -739,29 +757,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree twice within backup including delete", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - delItem(id(folder), parent(drive), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), driveParentDir(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - idx(folder, 2): asNew(t, fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + idx(folder, 2): asNew(t, driveFullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - idx(folder, 2): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + idx(folder, 2): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -769,28 +787,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder tree twice within backup with addition", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 1), name(folder), parent(drive), rootID, isFolder), - delItem(idx(folder, 1), parent(drive), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), - delItem(idx(folder, 2), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(idx(folder, 1), name(folder), driveParentDir(drive), rootID, isFolder), + delItem(idx(folder, 1), driveParentDir(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), + delItem(idx(folder, 2), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - idx(folder, 1): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + idx(folder, 1): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), + rootID: asNotMoved(t, driveFullPath(drive)), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -798,25 +816,25 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree with file no previous", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), - driveItem(id(folder), namex(folder, 2), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(id(folder), namex(folder, 2), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asNew(t, fullPath(drive, namex(folder, 2))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asNew(t, driveFullPath(drive, namex(folder, 2))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, namex(folder, 2)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, namex(folder, 2)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(id(file)), @@ -824,24 +842,24 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree with file no previous 1", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asNew(t, fullPath(drive, name(folder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asNew(t, driveFullPath(drive, name(folder))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(id(file)), @@ -849,29 +867,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree and subfolder 1", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), - id(subfolder): asMoved(t, fullPath(drive, namex(folder, "a"), name(subfolder)), fullPath(drive, name(subfolder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), + id(subfolder): asMoved(t, driveFullPath(drive, namex(folder, "a"), name(subfolder)), driveFullPath(drive, name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -879,29 +897,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree and subfolder 2", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(subfolder), name(subfolder), parent(drive), rootID, isFolder), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), - id(subfolder): asMoved(t, fullPath(drive, namex(folder, "a"), name(subfolder)), fullPath(drive, name(subfolder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), + id(subfolder): asMoved(t, driveFullPath(drive, namex(folder, "a"), name(subfolder)), driveFullPath(drive, name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -909,37 +927,37 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "move subfolder when moving parent", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), namex(folder, 2), parent(drive), rootID, isFolder), - driveItem(id(item), name(item), parent(drive, namex(folder, 2)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(drive), rootID, isFolder), + driveItem(id(item), name(item), driveParentDir(drive, namex(folder, 2)), idx(folder, 2), isFile), // Need to see the parent folder first (expected since that's what Graph // consistently returns). - driveItem(id(folder), namex(folder, "a"), parent(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive, namex(folder, "a")), id(folder), isFolder), - driveItem(idx(item, 2), namex(item, 2), parent(drive, namex(folder, "a"), name(subfolder)), id(subfolder), isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(folder), namex(folder, "a"), driveParentDir(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive, namex(folder, "a")), id(folder), isFolder), + driveItem(idx(item, 2), namex(item, 2), driveParentDir(drive, namex(folder, "a"), name(subfolder)), id(subfolder), isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - idx(folder, 2): asNew(t, fullPath(drive, namex(folder, 2))), - id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), - id(subfolder): asMoved(t, fullPath(drive, namex(folder, "a"), name(subfolder)), fullPath(drive, name(folder), name(subfolder))), + rootID: asNotMoved(t, driveFullPath(drive)), + idx(folder, 2): asNew(t, driveFullPath(drive, namex(folder, 2))), + id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), + id(subfolder): asMoved(t, driveFullPath(drive, namex(folder, "a"), name(subfolder)), driveFullPath(drive, name(folder), name(subfolder))), }, expectedItemCount: 5, expectedFileCount: 2, expectedContainerCount: 4, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - idx(folder, 2): fullPath(drive, namex(folder, 2)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + idx(folder, 2): driveFullPath(drive, namex(folder, 2)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(id(item), idx(item, 2)), @@ -947,29 +965,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree multiple times", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), - driveItem(id(folder), namex(folder, 2), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(id(folder), namex(folder, 2), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): fullPath(drive, namex(folder, "a")), - id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), + id(folder): driveFullPath(drive, namex(folder, "a")), + id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, namex(folder, 2))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, namex(folder, 2))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, namex(folder, 2)), - id(subfolder): fullPath(drive, namex(folder, 2), name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, namex(folder, 2)), + id(subfolder): driveFullPath(drive, namex(folder, 2), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(id(file)), @@ -977,28 +995,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder and package", items: []models.DriveItemable{ - driveRootItem(rootID), // root is always present, but not necessary here - delItem(id(folder), parent(drive), rootID, isFolder), - delItem(id(pkg), parent(drive), rootID, isPackage), + driveRootItem(), // root is always present, but not necessary here + delItem(id(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(pkg), driveParentDir(drive), rootID, isPackage), }, previousPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(pkg): fullPath(drive, name(pkg)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(pkg): driveFullPath(drive, name(pkg)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asDeleted(t, fullPath(drive, name(folder))), - id(pkg): asDeleted(t, fullPath(drive, name(pkg))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asDeleted(t, driveFullPath(drive, name(folder))), + id(pkg): asDeleted(t, driveFullPath(drive, name(pkg))), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -1006,23 +1024,23 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete folder without previous", items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(drive), rootID, isFolder), + driveRootItem(), + delItem(id(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), + rootID: asNotMoved(t, driveFullPath(drive)), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -1030,29 +1048,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete folder tree move subfolder", items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), parent(drive), rootID, isFolder), + driveRootItem(), + delItem(id(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(subfolder): fullPath(drive, name(folder), name(subfolder)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asDeleted(t, fullPath(drive, name(folder))), - id(subfolder): asMoved(t, fullPath(drive, name(folder), name(subfolder)), fullPath(drive, name(subfolder))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asDeleted(t, driveFullPath(drive, name(folder))), + id(subfolder): asMoved(t, driveFullPath(drive, name(folder), name(subfolder)), driveFullPath(drive, name(subfolder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(subfolder): fullPath(drive, name(subfolder)), + rootID: driveFullPath(drive), + id(subfolder): driveFullPath(drive, name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -1060,23 +1078,23 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete file", items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(item), parent(drive), rootID, isFile), + driveRootItem(), + delItem(id(item), driveParentDir(drive), rootID, isFile), }, previousPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), + rootID: asNotMoved(t, driveFullPath(drive)), }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(id(item)), @@ -1084,22 +1102,22 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "item before parent errors", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveRootItem(), + driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.Error, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), + rootID: asNotMoved(t, driveFullPath(drive)), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), + rootID: driveFullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -1107,33 +1125,33 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware", items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(file), id(file), parent(drive), rootID, isFile), - driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), - driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), - driveItem(idx(file, "good"), namex(file, "good"), parent(drive, name(folder)), id(folder), isFile), - malwareItem(id(malware), name(malware), parent(drive, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(file), id(file), driveParentDir(drive), rootID, isFile), + driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveItem(idx(file, "good"), namex(file, "good"), driveParentDir(drive, name(folder)), id(folder), isFile), + malwareItem(id(malware), name(malware), driveParentDir(drive, name(folder)), id(folder), isFile), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, fullPath(drive)), - id(folder): asNew(t, fullPath(drive, name(folder))), - id(pkg): asNew(t, fullPath(drive, name(pkg))), + rootID: asNotMoved(t, driveFullPath(drive)), + id(folder): asNew(t, driveFullPath(drive, name(folder))), + id(pkg): asNew(t, driveFullPath(drive, name(pkg))), }, expectedItemCount: 4, expectedFileCount: 2, expectedContainerCount: 3, expectedSkippedCount: 1, expectedPrevPaths: map[string]string{ - rootID: fullPath(drive), - id(folder): fullPath(drive, name(folder)), - id(pkg): fullPath(drive, name(pkg)), + rootID: driveFullPath(drive), + id(folder): driveFullPath(drive, name(folder)), + id(pkg): driveFullPath(drive, name(pkg)), }, expectedTopLevelPackages: map[string]struct{}{ - fullPath(drive, name(pkg)): {}, + driveFullPath(drive, name(pkg)): {}, }, expectedCountPackages: 1, expectedExcludes: makeExcludeMap(id(file), idx(file, "good")), @@ -1256,7 +1274,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1267,7 +1285,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { }, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }, canUsePreviousBackup: true, @@ -1298,7 +1316,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1307,7 +1325,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { expectedDeltas: map[string]string{}, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }, canUsePreviousBackup: true, @@ -1354,7 +1372,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1363,7 +1381,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { expectedDeltas: map[string]string{id(drive): ""}, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }, canUsePreviousBackup: true, @@ -1381,7 +1399,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1395,7 +1413,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ idx(drive, 2): { - idx(folder, 2): fullPath(2), + idx(folder, 2): driveFullPath(2), }, }), } @@ -1407,10 +1425,10 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { }, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, idx(drive, 2): { - idx(folder, 2): fullPath(2), + idx(folder, 2): driveFullPath(2), }, }, canUsePreviousBackup: true, @@ -1448,7 +1466,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), graph.NewMetadataEntry( @@ -1462,7 +1480,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { }, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }, canUsePreviousBackup: true, @@ -1480,7 +1498,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1491,7 +1509,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 2): fullPath(2), + idx(folder, 2): driveFullPath(2), }, }), } @@ -1514,7 +1532,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1544,8 +1562,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), - idx(folder, 2): fullPath(1), + idx(folder, 1): driveFullPath(1), + idx(folder, 2): driveFullPath(1), }, }), } @@ -1556,8 +1574,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { }, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), - idx(folder, 2): fullPath(1), + idx(folder, 1): driveFullPath(1), + idx(folder, 2): driveFullPath(1), }, }, expectedAlerts: []string{fault.AlertPreviousPathCollision}, @@ -1578,8 +1596,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), - idx(folder, 2): fullPath(1), + idx(folder, 1): driveFullPath(1), + idx(folder, 2): driveFullPath(1), }, }), } @@ -1593,7 +1611,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { bupMD.PreviousPathFileName, map[string]map[string]string{ idx(drive, 2): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }), } @@ -1605,11 +1623,11 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { }, expectedPaths: map[string]map[string]string{ id(drive): { - idx(folder, 1): fullPath(1), - idx(folder, 2): fullPath(1), + idx(folder, 1): driveFullPath(1), + idx(folder, 2): driveFullPath(1), }, idx(drive, 2): { - idx(folder, 1): fullPath(1), + idx(folder, 1): driveFullPath(1), }, }, expectedAlerts: []string{fault.AlertPreviousPathCollision}, @@ -1694,8 +1712,8 @@ func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() { defer flush() drv := models.NewDrive() - drv.SetId(ptr.To("id")) - drv.SetName(ptr.To("name")) + drv.SetId(ptr.To(id(drive))) + drv.SetName(ptr.To(name(drive))) mbh := mock.DefaultOneDriveBH(user) opts := control.DefaultOptions() @@ -1704,11 +1722,11 @@ func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() { mbh.DrivePagerV = pagerForDrives(drv) mbh.DriveItemEnumeration = mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - "id": { + id(drive): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), // will be present, not needed + delItem(id(file), parentDir(), rootID, isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -1720,7 +1738,7 @@ func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() { c.ctrl = opts _, _, err := c.Get(ctx, nil, nil, fault.New(true)) - require.ErrorContains(t, err, "not implemented", clues.ToCore(err)) + require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err)) } func (suite *CollectionsUnitSuite) TestGet() { @@ -1767,8 +1785,8 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), // will be present, not needed + delItem(id(file), driveParentDir(1), rootID, isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -1778,19 +1796,19 @@ func (suite *CollectionsUnitSuite) TestGet() { canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: fullPath(1)}, + idx(drive, 1): {rootID: driveFullPath(1)}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NotMovedState: {}}, + driveFullPath(1): {data.NotMovedState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: fullPath(1)}, + idx(drive, 1): {rootID: driveFullPath(1)}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(id(file)), + driveFullPath(1): makeExcludeMap(id(file)), }), }, { @@ -1801,8 +1819,8 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(file), name(file), parent(1), rootID, isFile), + driveRootItem(), + driveItem(id(file), name(file), driveParentDir(1), rootID, isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -1812,19 +1830,19 @@ func (suite *CollectionsUnitSuite) TestGet() { canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: fullPath(1)}, + idx(drive, 1): {rootID: driveFullPath(1)}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NotMovedState: {id(file)}}, + driveFullPath(1): {data.NotMovedState: {id(file)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: fullPath(1)}, + idx(drive, 1): {rootID: driveFullPath(1)}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(id(file)), + driveFullPath(1): makeExcludeMap(id(file)), }), }, { @@ -1835,9 +1853,9 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, @@ -1848,22 +1866,22 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{}, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -1874,10 +1892,10 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - driveItem(id(file), namex(file, 2), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(id(file), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, @@ -1888,22 +1906,22 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{}, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -1913,10 +1931,10 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - driveItem(id(file), namex(file, 2), parent(1), rootID, isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(id(file), namex(file, 2), driveParentDir(1), rootID, isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, @@ -1926,24 +1944,24 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NotMovedState: {id(file)}}, - fullPath(1, name(folder)): {data.NewState: {id(folder)}}, + driveFullPath(1): {data.NotMovedState: {id(file)}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(id(file)), + driveFullPath(1): makeExcludeMap(id(file)), }), }, { @@ -1953,9 +1971,9 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: "", Reset: true}, }, @@ -1967,20 +1985,20 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, }, expectedDeltaURLs: map[string]string{}, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -1992,16 +2010,16 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), }, }, }, @@ -2015,22 +2033,22 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2042,10 +2060,10 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(idx(file, 3), namex(file, 3), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { @@ -2054,16 +2072,16 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), }, }, }, @@ -2077,22 +2095,22 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2104,24 +2122,24 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, Reset: true, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), }, }, }, @@ -2135,22 +2153,22 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2163,17 +2181,17 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, idx(drive, 2): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(idx("root", 2)), - driveItem(idx(folder, 2), name(folder), parent(2), idx("root", 2), isFolder), - driveItem(idx(file, 2), name(file), parent(2, name(folder)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, @@ -2186,10 +2204,10 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 2): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - fullPath(2): {data.NewState: {}}, - fullPath(2, name(folder)): {data.NewState: {idx(folder, 2), idx(file, 2)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + driveFullPath(2): {data.NewState: {}}, + driveFullPath(2, name(folder)): {data.NewState: {idx(folder, 2), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), @@ -2197,20 +2215,20 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, idx(drive, 2): { - idx("root", 2): fullPath(2), - idx(folder, 2): fullPath(2, name(folder)), + rootID: driveFullPath(2), + idx(folder, 2): driveFullPath(2, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, - fullPath(2): true, - fullPath(2, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, + driveFullPath(2): true, + driveFullPath(2, name(folder)): true, }, }, { @@ -2223,17 +2241,17 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, idx(drive, 2): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), parent(2, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), id(folder), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, @@ -2246,10 +2264,10 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 2): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - fullPath(2): {data.NewState: {}}, - fullPath(2, name(folder)): {data.NewState: {id(folder), idx(file, 2)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + driveFullPath(2): {data.NewState: {}}, + driveFullPath(2, name(folder)): {data.NewState: {id(folder), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), @@ -2257,20 +2275,20 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, idx(drive, 2): { - rootID: fullPath(2), - id(folder): fullPath(2, name(folder)), + rootID: driveFullPath(2), + id(folder): driveFullPath(2, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, - fullPath(2): true, - fullPath(2, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, + driveFullPath(2): true, + driveFullPath(2, name(folder)): true, }, }, { @@ -2308,9 +2326,9 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, namex(folder, 2)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), }, }, }, @@ -2322,29 +2340,29 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.DeletedState: {}}, - fullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.DeletedState: {}}, + driveFullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 2): fullPath(1, namex(folder, 2)), + rootID: driveFullPath(1), + idx(folder, 2): driveFullPath(1, namex(folder, 2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, - fullPath(1, namex(folder, 2)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, + driveFullPath(1, namex(folder, 2)): true, }, }, { @@ -2360,9 +2378,9 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, namex(folder, 2)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), }, }, }, @@ -2374,29 +2392,29 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.DeletedState: {}}, - fullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.DeletedState: {}}, + driveFullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 2): fullPath(1, namex(folder, 2)), + rootID: driveFullPath(1), + idx(folder, 2): driveFullPath(1, namex(folder, 2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, - fullPath(1, namex(folder, 2)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, + driveFullPath(1, namex(folder, 2)): true, }, }, { @@ -2410,9 +2428,9 @@ func (suite *CollectionsUnitSuite) TestGet() { // on the first page, if this is the total data, we'd expect both folder and folder2 // since new previousPaths merge with the old previousPaths. Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), }, }, { @@ -2423,9 +2441,9 @@ func (suite *CollectionsUnitSuite) TestGet() { // but after a delta reset, we treat this as the total end set of folders, which means // we don't expect folder to exist any longer. Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), }, }, }, @@ -2437,13 +2455,13 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): { + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): { // Old folder path should be marked as deleted since it should compare // by ID. data.DeletedState: {}, @@ -2455,14 +2473,14 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 2): fullPath(1, name(folder)), + rootID: driveFullPath(1), + idx(folder, 2): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2474,9 +2492,9 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { @@ -2485,9 +2503,9 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, }, @@ -2499,13 +2517,13 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): { + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): { data.NewState: {id(folder), id(file)}, }, }, @@ -2514,14 +2532,14 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2537,9 +2555,9 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 2), name(file), parent(1, name(folder)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), }, }, }, @@ -2551,13 +2569,13 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): { + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): { data.DeletedState: {}, data.NewState: {idx(folder, 2), idx(file, 2)}, }, @@ -2567,14 +2585,14 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 2): fullPath(1, name(folder)), + rootID: driveFullPath(1), + idx(folder, 2): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2590,9 +2608,9 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), }, }, }, @@ -2604,13 +2622,13 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): { + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): { // Old folder path should be marked as deleted since it should compare // by ID. data.DeletedState: {}, @@ -2622,14 +2640,14 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 2): fullPath(1, name(folder)), + rootID: driveFullPath(1), + idx(folder, 2): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2641,18 +2659,18 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - malwareItem(id(malware), name(malware), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + malwareItem(id(malware), name(malware), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), - malwareItem(idx(malware, 2), namex(malware, 2), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), + malwareItem(idx(malware, 2), namex(malware, 2), driveParentDir(1, name(folder)), id(folder), isFile), }, }, }, @@ -2666,22 +2684,22 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, expectedSkippedCount: 2, }, @@ -2694,11 +2712,11 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, namex(folder, 2)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), }, }, { @@ -2706,11 +2724,11 @@ func (suite *CollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - delItem(idx(folder, 2), parent(1), rootID, isFolder), - delItem(namex(file, 2), parent(1), rootID, isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + delItem(idx(folder, 2), driveParentDir(1), rootID, isFolder), + delItem(namex(file, 2), driveParentDir(1), rootID, isFile), }, }, }, @@ -2722,30 +2740,30 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), - idx(folder, 2): fullPath(1, namex(folder, 2)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), + idx(folder, 2): driveFullPath(1, namex(folder, 2)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - fullPath(1, namex(folder, 2)): {data.DeletedState: {}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + driveFullPath(1, namex(folder, 2)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): idx(delta, 2), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, - fullPath(1, namex(folder, 2)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, + driveFullPath(1, namex(folder, 2)): true, }, }, { @@ -2756,8 +2774,8 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), }, Reset: true, }}, @@ -2769,26 +2787,26 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.DeletedState: {}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2800,8 +2818,8 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), + delItem(id(file), driveParentDir(1), rootID, isFile), }, Reset: true, }, @@ -2814,23 +2832,23 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, + driveFullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, + driveFullPath(1): true, }, }, { @@ -2842,16 +2860,16 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), + delItem(id(file), driveParentDir(1), rootID, isFile), }, }, }, @@ -2865,19 +2883,19 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, + driveFullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): idx(delta, 2), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, + driveFullPath(1): true, }, }, { @@ -2889,23 +2907,23 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), + delItem(id(file), driveParentDir(1), rootID, isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 1), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 1), name(file), parent(1, name(folder)), idx(folder, 1), isFile), + driveRootItem(), + driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile), }, }, }, @@ -2919,22 +2937,22 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {idx(folder, 1), idx(file, 1)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {idx(folder, 1), idx(file, 1)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): idx(delta, 2), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 1): fullPath(1, name(folder)), + rootID: driveFullPath(1), + idx(folder, 1): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -2946,23 +2964,23 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), - delItem(id(file), parent(1, name(folder)), rootID, isFile), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), + delItem(id(file), driveParentDir(1, name(folder)), rootID, isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), - delItem(id(file), parent(1, name(folder)), rootID, isFile), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), + delItem(id(file), driveParentDir(1, name(folder)), rootID, isFile), }, }, }, @@ -2974,20 +2992,20 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NotMovedState: {}}, - fullPath(1, name(folder)): {data.DeletedState: {}}, + driveFullPath(1): {data.NotMovedState: {}}, + driveFullPath(1, name(folder)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): idx(delta, 2), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), @@ -3002,23 +3020,23 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), + delItem(id(file), driveParentDir(1), rootID, isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 1), name(folder), parent(1), rootID, isFolder), - driveItem(idx(file, 1), name(file), parent(1, name(folder)), idx(folder, 1), isFile), + driveRootItem(), + driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile), }, }, }, @@ -3030,27 +3048,27 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.DeletedState: {}, data.NewState: {idx(folder, 1), idx(file, 1)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.DeletedState: {}, data.NewState: {idx(folder, 1), idx(file, 1)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): idx(delta, 2), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - idx(folder, 1): fullPath(1, name(folder)), + rootID: driveFullPath(1), + idx(folder, 1): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): false, - fullPath(1, name(folder)): true, + driveFullPath(1): false, + driveFullPath(1, name(folder)): true, }, }, { @@ -3062,15 +3080,15 @@ func (suite *CollectionsUnitSuite) TestGet() { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), + delItem(id(file), driveParentDir(1), rootID, isFile), }, }, }, @@ -3084,22 +3102,22 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, - fullPath(1, name(folder)): {data.NewState: {id(folder)}}, + driveFullPath(1): {data.NewState: {}}, + driveFullPath(1, name(folder)): {data.NewState: {id(folder)}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, - fullPath(1, name(folder)): true, + driveFullPath(1): true, + driveFullPath(1, name(folder)): true, }, }, { @@ -3109,8 +3127,8 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(folder), parent(1), rootID, isFolder), + driveRootItem(), + delItem(id(folder), driveParentDir(1), rootID, isFolder), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, @@ -3122,19 +3140,19 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, + driveFullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, + driveFullPath(1): true, }, }, { @@ -3144,8 +3162,8 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - delItem(id(file), parent(1), rootID, isFile), + driveRootItem(), + delItem(id(file), driveParentDir(1), rootID, isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, @@ -3157,19 +3175,19 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NewState: {}}, + driveFullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), + rootID: driveFullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(1): true, + driveFullPath(1): true, }, }, { @@ -3179,7 +3197,7 @@ func (suite *CollectionsUnitSuite) TestGet() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present + driveRootItem(), // will be present }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, @@ -3188,20 +3206,20 @@ func (suite *CollectionsUnitSuite) TestGet() { canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: fullPath(1)}, - idx(drive, 2): {rootID: fullPath(2)}, + idx(drive, 1): {rootID: driveFullPath(1)}, + idx(drive, 2): {rootID: driveFullPath(2)}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): {data.NotMovedState: {}}, - fullPath(2): {data.DeletedState: {}}, + driveFullPath(1): {data.NotMovedState: {}}, + driveFullPath(2): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{idx(drive, 1): id(delta)}, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: fullPath(1)}, + idx(drive, 1): {rootID: driveFullPath(1)}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - fullPath(2): true, + driveFullPath(2): true, }, }, { @@ -3213,11 +3231,11 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, namex(folder, 2)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -3226,11 +3244,11 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 2): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(2), rootID, isFolder), - driveItem(id(file), name(file), parent(2, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), parent(2), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(2, namex(folder, 2)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(2, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(2, namex(folder, 2)), idx(folder, 2), isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2)}, @@ -3241,34 +3259,34 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, name(folder)), - idx(folder, 2): fullPath(1, name(folder)), - idx(folder, 3): fullPath(1, name(folder)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, name(folder)), + idx(folder, 2): driveFullPath(1, name(folder)), + idx(folder, 3): driveFullPath(1, name(folder)), }, idx(drive, 2): { - rootID: fullPath(2), - id(folder): fullPath(2, name(folder)), - idx(folder, 2): fullPath(2, namex(folder, 2)), + rootID: driveFullPath(2), + id(folder): driveFullPath(2, name(folder)), + idx(folder, 2): driveFullPath(2, namex(folder, 2)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): { + driveFullPath(1): { data.NewState: {id(folder), idx(folder, 2)}, }, - fullPath(1, name(folder)): { + driveFullPath(1, name(folder)): { data.NotMovedState: {id(folder), id(file)}, }, - fullPath(1, namex(folder, 2)): { + driveFullPath(1, namex(folder, 2)): { data.MovedState: {idx(folder, 2), idx(file, 2)}, }, - fullPath(2): { + driveFullPath(2): { data.NewState: {id(folder), idx(folder, 2)}, }, - fullPath(2, name(folder)): { + driveFullPath(2, name(folder)): { data.NotMovedState: {id(folder), id(file)}, }, - fullPath(2, namex(folder, 2)): { + driveFullPath(2, namex(folder, 2)): { data.NotMovedState: {idx(folder, 2), idx(file, 2)}, }, }, @@ -3278,20 +3296,20 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(folder): fullPath(1, namex(folder, 2)), // note: this is a bug, but is currently expected - idx(folder, 2): fullPath(1, namex(folder, 2)), - idx(folder, 3): fullPath(1, namex(folder, 2)), + rootID: driveFullPath(1), + id(folder): driveFullPath(1, namex(folder, 2)), // note: this is a bug, but is currently expected + idx(folder, 2): driveFullPath(1, namex(folder, 2)), + idx(folder, 3): driveFullPath(1, namex(folder, 2)), }, idx(drive, 2): { - rootID: fullPath(2), - id(folder): fullPath(2, name(folder)), - idx(folder, 2): fullPath(2, namex(folder, 2)), + rootID: driveFullPath(2), + id(folder): driveFullPath(2, name(folder)), + idx(folder, 2): driveFullPath(2, namex(folder, 2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(id(file), idx(file, 2)), - fullPath(2): makeExcludeMap(id(file), idx(file, 2)), + driveFullPath(1): makeExcludeMap(id(file), idx(file, 2)), + driveFullPath(2): makeExcludeMap(id(file), idx(file, 2)), }), doNotMergeItems: map[string]bool{}, }, @@ -3303,11 +3321,11 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(fanny, 2), name(fanny), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, name(fanny)), idx(fanny, 2), isFile), - driveItem(id(nav), name(nav), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(nav)), id(nav), isFile), + driveRootItem(), + driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), + driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -3318,18 +3336,18 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(nav): fullPath(1, name(fanny)), + rootID: driveFullPath(1), + id(nav): driveFullPath(1, name(fanny)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): { + driveFullPath(1): { data.NewState: {idx(fanny, 2)}, }, - fullPath(1, name(nav)): { + driveFullPath(1, name(nav)): { data.MovedState: {id(nav), id(file)}, }, - fullPath(1, name(fanny)): { + driveFullPath(1, name(fanny)): { data.NewState: {idx(fanny, 2), idx(file, 2)}, }, }, @@ -3338,13 +3356,13 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(nav): fullPath(1, name(nav)), - idx(fanny, 2): fullPath(1, name(nav)), // note: this is a bug, but currently expected + rootID: driveFullPath(1), + id(nav): driveFullPath(1, name(nav)), + idx(fanny, 2): driveFullPath(1, name(nav)), // note: this is a bug, but currently expected }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(id(file), idx(file, 2)), + driveFullPath(1): makeExcludeMap(id(file), idx(file, 2)), }), doNotMergeItems: map[string]bool{}, }, @@ -3356,11 +3374,11 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(fanny, 2), name(fanny), parent(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), parent(1, name(fanny)), idx(fanny, 2), isFile), - driveItem(id(nav), name(nav), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(nav)), id(nav), isFile), + driveRootItem(), + driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), + driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -3371,18 +3389,18 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(nav): fullPath(1, name(fanny)), + rootID: driveFullPath(1), + id(nav): driveFullPath(1, name(fanny)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): { + driveFullPath(1): { data.NewState: {idx(fanny, 2)}, }, - fullPath(1, name(nav)): { + driveFullPath(1, name(nav)): { data.MovedState: {id(nav), id(file)}, }, - fullPath(1, name(fanny)): { + driveFullPath(1, name(fanny)): { data.NewState: {idx(fanny, 2), idx(file, 2)}, }, }, @@ -3391,13 +3409,13 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(nav): fullPath(1, name(nav)), - idx(fanny, 2): fullPath(1, name(nav)), // note: this is a bug, but currently expected + rootID: driveFullPath(1), + id(nav): driveFullPath(1, name(nav)), + idx(fanny, 2): driveFullPath(1, name(nav)), // note: this is a bug, but currently expected }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(id(file), idx(file, 2)), + driveFullPath(1): makeExcludeMap(id(file), idx(file, 2)), }), doNotMergeItems: map[string]bool{}, }, @@ -3409,12 +3427,12 @@ func (suite *CollectionsUnitSuite) TestGet() { idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(id(fanny), name(fanny), parent(1), rootID, isFolder), - driveItem(id(nav), name(nav), parent(1), rootID, isFolder), - driveItem(id(foo), name(foo), parent(1, name(fanny)), id(fanny), isFolder), - driveItem(id(bar), name(foo), parent(1, name(nav)), id(nav), isFolder), + driveRootItem(), + driveItem(idx(file, 1), namex(file, 1), driveParentDir(1), rootID, isFile), + driveItem(id(fanny), name(fanny), driveParentDir(1), rootID, isFolder), + driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), + driveItem(id(foo), name(foo), driveParentDir(1, name(fanny)), id(fanny), isFolder), + driveItem(id(bar), name(foo), driveParentDir(1, name(nav)), id(nav), isFolder), }, }}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, @@ -3425,27 +3443,27 @@ func (suite *CollectionsUnitSuite) TestGet() { errCheck: assert.NoError, previousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(nav): fullPath(1, name(nav)), - id(fanny): fullPath(1, name(fanny)), - id(foo): fullPath(1, name(nav), name(foo)), - id(bar): fullPath(1, name(fanny), name(foo)), + rootID: driveFullPath(1), + id(nav): driveFullPath(1, name(nav)), + id(fanny): driveFullPath(1, name(fanny)), + id(foo): driveFullPath(1, name(nav), name(foo)), + id(bar): driveFullPath(1, name(fanny), name(foo)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - fullPath(1): { + driveFullPath(1): { data.NotMovedState: {idx(file, 1)}, }, - fullPath(1, name(nav)): { + driveFullPath(1, name(nav)): { data.NotMovedState: {id(nav)}, }, - fullPath(1, name(nav), name(foo)): { + driveFullPath(1, name(nav), name(foo)): { data.MovedState: {id(bar)}, }, - fullPath(1, name(fanny)): { + driveFullPath(1, name(fanny)): { data.NotMovedState: {id(fanny)}, }, - fullPath(1, name(fanny), name(foo)): { + driveFullPath(1, name(fanny), name(foo)): { data.MovedState: {id(foo)}, }, }, @@ -3454,15 +3472,15 @@ func (suite *CollectionsUnitSuite) TestGet() { }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { - rootID: fullPath(1), - id(nav): fullPath(1, name(nav)), - id(fanny): fullPath(1, name(fanny)), - id(foo): fullPath(1, name(nav), name(foo)), // note: this is a bug, but currently expected - id(bar): fullPath(1, name(nav), name(foo)), + rootID: driveFullPath(1), + id(nav): driveFullPath(1, name(nav)), + id(fanny): driveFullPath(1, name(fanny)), + id(foo): driveFullPath(1, name(nav), name(foo)), // note: this is a bug, but currently expected + id(bar): driveFullPath(1, name(nav), name(foo)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - fullPath(1): makeExcludeMap(idx(file, 1)), + driveFullPath(1): makeExcludeMap(idx(file, 1)), }), doNotMergeItems: map[string]bool{}, }, @@ -3636,17 +3654,17 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(id(folder), name(folder), parent(1), rootID, isFolder), - driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveRootItem(), + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, idx(drive, 2): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem(idx(folder, 2), name(folder), parent(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), parent(2, name(folder)), idx(folder, 2), isFile), + driveRootItem(), + driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile), }}}, DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, diff --git a/src/internal/m365/collection/drive/collections_tree.go b/src/internal/m365/collection/drive/collections_tree.go index bef284a3e..e5342d832 100644 --- a/src/internal/m365/collection/drive/collections_tree.go +++ b/src/internal/m365/collection/drive/collections_tree.go @@ -156,8 +156,6 @@ func (c *Collections) getTree( return collections, canUsePrevBackup, nil } -var errTreeNotImplemented = clues.New("backup tree not implemented") - func (c *Collections) makeDriveCollections( ctx context.Context, drv models.Driveable, @@ -172,10 +170,7 @@ func (c *Collections) makeDriveCollections( return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix") } - var ( - tree = newFolderyMcFolderFace(ppfx) - stats = &driveEnumerationStats{} - ) + tree := newFolderyMcFolderFace(ppfx) counter.Add(count.PrevPaths, int64(len(prevPaths))) @@ -184,10 +179,9 @@ func (c *Collections) makeDriveCollections( du, err := c.populateTree( ctx, tree, - limiter, - stats, drv, prevDeltaLink, + limiter, counter, errs) if err != nil { @@ -260,7 +254,7 @@ func (c *Collections) makeDriveCollections( return nil, nil, du, nil } - return nil, nil, du, errTreeNotImplemented + return nil, nil, du, errGetTreeNotImplemented } // populateTree constructs a new tree and populates it with items @@ -268,10 +262,9 @@ func (c *Collections) makeDriveCollections( func (c *Collections) populateTree( ctx context.Context, tree *folderyMcFolderFace, - limiter *pagerLimiter, - stats *driveEnumerationStats, drv models.Driveable, prevDeltaLink string, + limiter *pagerLimiter, counter *count.Bus, errs *fault.Bus, ) (pagers.DeltaUpdate, error) { @@ -297,23 +290,18 @@ func (c *Collections) populateTree( break } - counter.Inc(count.PagesEnumerated) - if reset { counter.Inc(count.PagerResets) tree.reset() c.resetStats() - - *stats = driveEnumerationStats{} } err := c.enumeratePageOfItems( ctx, tree, - limiter, - stats, drv, page, + limiter, counter, errs) if err != nil { @@ -324,17 +312,12 @@ func (c *Collections) populateTree( el.AddRecoverable(ctx, clues.Stack(err)) } - // Stop enumeration early if we've reached the item or page limit. Do this - // at the end of the loop so we don't request another page in the - // background. - // - // We don't want to break on just the container limit here because it's - // possible that there's more items in the current (final) container that - // we're processing. We need to see the next page to determine if we've - // reached the end of the container. Note that this doesn't take into - // account the number of items in the current container, so it's possible it - // will fetch more data when it doesn't really need to. - if limiter.atPageLimit(stats) || limiter.atItemLimit(stats) { + counter.Inc(count.PagesEnumerated) + + // Stop enumeration early if we've reached the page limit. Keep this + // at the end of the loop so we don't request another page (pager.NextPage) + // before seeing we've passed the limit. + if limiter.hitPageLimit(int(counter.Get(count.PagesEnumerated))) { break } } @@ -357,10 +340,9 @@ func (c *Collections) populateTree( func (c *Collections) enumeratePageOfItems( ctx context.Context, tree *folderyMcFolderFace, - limiter *pagerLimiter, - stats *driveEnumerationStats, drv models.Driveable, page []models.DriveItemable, + limiter *pagerLimiter, counter *count.Bus, errs *fault.Bus, ) error { @@ -390,14 +372,9 @@ func (c *Collections) enumeratePageOfItems( switch { case isFolder: - // check limits before adding the next new folder - if !tree.containsFolder(itemID) && limiter.atLimit(stats, len(tree.folderIDToNode)) { - return errHitLimit - } - - skipped, err = c.addFolderToTree(ictx, tree, drv, item, stats, counter) + skipped, err = c.addFolderToTree(ictx, tree, drv, item, limiter, counter) case isFile: - skipped, err = c.addFileToTree(ictx, tree, drv, item, limiter, stats, counter) + skipped, err = c.addFileToTree(ictx, tree, drv, item, limiter, counter) default: err = clues.NewWC(ictx, "item is neither folder nor file"). Label(fault.LabelForceNoBackupCreation, count.UnknownItemType) @@ -408,22 +385,14 @@ func (c *Collections) enumeratePageOfItems( } if err != nil { - el.AddRecoverable(ictx, clues.Wrap(err, "adding item")) - } + if errors.Is(err, errHitLimit) { + return err + } - // Check if we reached the item or size limit while processing this page. - // The check after this loop will get us out of the pager. - // We don't want to check all limits because it's possible we've reached - // the container limit but haven't reached the item limit or really added - // items to the last container we found. - // FIXME(keepers): this isn't getting handled properly at the moment - if limiter.atItemLimit(stats) { - return errHitLimit + el.AddRecoverable(ictx, clues.Wrap(err, "adding folder")) } } - stats.numPages++ - return clues.Stack(el.Failure()).OrNil() } @@ -432,7 +401,7 @@ func (c *Collections) addFolderToTree( tree *folderyMcFolderFace, drv models.Driveable, folder models.DriveItemable, - stats *driveEnumerationStats, + limiter *pagerLimiter, counter *count.Bus, ) (*fault.Skipped, error) { var ( @@ -447,6 +416,11 @@ func (c *Collections) addFolderToTree( notSelected bool ) + // check container limits before adding the next new folder + if !tree.containsFolder(folderID) && limiter.hitContainerLimit(tree.countLiveFolders()) { + return nil, errHitLimit + } + if parent != nil { parentID = ptr.Val(parent.GetId()) } @@ -541,18 +515,18 @@ func (c *Collections) addFileToTree( drv models.Driveable, file models.DriveItemable, limiter *pagerLimiter, - stats *driveEnumerationStats, counter *count.Bus, ) (*fault.Skipped, error) { var ( - driveID = ptr.Val(drv.GetId()) - fileID = ptr.Val(file.GetId()) - fileName = ptr.Val(file.GetName()) - fileSize = ptr.Val(file.GetSize()) - isDeleted = file.GetDeleted() != nil - isMalware = file.GetMalware() != nil - parent = file.GetParentReference() - parentID string + driveID = ptr.Val(drv.GetId()) + fileID = ptr.Val(file.GetId()) + fileName = ptr.Val(file.GetName()) + fileSize = ptr.Val(file.GetSize()) + lastModified = ptr.Val(file.GetLastModifiedDateTime()) + isDeleted = file.GetDeleted() != nil + isMalware = file.GetMalware() != nil + parent = file.GetParentReference() + parentID string ) if parent != nil { @@ -583,53 +557,37 @@ func (c *Collections) addFileToTree( return skip, nil } - _, alreadySeen := tree.fileIDToParentID[fileID] - if isDeleted { tree.deleteFile(fileID) + return nil, nil + } - if alreadySeen { - stats.numAddedFiles-- - // FIXME(keepers): this might be faulty, - // since deletes may not include the file size. - // it will likely need to be tracked in - // the tree alongside the file modtime. - stats.numBytes -= fileSize - } else { - c.NumItems++ - c.NumFiles++ + _, alreadySeen := tree.fileIDToParentID[fileID] + parentNode, parentNotNil := tree.folderIDToNode[parentID] + + if parentNotNil && !alreadySeen { + countSize := tree.countLiveFilesAndSizes() + + // Don't add new items if the new collection has already reached it's limit. + // item moves and updates are generally allowed through. + if limiter.atContainerItemsLimit(len(parentNode.files)) || limiter.hitItemLimit(countSize.numFiles) { + return nil, errHitLimit } - return nil, nil + // Skip large files that don't fit within the size limit. + // unlike the other checks, which see if we're already at the limit, this check + // needs to be forward-facing to ensure we don't go far over the limit. + // Example case: a 1gb limit and a 25gb file. + if limiter.hitTotalBytesLimit(fileSize + countSize.totalBytes) { + return nil, errHitLimit + } } - parentNode, ok := tree.folderIDToNode[parentID] - - // Don't add new items if the new collection is already reached it's limit. - // item moves and updates are generally allowed through. - if ok && !alreadySeen && limiter.atContainerItemsLimit(len(parentNode.files)) { - return nil, nil - } - - // Skip large files that don't fit within the size limit. - if limiter.aboveSizeLimit(fileSize + stats.numBytes) { - return nil, nil - } - - err := tree.addFile(parentID, fileID, ptr.Val(file.GetLastModifiedDateTime())) + err := tree.addFile(parentID, fileID, lastModified, fileSize) if err != nil { return nil, clues.StackWC(ctx, err) } - // Only increment counters for new files - if !alreadySeen { - // todo: remmove c.NumItems/Files in favor of counter and tree counting. - c.NumItems++ - c.NumFiles++ - stats.numAddedFiles++ - stats.numBytes += fileSize - } - return nil, nil } diff --git a/src/internal/m365/collection/drive/collections_tree_test.go b/src/internal/m365/collection/drive/collections_tree_test.go index 50af70721..9786b728f 100644 --- a/src/internal/m365/collection/drive/collections_tree_test.go +++ b/src/internal/m365/collection/drive/collections_tree_test.go @@ -44,6 +44,19 @@ func collWithMBH(mbh BackupHandler) *Collections { count.New()) } +func collWithMBHAndOpts( + mbh BackupHandler, + opts control.Options, +) *Collections { + return NewCollections( + mbh, + tenant, + idname.NewProvider(user, user), + func(*support.ControllerOperationStatus) {}, + opts, + count.New()) +} + // func fullOrPrevPath( // t *testing.T, // coll data.BackupCollection, @@ -234,8 +247,8 @@ type collectionAssertions map[string]collectionAssertion // } // } -func rootAnd(items ...models.DriveItemable) []models.DriveItemable { - return append([]models.DriveItemable{driveItem(rootID, rootName, parent(0), "", isFolder)}, items...) +func pageItems(items ...models.DriveItemable) []models.DriveItemable { + return append([]models.DriveItemable{driveRootItem()}, items...) } func pagesOf(pages ...[]models.DriveItemable) []mock.NextPage { @@ -403,7 +416,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { - Pages: pagesOf(rootAnd()), + Pages: pagesOf(pageItems()), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -411,7 +424,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { expect: expected{ canUsePrevBackup: assert.False, collAssertions: collectionAssertions{ - fullPath(1): newCollAssertion( + driveFullPath(1): newCollAssertion( doNotMergeItems, statesToItemIDs{data.NotMovedState: {}}, id(file)), @@ -503,7 +516,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { - Pages: pagesOf(rootAnd()), + Pages: pagesOf(pageItems()), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -554,7 +567,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { type expected struct { counts countTD.Expected err require.ErrorAssertionFunc - treeSize int + numLiveFiles int + numLiveFolders int + shouldHitLimit bool + sizeBytes int64 treeContainsFolderIDs []string treeContainsTombstoneIDs []string treeContainsFileIDsWithParent map[string]string @@ -582,7 +598,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { expect: expected{ counts: countTD.Expected{}, err: require.NoError, - treeSize: 0, + numLiveFiles: 0, + numLiveFolders: 0, + sizeBytes: 0, treeContainsFolderIDs: []string{}, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{}, @@ -594,7 +612,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { - Pages: pagesOf(rootAnd()), + Pages: pagesOf(pageItems()), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -606,8 +624,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalFilesProcessed: 0, count.PagesEnumerated: 1, }, - err: require.NoError, - treeSize: 1, + err: require.NoError, + numLiveFiles: 0, + numLiveFolders: 1, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, }, @@ -621,7 +641,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { - Pages: pagesOf(rootAnd(), rootAnd()), + Pages: pagesOf(pageItems(), pageItems()), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -633,8 +653,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalFilesProcessed: 0, count.PagesEnumerated: 2, }, - err: require.NoError, - treeSize: 1, + err: require.NoError, + numLiveFiles: 0, + numLiveFolders: 1, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, }, @@ -649,11 +671,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { Pages: pagesOf( - rootAnd(driveItem(id(folder), name(folder), parent(0), rootID, isFolder)), - rootAnd(driveItem(idx(folder, "sib"), namex(folder, "sib"), parent(0), rootID, isFolder)), - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parent(0), id(folder), isFolder))), + pageItems(driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)), + pageItems(driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder)), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -665,8 +687,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.PagesEnumerated: 3, count.TotalFilesProcessed: 0, }, - err: require.NoError, - treeSize: 4, + err: require.NoError, + numLiveFiles: 0, + numLiveFolders: 4, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, id(folder), @@ -684,16 +708,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { Pages: pagesOf( - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile)), - rootAnd( - driveItem(idx(folder, "sib"), namex(folder, "sib"), parent(0), rootID, isFolder), - driveItem(idx(file, "sib"), namex(file, "sib"), parent(0, namex(folder, "sib")), idx(folder, "sib"), isFile)), - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parent(0), id(folder), isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parent(0, namex(folder, "chld")), idx(folder, "chld"), isFile))), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), + pageItems( + driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), + driveItem(idx(file, "sib"), namex(file, "sib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFile)), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder), + driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(namex(folder, "chld")), idx(folder, "chld"), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -705,8 +729,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalFilesProcessed: 3, count.PagesEnumerated: 3, }, - err: require.NoError, - treeSize: 4, + err: require.NoError, + numLiveFiles: 3, + numLiveFolders: 4, + sizeBytes: 3 * defaultItemSize, treeContainsFolderIDs: []string{ rootID, id(folder), @@ -730,10 +756,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { Pages: pagesOf( - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile)), - rootAnd(delItem(id(folder), parent(0), rootID, isFolder))), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), + pageItems(delItem(id(folder), parentDir(), rootID, isFolder))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -746,8 +772,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalDeleteFoldersProcessed: 1, count.PagesEnumerated: 2, }, - err: require.NoError, - treeSize: 2, + err: require.NoError, + numLiveFiles: 0, + numLiveFolders: 1, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, }, @@ -766,11 +794,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { Pages: pagesOf( - rootAnd( - driveItem(idx(folder, "parent"), namex(folder, "parent"), parent(0), rootID, isFolder), - driveItem(id(folder), namex(folder, "moved"), parent(0), idx(folder, "parent"), isFolder), - driveItem(id(file), name(file), parent(0, namex(folder, "parent"), name(folder)), id(folder), isFile)), - rootAnd(delItem(id(folder), parent(0), idx(folder, "parent"), isFolder))), + pageItems( + driveItem(idx(folder, "parent"), namex(folder, "parent"), parentDir(), rootID, isFolder), + driveItem(id(folder), namex(folder, "moved"), parentDir(), idx(folder, "parent"), isFolder), + driveItem(id(file), name(file), parentDir(namex(folder, "parent"), name(folder)), id(folder), isFile)), + pageItems(delItem(id(folder), parentDir(), idx(folder, "parent"), isFolder))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -783,8 +811,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalFilesProcessed: 1, count.PagesEnumerated: 2, }, - err: require.NoError, - treeSize: 3, + err: require.NoError, + numLiveFiles: 0, + numLiveFolders: 2, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, idx(folder, "parent"), @@ -799,21 +829,21 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, { name: "at folder limit before enumeration", - tree: treeWithRoot(), + tree: treeWithFileAtRoot(), enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { Pages: pagesOf( - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile)), - rootAnd( - driveItem(idx(folder, "sib"), namex(folder, "sib"), parent(0), rootID, isFolder), - driveItem(idx(file, "sib"), namex(file, "sib"), parent(0, namex(folder, "sib")), idx(folder, "sib"), isFile)), - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parent(0), id(folder), isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parent(0, namex(folder, "chld")), idx(folder, "chld"), isFile))), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), + pageItems( + driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), + driveItem(idx(file, "sib"), namex(file, "sib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFile)), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder), + driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(namex(folder, "chld")), idx(folder, "chld"), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -824,10 +854,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalDeleteFoldersProcessed: 0, count.TotalFoldersProcessed: 1, count.TotalFilesProcessed: 0, - count.PagesEnumerated: 1, + count.PagesEnumerated: 0, }, - err: require.NoError, - treeSize: 1, + err: require.NoError, + shouldHitLimit: true, + numLiveFiles: 1, + numLiveFolders: 1, + sizeBytes: defaultItemSize, treeContainsFolderIDs: []string{ rootID, }, @@ -842,16 +875,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { DrivePagers: map[string]*mock.DriveItemsDeltaPager{ id(drive): { Pages: pagesOf( - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile)), - rootAnd( - driveItem(idx(folder, "sib"), namex(folder, "sib"), parent(0), rootID, isFolder), - driveItem(idx(file, "sib"), namex(file, "sib"), parent(0, namex(folder, "sib")), idx(folder, "sib"), isFile)), - rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parent(0), id(folder), isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parent(0, namex(folder, "chld")), idx(folder, "chld"), isFile))), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), + pageItems( + driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), + driveItem(idx(file, "sib"), namex(file, "sib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFile)), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder), + driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(namex(folder, "chld")), idx(folder, "chld"), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, @@ -862,10 +895,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { count.TotalDeleteFoldersProcessed: 0, count.TotalFoldersProcessed: 1, count.TotalFilesProcessed: 0, - count.PagesEnumerated: 1, + count.PagesEnumerated: 0, }, - err: require.NoError, - treeSize: 1, + err: require.NoError, + shouldHitLimit: true, + numLiveFiles: 0, + numLiveFolders: 1, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, }, @@ -894,14 +930,31 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { _, err := c.populateTree( ctx, test.tree, - test.limiter, - &driveEnumerationStats{}, drv, id(delta), + test.limiter, counter, fault.New(true)) + test.expect.err(t, err, clues.ToCore(err)) - assert.Equal(t, test.expect.treeSize, test.tree.countFolders(), "count folders in tree") + + assert.Equal( + t, + test.expect.numLiveFolders, + test.tree.countLiveFolders(), + "count folders in tree") + + countSize := test.tree.countLiveFilesAndSizes() + assert.Equal( + t, + test.expect.numLiveFiles, + countSize.numFiles, + "count files in tree") + assert.Equal( + t, + test.expect.sizeBytes, + countSize.totalBytes, + "count total bytes in tree") test.expect.counts.Compare(t, counter) for _, id := range test.expect.treeContainsFolderIDs { @@ -913,8 +966,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { } for iID, pID := range test.expect.treeContainsFileIDsWithParent { - assert.Contains(t, test.tree.fileIDToParentID, iID) - assert.Equal(t, pID, test.tree.fileIDToParentID[iID]) + assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree") + assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent") } }) } @@ -934,6 +987,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold type expected struct { counts countTD.Expected err require.ErrorAssertionFunc + shouldHitLimit bool treeSize int treeContainsFolderIDs []string treeContainsTombstoneIDs []string @@ -978,8 +1032,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold }, { name: "root only", - tree: newFolderyMcFolderFace(nil), - page: rootAnd(), + tree: treeWithRoot(), + page: pageItems(), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -996,10 +1050,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "many folders in a hierarchy", tree: treeWithRoot(), - page: rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(folder, "sib"), namex(folder, "sib"), parent(0), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parent(0, name(folder)), id(folder), isFolder)), + page: pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), + driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(name(folder)), id(folder), isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1016,32 +1070,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold treeContainsTombstoneIDs: []string{}, }, }, - { - name: "already hit folder limit", - tree: treeWithRoot(), - page: rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(folder, "sib"), namex(folder, "sib"), parent(0), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parent(0, name(folder)), id(folder), isFolder)), - limiter: newPagerLimiter(minimumLimitOpts()), - expect: expected{ - counts: countTD.Expected{ - count.TotalFoldersProcessed: 1, - }, - err: require.Error, - treeSize: 1, - treeContainsFolderIDs: []string{ - rootID, - }, - treeContainsTombstoneIDs: []string{}, - }, - }, { name: "create->delete", tree: treeWithRoot(), - page: rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - delItem(id(folder), parent(0), rootID, isFolder)), + page: pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + delItem(id(folder), parentDir(), rootID, isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1059,10 +1093,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "move->delete", tree: treeWithFolders(), - page: rootAnd( - driveItem(idx(folder, "parent"), namex(folder, "parent"), parent(0), rootID, isFolder), - driveItem(id(folder), namex(folder, "moved"), parent(0, namex(folder, "parent")), idx(folder, "parent"), isFolder), - delItem(id(folder), parent(0), idx(folder, "parent"), isFolder)), + page: pageItems( + driveItem(idx(folder, "parent"), namex(folder, "parent"), parentDir(), rootID, isFolder), + driveItem(id(folder), namex(folder, "moved"), parentDir(namex(folder, "parent")), idx(folder, "parent"), isFolder), + delItem(id(folder), parentDir(), idx(folder, "parent"), isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1081,11 +1115,32 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold }, }, { - name: "delete->create", + name: "delete->create with previous path", tree: treeWithRoot(), - page: rootAnd( - delItem(id(folder), parent(0), rootID, isFolder), - driveItem(id(folder), name(folder), parent(0), rootID, isFolder)), + page: pageItems( + delItem(id(folder), parentDir(), rootID, isFolder), + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: expected{ + counts: countTD.Expected{ + count.TotalFoldersProcessed: 2, + count.TotalDeleteFoldersProcessed: 1, + }, + err: require.NoError, + treeSize: 2, + treeContainsFolderIDs: []string{ + rootID, + id(folder), + }, + treeContainsTombstoneIDs: []string{}, + }, + }, + { + name: "delete->create without previous path", + tree: treeWithRoot(), + page: pageItems( + delItem(id(folder), parentDir(), rootID, isFolder), + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1115,14 +1170,22 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold err := c.enumeratePageOfItems( ctx, test.tree, - test.limiter, - &driveEnumerationStats{}, drv, test.page, + test.limiter, counter, fault.New(true)) + test.expect.err(t, err, clues.ToCore(err)) - assert.Equal(t, test.expect.treeSize, test.tree.countFolders(), "count folders in tree") + if test.expect.shouldHitLimit { + assert.ErrorIs(t, err, errHitLimit, clues.ToCore(err)) + } + + assert.Equal( + t, + test.expect.treeSize, + len(test.tree.tombstones)+test.tree.countLiveFolders(), + "count folders in tree") test.expect.counts.Compare(t, counter) for _, id := range test.expect.treeContainsFolderIDs { @@ -1141,110 +1204,221 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { drv.SetId(ptr.To(id(drive))) drv.SetName(ptr.To(name(drive))) - fld := driveItem(id(folder), name(folder), parent(0), rootID, isFolder) - subFld := driveItem(id(folder), name(folder), parent(drv, namex(folder, "parent")), idx(folder, "parent"), isFolder) - pack := driveItem(id(pkg), name(pkg), parent(0), rootID, isPackage) - del := delItem(id(folder), parent(0), rootID, isFolder) - mal := malwareItem(idx(folder, "mal"), namex(folder, "mal"), parent(0), rootID, isFolder) + fld := driveItem(id(folder), name(folder), parentDir(), rootID, isFolder) + subFld := driveItem(id(folder), name(folder), driveParentDir(drv, namex(folder, "parent")), idx(folder, "parent"), isFolder) + pack := driveItem(id(pkg), name(pkg), parentDir(), rootID, isPackage) + del := delItem(id(folder), parentDir(), rootID, isFolder) + mal := malwareItem(idx(folder, "mal"), namex(folder, "mal"), parentDir(), rootID, isFolder) type expected struct { + countLiveFolders int counts countTD.Expected err require.ErrorAssertionFunc + shouldHitLimit bool treeSize int treeContainsFolder assert.BoolAssertionFunc skipped assert.ValueAssertionFunc } table := []struct { - name string - tree *folderyMcFolderFace - folder models.DriveItemable - expect expected + name string + tree *folderyMcFolderFace + folder models.DriveItemable + limiter *pagerLimiter + expect expected }{ { - name: "add folder", - tree: treeWithRoot(), - folder: fld, + name: "add folder", + tree: treeWithRoot(), + folder: fld, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalFoldersProcessed: 1}, + countLiveFolders: 2, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 1, + count.TotalDeleteFoldersProcessed: 0, + }, treeSize: 2, treeContainsFolder: assert.True, skipped: assert.Nil, }, }, { - name: "re-add folder that already exists", - tree: treeWithFolders(), - folder: subFld, + name: "re-add folder that already exists", + tree: treeWithFolders(), + folder: subFld, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalFoldersProcessed: 1}, + countLiveFolders: 3, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 1, + count.TotalDeleteFoldersProcessed: 0, + }, treeSize: 3, treeContainsFolder: assert.True, skipped: assert.Nil, }, }, { - name: "add package", - tree: treeWithRoot(), - folder: pack, + name: "add package", + tree: treeWithRoot(), + folder: pack, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalPackagesProcessed: 1}, + countLiveFolders: 2, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 1, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 0, + }, treeSize: 2, treeContainsFolder: assert.True, skipped: assert.Nil, }, }, { - name: "tombstone a folder in a populated tree", - tree: treeWithFolders(), - folder: del, + name: "tombstone a folder in a populated tree", + tree: treeWithFolders(), + folder: del, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalDeleteFoldersProcessed: 1}, + countLiveFolders: 2, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 1, + }, treeSize: 3, treeContainsFolder: assert.True, skipped: assert.Nil, }, }, { - name: "tombstone new folder in unpopulated tree", - tree: newFolderyMcFolderFace(nil), - folder: del, + name: "tombstone new folder in unpopulated tree", + tree: newFolderyMcFolderFace(nil), + folder: del, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalDeleteFoldersProcessed: 1}, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 1, + }, treeSize: 1, treeContainsFolder: assert.True, skipped: assert.Nil, }, }, { - name: "re-add tombstone that already exists", - tree: treeWithTombstone(), - folder: del, + name: "re-add tombstone that already exists", + tree: treeWithTombstone(), + folder: del, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalDeleteFoldersProcessed: 1}, + countLiveFolders: 1, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 1, + }, treeSize: 2, treeContainsFolder: assert.True, skipped: assert.Nil, }, }, { - name: "add malware", - tree: treeWithRoot(), - folder: mal, + name: "add malware", + tree: treeWithRoot(), + folder: mal, + limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ - err: require.NoError, - counts: countTD.Expected{count.TotalMalwareProcessed: 1}, + countLiveFolders: 1, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 1, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 0, + }, treeSize: 1, treeContainsFolder: assert.False, skipped: assert.NotNil, }, }, + { + name: "already over container limit, folder seen twice", + tree: treeWithFolders(), + folder: fld, + limiter: newPagerLimiter(minimumLimitOpts()), + expect: expected{ + countLiveFolders: 3, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 1, + count.TotalDeleteFoldersProcessed: 0, + }, + shouldHitLimit: false, + skipped: assert.Nil, + treeSize: 3, + treeContainsFolder: assert.True, + }, + }, + { + name: "already at container limit", + tree: treeWithRoot(), + folder: fld, + limiter: newPagerLimiter(minimumLimitOpts()), + expect: expected{ + countLiveFolders: 1, + err: require.Error, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 0, + }, + shouldHitLimit: true, + skipped: assert.Nil, + treeSize: 1, + treeContainsFolder: assert.False, + }, + }, + { + name: "process tombstone when over folder limits", + tree: treeWithFolders(), + folder: del, + limiter: newPagerLimiter(minimumLimitOpts()), + expect: expected{ + countLiveFolders: 2, + err: require.NoError, + counts: countTD.Expected{ + count.TotalMalwareProcessed: 0, + count.TotalPackagesProcessed: 0, + count.TotalFoldersProcessed: 0, + count.TotalDeleteFoldersProcessed: 1, + }, + shouldHitLimit: false, + skipped: assert.Nil, + treeSize: 3, + treeContainsFolder: assert.True, + }, + }, } for _, test := range table { suite.Run(test.name, func() { @@ -1255,19 +1429,29 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { c := collWithMBH(mock.DefaultOneDriveBH(user)) counter := count.New() - des := &driveEnumerationStats{} skipped, err := c.addFolderToTree( ctx, test.tree, drv, test.folder, - des, + test.limiter, counter) + test.expect.err(t, err, clues.ToCore(err)) test.expect.skipped(t, skipped) + + if test.expect.shouldHitLimit { + assert.ErrorIs(t, err, errHitLimit, clues.ToCore(err)) + } + test.expect.counts.Compare(t, counter) - assert.Equal(t, test.expect.treeSize, test.tree.countFolders(), "folders in tree") + assert.Equal(t, test.expect.countLiveFolders, test.tree.countLiveFolders(), "live folders") + assert.Equal( + t, + test.expect.treeSize, + len(test.tree.tombstones)+test.tree.countLiveFolders(), + "folders in tree") test.expect.treeContainsFolder(t, test.tree.containsFolder(ptr.Val(test.folder.GetId()))) }) } @@ -1288,13 +1472,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeFolderCollectionPath( }{ { name: "root", - folder: driveRootItem(rootID), + folder: driveRootItem(), expect: basePath.String(), expectErr: require.NoError, }, { name: "folder", - folder: driveItem(id(folder), name(folder), parent(0), rootID, isFolder), + folder: driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), expect: folderPath.String(), expectErr: require.NoError, }, @@ -1339,8 +1523,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file counts countTD.Expected err require.ErrorAssertionFunc treeContainsFileIDsWithParent map[string]string - statsNumAddedFiles int - statsNumBytes int64 + countLiveFiles int + countTotalBytes int64 } table := []struct { @@ -1352,7 +1536,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "one file at root", tree: treeWithRoot(), - page: rootAnd(driveItem(id(file), name(file), parent(0, name(folder)), rootID, isFile)), + page: pageItems(driveItem(id(file), name(file), parentDir(name(folder)), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1363,16 +1547,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 1, - statsNumBytes: defaultItemSize, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "one file in a folder", tree: newFolderyMcFolderFace(nil), - page: rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile)), + page: pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1383,17 +1567,17 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file treeContainsFileIDsWithParent: map[string]string{ id(file): id(folder), }, - statsNumAddedFiles: 1, - statsNumBytes: defaultItemSize, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "many files in a hierarchy", tree: treeWithRoot(), - page: rootAnd( - driveItem(id(file), name(file), parent(0), rootID, isFile), - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parent(0, name(folder)), id(folder), isFile)), + page: pageItems( + driveItem(id(file), name(file), parentDir(), rootID, isFile), + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(name(folder)), id(folder), isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1405,17 +1589,17 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file id(file): rootID, idx(file, "chld"): id(folder), }, - statsNumAddedFiles: 2, - statsNumBytes: defaultItemSize * 2, + countLiveFiles: 2, + countTotalBytes: defaultItemSize * 2, }, }, { name: "many updates to the same file", tree: treeWithRoot(), - page: rootAnd( - driveItem(id(file), name(file), parent(0), rootID, isFile), - driveItem(id(file), namex(file, 1), parent(0), rootID, isFile), - driveItem(id(file), namex(file, 2), parent(0), rootID, isFile)), + page: pageItems( + driveItem(id(file), name(file), parentDir(), rootID, isFile), + driveItem(id(file), namex(file, 1), parentDir(), rootID, isFile), + driveItem(id(file), namex(file, 2), parentDir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1426,14 +1610,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 1, - statsNumBytes: defaultItemSize, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "delete an existing file", tree: treeWithFileAtRoot(), - page: rootAnd(delItem(id(file), parent(0), rootID, isFile)), + page: pageItems(delItem(id(file), parentDir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1442,16 +1626,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: -1, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "delete the same file twice", tree: treeWithFileAtRoot(), - page: rootAnd( - delItem(id(file), parent(0), rootID, isFile), - delItem(id(file), parent(0), rootID, isFile)), + page: pageItems( + delItem(id(file), parentDir(), rootID, isFile), + delItem(id(file), parentDir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 2, @@ -1460,16 +1644,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: -1, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "create->delete", tree: treeWithRoot(), - page: rootAnd( - driveItem(id(file), name(file), parent(0), rootID, isFile), - delItem(id(file), parent(0), rootID, isFile)), + page: pageItems( + driveItem(id(file), name(file), parentDir(), rootID, isFile), + delItem(id(file), parentDir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1478,17 +1662,17 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: 0, - statsNumBytes: defaultItemSize, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "move->delete", tree: treeWithFileAtRoot(), - page: rootAnd( - driveItem(id(folder), name(folder), parent(0), rootID, isFolder), - driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile), - delItem(id(file), parent(0, name(folder)), id(folder), isFile)), + page: pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile), + delItem(id(file), parentDir(name(folder)), id(folder), isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1497,16 +1681,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: -1, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "delete->create an existing file", tree: treeWithFileAtRoot(), - page: rootAnd( - delItem(id(file), parent(0), rootID, isFile), - driveItem(id(file), name(file), parent(0), rootID, isFile)), + page: pageItems( + delItem(id(file), parentDir(), rootID, isFile), + driveItem(id(file), name(file), parentDir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1517,16 +1701,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 0, - statsNumBytes: defaultItemSize, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "delete->create a non-existing file", tree: treeWithRoot(), - page: rootAnd( - delItem(id(file), parent(0), rootID, isFile), - driveItem(id(file), name(file), parent(0), rootID, isFile)), + page: pageItems( + delItem(id(file), parentDir(), rootID, isFile), + driveItem(id(file), name(file), parentDir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1537,8 +1721,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 1, - statsNumBytes: defaultItemSize, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, } @@ -1551,20 +1735,20 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file c := collWithMBH(mock.DefaultOneDriveBH(user)) counter := count.New() - stats := &driveEnumerationStats{} err := c.enumeratePageOfItems( ctx, test.tree, - newPagerLimiter(control.DefaultOptions()), - stats, drv, test.page, + newPagerLimiter(control.DefaultOptions()), counter, fault.New(true)) test.expect.err(t, err, clues.ToCore(err)) - assert.Equal(t, test.expect.statsNumAddedFiles, stats.numAddedFiles, "num added files") - assert.Equal(t, test.expect.statsNumBytes, stats.numBytes, "num bytes") + + countSize := test.tree.countLiveFilesAndSizes() + assert.Equal(t, test.expect.countLiveFiles, countSize.numFiles, "count of files") + assert.Equal(t, test.expect.countTotalBytes, countSize.totalBytes, "total size in bytes") assert.Equal(t, test.expect.treeContainsFileIDsWithParent, test.tree.fileIDToParentID) test.expect.counts.Compare(t, counter) }) @@ -1579,11 +1763,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { type expected struct { counts countTD.Expected err require.ErrorAssertionFunc + shouldHitLimit bool skipped assert.ValueAssertionFunc - treeFileCount int treeContainsFileIDsWithParent map[string]string - statsNumAddedFiles int - statsNumBytes int64 + countLiveFiles int + countTotalBytes int64 } table := []struct { @@ -1596,45 +1780,43 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "add new file", tree: treeWithRoot(), - file: driveItem(id(file), name(file), parent(0), rootID, isFile), + file: driveItem(id(file), name(file), parentDir(), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ count.TotalFilesProcessed: 1, }, - err: require.NoError, - skipped: assert.Nil, - treeFileCount: 1, + err: require.NoError, + skipped: assert.Nil, treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 1, - statsNumBytes: defaultItemSize, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "duplicate file", tree: treeWithFileAtRoot(), - file: driveItem(id(file), name(file), parent(0), rootID, isFile), + file: driveItem(id(file), name(file), parentDir(), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ count.TotalFilesProcessed: 1, }, - err: require.NoError, - skipped: assert.Nil, - treeFileCount: 1, + err: require.NoError, + skipped: assert.Nil, treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 0, - statsNumBytes: 0, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "error file seen before parent", tree: treeWithRoot(), - file: driveItem(id(file), name(file), parent(0, name(folder)), id(folder), isFile), + file: driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1642,16 +1824,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { }, err: require.Error, skipped: assert.Nil, - treeFileCount: 0, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: 0, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "malware file", tree: treeWithRoot(), - file: malwareItem(id(file), name(file), parent(0, name(folder)), rootID, isFile), + file: malwareItem(id(file), name(file), parentDir(name(folder)), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1659,16 +1840,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { }, err: require.NoError, skipped: assert.NotNil, - treeFileCount: 0, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: 0, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "delete non-existing file", tree: treeWithRoot(), - file: delItem(id(file), parent(0, name(folder)), id(folder), isFile), + file: delItem(id(file), parentDir(name(folder)), id(folder), isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1676,16 +1856,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { }, err: require.NoError, skipped: assert.Nil, - treeFileCount: 0, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: 0, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "delete existing file", tree: treeWithFileAtRoot(), - file: delItem(id(file), parent(0), rootID, isFile), + file: delItem(id(file), parentDir(), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1693,46 +1872,45 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { }, err: require.NoError, skipped: assert.Nil, - treeFileCount: 0, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: -1, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, { name: "already at container file limit", tree: treeWithFileAtRoot(), - file: driveItem(id(file), name(file), parent(0), rootID, isFile), + file: driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), limiter: newPagerLimiter(minimumLimitOpts()), expect: expected{ counts: countTD.Expected{ count.TotalFilesProcessed: 1, }, - err: require.NoError, - skipped: assert.Nil, - treeFileCount: 1, + err: require.Error, + shouldHitLimit: true, + skipped: assert.Nil, treeContainsFileIDsWithParent: map[string]string{ id(file): rootID, }, - statsNumAddedFiles: 0, - statsNumBytes: 0, + countLiveFiles: 1, + countTotalBytes: defaultItemSize, }, }, { name: "goes over total byte limit", tree: treeWithRoot(), - file: driveItem(id(file), name(file), parent(0), rootID, isFile), + file: driveItem(id(file), name(file), parentDir(), rootID, isFile), limiter: newPagerLimiter(minimumLimitOpts()), expect: expected{ counts: countTD.Expected{ count.TotalFilesProcessed: 1, }, - err: require.NoError, + err: require.Error, + shouldHitLimit: true, skipped: assert.Nil, - treeFileCount: 0, treeContainsFileIDsWithParent: map[string]string{}, - statsNumAddedFiles: 0, - statsNumBytes: 0, + countLiveFiles: 0, + countTotalBytes: 0, }, }, } @@ -1745,7 +1923,6 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { c := collWithMBH(mock.DefaultOneDriveBH(user)) counter := count.New() - stats := &driveEnumerationStats{} skipped, err := c.addFileToTree( ctx, @@ -1753,15 +1930,21 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { drv, test.file, test.limiter, - stats, counter) + test.expect.err(t, err, clues.ToCore(err)) test.expect.skipped(t, skipped) - assert.Len(t, test.tree.fileIDToParentID, test.expect.treeFileCount, "count of files in tree") + + if test.expect.shouldHitLimit { + require.ErrorIs(t, err, errHitLimit, clues.ToCore(err)) + } + assert.Equal(t, test.expect.treeContainsFileIDsWithParent, test.tree.fileIDToParentID) test.expect.counts.Compare(t, counter) - assert.Equal(t, test.expect.statsNumAddedFiles, stats.numAddedFiles) - assert.Equal(t, test.expect.statsNumBytes, stats.numBytes) + + countSize := test.tree.countLiveFilesAndSizes() + assert.Equal(t, test.expect.countLiveFiles, countSize.numFiles, "count of files") + assert.Equal(t, test.expect.countTotalBytes, countSize.totalBytes, "total size in bytes") }) } } diff --git a/src/internal/m365/collection/drive/delta_tree.go b/src/internal/m365/collection/drive/delta_tree.go index dd95640df..8bfafce44 100644 --- a/src/internal/m365/collection/drive/delta_tree.go +++ b/src/internal/m365/collection/drive/delta_tree.go @@ -80,8 +80,8 @@ type nodeyMcNodeFace struct { prev path.Elements // folderID -> node children map[string]*nodeyMcNodeFace - // file item ID -> last modified time - files map[string]time.Time + // file item ID -> file metadata + files map[string]fileyMcFileFace // for special handling protocols around packages isPackage bool } @@ -96,11 +96,16 @@ func newNodeyMcNodeFace( id: id, name: name, children: map[string]*nodeyMcNodeFace{}, - files: map[string]time.Time{}, + files: map[string]fileyMcFileFace{}, isPackage: isPackage, } } +type fileyMcFileFace struct { + lastModified time.Time + contentSize int64 +} + // --------------------------------------------------------------------------- // folder handling // --------------------------------------------------------------------------- @@ -114,10 +119,10 @@ func (face *folderyMcFolderFace) containsFolder(id string) bool { return stillKicking || alreadyBuried } -// CountNodes returns a count that is the sum of live folders and -// tombstones recorded in the tree. -func (face *folderyMcFolderFace) countFolders() int { - return len(face.tombstones) + len(face.folderIDToNode) +// countLiveFolders returns a count of the number of folders held in the tree. +// Tombstones are not included in the count. Only live folders. +func (face *folderyMcFolderFace) countLiveFolders() int { + return len(face.folderIDToNode) } func (face *folderyMcFolderFace) getNode(id string) *nodeyMcNodeFace { @@ -264,12 +269,52 @@ func (face *folderyMcFolderFace) setTombstone( return nil } +type countAndSize struct { + numFiles int + totalBytes int64 +} + +// countLiveFilesAndSizes returns a count of the number of files in the tree +// and the sum of all of their sizes. Only includes files that are not +// children of tombstoned containers. If running an incremental backup, a +// live file may be either a creation or an update. +func (face *folderyMcFolderFace) countLiveFilesAndSizes() countAndSize { + return countFilesAndSizes(face.root) +} + +func countFilesAndSizes(nodey *nodeyMcNodeFace) countAndSize { + if nodey == nil { + return countAndSize{} + } + + var ( + fileCount int + sumContentSize int64 + ) + + for _, child := range nodey.children { + countSize := countFilesAndSizes(child) + fileCount += countSize.numFiles + sumContentSize += countSize.totalBytes + } + + for _, file := range nodey.files { + sumContentSize += file.contentSize + } + + return countAndSize{ + numFiles: fileCount + len(nodey.files), + totalBytes: sumContentSize, + } +} + // addFile places the file in the correct parent node. If the // file was already added to the tree and is getting relocated, // this func will update and/or clean up all the old references. func (face *folderyMcFolderFace) addFile( parentID, id string, - lastModifed time.Time, + lastModified time.Time, + contentSize int64, ) error { if len(parentID) == 0 { return clues.New("item added without parent folder ID") @@ -298,7 +343,10 @@ func (face *folderyMcFolderFace) addFile( } face.fileIDToParentID[id] = parentID - parent.files[id] = lastModifed + parent.files[id] = fileyMcFileFace{ + lastModified: lastModified, + contentSize: contentSize, + } delete(face.deletedFileIDs, id) diff --git a/src/internal/m365/collection/drive/delta_tree_test.go b/src/internal/m365/collection/drive/delta_tree_test.go index ec832b00c..161f1e6b4 100644 --- a/src/internal/m365/collection/drive/delta_tree_test.go +++ b/src/internal/m365/collection/drive/delta_tree_test.go @@ -40,6 +40,7 @@ func treeWithFolders() *folderyMcFolderFace { o := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true) tree.folderIDToNode[o.id] = o + tree.root.children[o.id] = o f := newNodeyMcNodeFace(o, id(folder), name(folder), false) tree.folderIDToNode[f.id] = f @@ -49,16 +50,22 @@ func treeWithFolders() *folderyMcFolderFace { } func treeWithFileAtRoot() *folderyMcFolderFace { - tree := treeWithFolders() - tree.root.files[id(file)] = time.Now() + tree := treeWithRoot() + tree.root.files[id(file)] = fileyMcFileFace{ + lastModified: time.Now(), + contentSize: 42, + } tree.fileIDToParentID[id(file)] = rootID return tree } func treeWithFileInFolder() *folderyMcFolderFace { - tree := treeWithFileAtRoot() - tree.folderIDToNode[id(folder)].files[id(file)] = time.Now() + tree := treeWithFolders() + tree.folderIDToNode[id(folder)].files[id(file)] = fileyMcFileFace{ + lastModified: time.Now(), + contentSize: 42, + } tree.fileIDToParentID[id(file)] = id(folder) return tree @@ -66,7 +73,10 @@ func treeWithFileInFolder() *folderyMcFolderFace { func treeWithFileInTombstone() *folderyMcFolderFace { tree := treeWithTombstone() - tree.tombstones[id(folder)].files[id(file)] = time.Now() + tree.tombstones[id(folder)].files[id(file)] = fileyMcFileFace{ + lastModified: time.Now(), + contentSize: 42, + } tree.fileIDToParentID[id(file)] = id(folder) return tree @@ -689,6 +699,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree *folderyMcFolderFace oldParentID string parentID string + contentSize int64 expectErr assert.ErrorAssertionFunc expectFiles map[string]string }{ @@ -697,6 +708,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithRoot(), oldParentID: "", parentID: rootID, + contentSize: 42, expectErr: assert.NoError, expectFiles: map[string]string{id(file): rootID}, }, @@ -705,6 +717,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithFolders(), oldParentID: "", parentID: id(folder), + contentSize: 24, expectErr: assert.NoError, expectFiles: map[string]string{id(file): id(folder)}, }, @@ -713,6 +726,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithFileAtRoot(), oldParentID: rootID, parentID: rootID, + contentSize: 84, expectErr: assert.NoError, expectFiles: map[string]string{id(file): rootID}, }, @@ -721,6 +735,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithFileInFolder(), oldParentID: id(folder), parentID: rootID, + contentSize: 48, expectErr: assert.NoError, expectFiles: map[string]string{id(file): rootID}, }, @@ -729,6 +744,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithFileInTombstone(), oldParentID: id(folder), parentID: rootID, + contentSize: 2, expectErr: assert.NoError, expectFiles: map[string]string{id(file): rootID}, }, @@ -737,6 +753,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithTombstone(), oldParentID: "", parentID: id(folder), + contentSize: 4, expectErr: assert.Error, expectFiles: map[string]string{}, }, @@ -745,6 +762,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithTombstone(), oldParentID: "", parentID: idx(folder, 1), + contentSize: 8, expectErr: assert.Error, expectFiles: map[string]string{}, }, @@ -753,6 +771,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { tree: treeWithTombstone(), oldParentID: "", parentID: "", + contentSize: 16, expectErr: assert.Error, expectFiles: map[string]string{}, }, @@ -764,7 +783,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { err := test.tree.addFile( test.parentID, id(file), - time.Now()) + time.Now(), + test.contentSize) test.expectErr(t, err, clues.ToCore(err)) assert.Equal(t, test.expectFiles, test.tree.fileIDToParentID) @@ -777,6 +797,10 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { require.NotNil(t, parent) assert.Contains(t, parent.files, id(file)) + countSize := test.tree.countLiveFilesAndSizes() + assert.Equal(t, 1, countSize.numFiles, "should have one file in the tree") + assert.Equal(t, test.contentSize, countSize.totalBytes, "tree should be sized to test file contents") + if len(test.oldParentID) > 0 && test.oldParentID != test.parentID { old, ok := test.tree.folderIDToNode[test.oldParentID] if !ok { @@ -848,7 +872,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() { assert.Len(t, tree.deletedFileIDs, 1) assert.Contains(t, tree.deletedFileIDs, fID) - err := tree.addFile(rootID, fID, time.Now()) + err := tree.addFile(rootID, fID, time.Now(), defaultItemSize) require.NoError(t, err, clues.ToCore(err)) assert.Len(t, tree.fileIDToParentID, 1) diff --git a/src/internal/m365/collection/drive/limiter.go b/src/internal/m365/collection/drive/limiter.go index 8b64acb60..8c508be90 100644 --- a/src/internal/m365/collection/drive/limiter.go +++ b/src/internal/m365/collection/drive/limiter.go @@ -6,9 +6,6 @@ import ( "github.com/alcionai/corso/src/pkg/control" ) -// used to mark an unused variable while we transition handling. -const ignoreMe = -1 - var errHitLimit = clues.New("hit limiter limits") type driveEnumerationStats struct { @@ -62,10 +59,6 @@ func (l pagerLimiter) sizeLimit() int64 { return l.limits.MaxBytes } -func (l pagerLimiter) aboveSizeLimit(i int64) bool { - return l.limits.Enabled && (i >= l.limits.MaxBytes) -} - // atItemLimit returns true if the limiter is enabled and has reached the limit // for individual items added to collections for this backup. func (l pagerLimiter) atItemLimit(stats *driveEnumerationStats) bool { @@ -81,7 +74,7 @@ func (l pagerLimiter) atContainerItemsLimit(numItems int) bool { return l.enabled() && numItems >= l.limits.MaxItemsPerContainer } -// atContainerPageLimit returns true if the limiter is enabled and the number of +// atPageLimit returns true if the limiter is enabled and the number of // pages processed so far is beyond the limit for this backup. func (l pagerLimiter) atPageLimit(stats *driveEnumerationStats) bool { return l.enabled() && stats.numPages >= l.limits.MaxPages @@ -89,17 +82,38 @@ func (l pagerLimiter) atPageLimit(stats *driveEnumerationStats) bool { // atLimit returns true if the limiter is enabled and meets any of the // conditions for max items, containers, etc for this backup. -func (l pagerLimiter) atLimit( - stats *driveEnumerationStats, - containerCount int, -) bool { - nc := stats.numContainers - if nc == 0 && containerCount > 0 { - nc = containerCount - } - +func (l pagerLimiter) atLimit(stats *driveEnumerationStats) bool { return l.enabled() && (l.atItemLimit(stats) || - nc >= l.limits.MaxContainers || + stats.numContainers >= l.limits.MaxContainers || stats.numPages >= l.limits.MaxPages) } + +// --------------------------------------------------------------------------- +// Used by the tree version limit handling +// --------------------------------------------------------------------------- + +// hitPageLimit returns true if the limiter is enabled and the number of +// pages processed so far is beyond the limit for this backup. +func (l pagerLimiter) hitPageLimit(pageCount int) bool { + return l.enabled() && pageCount >= l.limits.MaxPages +} + +// hitContainerLimit returns true if the limiter is enabled and the number of +// unique containers added so far is beyond the limit for this backup. +func (l pagerLimiter) hitContainerLimit(containerCount int) bool { + return l.enabled() && containerCount >= l.limits.MaxContainers +} + +// hitItemLimit returns true if the limiter is enabled and has reached the limit +// for unique items added to collections for this backup. +func (l pagerLimiter) hitItemLimit(itemCount int) bool { + return l.enabled() && itemCount >= l.limits.MaxItems +} + +// hitTotalBytesLimit returns true if the limiter is enabled and has reached the limit +// for the accumulated byte size of all items (the file contents, not the item metadata) +// added to collections for this backup. +func (l pagerLimiter) hitTotalBytesLimit(i int64) bool { + return l.enabled() && i >= l.limits.MaxBytes +} diff --git a/src/internal/m365/collection/drive/limiter_test.go b/src/internal/m365/collection/drive/limiter_test.go index 2fa1317e7..920d8fc20 100644 --- a/src/internal/m365/collection/drive/limiter_test.go +++ b/src/internal/m365/collection/drive/limiter_test.go @@ -11,15 +11,12 @@ import ( "github.com/stretchr/testify/suite" "golang.org/x/exp/maps" - "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" - "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control" - "github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" @@ -54,36 +51,26 @@ func TestLimiterUnitSuite(t *testing.T) { suite.Run(t, &LimiterUnitSuite{Suite: tester.NewUnitSuite(t)}) } -// TestGet_PreviewLimits checks that the limits set for preview backups in -// control.Options.ItemLimits are respected. These tests run a reduced set of -// checks that don't examine metadata, collection states, etc. They really just -// check the expected items appear. -func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { - metadataPath, err := path.BuildMetadata( - tenant, - user, - path.OneDriveService, - path.FilesCategory, - false) - require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err)) +type backupLimitTest struct { + name string + limits control.PreviewItemLimits + drives []models.Driveable + enumerator mock.EnumerateItemsDeltaByDrive + // Collection name -> set of item IDs. We can't check item data because + // that's not mocked out. Metadata is checked separately. + expectedItemIDsInCollection map[string][]string +} +func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) { drive1 := models.NewDrive() - drive1.SetId(ptr.To(idx(drive, 1))) - drive1.SetName(ptr.To(namex(drive, 1))) + drive1.SetId(ptr.To(id(drive))) + drive1.SetName(ptr.To(name(drive))) drive2 := models.NewDrive() drive2.SetId(ptr.To(idx(drive, 2))) drive2.SetName(ptr.To(namex(drive, 2))) - table := []struct { - name string - limits control.PreviewItemLimits - drives []models.Driveable - enumerator mock.EnumerateItemsDeltaByDrive - // Collection name -> set of item IDs. We can't check item data because - // that's not mocked out. Metadata is checked separately. - expectedCollections map[string][]string - }{ + tbl := []backupLimitTest{ { name: "OneDrive SinglePage ExcludeItemsOverMaxSize", limits: control.PreviewItemLimits{ @@ -97,21 +84,17 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItemWithSize(idx(file, 1), namex(file, 1), parent(1), rootID, 7, isFile), - driveItemWithSize(idx(file, 2), namex(file, 2), parent(1), rootID, 1, isFile), - driveItemWithSize(idx(file, 3), namex(file, 3), parent(1), rootID, 1, isFile), - }, - }}, + id(drive): { + Pages: pagesOf(pageItems( + driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 7, isFile), + driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(), rootID, 1, isFile), + driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(), rootID, 1, isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 2), idx(file, 3)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 2), idx(file, 3)}, }, }, { @@ -127,21 +110,17 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItemWithSize(idx(file, 1), namex(file, 1), parent(1), rootID, 1, isFile), - driveItemWithSize(idx(file, 2), namex(file, 2), parent(1), rootID, 2, isFile), - driveItemWithSize(idx(file, 3), namex(file, 3), parent(1), rootID, 1, isFile), - }, - }}, + id(drive): { + Pages: pagesOf(pageItems( + driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 1, isFile), + driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(), rootID, 2, isFile), + driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(), rootID, 1, isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2)}, }, }, { @@ -157,23 +136,19 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItemWithSize(idx(file, 1), namex(file, 1), parent(1), rootID, 1, isFile), - driveItemWithSize(idx(folder, 1), namex(folder, 1), parent(1), rootID, 1, isFolder), - driveItemWithSize(idx(file, 2), namex(file, 2), parent(1, namex(folder, 1)), idx(folder, 1), 2, isFile), - driveItemWithSize(idx(file, 3), namex(file, 3), parent(1, namex(folder, 1)), idx(folder, 1), 1, isFile), - }, - }}, + id(drive): { + Pages: pagesOf(pageItems( + driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 1, isFile), + driveItemWithSize(idx(folder, 1), namex(folder, 1), parentDir(), rootID, 1, isFolder), + driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(namex(folder, 1)), idx(folder, 1), 2, isFile), + driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), 1, isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 2)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)}, }, }, { @@ -189,24 +164,20 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - driveItem(idx(file, 4), namex(file, 4), parent(1), rootID, isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1), rootID, isFile), - driveItem(idx(file, 6), namex(file, 6), parent(1), rootID, isFile), - }, - }}, + id(drive): { + Pages: pagesOf(pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile), + driveItem(idx(file, 4), namex(file, 4), parentDir(), rootID, isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(), rootID, isFile), + driveItem(idx(file, 6), namex(file, 6), parentDir(), rootID, isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2), idx(file, 3)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, }, }, { @@ -222,34 +193,26 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - // Repeated items shouldn't count against the limit. - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 3), namex(file, 3), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 6), namex(file, 6), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile)), + pageItems( + // Repeated items shouldn't count against the limit. + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 6), namex(file, 6), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 3)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)}, }, }, { @@ -265,32 +228,23 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 3), namex(file, 3), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 6), namex(file, 6), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 6), namex(file, 6), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2)}, }, }, { @@ -306,34 +260,25 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ + expectedItemIDsInCollection: map[string][]string{ // Root has an additional item. It's hard to fix that in the code // though. - fullPath(1): {idx(file, 1), idx(file, 2)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 4)}, + fullPath(): {idx(file, 1), idx(file, 2)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4)}, }, }, { @@ -349,34 +294,25 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 0), namex(folder, 0), parent(1), rootID, isFolder), - driveItem(idx(file, 1), namex(file, 1), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 0), namex(folder, 0), parent(1), rootID, isFolder), - // Updated item that shouldn't count against the limit a second time. - driveItem(idx(file, 2), namex(file, 2), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(file, 1), namex(file, 1), parentDir(name(folder)), id(folder), isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(name(folder)), id(folder), isFile)), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + // Updated item that shouldn't count against the limit a second time. + driveItem(idx(file, 2), namex(file, 2), parentDir(name(folder)), id(folder), isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(name(folder)), id(folder), isFile), + driveItem(idx(file, 4), namex(file, 4), parentDir(name(folder)), id(folder), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {}, - fullPath(1, namex(folder, 0)): {idx(folder, 0), idx(file, 1), idx(file, 2), idx(file, 3)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {}, + fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)}, }, }, { @@ -392,35 +328,26 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - // Put folder 0 at limit. - driveItem(idx(folder, 0), namex(folder, 0), parent(1), rootID, isFolder), - driveItem(idx(file, 3), namex(file, 3), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 0), namex(folder, 0), parent(1), rootID, isFolder), - // Try to move item from root to folder 0 which is already at the limit. - driveItem(idx(file, 1), namex(file, 1), parent(1, namex(folder, 0)), idx(folder, 0), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + // Put folder 0 at limit. + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + driveItem(idx(file, 3), namex(file, 3), parentDir(name(folder)), id(folder), isFile), + driveItem(idx(file, 4), namex(file, 4), parentDir(name(folder)), id(folder), isFile)), + pageItems( + driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + // Try to move item from root to folder 0 which is already at the limit. + driveItem(idx(file, 1), namex(file, 1), parentDir(name(folder)), id(folder), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2)}, - fullPath(1, namex(folder, 0)): {idx(folder, 0), idx(file, 3), idx(file, 4)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2)}, + fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)}, }, }, { @@ -436,38 +363,25 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, }, }, { @@ -483,37 +397,28 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - // This container shouldn't be returned. - driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), - driveItem(idx(file, 7), namex(file, 7), parent(1, namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 8), namex(file, 8), parent(1, namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 9), namex(file, 9), parent(1, namex(folder, 2)), idx(folder, 2), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + // This container shouldn't be returned. + driveItem(idx(folder, 2), namex(folder, 2), parentDir(), rootID, isFolder), + driveItem(idx(file, 7), namex(file, 7), parentDir(namex(folder, 2)), idx(folder, 2), isFile), + driveItem(idx(file, 8), namex(file, 8), parentDir(namex(folder, 2)), idx(folder, 2), isFile), + driveItem(idx(file, 9), namex(file, 9), parentDir(namex(folder, 2)), idx(folder, 2), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, }, }, { @@ -529,42 +434,29 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - // This container shouldn't be returned. - driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), - driveItem(idx(file, 7), namex(file, 7), parent(1, namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 8), namex(file, 8), parent(1, namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 9), namex(file, 9), parent(1, namex(folder, 2)), idx(folder, 2), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile)), + pageItems( + // This container shouldn't be returned. + driveItem(idx(folder, 2), namex(folder, 2), parentDir(), rootID, isFolder), + driveItem(idx(file, 7), namex(file, 7), parentDir(namex(folder, 2)), idx(folder, 2), isFile), + driveItem(idx(file, 8), namex(file, 8), parentDir(namex(folder, 2)), idx(folder, 2), isFile), + driveItem(idx(file, 9), namex(file, 9), parentDir(namex(folder, 2)), idx(folder, 2), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, }, }, { @@ -580,41 +472,29 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1, drive2}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - driveItem(idx(file, 4), namex(file, 4), parent(1), rootID, isFile), - driveItem(idx(file, 5), namex(file, 5), parent(1), rootID, isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf(pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile), + driveItem(idx(file, 4), namex(file, 4), parentDir(), rootID, isFile), + driveItem(idx(file, 5), namex(file, 5), parentDir(), rootID, isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, idx(drive, 2): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(2), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(2), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(2), rootID, isFile), - driveItem(idx(file, 4), namex(file, 4), parent(2), rootID, isFile), - driveItem(idx(file, 5), namex(file, 5), parent(2), rootID, isFile), - }, - }, - }, + Pages: pagesOf(pageItems( + driveItem(idx(file, 1), namex(file, 1), driveParentDir(2), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(2), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), driveParentDir(2), rootID, isFile), + driveItem(idx(file, 4), namex(file, 4), driveParentDir(2), rootID, isFile), + driveItem(idx(file, 5), namex(file, 5), driveParentDir(2), rootID, isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, + driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)}, }, }, { @@ -629,152 +509,78 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parent(1), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parent(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(rootID), // will be present, not needed - driveItem(idx(folder, 1), namex(folder, 1), parent(1), rootID, isFolder), - driveItem(idx(file, 5), namex(file, 5), parent(1, namex(folder, 1)), idx(folder, 1), isFile), - }, - }, - }, + id(drive): { + Pages: pagesOf( + pageItems( + driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), + driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile)), + pageItems( + driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), + driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, - expectedCollections: map[string][]string{ - fullPath(1): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(1, namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + expectedItemIDsInCollection: map[string][]string{ + fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, + fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, }, }, } - for _, test := range table { + + return drive1, drive2, tbl +} + +// TestGet_PreviewLimits checks that the limits set for preview backups in +// control.Options.ItemLimits are respected. These tests run a reduced set of +// checks that don't examine metadata, collection states, etc. They really just +// check the expected items appear. +func (suite *LimiterUnitSuite) TestGet_PreviewLimits_noTree() { + _, _, tbl := backupLimitTable() + + for _, test := range tbl { suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - mockDrivePager := &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: test.drives}, - }, - } - - mbh := mock.DefaultOneDriveBH(user) - mbh.DrivePagerV = mockDrivePager - mbh.DriveItemEnumeration = test.enumerator - - opts := control.DefaultOptions() - opts.PreviewLimits = test.limits - - c := NewCollections( - mbh, - tenant, - idname.NewProvider(user, user), - func(*support.ControllerOperationStatus) {}, - opts, - count.New()) - - errs := fault.New(true) - - delList := prefixmatcher.NewStringSetBuilder() - - cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs) - require.NoError(t, err, clues.ToCore(err)) - - assert.True(t, canUsePreviousBackup, "can use previous backup") - assert.Empty(t, errs.Skipped()) - - collPaths := []string{} - - for _, baseCol := range cols { - // There shouldn't be any deleted collections. - if !assert.NotEqual( - t, - data.DeletedState, - baseCol.State(), - "collection marked deleted") { - continue - } - - folderPath := baseCol.FullPath().String() - - if folderPath == metadataPath.String() { - continue - } - - collPaths = append(collPaths, folderPath) - - // TODO: We should really be getting items in the collection - // via the Items() channel. The lack of that makes this check a bit more - // bittle since internal details can change. The wiring to support - // mocked GetItems is available. We just haven't plugged it in yet. - col, ok := baseCol.(*Collection) - require.True(t, ok, "getting onedrive.Collection handle") - - itemIDs := make([]string, 0, len(col.driveItems)) - - for id := range col.driveItems { - itemIDs = append(itemIDs, id) - } - - assert.ElementsMatchf( - t, - test.expectedCollections[folderPath], - itemIDs, - "expected elements to match in collection with path %q", - folderPath) - } - - assert.ElementsMatch( - t, - maps.Keys(test.expectedCollections), - collPaths, - "collection paths") + runGetPreviewLimits( + suite.T(), + test, + control.DefaultOptions()) }) } } -// TestGet_PreviewLimits_Defaults checks that default values are used when -// making a preview backup if the user didn't provide some options. -// These tests run a reduced set of checks that really just look for item counts -// and such. Other tests are expected to provide more comprehensive checks. -func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { - // Add a check that will fail if we make the default smaller than expected. - require.LessOrEqual( - suite.T(), - int64(1024*1024), - defaultPreviewMaxBytes, - "default number of bytes changed; DefaultNumBytes test case may need updating!") - require.Zero( - suite.T(), - defaultPreviewMaxBytes%(1024*1024), - "default number of bytes isn't divisible by 1MB; DefaultNumBytes test case may need updating!") +// TestGet_PreviewLimits checks that the limits set for preview backups in +// control.Options.ItemLimits are respected. These tests run a reduced set of +// checks that don't examine metadata, collection states, etc. They really just +// check the expected items appear. +func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() { + suite.T().Skip("TODO: unskip when tree produces collections") - // The number of pages returned can be indirectly tested by checking how many - // containers/items were returned. - type expected struct { - numItems int - numContainers int - numItemsPerContainer int + opts := control.DefaultOptions() + opts.ToggleFeatures.UseDeltaTree = true + + _, _, tbl := backupLimitTable() + + for _, test := range tbl { + suite.Run(test.name, func() { + runGetPreviewLimits( + suite.T(), + test, + opts) + }) } +} + +func runGetPreviewLimits( + t *testing.T, + test backupLimitTest, + opts control.Options, +) { + ctx, flush := tester.NewContext(t) + defer flush() metadataPath, err := path.BuildMetadata( tenant, @@ -782,22 +588,99 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { path.OneDriveService, path.FilesCategory, false) - require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err)) + require.NoError(t, err, "making metadata path", clues.ToCore(err)) - drive1 := models.NewDrive() - drive1.SetId(ptr.To(idx(drive, 1))) - drive1.SetName(ptr.To(namex(drive, 1))) + opts.PreviewLimits = test.limits - // The number of pages the test generates can be controlled by setting the - // number of containers. The test will put one (non-root) container per page. - table := []struct { - name string - numContainers int - numItemsPerContainer int - itemSize int64 - limits control.PreviewItemLimits - expect expected - }{ + var ( + mockDrivePager = &apiMock.Pager[models.Driveable]{ + ToReturn: []apiMock.PagerResult[models.Driveable]{ + {Values: test.drives}, + }, + } + mbh = mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator) + c = collWithMBHAndOpts(mbh, opts) + errs = fault.New(true) + delList = prefixmatcher.NewStringSetBuilder() + collPaths = []string{} + ) + + cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs) + + if opts.ToggleFeatures.UseDeltaTree { + require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err)) + } else { + require.NoError(t, err, clues.ToCore(err)) + } + + assert.True(t, canUsePreviousBackup, "can use previous backup") + assert.Empty(t, errs.Skipped()) + + for _, baseCol := range cols { + // There shouldn't be any deleted collections. + if !assert.NotEqual( + t, + data.DeletedState, + baseCol.State(), + "collection marked deleted") { + continue + } + + folderPath := baseCol.FullPath().String() + + if folderPath == metadataPath.String() { + continue + } + + collPaths = append(collPaths, folderPath) + + // TODO: We should really be getting items in the collection + // via the Items() channel. The lack of that makes this check a bit more + // bittle since internal details can change. The wiring to support + // mocked GetItems is available. We just haven't plugged it in yet. + col, ok := baseCol.(*Collection) + require.True(t, ok, "getting onedrive.Collection handle") + + itemIDs := make([]string, 0, len(col.driveItems)) + + for id := range col.driveItems { + itemIDs = append(itemIDs, id) + } + + assert.ElementsMatchf( + t, + test.expectedItemIDsInCollection[folderPath], + itemIDs, + "item IDs in collection with path %q", + folderPath) + } + + assert.ElementsMatch( + t, + maps.Keys(test.expectedItemIDsInCollection), + collPaths, + "collection paths") +} + +// The number of pages returned can be indirectly tested by checking how many +// containers/items were returned. +type defaultLimitTestExpects struct { + numItems int + numContainers int + numItemsPerContainer int +} + +type defaultLimitTest struct { + name string + numContainers int + numItemsPerContainer int + itemSize int64 + limits control.PreviewItemLimits + expect defaultLimitTestExpects +} + +func defaultLimitsTable() []defaultLimitTest { + return []defaultLimitTest{ { name: "DefaultNumItems", numContainers: 1, @@ -809,7 +692,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { MaxBytes: 99999999, MaxPages: 99999999, }, - expect: expected{ + expect: defaultLimitTestExpects{ numItems: defaultPreviewMaxItems, numContainers: 1, numItemsPerContainer: defaultPreviewMaxItems, @@ -826,7 +709,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { MaxBytes: 99999999, MaxPages: 99999999, }, - expect: expected{ + expect: defaultLimitTestExpects{ // Root is counted as a container in the code but won't be counted or // have items in the test. numItems: defaultPreviewMaxContainers - 1, @@ -845,7 +728,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { MaxBytes: 99999999, MaxPages: 99999999, }, - expect: expected{ + expect: defaultLimitTestExpects{ numItems: defaultPreviewMaxItemsPerContainer, numContainers: 1, numItemsPerContainer: defaultPreviewMaxItemsPerContainer, @@ -862,7 +745,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { MaxItemsPerContainer: 99999999, MaxBytes: 99999999, }, - expect: expected{ + expect: defaultLimitTestExpects{ numItems: defaultPreviewMaxPages, numContainers: defaultPreviewMaxPages, numItemsPerContainer: 1, @@ -880,151 +763,198 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_Defaults() { MaxItemsPerContainer: 99999999, MaxPages: 99999999, }, - expect: expected{ + expect: defaultLimitTestExpects{ numItems: int(defaultPreviewMaxBytes) / 1024 / 1024, numContainers: 1, numItemsPerContainer: int(defaultPreviewMaxBytes) / 1024 / 1024, }, }, } - for _, test := range table { +} + +// TestGet_PreviewLimits_Defaults checks that default values are used when +// making a preview backup if the user didn't provide some options. +// These tests run a reduced set of checks that really just look for item counts +// and such. Other tests are expected to provide more comprehensive checks. +func (suite *LimiterUnitSuite) TestGet_PreviewLimits_defaultsNoTree() { + for _, test := range defaultLimitsTable() { suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - mockDrivePager := &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: []models.Driveable{drive1}}, - }, - } - - mbh := mock.DefaultOneDriveBH(user) - mbh.DrivePagerV = mockDrivePager - - pages := make([]mock.NextPage, 0, test.numContainers) - - for containerIdx := 0; containerIdx < test.numContainers; containerIdx++ { - page := mock.NextPage{ - Items: []models.DriveItemable{ - driveRootItem(rootID), - driveItem( - idx(folder, containerIdx), - namex(folder, containerIdx), - parent(1), - rootID, - isFolder), - }, - } - - for itemIdx := 0; itemIdx < test.numItemsPerContainer; itemIdx++ { - itemSuffix := fmt.Sprintf("%d-%d", containerIdx, itemIdx) - - page.Items = append(page.Items, driveItemWithSize( - idx(file, itemSuffix), - namex(file, itemSuffix), - parent(1, namex(folder, containerIdx)), - idx(folder, containerIdx), - test.itemSize, - isFile)) - } - - pages = append(pages, page) - } - - mbh.DriveItemEnumeration = mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: pages, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - } - - opts := control.DefaultOptions() - opts.PreviewLimits = test.limits - - c := NewCollections( - mbh, - tenant, - idname.NewProvider(user, user), - func(*support.ControllerOperationStatus) {}, - opts, - count.New()) - - errs := fault.New(true) - - delList := prefixmatcher.NewStringSetBuilder() - - cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs) - require.NoError(t, err, clues.ToCore(err)) - - assert.True(t, canUsePreviousBackup, "can use previous backup") - assert.Empty(t, errs.Skipped()) - - var ( - numContainers int - numItems int - ) - - for _, baseCol := range cols { - // There shouldn't be any deleted collections. - if !assert.NotEqual( - t, - data.DeletedState, - baseCol.State(), - "collection marked deleted") { - continue - } - - folderPath := baseCol.FullPath().String() - - if folderPath == metadataPath.String() { - continue - } - - // Skip the root container and don't count it because we don't put - // anything in it. - dp, err := path.ToDrivePath(baseCol.FullPath()) - require.NoError(t, err, clues.ToCore(err)) - - if len(dp.Folders) == 0 { - continue - } - - numContainers++ - - // TODO: We should really be getting items in the collection - // via the Items() channel. The lack of that makes this check a bit more - // bittle since internal details can change. The wiring to support - // mocked GetItems is available. We just haven't plugged it in yet. - col, ok := baseCol.(*Collection) - require.True(t, ok, "getting onedrive.Collection handle") - - numItems += len(col.driveItems) - - // Add one to account for the folder permissions item. - assert.Len( - t, - col.driveItems, - test.expect.numItemsPerContainer+1, - "items in container %v", - col.FullPath()) - } - - assert.Equal( - t, - test.expect.numContainers, - numContainers, - "total containers") - - // Each container also gets an item so account for that here. - assert.Equal( - t, - test.expect.numItems+test.expect.numContainers, - numItems, - "total items across all containers") + runGetPreviewLimitsDefaults( + suite.T(), + test, + control.DefaultOptions()) }) } } + +// TestGet_PreviewLimits_Defaults checks that default values are used when +// making a preview backup if the user didn't provide some options. +// These tests run a reduced set of checks that really just look for item counts +// and such. Other tests are expected to provide more comprehensive checks. +func (suite *LimiterUnitSuite) TestGet_PreviewLimits_defaultsWithTree() { + suite.T().Skip("TODO: unskip when tree produces collections") + + opts := control.DefaultOptions() + opts.ToggleFeatures.UseDeltaTree = true + + for _, test := range defaultLimitsTable() { + suite.Run(test.name, func() { + runGetPreviewLimitsDefaults( + suite.T(), + test, + opts) + }) + } +} + +func runGetPreviewLimitsDefaults( + t *testing.T, + test defaultLimitTest, + opts control.Options, +) { + // Add a check that will fail if we make the default smaller than expected. + require.LessOrEqual( + t, + int64(1024*1024), + defaultPreviewMaxBytes, + "default number of bytes changed; DefaultNumBytes test case may need updating!") + require.Zero( + t, + defaultPreviewMaxBytes%(1024*1024), + "default number of bytes isn't divisible by 1MB; DefaultNumBytes test case may need updating!") + + ctx, flush := tester.NewContext(t) + defer flush() + + metadataPath, err := path.BuildMetadata( + tenant, + user, + path.OneDriveService, + path.FilesCategory, + false) + require.NoError(t, err, "making metadata path", clues.ToCore(err)) + + drv := models.NewDrive() + drv.SetId(ptr.To(id(drive))) + drv.SetName(ptr.To(name(drive))) + + pages := make([]mock.NextPage, 0, test.numContainers) + + for containerIdx := 0; containerIdx < test.numContainers; containerIdx++ { + page := mock.NextPage{ + Items: []models.DriveItemable{ + driveRootItem(), + driveItem( + idx(folder, containerIdx), + namex(folder, containerIdx), + parentDir(), + rootID, + isFolder), + }, + } + + for itemIdx := 0; itemIdx < test.numItemsPerContainer; itemIdx++ { + itemSuffix := fmt.Sprintf("%d-%d", containerIdx, itemIdx) + + page.Items = append(page.Items, driveItemWithSize( + idx(file, itemSuffix), + namex(file, itemSuffix), + parentDir(namex(folder, containerIdx)), + idx(folder, containerIdx), + test.itemSize, + isFile)) + } + + pages = append(pages, page) + } + + opts.PreviewLimits = test.limits + + var ( + mockDrivePager = &apiMock.Pager[models.Driveable]{ + ToReturn: []apiMock.PagerResult[models.Driveable]{ + {Values: []models.Driveable{drv}}, + }, + } + mockEnumerator = mock.EnumerateItemsDeltaByDrive{ + DrivePagers: map[string]*mock.DriveItemsDeltaPager{ + id(drive): { + Pages: pages, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + }, + }, + } + mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator) + c = collWithMBHAndOpts(mbh, opts) + errs = fault.New(true) + delList = prefixmatcher.NewStringSetBuilder() + numContainers int + numItems int + ) + + cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs) + + if opts.ToggleFeatures.UseDeltaTree { + require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err)) + } else { + require.NoError(t, err, clues.ToCore(err)) + } + + assert.True(t, canUsePreviousBackup, "can use previous backup") + assert.Empty(t, errs.Skipped()) + + for _, baseCol := range cols { + require.NotEqual( + t, + data.DeletedState, + baseCol.State(), + "no collections should be marked deleted") + + folderPath := baseCol.FullPath().String() + + if folderPath == metadataPath.String() { + continue + } + + // Skip the root container and don't count it because we don't put + // anything in it. + dp, err := path.ToDrivePath(baseCol.FullPath()) + require.NoError(t, err, clues.ToCore(err)) + + if len(dp.Folders) == 0 { + continue + } + + numContainers++ + + // TODO: We should really be getting items in the collection + // via the Items() channel. The lack of that makes this check a bit more + // bittle since internal details can change. The wiring to support + // mocked GetItems is available. We just haven't plugged it in yet. + col, ok := baseCol.(*Collection) + require.True(t, ok, "baseCol must be type *Collection") + + numItems += len(col.driveItems) + + // Add one to account for the folder permissions item. + assert.Len( + t, + col.driveItems, + test.expect.numItemsPerContainer+1, + "items in container %v", + col.FullPath()) + } + + assert.Equal( + t, + test.expect.numContainers, + numContainers, + "total containers") + + // Each container also gets an item so account for that here. + assert.Equal( + t, + test.expect.numItems+test.expect.numContainers, + numItems, + "total items across all containers") +}