add multi-delta unit tests

adds testing (and some minor tweaks) to multi-delta
enumeration within the collection tree processor.
This commit is contained in:
ryanfkeepers 2023-11-29 16:11:17 -07:00
parent 3ab500a7d8
commit 4edd1c5165
4 changed files with 338 additions and 107 deletions

View File

@ -828,7 +828,7 @@ func (c *Collections) PopulateDriveCollections(
break break
} }
counter.Inc(count.PagesEnumerated) counter.Inc(count.TotalPagesEnumerated)
if reset { if reset {
counter.Inc(count.PagerResets) counter.Inc(count.PagerResets)

View File

@ -286,6 +286,8 @@ func (c *Collections) populateTree(
) )
for !hitLimit && !finished && el.Failure() == nil { for !hitLimit && !finished && el.Failure() == nil {
counter.Inc(count.TotalDeltasProcessed)
var ( var (
pageCount int pageCount int
pageItemCount int pageItemCount int
@ -316,6 +318,8 @@ func (c *Collections) populateTree(
pageCount = 0 pageCount = 0
pageItemCount = 0 pageItemCount = 0
hadReset = true hadReset = true
} else {
counter.Inc(count.TotalPagesEnumerated)
} }
err = c.enumeratePageOfItems( err = c.enumeratePageOfItems(
@ -335,19 +339,17 @@ func (c *Collections) populateTree(
el.AddRecoverable(ctx, clues.Stack(err)) el.AddRecoverable(ctx, clues.Stack(err))
} }
counter.Inc(count.PagesEnumerated) pageCount++
pageItemCount += len(page)
// Stop enumeration early if we've reached the page limit. Keep this // Stop enumeration early if we've reached the page limit. Keep this
// at the end of the loop so we don't request another page (pager.NextPage) // at the end of the loop so we don't request another page (pager.NextPage)
// before seeing we've passed the limit. // before seeing we've passed the limit.
if limiter.hitPageLimit(int(counter.Get(count.PagesEnumerated))) { if limiter.hitPageLimit(pageCount) {
hitLimit = true hitLimit = true
break break
} }
pageCount++
pageItemCount += len(page)
} }
// Always cancel the pager so that even if we exit early from the loop above // Always cancel the pager so that even if we exit early from the loop above

View File

@ -557,14 +557,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
} }
} }
// This test focuses on the population of a tree using delta enumeration data, type populateTreeExpected struct {
// and is not concerned with unifying previous paths or post-processing collections.
func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
drv := models.NewDrive()
drv.SetId(ptr.To(id(drive)))
drv.SetName(ptr.To(name(drive)))
type expected struct {
counts countTD.Expected counts countTD.Expected
err require.ErrorAssertionFunc err require.ErrorAssertionFunc
numLiveFiles int numLiveFiles int
@ -574,15 +567,24 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
treeContainsFolderIDs []string treeContainsFolderIDs []string
treeContainsTombstoneIDs []string treeContainsTombstoneIDs []string
treeContainsFileIDsWithParent map[string]string treeContainsFileIDsWithParent map[string]string
} }
table := []struct { type populateTreeTest struct {
name string name string
enumerator mock.EnumerateItemsDeltaByDrive enumerator mock.EnumerateDriveItemsDelta
tree *folderyMcFolderFace tree *folderyMcFolderFace
limiter *pagerLimiter limiter *pagerLimiter
expect expected expect populateTreeExpected
}{ }
// this test focuses on the population of a tree using a single delta's enumeration data.
// It is not concerned with unifying previous paths or post-processing collections.
func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta() {
drv := models.NewDrive()
drv.SetId(ptr.To(id(drive)))
drv.SetName(ptr.To(name(drive)))
table := []populateTreeTest{
{ {
name: "nil page", name: "nil page",
tree: newFolderyMcFolderFace(nil, rootID), tree: newFolderyMcFolderFace(nil, rootID),
@ -595,7 +597,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{}, counts: countTD.Expected{},
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -618,11 +620,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 1, count.TotalFoldersProcessed: 1,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 1, count.TotalPagesEnumerated: 2,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -647,11 +649,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 2, count.TotalFoldersProcessed: 2,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 2, count.TotalPagesEnumerated: 3,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -681,10 +683,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 7, count.TotalFoldersProcessed: 7,
count.PagesEnumerated: 3, count.TotalPagesEnumerated: 4,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
}, },
err: require.NoError, err: require.NoError,
@ -723,11 +725,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 7, count.TotalFoldersProcessed: 7,
count.TotalFilesProcessed: 3, count.TotalFilesProcessed: 3,
count.PagesEnumerated: 3, count.TotalPagesEnumerated: 4,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 3, numLiveFiles: 3,
@ -742,8 +744,49 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
treeContainsTombstoneIDs: []string{}, treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{ treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder), id(file): id(folder),
idx(file, "sib"): idx(folder, "sib"), idx(file, "fsib"): idx(folder, "sib"),
idx(file, "chld"): idx(folder, "chld"), idx(file, "fchld"): idx(folder, "chld"),
},
},
},
{
name: "many folders with files across multiple deltas",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(aPage(
folderAtRoot(),
fileAt(folder))),
mock.Delta(id(delta), nil).With(aPage(
folderxAtRoot("sib"),
filexAt("fsib", "sib"))),
mock.Delta(id(delta), nil).With(aPage(
folderAtRoot(),
folderxAt("chld", folder),
filexAt("fchld", "chld"))),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalFoldersProcessed: 7,
count.TotalFilesProcessed: 3,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 3,
numLiveFolders: 4,
sizeBytes: 3 * 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
idx(folder, "sib"),
idx(folder, "chld"),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder),
idx(file, "fsib"): idx(folder, "sib"),
idx(file, "fchld"): idx(folder, "chld"),
}, },
}, },
}, },
@ -765,12 +808,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 3, count.TotalFoldersProcessed: 3,
count.TotalFilesProcessed: 1, count.TotalFilesProcessed: 1,
count.TotalDeleteFoldersProcessed: 1, count.TotalDeleteFoldersProcessed: 1,
count.PagesEnumerated: 2, count.TotalPagesEnumerated: 3,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -804,12 +847,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 4, count.TotalFoldersProcessed: 4,
count.TotalDeleteFoldersProcessed: 1, count.TotalDeleteFoldersProcessed: 1,
count.TotalFilesProcessed: 1, count.TotalFilesProcessed: 1,
count.PagesEnumerated: 2, count.TotalPagesEnumerated: 3,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -849,12 +892,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(minimumLimitOpts()), limiter: newPagerLimiter(minimumLimitOpts()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalDeleteFoldersProcessed: 0, count.TotalDeleteFoldersProcessed: 0,
count.TotalFoldersProcessed: 1, count.TotalFoldersProcessed: 1,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 0, count.TotalPagesEnumerated: 1,
}, },
err: require.NoError, err: require.NoError,
shouldHitLimit: true, shouldHitLimit: true,
@ -890,12 +933,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(minimumLimitOpts()), limiter: newPagerLimiter(minimumLimitOpts()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalDeleteFoldersProcessed: 0, count.TotalDeleteFoldersProcessed: 0,
count.TotalFoldersProcessed: 1, count.TotalFoldersProcessed: 1,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 0, count.TotalPagesEnumerated: 1,
}, },
err: require.NoError, err: require.NoError,
shouldHitLimit: true, shouldHitLimit: true,
@ -912,18 +955,205 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() runPopulateTreeTest(suite.T(), drv, test)
})
}
}
// this test focuses on quirks that can only arise from cases that occur across
// multiple delta enumerations.
// It is not concerned with unifying previous paths or post-processing collections.
func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() {
drv := models.NewDrive()
drv.SetId(ptr.To(id(drive)))
drv.SetName(ptr.To(name(drive)))
table := []populateTreeTest{
{
name: "sanity case: normal enumeration split across multiple deltas",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(aPage(
folderAtRoot(),
fileAt(folder))),
mock.Delta(id(delta), nil).With(aPage(
folderxAtRoot("sib"),
filexAt("fsib", "sib"))),
mock.Delta(id(delta), nil).With(aPage(
folderAtRoot(),
folderxAt("chld", folder),
filexAt("fchld", "chld"))),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 4,
count.TotalDeleteFoldersProcessed: 0,
count.TotalDeleteFilesProcessed: 0,
count.TotalFilesProcessed: 3,
count.TotalFoldersProcessed: 7,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 3,
numLiveFolders: 4,
sizeBytes: 3 * 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
idx(folder, "sib"),
idx(folder, "chld"),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder),
idx(file, "fsib"): idx(folder, "sib"),
idx(file, "fchld"): idx(folder, "chld"),
},
},
},
{
name: "create->delete,create",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(aPage(
folderAtRoot(),
fileAt(folder))),
// a (delete,create) pair in the same delta can occur when
// a user deletes and restores an item in-between deltas.
mock.Delta(id(delta), nil).With(
aPage(
delItem(id(folder), rootID, isFolder),
delItem(id(file), id(folder), isFile)),
aPage(
folderAtRoot(),
fileAt(folder))),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 3,
count.TotalDeleteFoldersProcessed: 1,
count.TotalDeleteFilesProcessed: 1,
count.TotalFilesProcessed: 2,
count.TotalFoldersProcessed: 5,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 1,
numLiveFolders: 2,
sizeBytes: 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{},
},
},
{
name: "visit->rename",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(aPage(
folderAtRoot(),
fileAt(folder))),
mock.Delta(id(delta), nil).With(aPage(
driveItem(id(folder), namex(folder, "rename"), parentDir(), rootID, isFolder),
driveItem(id(file), namex(file, "rename"), parentDir(namex(folder, "rename")), id(folder), isFile))),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 3,
count.TotalDeleteFilesProcessed: 0,
count.TotalDeleteFoldersProcessed: 0,
count.TotalFilesProcessed: 2,
count.TotalFoldersProcessed: 4,
count.TotalPagesEnumerated: 3,
},
err: require.NoError,
numLiveFiles: 1,
numLiveFolders: 2,
sizeBytes: 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder),
},
},
},
{
name: "duplicate folder name from deferred delete marker",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(
// first page: create /root/folder and /root/folder/file
aPage(
folderAtRoot(),
fileAt(folder)),
// assume the user makes changes at this point:
// 1. delete /root/folder
// 2. create a new /root/folder
// 3. move /root/folder/file from old to new folder (same file ID)
// in drive deltas, this will show up as another folder creation sharing
// the same dirname, but we won't see the delete until...
aPage(
driveItem(idx(folder, 2), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), idx(folder, 2), isFile))),
// the next delta, containing the delete marker for the original /root/folder
mock.Delta(id(delta), nil).With(aPage(
delItem(id(folder), rootID, isFolder),
)),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 3,
count.TotalDeleteFilesProcessed: 0,
count.TotalDeleteFoldersProcessed: 1,
count.TotalFilesProcessed: 2,
count.TotalFoldersProcessed: 5,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 1,
numLiveFolders: 2,
sizeBytes: 42,
treeContainsFolderIDs: []string{
rootID,
idx(folder, 2),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): idx(folder, 2),
},
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
runPopulateTreeTest(suite.T(), drv, test)
})
}
}
func runPopulateTreeTest(
t *testing.T,
drv models.Driveable,
test populateTreeTest,
) {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
mockDrivePager := &apiMock.Pager[models.Driveable]{ mbh := mock.DefaultDriveBHWith(user, pagerForDrives(drv), test.enumerator)
ToReturn: []apiMock.PagerResult[models.Driveable]{
{Values: []models.Driveable{drv}},
},
}
mbh := mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator)
c := collWithMBH(mbh) c := collWithMBH(mbh)
counter := count.New() counter := count.New()
@ -942,18 +1172,18 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
t, t,
test.expect.numLiveFolders, test.expect.numLiveFolders,
test.tree.countLiveFolders(), test.tree.countLiveFolders(),
"count folders in tree") "count live folders in tree")
countSize := test.tree.countLiveFilesAndSizes() cAndS := test.tree.countLiveFilesAndSizes()
assert.Equal( assert.Equal(
t, t,
test.expect.numLiveFiles, test.expect.numLiveFiles,
countSize.numFiles, cAndS.numFiles,
"count files in tree") "count live files in tree")
assert.Equal( assert.Equal(
t, t,
test.expect.sizeBytes, test.expect.sizeBytes,
countSize.totalBytes, cAndS.totalBytes,
"count total bytes in tree") "count total bytes in tree")
test.expect.counts.Compare(t, counter) test.expect.counts.Compare(t, counter)
@ -969,8 +1199,6 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree") assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree")
assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent") assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent")
} }
})
}
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -50,7 +50,6 @@ const (
NoDeltaQueries Key = "cannot-make-delta-queries" NoDeltaQueries Key = "cannot-make-delta-queries"
Packages Key = "packages" Packages Key = "packages"
PagerResets Key = "pager-resets" PagerResets Key = "pager-resets"
PagesEnumerated Key = "pages-enumerated"
PrevDeltas Key = "previous-deltas" PrevDeltas Key = "previous-deltas"
PrevPaths Key = "previous-paths" PrevPaths Key = "previous-paths"
PreviousPathMetadataCollision Key = "previous-path-metadata-collision" PreviousPathMetadataCollision Key = "previous-path-metadata-collision"
@ -80,10 +79,12 @@ const (
const ( const (
TotalDeleteFilesProcessed Key = "total-delete-files-processed" TotalDeleteFilesProcessed Key = "total-delete-files-processed"
TotalDeleteFoldersProcessed Key = "total-delete-folders-processed" TotalDeleteFoldersProcessed Key = "total-delete-folders-processed"
TotalDeltasProcessed Key = "total-deltas-processed"
TotalFilesProcessed Key = "total-files-processed" TotalFilesProcessed Key = "total-files-processed"
TotalFoldersProcessed Key = "total-folders-processed" TotalFoldersProcessed Key = "total-folders-processed"
TotalMalwareProcessed Key = "total-malware-processed" TotalMalwareProcessed Key = "total-malware-processed"
TotalPackagesProcessed Key = "total-packages-processed" TotalPackagesProcessed Key = "total-packages-processed"
TotalPagesEnumerated Key = "total-pages-enumerated"
) )
// miscellaneous // miscellaneous