Compare commits

...

4 Commits

Author SHA1 Message Date
ryanfkeepers
7ea8c41736 get things working with new base 2023-12-06 14:13:53 -07:00
ryanfkeepers
4edd1c5165 add multi-delta unit tests
adds testing (and some minor tweaks) to multi-delta
enumeration within the collection tree processor.
2023-12-06 13:46:59 -07:00
ryanfkeepers
3ab500a7d8 add multi delta enumeration to collection tree
tests coming in follow-up PR
2023-12-05 16:22:53 -07:00
ryanfkeepers
14225ad616 require rootID on tree construction
Turns out the root ID name isn't an appropriate match
for establishing the root node.  Instead, the backup hander
is now extended with a getRootFolder method and will
pass the expected root folder ID into the tree's constructor
func to ensure we establish the correct root node.
2023-12-05 16:20:56 -07:00
11 changed files with 686 additions and 179 deletions

View File

@ -828,7 +828,7 @@ func (c *Collections) PopulateDriveCollections(
break break
} }
counter.Inc(count.PagesEnumerated) counter.Inc(count.TotalPagesEnumerated)
if reset { if reset {
counter.Inc(count.PagerResets) counter.Inc(count.PagerResets)

View File

@ -173,9 +173,8 @@ func malwareItem(
} }
func driveRootItem() models.DriveItemable { func driveRootItem() models.DriveItemable {
name := rootName
item := models.NewDriveItem() item := models.NewDriveItem()
item.SetName(&name) item.SetName(ptr.To(rootName))
item.SetId(ptr.To(rootID)) item.SetId(ptr.To(rootID))
item.SetRoot(models.NewRoot()) item.SetRoot(models.NewRoot())
item.SetFolder(models.NewFolder()) item.SetFolder(models.NewFolder())

View File

@ -171,7 +171,12 @@ func (c *Collections) makeDriveCollections(
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix") return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix")
} }
tree := newFolderyMcFolderFace(ppfx) root, err := c.handler.GetRootFolder(ctx, ptr.Val(drv.GetId()))
if err != nil {
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "getting root folder")
}
tree := newFolderyMcFolderFace(ppfx, ptr.Val(root.GetId()))
counter.Add(count.PrevPaths, int64(len(prevPaths))) counter.Add(count.PrevPaths, int64(len(prevPaths)))
@ -272,65 +277,96 @@ func (c *Collections) populateTree(
ctx = clues.Add(ctx, "invalid_prev_delta", len(prevDeltaLink) == 0) ctx = clues.Add(ctx, "invalid_prev_delta", len(prevDeltaLink) == 0)
var ( var (
driveID = ptr.Val(drv.GetId()) currDeltaLink = prevDeltaLink
el = errs.Local() driveID = ptr.Val(drv.GetId())
el = errs.Local()
du pagers.DeltaUpdate
finished bool
hitLimit bool
) )
// TODO(keepers): to end in a correct state, we'll eventually need to run this for !hitLimit && !finished && el.Failure() == nil {
// query multiple times over, until it ends in an empty change set. counter.Inc(count.TotalDeltasProcessed)
pager := c.handler.EnumerateDriveItemsDelta(
ctx,
driveID,
prevDeltaLink,
api.CallConfig{
Select: api.DefaultDriveItemProps(),
})
for page, reset, done := pager.NextPage(); !done; page, reset, done = pager.NextPage() { var (
if el.Failure() != nil { pageCount int
break pageItemCount int
} hadReset bool
err error
)
if reset { // TODO(keepers): to end in a correct state, we'll eventually need to run this
counter.Inc(count.PagerResets) // query multiple times over, until it ends in an empty change set.
tree.reset() pager := c.handler.EnumerateDriveItemsDelta(
c.resetStats()
}
err := c.enumeratePageOfItems(
ctx, ctx,
tree, driveID,
drv, currDeltaLink,
page, api.CallConfig{
limiter, Select: api.DefaultDriveItemProps(),
counter, })
errs)
if err != nil { for page, reset, done := pager.NextPage(); !done; page, reset, done = pager.NextPage() {
if errors.Is(err, errHitLimit) { if el.Failure() != nil {
break return du, el.Failure()
} }
el.AddRecoverable(ctx, clues.Stack(err)) if reset {
counter.Inc(count.PagerResets)
tree.reset()
c.resetStats()
pageCount = 0
pageItemCount = 0
hadReset = true
} else {
counter.Inc(count.TotalPagesEnumerated)
}
err = c.enumeratePageOfItems(
ctx,
tree,
drv,
page,
limiter,
counter,
errs)
if err != nil {
if errors.Is(err, errHitLimit) {
hitLimit = true
break
}
el.AddRecoverable(ctx, clues.Stack(err))
}
pageCount++
pageItemCount += len(page)
// Stop enumeration early if we've reached the page limit. Keep this
// at the end of the loop so we don't request another page (pager.NextPage)
// before seeing we've passed the limit.
if limiter.hitPageLimit(pageCount) {
hitLimit = true
break
}
} }
counter.Inc(count.PagesEnumerated) // Always cancel the pager so that even if we exit early from the loop above
// we don't deadlock. Cancelling a pager that's already completed is
// essentially a noop.
pager.Cancel()
// Stop enumeration early if we've reached the page limit. Keep this du, err = pager.Results()
// at the end of the loop so we don't request another page (pager.NextPage) if err != nil {
// before seeing we've passed the limit. return du, clues.Stack(err)
if limiter.hitPageLimit(int(counter.Get(count.PagesEnumerated))) {
break
} }
}
// Always cancel the pager so that even if we exit early from the loop above currDeltaLink = du.URL
// we don't deadlock. Cancelling a pager that's already completed is
// essentially a noop.
pager.Cancel()
du, err := pager.Results() // 0 pages is never expected. We should at least have one (empty) page to
if err != nil { // consume. But checking pageCount == 1 is brittle in a non-helpful way.
return du, clues.Stack(err) finished = pageCount < 2 && pageItemCount == 0 && !hadReset
} }
logger.Ctx(ctx).Infow("enumerated collection delta", "stats", counter.Values()) logger.Ctx(ctx).Infow("enumerated collection delta", "stats", counter.Values())

View File

@ -557,35 +557,37 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
} }
} }
// This test focuses on the population of a tree using delta enumeration data, type populateTreeExpected struct {
// and is not concerned with unifying previous paths or post-processing collections. counts countTD.Expected
func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { err require.ErrorAssertionFunc
numLiveFiles int
numLiveFolders int
shouldHitLimit bool
sizeBytes int64
treeContainsFolderIDs []string
treeContainsTombstoneIDs []string
treeContainsFileIDsWithParent map[string]string
}
type populateTreeTest struct {
name string
enumerator mock.EnumerateItemsDeltaByDrive
tree *folderyMcFolderFace
limiter *pagerLimiter
expect populateTreeExpected
}
// this test focuses on the population of a tree using a single delta's enumeration data.
// It is not concerned with unifying previous paths or post-processing collections.
func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta() {
drv := models.NewDrive() drv := models.NewDrive()
drv.SetId(ptr.To(id(drive))) drv.SetId(ptr.To(id(drive)))
drv.SetName(ptr.To(name(drive))) drv.SetName(ptr.To(name(drive)))
type expected struct { table := []populateTreeTest{
counts countTD.Expected
err require.ErrorAssertionFunc
numLiveFiles int
numLiveFolders int
shouldHitLimit bool
sizeBytes int64
treeContainsFolderIDs []string
treeContainsTombstoneIDs []string
treeContainsFileIDsWithParent map[string]string
}
table := []struct {
name string
enumerator mock.EnumerateItemsDeltaByDrive
tree *folderyMcFolderFace
limiter *pagerLimiter
expect expected
}{
{ {
name: "nil page", name: "nil page",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -595,7 +597,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{}, counts: countTD.Expected{},
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -608,7 +610,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
{ {
name: "root only", name: "root only",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -618,11 +620,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 1, count.TotalFoldersProcessed: 1,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 1, count.TotalPagesEnumerated: 2,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -637,7 +639,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
{ {
name: "root only on two pages", name: "root only on two pages",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -647,11 +649,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 2, count.TotalFoldersProcessed: 2,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 2, count.TotalPagesEnumerated: 3,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -666,7 +668,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
{ {
name: "many folders in a hierarchy across multiple pages", name: "many folders in a hierarchy across multiple pages",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -681,10 +683,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 7, count.TotalFoldersProcessed: 7,
count.PagesEnumerated: 3, count.TotalPagesEnumerated: 4,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
}, },
err: require.NoError, err: require.NoError,
@ -703,7 +705,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
{ {
name: "many folders with files", name: "many folders with files",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -723,11 +725,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 7, count.TotalFoldersProcessed: 7,
count.TotalFilesProcessed: 3, count.TotalFilesProcessed: 3,
count.PagesEnumerated: 3, count.TotalPagesEnumerated: 4,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 3, numLiveFiles: 3,
@ -741,17 +743,59 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
treeContainsTombstoneIDs: []string{}, treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{ treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder), id(file): id(folder),
idx(file, "sib"): idx(folder, "sib"), idx(file, "fsib"): idx(folder, "sib"),
idx(file, "chld"): idx(folder, "chld"), idx(file, "fchld"): idx(folder, "chld"),
}, },
}, },
}, },
// TODO: restore after mock.DriveEnumerator support lands.
// {
// name: "many folders with files across multiple deltas",
// tree: newFolderyMcFolderFace(nil, rootID),
// enumerator: mock.DriveEnumerator(
// mock.Drive(id(drive)).With(
// mock.Delta(id(delta), nil).With(aPage(
// folderAtRoot(),
// fileAt(folder))),
// mock.Delta(id(delta), nil).With(aPage(
// folderxAtRoot("sib"),
// filexAt("fsib", "sib"))),
// mock.Delta(id(delta), nil).With(aPage(
// folderAtRoot(),
// folderxAt("chld", folder),
// filexAt("fchld", "chld"))),
// )),
// limiter: newPagerLimiter(control.DefaultOptions()),
// expect: populateTreeExpected{
// counts: countTD.Expected{
// count.TotalFoldersProcessed: 7,
// count.TotalFilesProcessed: 3,
// count.TotalPagesEnumerated: 4,
// },
// err: require.NoError,
// numLiveFiles: 3,
// numLiveFolders: 4,
// sizeBytes: 3 * 42,
// treeContainsFolderIDs: []string{
// rootID,
// id(folder),
// idx(folder, "sib"),
// idx(folder, "chld"),
// },
// treeContainsTombstoneIDs: []string{},
// treeContainsFileIDsWithParent: map[string]string{
// id(file): id(folder),
// idx(file, "fsib"): idx(folder, "sib"),
// idx(file, "fchld"): idx(folder, "chld"),
// },
// },
// },
{ {
// technically you won't see this behavior from graph deltas, since deletes always // technically you won't see this behavior from graph deltas, since deletes always
// precede creates/updates. But it's worth checking that we can handle it anyways. // precede creates/updates. But it's worth checking that we can handle it anyways.
name: "create, delete on next page", name: "create, delete on next page",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -765,12 +809,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 3, count.TotalFoldersProcessed: 3,
count.TotalFilesProcessed: 1, count.TotalFilesProcessed: 1,
count.TotalDeleteFoldersProcessed: 1, count.TotalDeleteFoldersProcessed: 1,
count.PagesEnumerated: 2, count.TotalPagesEnumerated: 3,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -804,12 +848,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalFoldersProcessed: 4, count.TotalFoldersProcessed: 4,
count.TotalDeleteFoldersProcessed: 1, count.TotalDeleteFoldersProcessed: 1,
count.TotalFilesProcessed: 1, count.TotalFilesProcessed: 1,
count.PagesEnumerated: 2, count.TotalPagesEnumerated: 3,
}, },
err: require.NoError, err: require.NoError,
numLiveFiles: 0, numLiveFiles: 0,
@ -849,12 +893,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(minimumLimitOpts()), limiter: newPagerLimiter(minimumLimitOpts()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalDeleteFoldersProcessed: 0, count.TotalDeleteFoldersProcessed: 0,
count.TotalFoldersProcessed: 1, count.TotalFoldersProcessed: 1,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 0, count.TotalPagesEnumerated: 1,
}, },
err: require.NoError, err: require.NoError,
shouldHitLimit: true, shouldHitLimit: true,
@ -870,7 +914,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
{ {
name: "hit folder limit during enumeration", name: "hit folder limit during enumeration",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.EnumerateItemsDeltaByDrive{ enumerator: mock.EnumerateItemsDeltaByDrive{
DrivePagers: map[string]*mock.DriveItemsDeltaPager{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{
id(drive): { id(drive): {
@ -890,12 +934,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
}, },
}, },
limiter: newPagerLimiter(minimumLimitOpts()), limiter: newPagerLimiter(minimumLimitOpts()),
expect: expected{ expect: populateTreeExpected{
counts: countTD.Expected{ counts: countTD.Expected{
count.TotalDeleteFoldersProcessed: 0, count.TotalDeleteFoldersProcessed: 0,
count.TotalFoldersProcessed: 1, count.TotalFoldersProcessed: 1,
count.TotalFilesProcessed: 0, count.TotalFilesProcessed: 0,
count.PagesEnumerated: 0, count.TotalPagesEnumerated: 1,
}, },
err: require.NoError, err: require.NoError,
shouldHitLimit: true, shouldHitLimit: true,
@ -912,67 +956,321 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() runPopulateTreeTest(suite.T(), drv, test)
ctx, flush := tester.NewContext(t)
defer flush()
mockDrivePager := &apiMock.Pager[models.Driveable]{
ToReturn: []apiMock.PagerResult[models.Driveable]{
{Values: []models.Driveable{drv}},
},
}
mbh := mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator)
c := collWithMBH(mbh)
counter := count.New()
_, err := c.populateTree(
ctx,
test.tree,
drv,
id(delta),
test.limiter,
counter,
fault.New(true))
test.expect.err(t, err, clues.ToCore(err))
assert.Equal(
t,
test.expect.numLiveFolders,
test.tree.countLiveFolders(),
"count folders in tree")
countSize := test.tree.countLiveFilesAndSizes()
assert.Equal(
t,
test.expect.numLiveFiles,
countSize.numFiles,
"count files in tree")
assert.Equal(
t,
test.expect.sizeBytes,
countSize.totalBytes,
"count total bytes in tree")
test.expect.counts.Compare(t, counter)
for _, id := range test.expect.treeContainsFolderIDs {
assert.NotNil(t, test.tree.folderIDToNode[id], "node exists")
}
for _, id := range test.expect.treeContainsTombstoneIDs {
assert.NotNil(t, test.tree.tombstones[id], "tombstone exists")
}
for iID, pID := range test.expect.treeContainsFileIDsWithParent {
assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree")
assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent")
}
}) })
} }
} }
// TODO: remove when unifying test tree structs
type populateTreeTestMulti struct {
name string
enumerator mock.EnumerateDriveItemsDelta
tree *folderyMcFolderFace
limiter *pagerLimiter
expect populateTreeExpected
}
// this test focuses on quirks that can only arise from cases that occur across
// multiple delta enumerations.
// It is not concerned with unifying previous paths or post-processing collections.
func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() {
drv := models.NewDrive()
drv.SetId(ptr.To(id(drive)))
drv.SetName(ptr.To(name(drive)))
table := []populateTreeTestMulti{
{
name: "sanity case: normal enumeration split across multiple deltas",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(pagesOf(pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)))...),
mock.Delta(id(delta), nil).With(pagesOf(pageItems(
driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder),
driveItem(idx(file, "fsib"), namex(file, "fsib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFolder)))...),
mock.Delta(id(delta), nil).With(pagesOf(pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(name(folder)), id(folder), isFolder),
driveItem(idx(file, "fchld"), namex(file, "fchld"), parentDir(name(folder), namex(folder, "chld")), idx(folder, "chld"), isFolder)))...),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 4,
count.TotalDeleteFoldersProcessed: 0,
count.TotalDeleteFilesProcessed: 0,
count.TotalFilesProcessed: 3,
count.TotalFoldersProcessed: 7,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 3,
numLiveFolders: 4,
sizeBytes: 3 * 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
idx(folder, "sib"),
idx(folder, "chld"),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder),
idx(file, "fsib"): idx(folder, "sib"),
idx(file, "fchld"): idx(folder, "chld"),
},
},
},
{
name: "create->delete,create",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).
With(pagesOf(pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)))...),
// a (delete,create) pair in the same delta can occur when
// a user deletes and restores an item in-between deltas.
mock.Delta(id(delta), nil).
With(pagesOf(pageItems(
delItem(id(folder), parentDir(), rootID, isFolder),
delItem(id(file), parentDir(), id(folder), isFile)))...).
With(pagesOf(pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)))...),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 3,
count.TotalDeleteFoldersProcessed: 1,
count.TotalDeleteFilesProcessed: 1,
count.TotalFilesProcessed: 2,
count.TotalFoldersProcessed: 5,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 1,
numLiveFolders: 2,
sizeBytes: 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{},
},
},
{
name: "visit->rename",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).
With(pagesOf(pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)))...),
mock.Delta(id(delta), nil).
With(pagesOf(pageItems(
driveItem(id(folder), namex(folder, "rename"), parentDir(), rootID, isFolder),
driveItem(id(file), namex(file, "rename"), parentDir(namex(folder, "rename")), id(folder), isFile)))...),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 3,
count.TotalDeleteFilesProcessed: 0,
count.TotalDeleteFoldersProcessed: 0,
count.TotalFilesProcessed: 2,
count.TotalFoldersProcessed: 4,
count.TotalPagesEnumerated: 3,
},
err: require.NoError,
numLiveFiles: 1,
numLiveFolders: 2,
sizeBytes: 42,
treeContainsFolderIDs: []string{
rootID,
id(folder),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): id(folder),
},
},
},
{
name: "duplicate folder name from deferred delete marker",
tree: newFolderyMcFolderFace(nil, rootID),
enumerator: mock.DriveEnumerator(
mock.Drive(id(drive)).With(
mock.Delta(id(delta), nil).With(
// first page: create /root/folder and /root/folder/file
pagesOf(pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)))...).
// assume the user makes changes at this point:
// 1. delete /root/folder
// 2. create a new /root/folder
// 3. move /root/folder/file from old to new folder (same file ID)
// in drive deltas, this will show up as another folder creation sharing
// the same dirname, but we won't see the delete until...
With(pagesOf(pageItems(
driveItem(idx(folder, 2), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), idx(folder, 2), isFile)))...),
// the next delta, containing the delete marker for the original /root/folder
mock.Delta(id(delta), nil).
With(pagesOf(pageItems(
delItem(id(folder), parentDir(), rootID, isFolder),
))...),
)),
limiter: newPagerLimiter(control.DefaultOptions()),
expect: populateTreeExpected{
counts: countTD.Expected{
count.TotalDeltasProcessed: 3,
count.TotalDeleteFilesProcessed: 0,
count.TotalDeleteFoldersProcessed: 1,
count.TotalFilesProcessed: 2,
count.TotalFoldersProcessed: 5,
count.TotalPagesEnumerated: 4,
},
err: require.NoError,
numLiveFiles: 1,
numLiveFolders: 2,
sizeBytes: 42,
treeContainsFolderIDs: []string{
rootID,
idx(folder, 2),
},
treeContainsTombstoneIDs: []string{},
treeContainsFileIDsWithParent: map[string]string{
id(file): idx(folder, 2),
},
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
runPopulateTreeTestMulti(suite.T(), drv, test)
})
}
}
func runPopulateTreeTest(
t *testing.T,
drv models.Driveable,
test populateTreeTest,
) {
ctx, flush := tester.NewContext(t)
defer flush()
mbh := mock.DefaultDriveBHWith(user, pagerForDrives(drv), test.enumerator)
c := collWithMBH(mbh)
counter := count.New()
_, err := c.populateTree(
ctx,
test.tree,
drv,
id(delta),
test.limiter,
counter,
fault.New(true))
test.expect.err(t, err, clues.ToCore(err))
assert.Equal(
t,
test.expect.numLiveFolders,
test.tree.countLiveFolders(),
"count live folders in tree")
cAndS := test.tree.countLiveFilesAndSizes()
assert.Equal(
t,
test.expect.numLiveFiles,
cAndS.numFiles,
"count live files in tree")
assert.Equal(
t,
test.expect.sizeBytes,
cAndS.totalBytes,
"count total bytes in tree")
test.expect.counts.Compare(t, counter)
for _, id := range test.expect.treeContainsFolderIDs {
assert.NotNil(t, test.tree.folderIDToNode[id], "node exists")
}
for _, id := range test.expect.treeContainsTombstoneIDs {
assert.NotNil(t, test.tree.tombstones[id], "tombstone exists")
}
for iID, pID := range test.expect.treeContainsFileIDsWithParent {
assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree")
assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent")
}
}
func runPopulateTreeTestMulti(
t *testing.T,
drv models.Driveable,
test populateTreeTestMulti,
) {
ctx, flush := tester.NewContext(t)
defer flush()
mbh := mock.DefaultDriveBHWithMulti(user, pagerForDrives(drv), test.enumerator)
c := collWithMBH(mbh)
counter := count.New()
_, err := c.populateTree(
ctx,
test.tree,
drv,
id(delta),
test.limiter,
counter,
fault.New(true))
test.expect.err(t, err, clues.ToCore(err))
assert.Equal(
t,
test.expect.numLiveFolders,
test.tree.countLiveFolders(),
"count live folders in tree")
cAndS := test.tree.countLiveFilesAndSizes()
assert.Equal(
t,
test.expect.numLiveFiles,
cAndS.numFiles,
"count live files in tree")
assert.Equal(
t,
test.expect.sizeBytes,
cAndS.totalBytes,
"count total bytes in tree")
test.expect.counts.Compare(t, counter)
for _, id := range test.expect.treeContainsFolderIDs {
assert.NotNil(t, test.tree.folderIDToNode[id], "node exists")
}
for _, id := range test.expect.treeContainsTombstoneIDs {
assert.NotNil(t, test.tree.tombstones[id], "tombstone exists")
}
for iID, pID := range test.expect.treeContainsFileIDsWithParent {
assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree")
assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent")
}
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// folder tests // folder tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -1305,7 +1603,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() {
}, },
{ {
name: "tombstone new folder in unpopulated tree", name: "tombstone new folder in unpopulated tree",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
folder: del, folder: del,
limiter: newPagerLimiter(control.DefaultOptions()), limiter: newPagerLimiter(control.DefaultOptions()),
expect: expected{ expect: expected{
@ -1553,7 +1851,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file
}, },
{ {
name: "one file in a folder", name: "one file in a folder",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
page: pageItems( page: pageItems(
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)),

View File

@ -6,7 +6,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -23,6 +22,10 @@ type folderyMcFolderFace struct {
// new, moved, and notMoved root // new, moved, and notMoved root
root *nodeyMcNodeFace root *nodeyMcNodeFace
// the ID of the actual root folder.
// required to ensure correct population of the root node.
rootID string
// the majority of operations we perform can be handled with // the majority of operations we perform can be handled with
// a folder ID lookup instead of re-walking the entire tree. // a folder ID lookup instead of re-walking the entire tree.
// Ex: adding a new file to its parent folder. // Ex: adding a new file to its parent folder.
@ -45,9 +48,11 @@ type folderyMcFolderFace struct {
func newFolderyMcFolderFace( func newFolderyMcFolderFace(
prefix path.Path, prefix path.Path,
rootID string,
) *folderyMcFolderFace { ) *folderyMcFolderFace {
return &folderyMcFolderFace{ return &folderyMcFolderFace{
prefix: prefix, prefix: prefix,
rootID: rootID,
folderIDToNode: map[string]*nodeyMcNodeFace{}, folderIDToNode: map[string]*nodeyMcNodeFace{},
tombstones: map[string]*nodeyMcNodeFace{}, tombstones: map[string]*nodeyMcNodeFace{},
fileIDToParentID: map[string]string{}, fileIDToParentID: map[string]string{},
@ -150,17 +155,12 @@ func (face *folderyMcFolderFace) setFolder(
return clues.NewWC(ctx, "missing folder name") return clues.NewWC(ctx, "missing folder name")
} }
// drive doesn't normally allow the `:` character in folder names. if len(parentID) == 0 && id != face.rootID {
// so `root:` is, by default, the only folder that can match this
// name. That makes this check a little bit brittle, but generally
// reliable, since we should always see the root first and can rely
// on the naming structure.
if len(parentID) == 0 && name != odConsts.RootPathDir {
return clues.NewWC(ctx, "non-root folder missing parent id") return clues.NewWC(ctx, "non-root folder missing parent id")
} }
// only set the root node once. // only set the root node once.
if name == odConsts.RootPathDir { if id == face.rootID {
if face.root == nil { if face.root == nil {
root := newNodeyMcNodeFace(nil, id, name, isPackage) root := newNodeyMcNodeFace(nil, id, name, isPackage)
face.root = root face.root = root

View File

@ -20,7 +20,7 @@ import (
var loc = path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy") var loc = path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy")
func treeWithRoot() *folderyMcFolderFace { func treeWithRoot() *folderyMcFolderFace {
tree := newFolderyMcFolderFace(nil) tree := newFolderyMcFolderFace(nil, rootID)
rootey := newNodeyMcNodeFace(nil, rootID, rootName, false) rootey := newNodeyMcNodeFace(nil, rootID, rootName, false)
tree.root = rootey tree.root = rootey
tree.folderIDToNode[rootID] = rootey tree.folderIDToNode[rootID] = rootey
@ -38,13 +38,13 @@ func treeWithTombstone() *folderyMcFolderFace {
func treeWithFolders() *folderyMcFolderFace { func treeWithFolders() *folderyMcFolderFace {
tree := treeWithRoot() tree := treeWithRoot()
o := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true) parent := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true)
tree.folderIDToNode[o.id] = o tree.folderIDToNode[parent.id] = parent
tree.root.children[o.id] = o tree.root.children[parent.id] = parent
f := newNodeyMcNodeFace(o, id(folder), name(folder), false) f := newNodeyMcNodeFace(parent, id(folder), name(folder), false)
tree.folderIDToNode[f.id] = f tree.folderIDToNode[f.id] = f
o.children[f.id] = f parent.children[f.id] = f
return tree return tree
} }
@ -102,7 +102,7 @@ func (suite *DeltaTreeUnitSuite) TestNewFolderyMcFolderFace() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
folderFace := newFolderyMcFolderFace(p) folderFace := newFolderyMcFolderFace(p, rootID)
assert.Equal(t, p, folderFace.prefix) assert.Equal(t, p, folderFace.prefix)
assert.Nil(t, folderFace.root) assert.Nil(t, folderFace.root)
assert.NotNil(t, folderFace.folderIDToNode) assert.NotNil(t, folderFace.folderIDToNode)
@ -144,7 +144,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
}{ }{
{ {
tname: "add root", tname: "add root",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
id: rootID, id: rootID,
name: rootName, name: rootName,
isPackage: true, isPackage: true,
@ -272,7 +272,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
{ {
name: "add tombstone", name: "add tombstone",
id: id(folder), id: id(folder),
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
expectErr: assert.NoError, expectErr: assert.NoError,
}, },
{ {
@ -283,7 +283,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
}, },
{ {
name: "missing ID", name: "missing ID",
tree: newFolderyMcFolderFace(nil), tree: newFolderyMcFolderFace(nil, rootID),
expectErr: assert.Error, expectErr: assert.Error,
}, },
{ {

View File

@ -39,6 +39,7 @@ type BackupHandler interface {
api.Getter api.Getter
GetItemPermissioner GetItemPermissioner
GetItemer GetItemer
GetRootFolderer
NewDrivePagerer NewDrivePagerer
EnumerateDriveItemsDeltaer EnumerateDriveItemsDeltaer

View File

@ -182,6 +182,13 @@ func (h siteBackupHandler) EnumerateDriveItemsDelta(
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc) return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc)
} }
func (h siteBackupHandler) GetRootFolder(
ctx context.Context,
driveID string,
) (models.DriveItemable, error) {
return h.ac.Drives().GetRootFolder(ctx, driveID)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore // Restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -182,6 +182,13 @@ func (h userDriveBackupHandler) EnumerateDriveItemsDelta(
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc) return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc)
} }
func (h userDriveBackupHandler) GetRootFolder(
ctx context.Context,
driveID string,
) (models.DriveItemable, error) {
return h.ac.Drives().GetRootFolder(ctx, driveID)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore // Restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -2,6 +2,7 @@ package mock
import ( import (
"context" "context"
"fmt"
"net/http" "net/http"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -9,6 +10,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -30,7 +32,8 @@ type BackupHandler[T any] struct {
// and plug in the selector scope there. // and plug in the selector scope there.
Sel selectors.Selector Sel selectors.Selector
DriveItemEnumeration EnumerateItemsDeltaByDrive DriveItemEnumeration EnumerateItemsDeltaByDrive
DriveItemEnumerationMulti EnumerateDriveItemsDelta
GI GetsItem GI GetsItem
GIP GetsItemPermission GIP GetsItemPermission
@ -57,6 +60,18 @@ type BackupHandler[T any] struct {
getCall int getCall int
GetResps []*http.Response GetResps []*http.Response
GetErrs []error GetErrs []error
RootFolder models.DriveItemable
}
func stubRootFolder() models.DriveItemable {
item := models.NewDriveItem()
item.SetName(ptr.To(odConsts.RootPathDir))
item.SetId(ptr.To(odConsts.RootID))
item.SetRoot(models.NewRoot())
item.SetFolder(models.NewFolder())
return item
} }
func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable] { func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable] {
@ -81,6 +96,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable
LocationIDFn: defaultOneDriveLocationIDer, LocationIDFn: defaultOneDriveLocationIDer,
GetResps: []*http.Response{nil}, GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")}, GetErrs: []error{clues.New("not defined")},
RootFolder: stubRootFolder(),
} }
} }
@ -105,6 +121,7 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab
LocationIDFn: defaultSharePointLocationIDer, LocationIDFn: defaultSharePointLocationIDer,
GetResps: []*http.Response{nil}, GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")}, GetErrs: []error{clues.New("not defined")},
RootFolder: stubRootFolder(),
} }
} }
@ -120,6 +137,18 @@ func DefaultDriveBHWith(
return mbh return mbh
} }
func DefaultDriveBHWithMulti(
resource string,
drivePager *apiMock.Pager[models.Driveable],
enumerator EnumerateDriveItemsDelta,
) *BackupHandler[models.DriveItemable] {
mbh := DefaultOneDriveBH(resource)
mbh.DrivePagerV = drivePager
mbh.DriveItemEnumerationMulti = enumerator
return mbh
}
func (h BackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error) { func (h BackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, h.ProtectedResource.ID(), driveID) pp, err := h.PathPrefixFn(tID, h.ProtectedResource.ID(), driveID)
if err != nil { if err != nil {
@ -190,6 +219,14 @@ func (h BackupHandler[T]) EnumerateDriveItemsDelta(
driveID, prevDeltaLink string, driveID, prevDeltaLink string,
cc api.CallConfig, cc api.CallConfig,
) pagers.NextPageResulter[models.DriveItemable] { ) pagers.NextPageResulter[models.DriveItemable] {
if h.DriveItemEnumerationMulti.DrivePagers != nil {
return h.DriveItemEnumerationMulti.EnumerateDriveItemsDelta(
ctx,
driveID,
prevDeltaLink,
cc)
}
return h.DriveItemEnumeration.EnumerateDriveItemsDelta( return h.DriveItemEnumeration.EnumerateDriveItemsDelta(
ctx, ctx,
driveID, driveID,
@ -287,6 +324,10 @@ func (h BackupHandler[T]) IncludesDir(dir string) bool {
selectors.OneDriveScope(scope).Matches(selectors.OneDriveFolder, dir) selectors.OneDriveScope(scope).Matches(selectors.OneDriveFolder, dir)
} }
func (h BackupHandler[T]) GetRootFolder(context.Context, string) (models.DriveItemable, error) {
return h.RootFolder, nil
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Get Itemer // Get Itemer
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -304,7 +345,124 @@ func (m GetsItem) GetItem(
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Enumerates Drive Items // Drive Items Enumerator
// ---------------------------------------------------------------------------
type EnumerateDriveItemsDelta struct {
DrivePagers map[string]*DriveDeltaEnumerator
}
func DriveEnumerator(
ds ...*DriveDeltaEnumerator,
) EnumerateDriveItemsDelta {
enumerator := EnumerateDriveItemsDelta{
DrivePagers: map[string]*DriveDeltaEnumerator{},
}
for _, drive := range ds {
enumerator.DrivePagers[drive.DriveID] = drive
}
return enumerator
}
func (en EnumerateDriveItemsDelta) EnumerateDriveItemsDelta(
_ context.Context,
driveID, _ string,
_ api.CallConfig,
) pagers.NextPageResulter[models.DriveItemable] {
iterator := en.DrivePagers[driveID]
return iterator.nextDelta()
}
type DriveDeltaEnumerator struct {
DriveID string
idx int
DeltaQueries []*DeltaQuery
}
func Drive(driveID string) *DriveDeltaEnumerator {
return &DriveDeltaEnumerator{DriveID: driveID}
}
func (dde *DriveDeltaEnumerator) With(ds ...*DeltaQuery) *DriveDeltaEnumerator {
dde.DeltaQueries = ds
return dde
}
func (dde *DriveDeltaEnumerator) nextDelta() *DeltaQuery {
if dde.idx == len(dde.DeltaQueries) {
// at the end of the enumeration, return an empty page with no items,
// not even the root. This is what graph api would do to signify an absence
// of changes in the delta.
lastDU := dde.DeltaQueries[dde.idx-1].DeltaUpdate
return &DeltaQuery{
DeltaUpdate: lastDU,
Pages: []NextPage{{
Items: []models.DriveItemable{},
}},
}
}
if dde.idx > len(dde.DeltaQueries) {
// a panic isn't optimal here, but since this mechanism is internal to testing,
// it's an acceptable way to have the tests ensure we don't over-enumerate deltas.
panic(fmt.Sprintf("delta index %d larger than count of delta iterations in mock", dde.idx))
}
pages := dde.DeltaQueries[dde.idx]
dde.idx++
return pages
}
var _ pagers.NextPageResulter[models.DriveItemable] = &DeltaQuery{}
type DeltaQuery struct {
idx int
Pages []NextPage
DeltaUpdate pagers.DeltaUpdate
Err error
}
func Delta(
resultDeltaID string,
err error,
) *DeltaQuery {
return &DeltaQuery{
DeltaUpdate: pagers.DeltaUpdate{URL: resultDeltaID},
Err: err,
}
}
func (dq *DeltaQuery) NextPage() ([]models.DriveItemable, bool, bool) {
if dq.idx >= len(dq.Pages) {
return nil, false, true
}
np := dq.Pages[dq.idx]
dq.idx = dq.idx + 1
return np.Items, np.Reset, false
}
func (dq *DeltaQuery) With(
pages ...NextPage,
) *DeltaQuery {
dq.Pages = append(dq.Pages, pages...)
return dq
}
func (dq *DeltaQuery) Cancel() {}
func (dq *DeltaQuery) Results() (pagers.DeltaUpdate, error) {
return dq.DeltaUpdate, dq.Err
}
// ---------------------------------------------------------------------------
// old version - Enumerates Drive Items
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type NextPage struct { type NextPage struct {

View File

@ -50,7 +50,6 @@ const (
NoDeltaQueries Key = "cannot-make-delta-queries" NoDeltaQueries Key = "cannot-make-delta-queries"
Packages Key = "packages" Packages Key = "packages"
PagerResets Key = "pager-resets" PagerResets Key = "pager-resets"
PagesEnumerated Key = "pages-enumerated"
PrevDeltas Key = "previous-deltas" PrevDeltas Key = "previous-deltas"
PrevPaths Key = "previous-paths" PrevPaths Key = "previous-paths"
PreviousPathMetadataCollision Key = "previous-path-metadata-collision" PreviousPathMetadataCollision Key = "previous-path-metadata-collision"
@ -80,10 +79,12 @@ const (
const ( const (
TotalDeleteFilesProcessed Key = "total-delete-files-processed" TotalDeleteFilesProcessed Key = "total-delete-files-processed"
TotalDeleteFoldersProcessed Key = "total-delete-folders-processed" TotalDeleteFoldersProcessed Key = "total-delete-folders-processed"
TotalDeltasProcessed Key = "total-deltas-processed"
TotalFilesProcessed Key = "total-files-processed" TotalFilesProcessed Key = "total-files-processed"
TotalFoldersProcessed Key = "total-folders-processed" TotalFoldersProcessed Key = "total-folders-processed"
TotalMalwareProcessed Key = "total-malware-processed" TotalMalwareProcessed Key = "total-malware-processed"
TotalPackagesProcessed Key = "total-packages-processed" TotalPackagesProcessed Key = "total-packages-processed"
TotalPagesEnumerated Key = "total-pages-enumerated"
) )
// miscellaneous // miscellaneous