Revert "Revert "move drive pagers to pager pattern (#4316)" (#4412)" (#4456)

This reverts commit 3d78183651289e2051b8690850069c9b41df6bd0.

Replacement for https://github.com/alcionai/corso/pull/4316 after revert in https://github.com/alcionai/corso/pull/4412.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🌻 Feature
This commit is contained in:
Keepers 2023-10-09 13:46:18 -06:00 committed by GitHub
parent 6f25be4ad2
commit 5215e907b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 551 additions and 961 deletions

View File

@ -229,7 +229,7 @@ elseif (![string]::IsNullOrEmpty($Site)) {
}
}
else {
Write-Host "User (for OneDrvie) or Site (for Sharpeoint) is required"
Write-Host "User (for OneDrive) or Site (for Sharepoint) is required"
Exit
}

View File

@ -27,7 +27,7 @@ func NewPrefixMap(m map[string]map[string]struct{}) *PrefixMap {
func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) {
if pm.Empty() {
require.True(t, r.Empty(), "both prefix maps are empty")
require.True(t, r.Empty(), "result prefixMap should be empty but contains keys: %+v", r.Keys())
return
}

View File

@ -228,16 +228,16 @@ func (c *Collections) Get(
ssmb *prefixmatcher.StringSetMatchBuilder,
errs *fault.Bus,
) ([]data.BackupCollection, bool, error) {
prevDeltas, oldPathsByDriveID, canUsePreviousBackup, err := deserializeMetadata(ctx, prevMetadata)
prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata)
if err != nil {
return nil, false, err
}
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup)
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePrevBackup)
driveTombstones := map[string]struct{}{}
for driveID := range oldPathsByDriveID {
for driveID := range oldPrevPathsByDriveID {
driveTombstones[driveID] = struct{}{}
}
@ -255,76 +255,88 @@ func (c *Collections) Get(
}
var (
// Drive ID -> delta URL for drive
deltaURLs = map[string]string{}
// Drive ID -> folder ID -> folder path
folderPaths = map[string]map[string]string{}
numPrevItems = 0
driveIDToDeltaLink = map[string]string{}
driveIDToPrevPaths = map[string]map[string]string{}
numPrevItems = 0
)
for _, d := range drives {
var (
driveID = ptr.Val(d.GetId())
driveName = ptr.Val(d.GetName())
prevDelta = prevDeltas[driveID]
oldPaths = oldPathsByDriveID[driveID]
numOldDelta = 0
ictx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
driveID = ptr.Val(d.GetId())
driveName = ptr.Val(d.GetName())
ictx = clues.Add(
ctx,
"drive_id", driveID,
"drive_name", clues.Hide(driveName))
excludedItemIDs = map[string]struct{}{}
oldPrevPaths = oldPrevPathsByDriveID[driveID]
prevDeltaLink = prevDriveIDToDelta[driveID]
// itemCollection is used to identify which collection a
// file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a
// different collection within the same delta query
// item ID -> item ID
itemCollection = map[string]string{}
)
delete(driveTombstones, driveID)
if _, ok := driveIDToPrevPaths[driveID]; !ok {
driveIDToPrevPaths[driveID] = map[string]string{}
}
if _, ok := c.CollectionMap[driveID]; !ok {
c.CollectionMap[driveID] = map[string]*Collection{}
}
if len(prevDelta) > 0 {
numOldDelta++
}
logger.Ctx(ictx).Infow(
"previous metadata for drive",
"num_paths_entries", len(oldPaths),
"num_deltas_entries", numOldDelta)
"num_paths_entries", len(oldPrevPaths))
delta, paths, excluded, err := collectItems(
items, du, err := c.handler.EnumerateDriveItemsDelta(
ictx,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
driveID,
driveName,
c.UpdateCollections,
oldPaths,
prevDelta,
errs)
prevDeltaLink)
if err != nil {
return nil, false, err
}
// Used for logging below.
numDeltas := 0
// It's alright to have an empty folders map (i.e. no folders found) but not
// an empty delta token. This is because when deserializing the metadata we
// remove entries for which there is no corresponding delta token/folder. If
// we leave empty delta tokens then we may end up setting the State field
// for collections when not actually getting delta results.
if len(delta.URL) > 0 {
deltaURLs[driveID] = delta.URL
numDeltas++
if len(du.URL) > 0 {
driveIDToDeltaLink[driveID] = du.URL
}
newPrevPaths, err := c.UpdateCollections(
ctx,
driveID,
driveName,
items,
oldPrevPaths,
itemCollection,
excludedItemIDs,
du.Reset,
errs)
if err != nil {
return nil, false, clues.Stack(err)
}
// Avoid the edge case where there's no paths but we do have a valid delta
// token. We can accomplish this by adding an empty paths map for this
// drive. If we don't have this then the next backup won't use the delta
// token because it thinks the folder paths weren't persisted.
folderPaths[driveID] = map[string]string{}
maps.Copy(folderPaths[driveID], paths)
driveIDToPrevPaths[driveID] = map[string]string{}
maps.Copy(driveIDToPrevPaths[driveID], newPrevPaths)
logger.Ctx(ictx).Infow(
"persisted metadata for drive",
"num_paths_entries", len(paths),
"num_deltas_entries", numDeltas,
"delta_reset", delta.Reset)
"num_new_paths_entries", len(newPrevPaths),
"delta_reset", du.Reset)
numDriveItems := c.NumItems - numPrevItems
numPrevItems = c.NumItems
@ -336,7 +348,7 @@ func (c *Collections) Get(
err = c.addURLCacheToDriveCollections(
ictx,
driveID,
prevDelta,
prevDeltaLink,
errs)
if err != nil {
return nil, false, err
@ -345,8 +357,8 @@ func (c *Collections) Get(
// For both cases we don't need to do set difference on folder map if the
// delta token was valid because we should see all the changes.
if !delta.Reset {
if len(excluded) == 0 {
if !du.Reset {
if len(excludedItemIDs) == 0 {
continue
}
@ -355,7 +367,7 @@ func (c *Collections) Get(
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
}
ssmb.Add(p.String(), excluded)
ssmb.Add(p.String(), excludedItemIDs)
continue
}
@ -370,13 +382,11 @@ func (c *Collections) Get(
foundFolders[id] = struct{}{}
}
for fldID, p := range oldPaths {
for fldID, p := range oldPrevPaths {
if _, ok := foundFolders[fldID]; ok {
continue
}
delete(paths, fldID)
prevPath, err := path.FromDataLayerPath(p, false)
if err != nil {
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
@ -446,14 +456,14 @@ func (c *Collections) Get(
// empty/missing and default to a full backup.
logger.CtxErr(ctx, err).Info("making metadata collection path prefixes")
return collections, canUsePreviousBackup, nil
return collections, canUsePrevBackup, nil
}
md, err := graph.MakeMetadataCollection(
pathPrefix,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(bupMD.PreviousPathFileName, folderPaths),
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, deltaURLs),
graph.NewMetadataEntry(bupMD.PreviousPathFileName, driveIDToPrevPaths),
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, driveIDToDeltaLink),
},
c.statusUpdater)
@ -466,7 +476,7 @@ func (c *Collections) Get(
collections = append(collections, md)
}
return collections, canUsePreviousBackup, nil
return collections, canUsePrevBackup, nil
}
// addURLCacheToDriveCollections adds an URL cache to all collections belonging to
@ -480,7 +490,7 @@ func (c *Collections) addURLCacheToDriveCollections(
driveID,
prevDelta,
urlCacheRefreshInterval,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectURLCache()),
c.handler,
errs)
if err != nil {
return err
@ -536,22 +546,21 @@ func updateCollectionPaths(
func (c *Collections) handleDelete(
itemID, driveID string,
oldPaths, newPaths map[string]string,
oldPrevPaths, currPrevPaths, newPrevPaths map[string]string,
isFolder bool,
excluded map[string]struct{},
itemCollection map[string]map[string]string,
invalidPrevDelta bool,
) error {
if !isFolder {
// Try to remove the item from the Collection if an entry exists for this
// item. This handles cases where an item was created and deleted during the
// same delta query.
if parentID, ok := itemCollection[driveID][itemID]; ok {
if parentID, ok := currPrevPaths[itemID]; ok {
if col := c.CollectionMap[driveID][parentID]; col != nil {
col.Remove(itemID)
}
delete(itemCollection[driveID], itemID)
delete(currPrevPaths, itemID)
}
// Don't need to add to exclude list if the delta is invalid since the
@ -572,7 +581,7 @@ func (c *Collections) handleDelete(
var prevPath path.Path
prevPathStr, ok := oldPaths[itemID]
prevPathStr, ok := oldPrevPaths[itemID]
if ok {
var err error
@ -589,7 +598,7 @@ func (c *Collections) handleDelete(
// Nested folders also return deleted delta results so we don't have to
// worry about doing a prefix search in the map to remove the subtree of
// the deleted folder/package.
delete(newPaths, itemID)
delete(newPrevPaths, itemID)
if prevPath == nil || invalidPrevDelta {
// It is possible that an item was created and deleted between two delta
@ -680,21 +689,29 @@ func (c *Collections) getCollectionPath(
// UpdateCollections initializes and adds the provided drive items to Collections
// A new collection is created for every drive folder (or package).
// oldPaths is the unchanged data that was loaded from the metadata file.
// newPaths starts as a copy of oldPaths and is updated as changes are found in
// the returned results.
// oldPrevPaths is the unchanged data that was loaded from the metadata file.
// This map is not modified during the call.
// currPrevPaths starts as a copy of oldPaths and is updated as changes are found in
// the returned results. Items are added to this collection throughout the call.
// newPrevPaths, ie: the items added during this call, get returned as a map.
func (c *Collections) UpdateCollections(
ctx context.Context,
driveID, driveName string,
items []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
oldPrevPaths map[string]string,
currPrevPaths map[string]string,
excluded map[string]struct{},
itemCollection map[string]map[string]string,
invalidPrevDelta bool,
errs *fault.Bus,
) error {
el := errs.Local()
) (map[string]string, error) {
var (
el = errs.Local()
newPrevPaths = map[string]string{}
)
if !invalidPrevDelta {
maps.Copy(newPrevPaths, oldPrevPaths)
}
for _, item := range items {
if el.Failure() != nil {
@ -704,8 +721,12 @@ func (c *Collections) UpdateCollections(
var (
itemID = ptr.Val(item.GetId())
itemName = ptr.Val(item.GetName())
ictx = clues.Add(ctx, "item_id", itemID, "item_name", clues.Hide(itemName))
isFolder = item.GetFolder() != nil || item.GetPackageEscaped() != nil
ictx = clues.Add(
ctx,
"item_id", itemID,
"item_name", clues.Hide(itemName),
"item_is_folder", isFolder)
)
if item.GetMalware() != nil {
@ -727,13 +748,13 @@ func (c *Collections) UpdateCollections(
if err := c.handleDelete(
itemID,
driveID,
oldPaths,
newPaths,
oldPrevPaths,
currPrevPaths,
newPrevPaths,
isFolder,
excluded,
itemCollection,
invalidPrevDelta); err != nil {
return clues.Stack(err).WithClues(ictx)
return nil, clues.Stack(err).WithClues(ictx)
}
continue
@ -759,13 +780,13 @@ func (c *Collections) UpdateCollections(
// Deletions are handled above so this is just moves/renames.
var prevPath path.Path
prevPathStr, ok := oldPaths[itemID]
prevPathStr, ok := oldPrevPaths[itemID]
if ok {
prevPath, err = path.FromDataLayerPath(prevPathStr, false)
if err != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path").
WithClues(ictx).
With("path_string", prevPathStr))
With("prev_path_string", path.LoggableDir(prevPathStr)))
}
} else if item.GetRoot() != nil {
// Root doesn't move or get renamed.
@ -775,11 +796,11 @@ func (c *Collections) UpdateCollections(
// Moved folders don't cause delta results for any subfolders nested in
// them. We need to go through and update paths to handle that. We only
// update newPaths so we don't accidentally clobber previous deletes.
updatePath(newPaths, itemID, collectionPath.String())
updatePath(newPrevPaths, itemID, collectionPath.String())
found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath)
if err != nil {
return clues.Stack(err).WithClues(ictx)
return nil, clues.Stack(err).WithClues(ictx)
}
if found {
@ -803,7 +824,7 @@ func (c *Collections) UpdateCollections(
invalidPrevDelta,
nil)
if err != nil {
return clues.Stack(err).WithClues(ictx)
return nil, clues.Stack(err).WithClues(ictx)
}
col.driveName = driveName
@ -825,35 +846,38 @@ func (c *Collections) UpdateCollections(
case item.GetFile() != nil:
// Deletions are handled above so this is just moves/renames.
if len(ptr.Val(item.GetParentReference().GetId())) == 0 {
return clues.New("file without parent ID").WithClues(ictx)
return nil, clues.New("file without parent ID").WithClues(ictx)
}
// Get the collection for this item.
parentID := ptr.Val(item.GetParentReference().GetId())
ictx = clues.Add(ictx, "parent_id", parentID)
collection, found := c.CollectionMap[driveID][parentID]
if !found {
return clues.New("item seen before parent folder").WithClues(ictx)
collection, ok := c.CollectionMap[driveID][parentID]
if !ok {
return nil, clues.New("item seen before parent folder").WithClues(ictx)
}
// Delete the file from previous collection. This will
// only kick in if the file was moved multiple times
// within a single delta query
icID, found := itemCollection[driveID][itemID]
if found {
pcollection, found := c.CollectionMap[driveID][icID]
// This will only kick in if the file was moved multiple times
// within a single delta query. We delete the file from the previous
// collection so that it doesn't appear in two places.
prevParentContainerID, ok := currPrevPaths[itemID]
if ok {
prevColl, found := c.CollectionMap[driveID][prevParentContainerID]
if !found {
return clues.New("previous collection not found").WithClues(ictx)
return nil, clues.New("previous collection not found").
With("prev_parent_container_id", prevParentContainerID).
WithClues(ictx)
}
removed := pcollection.Remove(itemID)
if !removed {
return clues.New("removing from prev collection").WithClues(ictx)
if ok := prevColl.Remove(itemID); !ok {
return nil, clues.New("removing item from prev collection").
With("prev_parent_container_id", prevParentContainerID).
WithClues(ictx)
}
}
itemCollection[driveID][itemID] = parentID
currPrevPaths[itemID] = parentID
if collection.Add(item) {
c.NumItems++
@ -874,11 +898,13 @@ func (c *Collections) UpdateCollections(
}
default:
return clues.New("item type not supported").WithClues(ictx)
el.AddRecoverable(ictx, clues.New("item is neither folder nor file").
WithClues(ictx).
Label(fault.LabelForceNoBackupCreation))
}
}
return el.Failure()
return newPrevPaths, el.Failure()
}
type dirScopeChecker interface {

View File

@ -8,7 +8,6 @@ import (
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
@ -138,7 +137,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
tests := []struct {
testCase string
name string
items []models.DriveItemable
inputFolderMap map[string]string
scope selectors.OneDriveScope
@ -148,11 +147,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedContainerCount int
expectedFileCount int
expectedSkippedCount int
expectedMetadataPaths map[string]string
expectedPrevPaths map[string]string
expectedExcludes map[string]struct{}
}{
{
testCase: "Invalid item",
name: "Invalid item",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("item", "item", testBaseDrivePath, "root", false, false, false),
@ -164,13 +163,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""),
},
expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
},
expectedExcludes: map[string]struct{}{},
},
{
testCase: "Single File",
name: "Single File",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", testBaseDrivePath, "root", true, false, false),
@ -185,13 +184,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 1,
expectedContainerCount: 1,
// Root folder is skipped since it's always present.
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
},
expectedExcludes: getDelList("file"),
},
{
testCase: "Single Folder",
name: "Single Folder",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -203,7 +202,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NewState, folder),
},
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath("/folder"),
},
@ -212,7 +211,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{},
},
{
testCase: "Single Package",
name: "Single Package",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("package", "package", testBaseDrivePath, "root", false, false, true),
@ -224,7 +223,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""),
"package": expectedStatePath(data.NewState, pkg),
},
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"package": expectedPath("/package"),
},
@ -233,7 +232,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{},
},
{
testCase: "1 root file, 1 folder, 1 package, 2 files, 3 collections",
name: "1 root file, 1 folder, 1 package, 2 files, 3 collections",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -253,7 +252,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 5,
expectedFileCount: 3,
expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath("/folder"),
"package": expectedPath("/package"),
@ -261,7 +260,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("fileInRoot", "fileInFolder", "fileInPackage"),
},
{
testCase: "contains folder selector",
name: "contains folder selector",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -286,7 +285,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedContainerCount: 3,
// just "folder" isn't added here because the include check is done on the
// parent path since we only check later if something is a folder or not.
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"folder": expectedPath(folder),
"subfolder": expectedPath(folderSub),
"folder2": expectedPath(folderSub + folder),
@ -294,7 +293,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("fileInFolder", "fileInFolder2"),
},
{
testCase: "prefix subfolder selector",
name: "prefix subfolder selector",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -317,14 +316,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 3,
expectedFileCount: 1,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"subfolder": expectedPath(folderSub),
"folder2": expectedPath(folderSub + folder),
},
expectedExcludes: getDelList("fileInFolder2"),
},
{
testCase: "match subfolder selector",
name: "match subfolder selector",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -345,13 +344,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 1,
expectedContainerCount: 1,
// No child folders for subfolder so nothing here.
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"subfolder": expectedPath(folderSub),
},
expectedExcludes: getDelList("fileInSubfolder"),
},
{
testCase: "not moved folder tree",
name: "not moved folder tree",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -369,7 +368,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1,
expectedFileCount: 0,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath(folder),
"subfolder": expectedPath(folderSub),
@ -377,7 +376,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{},
},
{
testCase: "moved folder tree",
name: "moved folder tree",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -395,7 +394,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1,
expectedFileCount: 0,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath(folder),
"subfolder": expectedPath(folderSub),
@ -403,7 +402,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{},
},
{
testCase: "moved folder tree with file no previous",
name: "moved folder tree with file no previous",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -420,14 +419,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2,
expectedFileCount: 1,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath("/folder2"),
},
expectedExcludes: getDelList("file"),
},
{
testCase: "moved folder tree with file no previous 1",
name: "moved folder tree with file no previous 1",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -443,14 +442,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2,
expectedFileCount: 1,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath(folder),
},
expectedExcludes: getDelList("file"),
},
{
testCase: "moved folder tree and subfolder 1",
name: "moved folder tree and subfolder 1",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -470,7 +469,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2,
expectedFileCount: 0,
expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath(folder),
"subfolder": expectedPath("/subfolder"),
@ -478,7 +477,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{},
},
{
testCase: "moved folder tree and subfolder 2",
name: "moved folder tree and subfolder 2",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false),
@ -498,7 +497,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2,
expectedFileCount: 0,
expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath(folder),
"subfolder": expectedPath("/subfolder"),
@ -506,7 +505,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{},
},
{
testCase: "move subfolder when moving parent",
name: "move subfolder when moving parent",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false),
@ -540,7 +539,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 5,
expectedFileCount: 2,
expectedContainerCount: 4,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath("/folder"),
"folder2": expectedPath("/folder2"),
@ -549,7 +548,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("itemInSubfolder", "itemInFolder2"),
},
{
testCase: "moved folder tree multiple times",
name: "moved folder tree multiple times",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -569,7 +568,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2,
expectedFileCount: 1,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath("/folder2"),
"subfolder": expectedPath("/folder2/subfolder"),
@ -577,7 +576,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("file"),
},
{
testCase: "deleted folder and package",
name: "deleted folder and package",
items: []models.DriveItemable{
driveRootItem("root"), // root is always present, but not necessary here
delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -598,13 +597,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0,
expectedFileCount: 0,
expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
},
expectedExcludes: map[string]struct{}{},
},
{
testCase: "delete folder without previous",
name: "delete folder without previous",
items: []models.DriveItemable{
driveRootItem("root"),
delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -620,13 +619,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0,
expectedFileCount: 0,
expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
},
expectedExcludes: map[string]struct{}{},
},
{
testCase: "delete folder tree move subfolder",
name: "delete folder tree move subfolder",
items: []models.DriveItemable{
driveRootItem("root"),
delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -647,14 +646,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1,
expectedFileCount: 0,
expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"subfolder": expectedPath("/subfolder"),
},
expectedExcludes: map[string]struct{}{},
},
{
testCase: "delete file",
name: "delete file",
items: []models.DriveItemable{
driveRootItem("root"),
delItem("item", testBaseDrivePath, "root", true, false, false),
@ -670,13 +669,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1,
expectedFileCount: 1,
expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
},
expectedExcludes: getDelList("item"),
},
{
testCase: "item before parent errors",
name: "item before parent errors",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false),
@ -691,13 +690,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0,
expectedFileCount: 0,
expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{
"root": expectedPath(""),
},
expectedExcludes: map[string]struct{}{},
expectedPrevPaths: nil,
expectedExcludes: map[string]struct{}{},
},
{
testCase: "1 root file, 1 folder, 1 package, 1 good file, 1 malware",
name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -718,7 +715,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 2,
expectedContainerCount: 3,
expectedSkippedCount: 1,
expectedMetadataPaths: map[string]string{
expectedPrevPaths: map[string]string{
"root": expectedPath(""),
"folder": expectedPath("/folder"),
"package": expectedPath("/package"),
@ -727,26 +724,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
},
}
for _, tt := range tests {
suite.Run(tt.testCase, func() {
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
excludes = map[string]struct{}{}
outputFolderMap = map[string]string{}
itemCollection = map[string]map[string]string{
driveID: {},
}
errs = fault.New(true)
excludes = map[string]struct{}{}
currPrevPaths = map[string]string{}
errs = fault.New(true)
)
maps.Copy(outputFolderMap, tt.inputFolderMap)
maps.Copy(currPrevPaths, test.inputFolderMap)
c := NewCollections(
&itemBackupHandler{api.Drives{}, user, tt.scope},
&itemBackupHandler{api.Drives{}, user, test.scope},
tenant,
idname.NewProvider(user, user),
nil,
@ -754,25 +748,24 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
c.CollectionMap[driveID] = map[string]*Collection{}
err := c.UpdateCollections(
newPrevPaths, err := c.UpdateCollections(
ctx,
driveID,
"General",
tt.items,
tt.inputFolderMap,
outputFolderMap,
test.items,
test.inputFolderMap,
currPrevPaths,
excludes,
itemCollection,
false,
errs)
tt.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count")
assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items")
test.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()), "skipped items")
for id, sp := range tt.expectedCollectionIDs {
for id, sp := range test.expectedCollectionIDs {
if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) {
// Skip collections we don't find so we don't get an NPE.
continue
@ -783,8 +776,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id)
}
assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths")
assert.Equal(t, tt.expectedExcludes, excludes, "exclude list")
assert.Equal(t, test.expectedPrevPaths, newPrevPaths, "metadata paths")
assert.Equal(t, test.expectedExcludes, excludes, "exclude list")
})
}
}
@ -1306,7 +1299,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -1344,7 +1338,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -1421,7 +1416,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &empty, // probably will never happen with graph
DeltaLink: &empty, // probably will never happen with graph
ResetDelta: true,
},
},
},
@ -1458,7 +1454,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
NextLink: &next,
NextLink: &next,
ResetDelta: true,
},
{
Values: []models.DriveItemable{
@ -1466,7 +1463,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -1508,7 +1506,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
driveID2: {
@ -1518,7 +1517,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false),
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
},
DeltaLink: &delta2,
DeltaLink: &delta2,
ResetDelta: true,
},
},
},
@ -1570,7 +1570,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
driveID2: {
@ -1580,7 +1581,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath2, "root", false, true, false),
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
},
DeltaLink: &delta2,
DeltaLink: &delta2,
ResetDelta: true,
},
},
},
@ -1638,87 +1640,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
expectedFolderPaths: nil,
expectedDelList: nil,
},
{
name: "OneDrive_OneItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta,
},
},
},
canUsePreviousBackup: true,
errCheck: assert.NoError,
expectedCollections: map[string]map[data.CollectionState][]string{
rootFolderPath1: {data.NotMovedState: {"file"}},
},
expectedDeltaURLs: map[string]string{
driveID1: delta,
},
expectedFolderPaths: map[string]map[string]string{
driveID1: {
"root": rootFolderPath1,
},
},
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
doNotMergeItems: map[string]bool{
rootFolderPath1: true,
},
},
{
name: "OneDrive_TwoItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
NextLink: &next,
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
},
},
},
canUsePreviousBackup: true,
errCheck: assert.NoError,
expectedCollections: map[string]map[data.CollectionState][]string{
rootFolderPath1: {data.NotMovedState: {"file"}},
expectedPath1("/folder"): {data.NewState: {"folder", "file2"}},
},
expectedDeltaURLs: map[string]string{
driveID1: delta,
},
expectedFolderPaths: map[string]map[string]string{
driveID1: {
"root": rootFolderPath1,
"folder": folderPath1,
},
},
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
doNotMergeItems: map[string]bool{
rootFolderPath1: true,
folderPath1: true,
},
},
{
name: "OneDrive_TwoItemPage_NoDeltaError",
drives: []models.Driveable{drive1},
@ -1771,16 +1692,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -1818,16 +1737,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder2", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -1884,7 +1801,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -1914,13 +1832,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
expectedSkippedCount: 2,
},
{
name: "One Drive Delta Error Deleted Folder In New Results",
name: "One Drive Deleted Folder In New Results",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
@ -1937,7 +1852,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("folder2", driveBasePath1, "root", false, true, false),
delItem("file2", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta2,
DeltaLink: &delta2,
ResetDelta: true,
},
},
},
@ -1972,19 +1888,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
},
},
{
name: "One Drive Delta Error Random Folder Delete",
name: "One Drive Random Folder Delete",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -2015,19 +1929,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
},
},
{
name: "One Drive Delta Error Random Item Delete",
name: "One Drive Random Item Delete",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -2073,7 +1985,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("folder", driveBasePath1, "root", false, true, false),
delItem("file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta2,
DeltaLink: &delta2,
ResetDelta: true,
},
},
},
@ -2116,7 +2029,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -2154,7 +2068,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -2189,7 +2104,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta,
DeltaLink: &delta,
ResetDelta: true,
},
},
},
@ -2271,6 +2187,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
mbh := mock.DefaultOneDriveBH("a-user")
mbh.DrivePagerV = mockDrivePager
mbh.ItemPagerV = itemPagers
mbh.DriveItemEnumeration = mock.PagerResultToEDID(test.items)
c := NewCollections(
mbh,
@ -2501,121 +2418,6 @@ func delItem(
return item
}
func getDeltaError() error {
syncStateNotFound := "SyncStateNotFound"
me := odataerrors.NewMainError()
me.SetCode(&syncStateNotFound)
deltaError := odataerrors.NewODataError()
deltaError.SetErrorEscaped(me)
return deltaError
}
func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
next := "next"
delta := "delta"
prevDelta := "prev-delta"
table := []struct {
name string
items []apiMock.PagerResult[models.DriveItemable]
deltaURL string
prevDeltaSuccess bool
prevDelta string
err error
}{
{
name: "delta on first run",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
},
{
name: "empty prev delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: false,
prevDelta: "",
},
{
name: "next then delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
},
{
name: "invalid prev delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{Err: getDeltaError()},
{DeltaLink: &delta}, // works on retry
},
prevDelta: prevDelta,
prevDeltaSuccess: false,
},
{
name: "fail a normal delta query",
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{Err: assert.AnError},
},
prevDelta: prevDelta,
prevDeltaSuccess: true,
err: assert.AnError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.items,
}
collectorFunc := func(
ctx context.Context,
driveID, driveName string,
driveItems []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
excluded map[string]struct{},
itemCollection map[string]map[string]string,
doNotMergeItems bool,
errs *fault.Bus,
) error {
return nil
}
delta, _, _, err := collectItems(
ctx,
itemPager,
"",
"General",
collectorFunc,
map[string]string{},
test.prevDelta,
fault.New(true))
require.ErrorIs(t, err, test.err, "delta fetch err", clues.ToCore(err))
require.Equal(t, test.deltaURL, delta.URL, "delta url")
require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset")
})
}
}
func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
driveID := "test-drive"
collCount := 3

View File

@ -38,6 +38,7 @@ type BackupHandler interface {
GetItemPermissioner
GetItemer
NewDrivePagerer
EnumerateDriveItemsDeltaer
// PathPrefix constructs the service and category specific path prefix for
// the given values.
@ -52,7 +53,7 @@ type BackupHandler interface {
// ServiceCat returns the service and category used by this implementation.
ServiceCat() (path.ServiceType, path.CategoryType)
NewItemPager(driveID, link string, fields []string) api.DeltaPager[models.DriveItemable]
// FormatDisplayPath creates a human-readable string to represent the
// provided path.
FormatDisplayPath(driveName string, parentPath *path.Builder) string
@ -81,6 +82,17 @@ type GetItemer interface {
) (models.DriveItemable, error)
}
type EnumerateDriveItemsDeltaer interface {
EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) (
[]models.DriveItemable,
api.DeltaUpdate,
error,
)
}
// ---------------------------------------------------------------------------
// restore
// ---------------------------------------------------------------------------

View File

@ -1,142 +0,0 @@
package drive
import (
"context"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
// FIXME: This is same as exchange.api.DeltaUpdate
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}
// itemCollector functions collect the items found in a drive
type itemCollector func(
ctx context.Context,
driveID, driveName string,
driveItems []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
excluded map[string]struct{},
itemCollections map[string]map[string]string,
validPrevDelta bool,
errs *fault.Bus,
) error
// collectItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method
func collectItems(
ctx context.Context,
pager api.DeltaPager[models.DriveItemable],
driveID, driveName string,
collector itemCollector,
oldPaths map[string]string,
prevDelta string,
errs *fault.Bus,
) (
DeltaUpdate,
map[string]string, // newPaths
map[string]struct{}, // excluded
error,
) {
var (
newDeltaURL = ""
newPaths = map[string]string{}
excluded = map[string]struct{}{}
invalidPrevDelta = len(prevDelta) == 0
// itemCollection is used to identify which collection a
// file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a
// different collection within the same delta query
// drive ID -> item ID -> item ID
itemCollection = map[string]map[string]string{
driveID: {},
}
)
if !invalidPrevDelta {
maps.Copy(newPaths, oldPaths)
pager.SetNextLink(prevDelta)
}
for {
// assume delta urls here, which allows single-token consumption
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
if graph.IsErrInvalidDelta(err) {
logger.Ctx(ctx).Infow("Invalid previous delta link", "link", prevDelta)
invalidPrevDelta = true
newPaths = map[string]string{}
pager.Reset(ctx)
continue
}
if err != nil {
return DeltaUpdate{}, nil, nil, graph.Wrap(ctx, err, "getting page")
}
vals := page.GetValue()
err = collector(
ctx,
driveID,
driveName,
vals,
oldPaths,
newPaths,
excluded,
itemCollection,
invalidPrevDelta,
errs)
if err != nil {
return DeltaUpdate{}, nil, nil, err
}
nextLink, deltaLink := api.NextAndDeltaLink(page)
if len(deltaLink) > 0 {
newDeltaURL = deltaLink
}
// Check if there are more items
if len(nextLink) == 0 {
break
}
logger.Ctx(ctx).Debugw("Found nextLink", "link", nextLink)
pager.SetNextLink(nextLink)
}
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
}
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
func newItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}

View File

@ -88,13 +88,6 @@ func (h itemBackupHandler) NewDrivePager(
return h.ac.NewUserDrivePager(resourceOwner, fields)
}
func (h itemBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
func (h itemBackupHandler) AugmentItemInfo(
dii details.ItemInfo,
resource idname.Provider,
@ -141,6 +134,13 @@ func (h itemBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.OneDriveFolder, dir)
}
func (h itemBackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
}
// ---------------------------------------------------------------------------
// Restore
// ---------------------------------------------------------------------------

View File

@ -16,14 +16,11 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -64,125 +61,6 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.userDriveID = ptr.Val(odDrives[0].GetId())
}
func getOneDriveItem(
ctx context.Context,
t *testing.T,
ac api.Client,
driveID string,
) models.DriveItemable {
var driveItem models.DriveItemable
// file to test the reader function
itemCollector := func(
_ context.Context,
_, _ string,
items []models.DriveItemable,
_ map[string]string,
_ map[string]string,
_ map[string]struct{},
_ map[string]map[string]string,
_ bool,
_ *fault.Bus,
) error {
if driveItem != nil {
return nil
}
for _, item := range items {
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
driveItem = item
break
}
}
return nil
}
ip := ac.
Drives().
NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectDefault())
_, _, _, err := collectItems(
ctx,
ip,
driveID,
"General",
itemCollector,
map[string]string{},
"",
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
return driveItem
}
// TestItemReader is an integration test that makes a few assumptions
// about the test environment
// 1) It assumes the test user has a drive
// 2) It assumes the drive has a file it can use to test `driveItemReader`
// The test checks these in below
func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
// Test Requirement 2: Need a file
require.NotEmpty(
t,
driveItem,
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
bh := itemBackupHandler{
suite.service.ac.Drives(),
suite.user,
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
}
// Read data for the file
itemData, err := downloadItem(ctx, bh, driveItem)
require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData)
require.NoError(t, err, clues.ToCore(err))
require.NotZero(t, size)
}
// In prod we consider any errors in isURLExpired as non-fatal and carry on
// with the download. This is a regression test to make sure we keep track
// of any graph changes to the download url scheme, including how graph
// embeds the jwt token.
func (suite *ItemIntegrationSuite) TestIsURLExpired() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
require.NotEmpty(
t,
driveItem,
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
var url string
for _, key := range downloadURLKeys {
if v, err := str.AnyValueToString(key, driveItem.GetAdditionalData()); err == nil {
url = v
break
}
}
expired, err := isURLExpired(ctx, url)
require.NoError(t, err, clues.ToCore(err))
require.False(t, expired)
}
// TestItemWriter is an integration test for uploading data to OneDrive
// It creates a new folder with a new item and writes data to it
func (suite *ItemIntegrationSuite) TestItemWriter() {
@ -217,7 +95,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx,
test.driveID,
ptr.Val(root.GetId()),
newItem(newFolderName, true),
api.NewDriveItem(newFolderName, true),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId())
@ -229,7 +107,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx,
test.driveID,
ptr.Val(newFolder.GetId()),
newItem(newItemName, false),
api.NewDriveItem(newItemName, false),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newItem.GetId())
@ -363,7 +241,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{
name: "success",
itemFunc: func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url,
})
@ -382,7 +260,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{
name: "success, content url set instead of download url",
itemFunc: func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{
"@content.downloadUrl": url,
})
@ -401,7 +279,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{
name: "api getter returns error",
itemFunc: func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url,
})
@ -417,7 +295,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{
name: "download url is empty",
itemFunc: func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
@ -432,7 +310,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{
name: "malware",
itemFunc: func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url,
})
@ -454,7 +332,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{
name: "non-2xx http response",
itemFunc: func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url,
})
@ -503,7 +381,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
url = "https://example.com"
itemFunc = func() models.DriveItemable {
di := newItem("test", false)
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url,
})

View File

@ -91,13 +91,6 @@ func (h libraryBackupHandler) NewDrivePager(
return h.ac.NewSiteDrivePager(resourceOwner, fields)
}
func (h libraryBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
func (h libraryBackupHandler) AugmentItemInfo(
dii details.ItemInfo,
resource idname.Provider,
@ -144,6 +137,13 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.SharePointLibraryFolder, dir)
}
func (h libraryBackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
}
// ---------------------------------------------------------------------------
// Restore
// ---------------------------------------------------------------------------

View File

@ -671,7 +671,7 @@ func createFolder(
ctx,
driveID,
parentFolderID,
newItem(folderName, true),
api.NewDriveItem(folderName, true),
control.Replace)
// ErrItemAlreadyExistsConflict can only occur for folders if the
@ -692,7 +692,7 @@ func createFolder(
ctx,
driveID,
parentFolderID,
newItem(folderName, true),
api.NewDriveItem(folderName, true),
control.Copy)
if err != nil {
return nil, clues.Wrap(err, "creating folder")
@ -733,7 +733,7 @@ func restoreFile(
}
var (
item = newItem(name, false)
item = api.NewDriveItem(name, false)
collisionKey = api.DriveItemCollisionKey(item)
collision api.DriveItemIDType
shouldDeleteOriginal bool

View File

@ -12,7 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
const (
@ -47,7 +46,7 @@ type urlCache struct {
refreshMu sync.Mutex
deltaQueryCount int
itemPager api.DeltaPager[models.DriveItemable]
edid EnumerateDriveItemsDeltaer
errs *fault.Bus
}
@ -56,13 +55,10 @@ type urlCache struct {
func newURLCache(
driveID, prevDelta string,
refreshInterval time.Duration,
itemPager api.DeltaPager[models.DriveItemable],
edid EnumerateDriveItemsDeltaer,
errs *fault.Bus,
) (*urlCache, error) {
err := validateCacheParams(
driveID,
refreshInterval,
itemPager)
err := validateCacheParams(driveID, refreshInterval, edid)
if err != nil {
return nil, clues.Wrap(err, "cache params")
}
@ -71,9 +67,9 @@ func newURLCache(
idToProps: make(map[string]itemProps),
lastRefreshTime: time.Time{},
driveID: driveID,
edid: edid,
prevDelta: prevDelta,
refreshInterval: refreshInterval,
itemPager: itemPager,
errs: errs,
},
nil
@ -83,7 +79,7 @@ func newURLCache(
func validateCacheParams(
driveID string,
refreshInterval time.Duration,
itemPager api.DeltaPager[models.DriveItemable],
edid EnumerateDriveItemsDeltaer,
) error {
if len(driveID) == 0 {
return clues.New("drive id is empty")
@ -93,8 +89,8 @@ func validateCacheParams(
return clues.New("invalid refresh interval")
}
if itemPager == nil {
return clues.New("nil item pager")
if edid == nil {
return clues.New("nil item enumerator")
}
return nil
@ -160,44 +156,23 @@ func (uc *urlCache) refreshCache(
// Issue a delta query to graph
logger.Ctx(ctx).Info("refreshing url cache")
err := uc.deltaQuery(ctx)
items, du, err := uc.edid.EnumerateDriveItemsDelta(ctx, uc.driveID, uc.prevDelta)
if err != nil {
// clear cache
uc.idToProps = make(map[string]itemProps)
return clues.Stack(err)
}
return err
uc.deltaQueryCount++
if err := uc.updateCache(ctx, items, uc.errs); err != nil {
return clues.Stack(err)
}
logger.Ctx(ctx).Info("url cache refreshed")
// Update last refresh time
uc.lastRefreshTime = time.Now()
return nil
}
// deltaQuery performs a delta query on the drive and update the cache
func (uc *urlCache) deltaQuery(
ctx context.Context,
) error {
logger.Ctx(ctx).Debug("starting delta query")
// Reset item pager to remove any previous state
uc.itemPager.Reset(ctx)
_, _, _, err := collectItems(
ctx,
uc.itemPager,
uc.driveID,
"",
uc.updateCache,
map[string]string{},
uc.prevDelta,
uc.errs)
if err != nil {
return clues.Wrap(err, "delta query")
}
uc.deltaQueryCount++
uc.prevDelta = du.URL
return nil
}
@ -224,13 +199,7 @@ func (uc *urlCache) readCache(
// It assumes that cacheMu is held by caller in write mode
func (uc *urlCache) updateCache(
ctx context.Context,
_, _ string,
items []models.DriveItemable,
_ map[string]string,
_ map[string]string,
_ map[string]struct{},
_ map[string]map[string]string,
_ bool,
errs *fault.Bus,
) error {
el := errs.Local()

View File

@ -1,7 +1,6 @@
package drive
import (
"context"
"errors"
"io"
"math/rand"
@ -18,15 +17,19 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
)
// ---------------------------------------------------------------------------
// integration
// ---------------------------------------------------------------------------
type URLCacheIntegrationSuite struct {
tester.Suite
ac api.Client
@ -68,11 +71,10 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
// url cache
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
var (
t = suite.T()
ac = suite.ac.Drives()
driveID = suite.driveID
newFolderName = testdata.DefaultRestoreConfig("folder").Location
driveItemPager = suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectDefault())
t = suite.T()
ac = suite.ac.Drives()
driveID = suite.driveID
newFolderName = testdata.DefaultRestoreConfig("folder").Location
)
ctx, flush := tester.NewContext(t)
@ -82,11 +84,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
newFolder, err := ac.Drives().PostItemInContainer(
newFolder, err := ac.PostItemInContainer(
ctx,
driveID,
ptr.Val(root.GetId()),
newItem(newFolderName, true),
api.NewDriveItem(newFolderName, true),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
@ -94,33 +96,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
nfid := ptr.Val(newFolder.GetId())
collectorFunc := func(
context.Context,
string,
string,
[]models.DriveItemable,
map[string]string,
map[string]string,
map[string]struct{},
map[string]map[string]string,
bool,
*fault.Bus,
) error {
return nil
}
// Get the previous delta to feed into url cache
prevDelta, _, _, err := collectItems(
ctx,
suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectURLCache()),
suite.driveID,
"drive-name",
collectorFunc,
map[string]string{},
"",
fault.New(true))
_, du, err := ac.EnumerateDriveItemsDelta(ctx, suite.driveID, "")
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, prevDelta.URL)
require.NotEmpty(t, du.URL)
// Create a bunch of files in the new folder
var items []models.DriveItemable
@ -128,11 +107,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
for i := 0; i < 5; i++ {
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
item, err := ac.Drives().PostItemInContainer(
item, err := ac.PostItemInContainer(
ctx,
driveID,
nfid,
newItem(newItemName, false),
api.NewDriveItem(newItemName, false),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
@ -142,9 +121,9 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Create a new URL cache with a long TTL
uc, err := newURLCache(
suite.driveID,
prevDelta.URL,
du.URL,
1*time.Hour,
driveItemPager,
suite.ac.Drives(),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -195,6 +174,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
require.Equal(t, 1, uc.deltaQueryCount)
}
// ---------------------------------------------------------------------------
// unit
// ---------------------------------------------------------------------------
type URLCacheUnitSuite struct {
tester.Suite
}
@ -205,27 +188,20 @@ func TestURLCacheUnitSuite(t *testing.T) {
func (suite *URLCacheUnitSuite) TestGetItemProperties() {
deltaString := "delta"
next := "next"
driveID := "drive1"
table := []struct {
name string
pagerResult map[string][]apiMock.PagerResult[models.DriveItemable]
pagerItems map[string][]models.DriveItemable
pagerErr map[string]error
expectedItemProps map[string]itemProps
expectedErr require.ErrorAssertionFunc
cacheAssert func(*urlCache, time.Time)
}{
{
name: "single item in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
DeltaLink: &deltaString,
},
},
pagerItems: map[string][]models.DriveItemable{
driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
},
expectedItemProps: map[string]itemProps{
"1": {
@ -242,18 +218,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "multiple items in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
pagerItems: map[string][]models.DriveItemable{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
},
DeltaLink: &deltaString,
},
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
},
},
expectedItemProps: map[string]itemProps{
@ -287,18 +258,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "duplicate items with potentially new urls",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
pagerItems: map[string][]models.DriveItemable{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("1", "file1", "root", "root", "https://test1.com", false),
fileItem("2", "file2", "root", "root", "https://test2.com", false),
},
DeltaLink: &deltaString,
},
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("1", "file1", "root", "root", "https://test1.com", false),
fileItem("2", "file2", "root", "root", "https://test2.com", false),
},
},
expectedItemProps: map[string]itemProps{
@ -324,16 +290,11 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "deleted items",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
pagerItems: map[string][]models.DriveItemable{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
},
DeltaLink: &deltaString,
},
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
},
},
expectedItemProps: map[string]itemProps{
@ -355,15 +316,8 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "item not found in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
DeltaLink: &deltaString,
},
},
pagerItems: map[string][]models.DriveItemable{
driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
},
expectedItemProps: map[string]itemProps{
"2": {},
@ -376,23 +330,10 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
},
{
name: "multi-page delta query error",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
NextLink: &next,
},
{
Values: []models.DriveItemable{
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
},
DeltaLink: &deltaString,
Err: errors.New("delta query error"),
},
},
name: "delta query error",
pagerItems: map[string][]models.DriveItemable{},
pagerErr: map[string]error{
driveID: errors.New("delta query error"),
},
expectedItemProps: map[string]itemProps{
"1": {},
@ -408,15 +349,10 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
{
name: "folder item",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
pagerItems: map[string][]models.DriveItemable{
driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
driveItem("2", "folder2", "root", "root", false, true, false),
},
DeltaLink: &deltaString,
},
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
driveItem("2", "folder2", "root", "root", false, true, false),
},
},
expectedItemProps: map[string]itemProps{
@ -437,15 +373,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
ctx, flush := tester.NewContext(t)
defer flush()
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.pagerResult[driveID],
medi := mock.EnumeratesDriveItemsDelta{
Items: test.pagerItems,
Err: test.pagerErr,
DeltaUpdate: map[string]api.DeltaUpdate{driveID: {URL: deltaString}},
}
cache, err := newURLCache(
driveID,
"",
1*time.Hour,
itemPager,
&medi,
fault.New(true))
require.NoError(suite.T(), err, clues.ToCore(err))
@ -480,15 +418,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
// Test needsRefresh
func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
driveID := "drive1"
t := suite.T()
refreshInterval := 1 * time.Second
var (
t = suite.T()
driveID = "drive1"
refreshInterval = 1 * time.Second
)
cache, err := newURLCache(
driveID,
"",
refreshInterval,
&apiMock.DeltaPager[models.DriveItemable]{},
&mock.EnumeratesDriveItemsDelta{},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -510,14 +450,12 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
require.False(t, cache.needsRefresh())
}
// Test newURLCache
func (suite *URLCacheUnitSuite) TestNewURLCache() {
// table driven tests
table := []struct {
name string
driveID string
refreshInt time.Duration
itemPager api.DeltaPager[models.DriveItemable]
itemPager EnumerateDriveItemsDeltaer
errors *fault.Bus
expectedErr require.ErrorAssertionFunc
}{
@ -525,7 +463,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid driveID",
driveID: "",
refreshInt: 1 * time.Hour,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true),
expectedErr: require.Error,
},
@ -533,12 +471,12 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid refresh interval",
driveID: "drive1",
refreshInt: 100 * time.Millisecond,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true),
expectedErr: require.Error,
},
{
name: "invalid itemPager",
name: "invalid item enumerator",
driveID: "drive1",
refreshInt: 1 * time.Hour,
itemPager: nil,
@ -549,7 +487,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "valid",
driveID: "drive1",
refreshInt: 1 * time.Hour,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true),
expectedErr: require.NoError,
},

View File

@ -2,7 +2,6 @@ package groups
import (
"context"
"fmt"
"testing"
"time"
@ -527,8 +526,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
require.NotEmpty(t, c.FullPath().Folder(false))
fmt.Printf("\n-----\nfolder %+v\n-----\n", c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
@ -537,8 +534,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
loc := c.(data.LocationPather).LocationPath().String()
fmt.Printf("\n-----\nloc %+v\n-----\n", c.(data.LocationPather).LocationPath().String())
require.NotEmpty(t, loc)
delete(test.channelNames, loc)

View File

@ -9,11 +9,13 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
)
// ---------------------------------------------------------------------------
@ -23,6 +25,8 @@ import (
type BackupHandler struct {
ItemInfo details.ItemInfo
DriveItemEnumeration EnumeratesDriveItemsDelta
GI GetsItem
GIP GetsItemPermission
@ -56,6 +60,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler {
OneDrive: &details.OneDriveInfo{},
Extension: &details.ExtensionData{},
},
DriveItemEnumeration: EnumeratesDriveItemsDelta{},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultOneDrivePathPrefixer,
@ -125,10 +130,6 @@ func (h BackupHandler) NewDrivePager(string, []string) api.Pager[models.Driveabl
return h.DrivePagerV
}
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DeltaPager[models.DriveItemable] {
return h.ItemPagerV[driveID]
}
func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string {
return "/" + pb.String()
}
@ -159,6 +160,13 @@ func (h *BackupHandler) Get(context.Context, string, map[string]string) (*http.R
return h.GetResps[c], h.GetErrs[c]
}
func (h BackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.DriveItemEnumeration.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
}
func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) {
return h.GI.GetItem(ctx, "", "")
}
@ -261,6 +269,65 @@ func (m GetsItem) GetItem(
return m.Item, m.Err
}
// ---------------------------------------------------------------------------
// Enumerates Drive Items
// ---------------------------------------------------------------------------
type EnumeratesDriveItemsDelta struct {
Items map[string][]models.DriveItemable
DeltaUpdate map[string]api.DeltaUpdate
Err map[string]error
}
func (edi EnumeratesDriveItemsDelta) EnumerateDriveItemsDelta(
_ context.Context,
driveID, _ string,
) (
[]models.DriveItemable,
api.DeltaUpdate,
error,
) {
return edi.Items[driveID], edi.DeltaUpdate[driveID], edi.Err[driveID]
}
func PagerResultToEDID(
m map[string][]apiMock.PagerResult[models.DriveItemable],
) EnumeratesDriveItemsDelta {
edi := EnumeratesDriveItemsDelta{
Items: map[string][]models.DriveItemable{},
DeltaUpdate: map[string]api.DeltaUpdate{},
Err: map[string]error{},
}
for driveID, results := range m {
var (
err error
items = []models.DriveItemable{}
deltaUpdate api.DeltaUpdate
)
for _, pr := range results {
items = append(items, pr.Values...)
if pr.DeltaLink != nil {
deltaUpdate = api.DeltaUpdate{URL: ptr.Val(pr.DeltaLink)}
}
if pr.Err != nil {
err = pr.Err
}
deltaUpdate.Reset = deltaUpdate.Reset || pr.ResetDelta
}
edi.Items[driveID] = items
edi.Err[driveID] = err
edi.DeltaUpdate[driveID] = deltaUpdate
}
return edi
}
// ---------------------------------------------------------------------------
// Get Item Permissioner
// ---------------------------------------------------------------------------

View File

@ -91,12 +91,9 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
var (
paths = map[string]string{}
newPaths = map[string]string{}
currPaths = map[string]string{}
excluded = map[string]struct{}{}
itemColls = map[string]map[string]string{
driveID: {},
}
collMap = map[string]map[string]*drive.Collection{
collMap = map[string]map[string]*drive.Collection{
driveID: {},
}
)
@ -110,15 +107,14 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
c.CollectionMap = collMap
err := c.UpdateCollections(
_, err := c.UpdateCollections(
ctx,
driveID,
"General",
test.items,
paths,
newPaths,
currPaths,
excluded,
itemColls,
true,
fault.New(true))

View File

@ -384,20 +384,20 @@ func (pec printableErrCore) Values() []string {
// funcs, and the function that spawned the local bus should always
// return `local.Failure()` to ensure that hard failures are propagated
// back upstream.
func (e *Bus) Local() *localBus {
return &localBus{
func (e *Bus) Local() *LocalBus {
return &LocalBus{
mu: &sync.Mutex{},
bus: e,
}
}
type localBus struct {
type LocalBus struct {
mu *sync.Mutex
bus *Bus
current error
}
func (e *localBus) AddRecoverable(ctx context.Context, err error) {
func (e *LocalBus) AddRecoverable(ctx context.Context, err error) {
if err == nil {
return
}
@ -422,7 +422,7 @@ func (e *localBus) AddRecoverable(ctx context.Context, err error) {
// 2. Skipping avoids a permanent and consistent failure. If
// the underlying reason is transient or otherwise recoverable,
// the item should not be skipped.
func (e *localBus) AddSkip(ctx context.Context, s *Skipped) {
func (e *LocalBus) AddSkip(ctx context.Context, s *Skipped) {
if s == nil {
return
}
@ -437,7 +437,7 @@ func (e *localBus) AddSkip(ctx context.Context, s *Skipped) {
// It does not return the underlying bus.Failure(), only the failure
// that was recorded within the local bus instance. This error should
// get returned by any func which created a local bus.
func (e *localBus) Failure() error {
func (e *LocalBus) Failure() error {
return e.current
}

View File

@ -697,7 +697,7 @@ func (s ExchangeScope) IncludesCategory(cat exchangeCategory) bool {
// returns true if the category is included in the scope's data type,
// and the value is set to Any().
func (s ExchangeScope) IsAny(cat exchangeCategory) bool {
return isAnyTarget(s, cat)
return IsAnyTarget(s, cat)
}
// Get returns the data category in the scope. If the scope

View File

@ -699,7 +699,7 @@ func (s GroupsScope) IncludesCategory(cat groupsCategory) bool {
// returns true if the category is included in the scope's data type,
// and the value is set to Any().
func (s GroupsScope) IsAny(cat groupsCategory) bool {
return isAnyTarget(s, cat)
return IsAnyTarget(s, cat)
}
// Get returns the data category in the scope. If the scope

View File

@ -484,7 +484,7 @@ func (s OneDriveScope) Matches(cat oneDriveCategory, target string) bool {
// returns true if the category is included in the scope's data type,
// and the value is set to Any().
func (s OneDriveScope) IsAny(cat oneDriveCategory) bool {
return isAnyTarget(s, cat)
return IsAnyTarget(s, cat)
}
// Get returns the data category in the scope. If the scope

View File

@ -694,7 +694,7 @@ func matchesPathValues[T scopeT, C categoryT](
return false
}
if isAnyTarget(sc, cc) {
if IsAnyTarget(sc, cc) {
// continue, not return: all path keys must match the entry to succeed
continue
}
@ -795,7 +795,7 @@ func isNoneTarget[T scopeT, C categoryT](s T, cat C) bool {
// returns true if the category is included in the scope's category type,
// and the value is set to Any().
func isAnyTarget[T scopeT, C categoryT](s T, cat C) bool {
func IsAnyTarget[T scopeT, C categoryT](s T, cat C) bool {
if !typeAndCategoryMatches(cat, s.categorizer()) {
return false
}

View File

@ -125,14 +125,14 @@ func (suite *SelectorScopesSuite) TestGetCatValue() {
func (suite *SelectorScopesSuite) TestIsAnyTarget() {
t := suite.T()
stub := stubScope("")
assert.True(t, isAnyTarget(stub, rootCatStub))
assert.True(t, isAnyTarget(stub, leafCatStub))
assert.False(t, isAnyTarget(stub, mockCategorizer("smarf")))
assert.True(t, IsAnyTarget(stub, rootCatStub))
assert.True(t, IsAnyTarget(stub, leafCatStub))
assert.False(t, IsAnyTarget(stub, mockCategorizer("smarf")))
stub = stubScope("none")
assert.False(t, isAnyTarget(stub, rootCatStub))
assert.False(t, isAnyTarget(stub, leafCatStub))
assert.False(t, isAnyTarget(stub, mockCategorizer("smarf")))
assert.False(t, IsAnyTarget(stub, rootCatStub))
assert.False(t, IsAnyTarget(stub, leafCatStub))
assert.False(t, IsAnyTarget(stub, mockCategorizer("smarf")))
}
var reduceTestTable = []struct {

View File

@ -625,7 +625,7 @@ func (s SharePointScope) IncludesCategory(cat sharePointCategory) bool {
// returns true if the category is included in the scope's data type,
// and the value is set to Any().
func (s SharePointScope) IsAny(cat sharePointCategory) bool {
return isAnyTarget(s, cat)
return IsAnyTarget(s, cat)
}
// Get returns the data category in the scope. If the scope

View File

@ -101,7 +101,7 @@ func idAnd(ss ...string) []string {
// exported
// ---------------------------------------------------------------------------
func DriveItemSelectDefault() []string {
func DefaultDriveItemProps() []string {
return idAnd(
"content.downloadUrl",
"createdBy",

View File

@ -1,11 +0,0 @@
package api
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}

View File

@ -351,6 +351,10 @@ func (c Drives) PostItemLinkShareUpdate(
return itm, nil
}
// ---------------------------------------------------------------------------
// helper funcs
// ---------------------------------------------------------------------------
// DriveItemCollisionKeyy constructs a key from the item name.
// collision keys are used to identify duplicate item conflicts for handling advanced restoration config.
func DriveItemCollisionKey(item models.DriveItemable) string {
@ -360,3 +364,17 @@ func DriveItemCollisionKey(item models.DriveItemable) string {
return ptr.Val(item.GetName())
}
// NewDriveItem initializes a `models.DriveItemable` with either a folder or file entry.
func NewDriveItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}

View File

@ -15,6 +15,11 @@ import (
"github.com/alcionai/corso/src/pkg/logger"
)
type DriveItemIDType struct {
ItemID string
IsFolder bool
}
// ---------------------------------------------------------------------------
// non-delta item pager
// ---------------------------------------------------------------------------
@ -65,11 +70,6 @@ func (p *driveItemPageCtrl) ValidModTimes() bool {
return true
}
type DriveItemIDType struct {
ItemID string
IsFolder bool
}
func (c Drives) GetItemsInContainerByCollisionKey(
ctx context.Context,
driveID, containerID string,
@ -131,9 +131,9 @@ type DriveItemDeltaPageCtrl struct {
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
}
func (c Drives) NewDriveItemDeltaPager(
driveID, link string,
selectFields []string,
func (c Drives) newDriveItemDeltaPager(
driveID, prevDeltaLink string,
selectProps ...string,
) *DriveItemDeltaPageCtrl {
preferHeaderItems := []string{
"deltashowremovedasdeleted",
@ -142,28 +142,32 @@ func (c Drives) NewDriveItemDeltaPager(
"hierarchicalsharing",
}
requestConfig := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferHeaderItems...),
QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{
Select: selectFields,
},
options := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferHeaderItems...),
QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{},
}
if len(selectProps) > 0 {
options.QueryParameters.Select = selectProps
}
builder := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(onedrive.RootID).
Delta()
if len(prevDeltaLink) > 0 {
builder = drives.NewItemItemsItemDeltaRequestBuilder(prevDeltaLink, c.Stable.Adapter())
}
res := &DriveItemDeltaPageCtrl{
gs: c.Stable,
driveID: driveID,
options: requestConfig,
builder: c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(onedrive.RootID).
Delta(),
}
if len(link) > 0 {
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, c.Stable.Adapter())
options: options,
builder: builder,
}
return res
@ -193,6 +197,27 @@ func (p *DriveItemDeltaPageCtrl) ValidModTimes() bool {
return true
}
// EnumerateDriveItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method
func (c Drives) EnumerateDriveItemsDelta(
ctx context.Context,
driveID string,
prevDeltaLink string,
) (
[]models.DriveItemable,
DeltaUpdate,
error,
) {
pager := c.newDriveItemDeltaPager(driveID, prevDeltaLink, DefaultDriveItemProps()...)
items, du, err := deltaEnumerateItems[models.DriveItemable](ctx, pager, prevDeltaLink)
if err != nil {
return nil, du, clues.Stack(err)
}
return items, du, nil
}
// ---------------------------------------------------------------------------
// user's drives pager
// ---------------------------------------------------------------------------

View File

@ -178,3 +178,18 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
})
}
}
func (suite *DrivePagerIntgSuite) TestEnumerateDriveItems() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
items, du, err := suite.its.
ac.
Drives().
EnumerateDriveItemsDelta(ctx, suite.its.user.driveID, "")
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, items, "no items found in user's drive")
assert.NotEmpty(t, du.URL, "should have a delta link")
}

View File

@ -17,6 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type DriveAPIIntgSuite struct {
@ -50,20 +51,6 @@ func (suite *DriveAPIIntgSuite) TestDrives_CreatePagerAndGetPage() {
assert.NotNil(t, a)
}
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
func newItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}
func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
t := suite.T()
@ -78,12 +65,12 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
ctx,
suite.its.user.driveID,
suite.its.user.driveRootFolderID,
newItem(rc.Location, true),
api.NewDriveItem(rc.Location, true),
control.Replace)
require.NoError(t, err, clues.ToCore(err))
// generate a folder to use for collision testing
folder := newItem("collision", true)
folder := api.NewDriveItem("collision", true)
origFolder, err := acd.PostItemInContainer(
ctx,
suite.its.user.driveID,
@ -93,7 +80,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
require.NoError(t, err, clues.ToCore(err))
// generate an item to use for collision testing
file := newItem("collision.txt", false)
file := api.NewDriveItem("collision.txt", false)
origFile, err := acd.PostItemInContainer(
ctx,
suite.its.user.driveID,
@ -241,7 +228,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
ctx,
suite.its.user.driveID,
suite.its.user.driveRootFolderID,
newItem(rc.Location, true),
api.NewDriveItem(rc.Location, true),
// skip instead of replace here to get
// an ErrItemAlreadyExistsConflict, just in case.
control.Skip)
@ -249,7 +236,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
// generate items within that folder
for i := 0; i < 5; i++ {
file := newItem(fmt.Sprintf("collision_%d.txt", i), false)
file := api.NewDriveItem(fmt.Sprintf("collision_%d.txt", i), false)
f, err := acd.PostItemInContainer(
ctx,
suite.its.user.driveID,
@ -265,7 +252,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
ctx,
suite.its.user.driveID,
ptr.Val(folder.GetParentReference().GetId()),
newItem(rc.Location, true),
api.NewDriveItem(rc.Location, true),
control.Replace)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, ptr.Val(resultFolder.GetId()))

View File

@ -13,6 +13,20 @@ import (
"github.com/alcionai/corso/src/pkg/logger"
)
// ---------------------------------------------------------------------------
// common structs
// ---------------------------------------------------------------------------
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}
// ---------------------------------------------------------------------------
// common interfaces
// ---------------------------------------------------------------------------

View File

@ -32,10 +32,11 @@ func (dnl *DeltaNextLinkValues[T]) GetOdataDeltaLink() *string {
}
type PagerResult[T any] struct {
Values []T
NextLink *string
DeltaLink *string
Err error
Values []T
NextLink *string
DeltaLink *string
ResetDelta bool
Err error
}
// ---------------------------------------------------------------------------