This reverts commit c3f94fd7f76f377e4728c715abbb8c7846e9fb25. The specified commit is working fine for CI and development, but contains performance degredation (solved in a follow-up pr) that we want to avoid for the next release. This rever is temporary, and the changes will be re-instated after release. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🤖 Supportability/Tests #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
8bdef88a8b
commit
3d78183651
@ -229,7 +229,7 @@ elseif (![string]::IsNullOrEmpty($Site)) {
|
||||
}
|
||||
}
|
||||
else {
|
||||
Write-Host "User (for OneDrive) or Site (for Sharepoint) is required"
|
||||
Write-Host "User (for OneDrvie) or Site (for Sharpeoint) is required"
|
||||
Exit
|
||||
}
|
||||
|
||||
|
||||
@ -27,7 +27,7 @@ func NewPrefixMap(m map[string]map[string]struct{}) *PrefixMap {
|
||||
|
||||
func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) {
|
||||
if pm.Empty() {
|
||||
require.True(t, r.Empty(), "result prefixMap should be empty but contains keys: %+v", r.Keys())
|
||||
require.True(t, r.Empty(), "both prefix maps are empty")
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@ -227,16 +227,16 @@ func (c *Collections) Get(
|
||||
ssmb *prefixmatcher.StringSetMatchBuilder,
|
||||
errs *fault.Bus,
|
||||
) ([]data.BackupCollection, bool, error) {
|
||||
prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata)
|
||||
prevDeltas, oldPathsByDriveID, canUsePreviousBackup, err := deserializeMetadata(ctx, prevMetadata)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePrevBackup)
|
||||
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup)
|
||||
|
||||
driveTombstones := map[string]struct{}{}
|
||||
|
||||
for driveID := range oldPrevPathsByDriveID {
|
||||
for driveID := range oldPathsByDriveID {
|
||||
driveTombstones[driveID] = struct{}{}
|
||||
}
|
||||
|
||||
@ -254,8 +254,10 @@ func (c *Collections) Get(
|
||||
}
|
||||
|
||||
var (
|
||||
driveIDToDeltaLink = map[string]string{}
|
||||
driveIDToPrevPaths = map[string]map[string]string{}
|
||||
// Drive ID -> delta URL for drive
|
||||
deltaURLs = map[string]string{}
|
||||
// Drive ID -> folder ID -> folder path
|
||||
folderPaths = map[string]map[string]string{}
|
||||
numPrevItems = 0
|
||||
)
|
||||
|
||||
@ -263,79 +265,65 @@ func (c *Collections) Get(
|
||||
var (
|
||||
driveID = ptr.Val(d.GetId())
|
||||
driveName = ptr.Val(d.GetName())
|
||||
ictx = clues.Add(
|
||||
ctx,
|
||||
"drive_id", driveID,
|
||||
"drive_name", clues.Hide(driveName))
|
||||
|
||||
excludedItemIDs = map[string]struct{}{}
|
||||
oldPrevPaths = oldPrevPathsByDriveID[driveID]
|
||||
prevDeltaLink = prevDriveIDToDelta[driveID]
|
||||
|
||||
// itemCollection is used to identify which collection a
|
||||
// file belongs to. This is useful to delete a file from the
|
||||
// collection it was previously in, in case it was moved to a
|
||||
// different collection within the same delta query
|
||||
// item ID -> item ID
|
||||
itemCollection = map[string]string{}
|
||||
prevDelta = prevDeltas[driveID]
|
||||
oldPaths = oldPathsByDriveID[driveID]
|
||||
numOldDelta = 0
|
||||
ictx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
|
||||
)
|
||||
|
||||
delete(driveTombstones, driveID)
|
||||
|
||||
if _, ok := driveIDToPrevPaths[driveID]; !ok {
|
||||
driveIDToPrevPaths[driveID] = map[string]string{}
|
||||
}
|
||||
|
||||
if _, ok := c.CollectionMap[driveID]; !ok {
|
||||
c.CollectionMap[driveID] = map[string]*Collection{}
|
||||
}
|
||||
|
||||
if len(prevDelta) > 0 {
|
||||
numOldDelta++
|
||||
}
|
||||
|
||||
logger.Ctx(ictx).Infow(
|
||||
"previous metadata for drive",
|
||||
"num_paths_entries", len(oldPrevPaths))
|
||||
"num_paths_entries", len(oldPaths),
|
||||
"num_deltas_entries", numOldDelta)
|
||||
|
||||
items, du, err := c.handler.EnumerateDriveItemsDelta(
|
||||
delta, paths, excluded, err := collectItems(
|
||||
ictx,
|
||||
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
|
||||
driveID,
|
||||
prevDeltaLink)
|
||||
driveName,
|
||||
c.UpdateCollections,
|
||||
oldPaths,
|
||||
prevDelta,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
// Used for logging below.
|
||||
numDeltas := 0
|
||||
|
||||
// It's alright to have an empty folders map (i.e. no folders found) but not
|
||||
// an empty delta token. This is because when deserializing the metadata we
|
||||
// remove entries for which there is no corresponding delta token/folder. If
|
||||
// we leave empty delta tokens then we may end up setting the State field
|
||||
// for collections when not actually getting delta results.
|
||||
if len(du.URL) > 0 {
|
||||
driveIDToDeltaLink[driveID] = du.URL
|
||||
}
|
||||
|
||||
newPrevPaths, err := c.UpdateCollections(
|
||||
ctx,
|
||||
driveID,
|
||||
driveName,
|
||||
items,
|
||||
oldPrevPaths,
|
||||
itemCollection,
|
||||
excludedItemIDs,
|
||||
du.Reset,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, false, clues.Stack(err)
|
||||
if len(delta.URL) > 0 {
|
||||
deltaURLs[driveID] = delta.URL
|
||||
numDeltas++
|
||||
}
|
||||
|
||||
// Avoid the edge case where there's no paths but we do have a valid delta
|
||||
// token. We can accomplish this by adding an empty paths map for this
|
||||
// drive. If we don't have this then the next backup won't use the delta
|
||||
// token because it thinks the folder paths weren't persisted.
|
||||
driveIDToPrevPaths[driveID] = map[string]string{}
|
||||
maps.Copy(driveIDToPrevPaths[driveID], newPrevPaths)
|
||||
folderPaths[driveID] = map[string]string{}
|
||||
maps.Copy(folderPaths[driveID], paths)
|
||||
|
||||
logger.Ctx(ictx).Infow(
|
||||
"persisted metadata for drive",
|
||||
"num_new_paths_entries", len(newPrevPaths),
|
||||
"delta_reset", du.Reset)
|
||||
"num_paths_entries", len(paths),
|
||||
"num_deltas_entries", numDeltas,
|
||||
"delta_reset", delta.Reset)
|
||||
|
||||
numDriveItems := c.NumItems - numPrevItems
|
||||
numPrevItems = c.NumItems
|
||||
@ -347,7 +335,7 @@ func (c *Collections) Get(
|
||||
err = c.addURLCacheToDriveCollections(
|
||||
ictx,
|
||||
driveID,
|
||||
prevDeltaLink,
|
||||
prevDelta,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
@ -356,8 +344,8 @@ func (c *Collections) Get(
|
||||
|
||||
// For both cases we don't need to do set difference on folder map if the
|
||||
// delta token was valid because we should see all the changes.
|
||||
if !du.Reset {
|
||||
if len(excludedItemIDs) == 0 {
|
||||
if !delta.Reset {
|
||||
if len(excluded) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -366,7 +354,7 @@ func (c *Collections) Get(
|
||||
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
|
||||
}
|
||||
|
||||
ssmb.Add(p.String(), excludedItemIDs)
|
||||
ssmb.Add(p.String(), excluded)
|
||||
|
||||
continue
|
||||
}
|
||||
@ -381,11 +369,13 @@ func (c *Collections) Get(
|
||||
foundFolders[id] = struct{}{}
|
||||
}
|
||||
|
||||
for fldID, p := range oldPrevPaths {
|
||||
for fldID, p := range oldPaths {
|
||||
if _, ok := foundFolders[fldID]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
delete(paths, fldID)
|
||||
|
||||
prevPath, err := path.FromDataLayerPath(p, false)
|
||||
if err != nil {
|
||||
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
|
||||
@ -453,14 +443,14 @@ func (c *Collections) Get(
|
||||
// empty/missing and default to a full backup.
|
||||
logger.CtxErr(ctx, err).Info("making metadata collection path prefixes")
|
||||
|
||||
return collections, canUsePrevBackup, nil
|
||||
return collections, canUsePreviousBackup, nil
|
||||
}
|
||||
|
||||
md, err := graph.MakeMetadataCollection(
|
||||
pathPrefix,
|
||||
[]graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(bupMD.PreviousPathFileName, driveIDToPrevPaths),
|
||||
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, driveIDToDeltaLink),
|
||||
graph.NewMetadataEntry(bupMD.PreviousPathFileName, folderPaths),
|
||||
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, deltaURLs),
|
||||
},
|
||||
c.statusUpdater)
|
||||
|
||||
@ -473,7 +463,7 @@ func (c *Collections) Get(
|
||||
collections = append(collections, md)
|
||||
}
|
||||
|
||||
return collections, canUsePrevBackup, nil
|
||||
return collections, canUsePreviousBackup, nil
|
||||
}
|
||||
|
||||
// addURLCacheToDriveCollections adds an URL cache to all collections belonging to
|
||||
@ -487,7 +477,7 @@ func (c *Collections) addURLCacheToDriveCollections(
|
||||
driveID,
|
||||
prevDelta,
|
||||
urlCacheRefreshInterval,
|
||||
c.handler,
|
||||
c.handler.NewItemPager(driveID, "", api.DriveItemSelectURLCache()),
|
||||
errs)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -543,21 +533,22 @@ func updateCollectionPaths(
|
||||
|
||||
func (c *Collections) handleDelete(
|
||||
itemID, driveID string,
|
||||
oldPrevPaths, currPrevPaths, newPrevPaths map[string]string,
|
||||
oldPaths, newPaths map[string]string,
|
||||
isFolder bool,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]map[string]string,
|
||||
invalidPrevDelta bool,
|
||||
) error {
|
||||
if !isFolder {
|
||||
// Try to remove the item from the Collection if an entry exists for this
|
||||
// item. This handles cases where an item was created and deleted during the
|
||||
// same delta query.
|
||||
if parentID, ok := currPrevPaths[itemID]; ok {
|
||||
if parentID, ok := itemCollection[driveID][itemID]; ok {
|
||||
if col := c.CollectionMap[driveID][parentID]; col != nil {
|
||||
col.Remove(itemID)
|
||||
}
|
||||
|
||||
delete(currPrevPaths, itemID)
|
||||
delete(itemCollection[driveID], itemID)
|
||||
}
|
||||
|
||||
// Don't need to add to exclude list if the delta is invalid since the
|
||||
@ -578,7 +569,7 @@ func (c *Collections) handleDelete(
|
||||
|
||||
var prevPath path.Path
|
||||
|
||||
prevPathStr, ok := oldPrevPaths[itemID]
|
||||
prevPathStr, ok := oldPaths[itemID]
|
||||
if ok {
|
||||
var err error
|
||||
|
||||
@ -595,7 +586,7 @@ func (c *Collections) handleDelete(
|
||||
// Nested folders also return deleted delta results so we don't have to
|
||||
// worry about doing a prefix search in the map to remove the subtree of
|
||||
// the deleted folder/package.
|
||||
delete(newPrevPaths, itemID)
|
||||
delete(newPaths, itemID)
|
||||
|
||||
if prevPath == nil || invalidPrevDelta {
|
||||
// It is possible that an item was created and deleted between two delta
|
||||
@ -685,29 +676,21 @@ func (c *Collections) getCollectionPath(
|
||||
|
||||
// UpdateCollections initializes and adds the provided drive items to Collections
|
||||
// A new collection is created for every drive folder (or package).
|
||||
// oldPrevPaths is the unchanged data that was loaded from the metadata file.
|
||||
// This map is not modified during the call.
|
||||
// currPrevPaths starts as a copy of oldPaths and is updated as changes are found in
|
||||
// the returned results. Items are added to this collection throughout the call.
|
||||
// newPrevPaths, ie: the items added during this call, get returned as a map.
|
||||
// oldPaths is the unchanged data that was loaded from the metadata file.
|
||||
// newPaths starts as a copy of oldPaths and is updated as changes are found in
|
||||
// the returned results.
|
||||
func (c *Collections) UpdateCollections(
|
||||
ctx context.Context,
|
||||
driveID, driveName string,
|
||||
items []models.DriveItemable,
|
||||
oldPrevPaths map[string]string,
|
||||
currPrevPaths map[string]string,
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]map[string]string,
|
||||
invalidPrevDelta bool,
|
||||
errs *fault.Bus,
|
||||
) (map[string]string, error) {
|
||||
var (
|
||||
el = errs.Local()
|
||||
newPrevPaths = map[string]string{}
|
||||
)
|
||||
|
||||
if !invalidPrevDelta {
|
||||
maps.Copy(newPrevPaths, oldPrevPaths)
|
||||
}
|
||||
) error {
|
||||
el := errs.Local()
|
||||
|
||||
for _, item := range items {
|
||||
if el.Failure() != nil {
|
||||
@ -717,12 +700,8 @@ func (c *Collections) UpdateCollections(
|
||||
var (
|
||||
itemID = ptr.Val(item.GetId())
|
||||
itemName = ptr.Val(item.GetName())
|
||||
ictx = clues.Add(ctx, "item_id", itemID, "item_name", clues.Hide(itemName))
|
||||
isFolder = item.GetFolder() != nil || item.GetPackageEscaped() != nil
|
||||
ictx = clues.Add(
|
||||
ctx,
|
||||
"item_id", itemID,
|
||||
"item_name", clues.Hide(itemName),
|
||||
"item_is_folder", isFolder)
|
||||
)
|
||||
|
||||
if item.GetMalware() != nil {
|
||||
@ -744,13 +723,13 @@ func (c *Collections) UpdateCollections(
|
||||
if err := c.handleDelete(
|
||||
itemID,
|
||||
driveID,
|
||||
oldPrevPaths,
|
||||
currPrevPaths,
|
||||
newPrevPaths,
|
||||
oldPaths,
|
||||
newPaths,
|
||||
isFolder,
|
||||
excluded,
|
||||
itemCollection,
|
||||
invalidPrevDelta); err != nil {
|
||||
return nil, clues.Stack(err).WithClues(ictx)
|
||||
return clues.Stack(err).WithClues(ictx)
|
||||
}
|
||||
|
||||
continue
|
||||
@ -776,13 +755,13 @@ func (c *Collections) UpdateCollections(
|
||||
// Deletions are handled above so this is just moves/renames.
|
||||
var prevPath path.Path
|
||||
|
||||
prevPathStr, ok := oldPrevPaths[itemID]
|
||||
prevPathStr, ok := oldPaths[itemID]
|
||||
if ok {
|
||||
prevPath, err = path.FromDataLayerPath(prevPathStr, false)
|
||||
if err != nil {
|
||||
el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path").
|
||||
WithClues(ictx).
|
||||
With("prev_path_string", path.LoggableDir(prevPathStr)))
|
||||
With("path_string", prevPathStr))
|
||||
}
|
||||
} else if item.GetRoot() != nil {
|
||||
// Root doesn't move or get renamed.
|
||||
@ -792,11 +771,11 @@ func (c *Collections) UpdateCollections(
|
||||
// Moved folders don't cause delta results for any subfolders nested in
|
||||
// them. We need to go through and update paths to handle that. We only
|
||||
// update newPaths so we don't accidentally clobber previous deletes.
|
||||
updatePath(newPrevPaths, itemID, collectionPath.String())
|
||||
updatePath(newPaths, itemID, collectionPath.String())
|
||||
|
||||
found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath)
|
||||
if err != nil {
|
||||
return nil, clues.Stack(err).WithClues(ictx)
|
||||
return clues.Stack(err).WithClues(ictx)
|
||||
}
|
||||
|
||||
if found {
|
||||
@ -819,7 +798,7 @@ func (c *Collections) UpdateCollections(
|
||||
invalidPrevDelta,
|
||||
nil)
|
||||
if err != nil {
|
||||
return nil, clues.Stack(err).WithClues(ictx)
|
||||
return clues.Stack(err).WithClues(ictx)
|
||||
}
|
||||
|
||||
col.driveName = driveName
|
||||
@ -841,38 +820,35 @@ func (c *Collections) UpdateCollections(
|
||||
case item.GetFile() != nil:
|
||||
// Deletions are handled above so this is just moves/renames.
|
||||
if len(ptr.Val(item.GetParentReference().GetId())) == 0 {
|
||||
return nil, clues.New("file without parent ID").WithClues(ictx)
|
||||
return clues.New("file without parent ID").WithClues(ictx)
|
||||
}
|
||||
|
||||
// Get the collection for this item.
|
||||
parentID := ptr.Val(item.GetParentReference().GetId())
|
||||
ictx = clues.Add(ictx, "parent_id", parentID)
|
||||
|
||||
collection, ok := c.CollectionMap[driveID][parentID]
|
||||
if !ok {
|
||||
return nil, clues.New("item seen before parent folder").WithClues(ictx)
|
||||
}
|
||||
|
||||
// This will only kick in if the file was moved multiple times
|
||||
// within a single delta query. We delete the file from the previous
|
||||
// collection so that it doesn't appear in two places.
|
||||
prevParentContainerID, ok := currPrevPaths[itemID]
|
||||
if ok {
|
||||
prevColl, found := c.CollectionMap[driveID][prevParentContainerID]
|
||||
collection, found := c.CollectionMap[driveID][parentID]
|
||||
if !found {
|
||||
return nil, clues.New("previous collection not found").
|
||||
With("prev_parent_container_id", prevParentContainerID).
|
||||
WithClues(ictx)
|
||||
return clues.New("item seen before parent folder").WithClues(ictx)
|
||||
}
|
||||
|
||||
if ok := prevColl.Remove(itemID); !ok {
|
||||
return nil, clues.New("removing item from prev collection").
|
||||
With("prev_parent_container_id", prevParentContainerID).
|
||||
WithClues(ictx)
|
||||
// Delete the file from previous collection. This will
|
||||
// only kick in if the file was moved multiple times
|
||||
// within a single delta query
|
||||
icID, found := itemCollection[driveID][itemID]
|
||||
if found {
|
||||
pcollection, found := c.CollectionMap[driveID][icID]
|
||||
if !found {
|
||||
return clues.New("previous collection not found").WithClues(ictx)
|
||||
}
|
||||
|
||||
removed := pcollection.Remove(itemID)
|
||||
if !removed {
|
||||
return clues.New("removing from prev collection").WithClues(ictx)
|
||||
}
|
||||
}
|
||||
|
||||
currPrevPaths[itemID] = parentID
|
||||
itemCollection[driveID][itemID] = parentID
|
||||
|
||||
if collection.Add(item) {
|
||||
c.NumItems++
|
||||
@ -893,13 +869,11 @@ func (c *Collections) UpdateCollections(
|
||||
}
|
||||
|
||||
default:
|
||||
el.AddRecoverable(ictx, clues.New("item is neither folder nor file").
|
||||
WithClues(ictx).
|
||||
Label(fault.LabelForceNoBackupCreation))
|
||||
return clues.New("item type not supported").WithClues(ictx)
|
||||
}
|
||||
}
|
||||
|
||||
return newPrevPaths, el.Failure()
|
||||
return el.Failure()
|
||||
}
|
||||
|
||||
type dirScopeChecker interface {
|
||||
|
||||
@ -8,6 +8,7 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
@ -136,7 +137,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
testCase string
|
||||
items []models.DriveItemable
|
||||
inputFolderMap map[string]string
|
||||
scope selectors.OneDriveScope
|
||||
@ -146,11 +147,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedContainerCount int
|
||||
expectedFileCount int
|
||||
expectedSkippedCount int
|
||||
expectedPrevPaths map[string]string
|
||||
expectedMetadataPaths map[string]string
|
||||
expectedExcludes map[string]struct{}
|
||||
}{
|
||||
{
|
||||
name: "Invalid item",
|
||||
testCase: "Invalid item",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("item", "item", testBaseDrivePath, "root", false, false, false),
|
||||
@ -162,13 +163,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
"root": expectedStatePath(data.NotMovedState, ""),
|
||||
},
|
||||
expectedContainerCount: 1,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
},
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "Single File",
|
||||
testCase: "Single File",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("file", "file", testBaseDrivePath, "root", true, false, false),
|
||||
@ -183,13 +184,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 1,
|
||||
// Root folder is skipped since it's always present.
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
},
|
||||
expectedExcludes: getDelList("file"),
|
||||
},
|
||||
{
|
||||
name: "Single Folder",
|
||||
testCase: "Single Folder",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -201,7 +202,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
"root": expectedStatePath(data.NotMovedState, ""),
|
||||
"folder": expectedStatePath(data.NewState, folder),
|
||||
},
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath("/folder"),
|
||||
},
|
||||
@ -210,7 +211,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "Single Package",
|
||||
testCase: "Single Package",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("package", "package", testBaseDrivePath, "root", false, false, true),
|
||||
@ -222,7 +223,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
"root": expectedStatePath(data.NotMovedState, ""),
|
||||
"package": expectedStatePath(data.NewState, pkg),
|
||||
},
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"package": expectedPath("/package"),
|
||||
},
|
||||
@ -231,7 +232,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "1 root file, 1 folder, 1 package, 2 files, 3 collections",
|
||||
testCase: "1 root file, 1 folder, 1 package, 2 files, 3 collections",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
|
||||
@ -251,7 +252,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 5,
|
||||
expectedFileCount: 3,
|
||||
expectedContainerCount: 3,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath("/folder"),
|
||||
"package": expectedPath("/package"),
|
||||
@ -259,7 +260,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: getDelList("fileInRoot", "fileInFolder", "fileInPackage"),
|
||||
},
|
||||
{
|
||||
name: "contains folder selector",
|
||||
testCase: "contains folder selector",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
|
||||
@ -284,7 +285,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedContainerCount: 3,
|
||||
// just "folder" isn't added here because the include check is done on the
|
||||
// parent path since we only check later if something is a folder or not.
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"folder": expectedPath(folder),
|
||||
"subfolder": expectedPath(folderSub),
|
||||
"folder2": expectedPath(folderSub + folder),
|
||||
@ -292,7 +293,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: getDelList("fileInFolder", "fileInFolder2"),
|
||||
},
|
||||
{
|
||||
name: "prefix subfolder selector",
|
||||
testCase: "prefix subfolder selector",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
|
||||
@ -315,14 +316,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 3,
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"subfolder": expectedPath(folderSub),
|
||||
"folder2": expectedPath(folderSub + folder),
|
||||
},
|
||||
expectedExcludes: getDelList("fileInFolder2"),
|
||||
},
|
||||
{
|
||||
name: "match subfolder selector",
|
||||
testCase: "match subfolder selector",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
|
||||
@ -343,13 +344,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 1,
|
||||
// No child folders for subfolder so nothing here.
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"subfolder": expectedPath(folderSub),
|
||||
},
|
||||
expectedExcludes: getDelList("fileInSubfolder"),
|
||||
},
|
||||
{
|
||||
name: "not moved folder tree",
|
||||
testCase: "not moved folder tree",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -367,7 +368,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 1,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath(folder),
|
||||
"subfolder": expectedPath(folderSub),
|
||||
@ -375,7 +376,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "moved folder tree",
|
||||
testCase: "moved folder tree",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -393,7 +394,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 1,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath(folder),
|
||||
"subfolder": expectedPath(folderSub),
|
||||
@ -401,7 +402,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "moved folder tree with file no previous",
|
||||
testCase: "moved folder tree with file no previous",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -418,14 +419,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 2,
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath("/folder2"),
|
||||
},
|
||||
expectedExcludes: getDelList("file"),
|
||||
},
|
||||
{
|
||||
name: "moved folder tree with file no previous 1",
|
||||
testCase: "moved folder tree with file no previous 1",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -441,14 +442,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 2,
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath(folder),
|
||||
},
|
||||
expectedExcludes: getDelList("file"),
|
||||
},
|
||||
{
|
||||
name: "moved folder tree and subfolder 1",
|
||||
testCase: "moved folder tree and subfolder 1",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -468,7 +469,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 2,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 3,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath(folder),
|
||||
"subfolder": expectedPath("/subfolder"),
|
||||
@ -476,7 +477,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "moved folder tree and subfolder 2",
|
||||
testCase: "moved folder tree and subfolder 2",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -496,7 +497,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 2,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 3,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath(folder),
|
||||
"subfolder": expectedPath("/subfolder"),
|
||||
@ -504,7 +505,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "move subfolder when moving parent",
|
||||
testCase: "move subfolder when moving parent",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false),
|
||||
@ -538,7 +539,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 5,
|
||||
expectedFileCount: 2,
|
||||
expectedContainerCount: 4,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath("/folder"),
|
||||
"folder2": expectedPath("/folder2"),
|
||||
@ -547,7 +548,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: getDelList("itemInSubfolder", "itemInFolder2"),
|
||||
},
|
||||
{
|
||||
name: "moved folder tree multiple times",
|
||||
testCase: "moved folder tree multiple times",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -567,7 +568,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 2,
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath("/folder2"),
|
||||
"subfolder": expectedPath("/folder2/subfolder"),
|
||||
@ -575,7 +576,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedExcludes: getDelList("file"),
|
||||
},
|
||||
{
|
||||
name: "deleted folder and package",
|
||||
testCase: "deleted folder and package",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"), // root is always present, but not necessary here
|
||||
delItem("folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -596,13 +597,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 0,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 1,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
},
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "delete folder without previous",
|
||||
testCase: "delete folder without previous",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
delItem("folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -618,13 +619,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 0,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 1,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
},
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "delete folder tree move subfolder",
|
||||
testCase: "delete folder tree move subfolder",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
delItem("folder", testBaseDrivePath, "root", false, true, false),
|
||||
@ -645,14 +646,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 1,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 2,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"subfolder": expectedPath("/subfolder"),
|
||||
},
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "delete file",
|
||||
testCase: "delete file",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
delItem("item", testBaseDrivePath, "root", true, false, false),
|
||||
@ -668,13 +669,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 1,
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 1,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
},
|
||||
expectedExcludes: getDelList("item"),
|
||||
},
|
||||
{
|
||||
name: "item before parent errors",
|
||||
testCase: "item before parent errors",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false),
|
||||
@ -689,11 +690,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedItemCount: 0,
|
||||
expectedFileCount: 0,
|
||||
expectedContainerCount: 1,
|
||||
expectedPrevPaths: nil,
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
},
|
||||
expectedExcludes: map[string]struct{}{},
|
||||
},
|
||||
{
|
||||
name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware",
|
||||
testCase: "1 root file, 1 folder, 1 package, 1 good file, 1 malware",
|
||||
items: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
|
||||
@ -714,7 +717,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
expectedFileCount: 2,
|
||||
expectedContainerCount: 3,
|
||||
expectedSkippedCount: 1,
|
||||
expectedPrevPaths: map[string]string{
|
||||
expectedMetadataPaths: map[string]string{
|
||||
"root": expectedPath(""),
|
||||
"folder": expectedPath("/folder"),
|
||||
"package": expectedPath("/package"),
|
||||
@ -723,8 +726,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
suite.Run(test.name, func() {
|
||||
for _, tt := range tests {
|
||||
suite.Run(tt.testCase, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
@ -732,14 +735,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
|
||||
var (
|
||||
excludes = map[string]struct{}{}
|
||||
currPrevPaths = map[string]string{}
|
||||
outputFolderMap = map[string]string{}
|
||||
itemCollection = map[string]map[string]string{
|
||||
driveID: {},
|
||||
}
|
||||
errs = fault.New(true)
|
||||
)
|
||||
|
||||
maps.Copy(currPrevPaths, test.inputFolderMap)
|
||||
maps.Copy(outputFolderMap, tt.inputFolderMap)
|
||||
|
||||
c := NewCollections(
|
||||
&itemBackupHandler{api.Drives{}, user, test.scope},
|
||||
&itemBackupHandler{api.Drives{}, user, tt.scope},
|
||||
tenant,
|
||||
user,
|
||||
nil,
|
||||
@ -747,24 +753,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
|
||||
c.CollectionMap[driveID] = map[string]*Collection{}
|
||||
|
||||
newPrevPaths, err := c.UpdateCollections(
|
||||
err := c.UpdateCollections(
|
||||
ctx,
|
||||
driveID,
|
||||
"General",
|
||||
test.items,
|
||||
test.inputFolderMap,
|
||||
currPrevPaths,
|
||||
tt.items,
|
||||
tt.inputFolderMap,
|
||||
outputFolderMap,
|
||||
excludes,
|
||||
itemCollection,
|
||||
false,
|
||||
errs)
|
||||
test.expect(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
|
||||
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
|
||||
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
|
||||
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
|
||||
assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()), "skipped items")
|
||||
tt.expect(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
|
||||
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
|
||||
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
|
||||
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count")
|
||||
assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items")
|
||||
|
||||
for id, sp := range test.expectedCollectionIDs {
|
||||
for id, sp := range tt.expectedCollectionIDs {
|
||||
if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) {
|
||||
// Skip collections we don't find so we don't get an NPE.
|
||||
continue
|
||||
@ -775,8 +782,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id)
|
||||
}
|
||||
|
||||
assert.Equal(t, test.expectedPrevPaths, newPrevPaths, "metadata paths")
|
||||
assert.Equal(t, test.expectedExcludes, excludes, "exclude list")
|
||||
assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths")
|
||||
assert.Equal(t, tt.expectedExcludes, excludes, "exclude list")
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1299,7 +1306,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1338,7 +1344,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1416,7 +1421,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &empty, // probably will never happen with graph
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1454,7 +1458,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
NextLink: &next,
|
||||
ResetDelta: true,
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
@ -1463,7 +1466,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1506,7 +1508,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
driveID2: {
|
||||
@ -1517,7 +1518,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta2,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1570,7 +1570,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
driveID2: {
|
||||
@ -1581,7 +1580,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta2,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1639,6 +1637,87 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
expectedFolderPaths: nil,
|
||||
expectedDelList: nil,
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_DeltaError",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
canUsePreviousBackup: true,
|
||||
errCheck: assert.NoError,
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
driveID1: delta,
|
||||
},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_TwoItemPage_DeltaError",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
NextLink: &next,
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
canUsePreviousBackup: true,
|
||||
errCheck: assert.NoError,
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||
expectedPath1("/folder"): {data.NewState: {"folder", "file2"}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
driveID1: delta,
|
||||
},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_TwoItemPage_NoDeltaError",
|
||||
drives: []models.Driveable{drive1},
|
||||
@ -1691,6 +1770,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
@ -1698,7 +1780,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1736,6 +1817,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
@ -1743,7 +1827,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1801,7 +1884,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1831,10 +1913,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
expectedSkippedCount: 2,
|
||||
},
|
||||
{
|
||||
name: "One Drive Deleted Folder In New Results",
|
||||
name: "One Drive Delta Error Deleted Folder In New Results",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
@ -1852,7 +1937,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
delItem("file2", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta2,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1887,17 +1971,19 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Random Folder Delete",
|
||||
name: "One Drive Delta Error Random Folder Delete",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
delItem("folder", driveBasePath1, "root", false, true, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1928,17 +2014,19 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Random Item Delete",
|
||||
name: "One Drive Delta Error Random Item Delete",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID1: {
|
||||
{
|
||||
Err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
driveRootItem("root"),
|
||||
delItem("file", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1985,7 +2073,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
delItem("file", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta2,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -2029,7 +2116,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
delItem("file", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -2068,7 +2154,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
delItem("folder", driveBasePath1, "root", false, true, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -2104,7 +2189,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
delItem("file", driveBasePath1, "root", true, false, false),
|
||||
},
|
||||
DeltaLink: &delta,
|
||||
ResetDelta: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -2186,7 +2270,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh.DrivePagerV = mockDrivePager
|
||||
mbh.ItemPagerV = itemPagers
|
||||
mbh.DriveItemEnumeration = mock.PagerResultToEDID(test.items)
|
||||
|
||||
c := NewCollections(
|
||||
mbh,
|
||||
@ -2417,6 +2500,121 @@ func delItem(
|
||||
return item
|
||||
}
|
||||
|
||||
func getDeltaError() error {
|
||||
syncStateNotFound := "SyncStateNotFound"
|
||||
me := odataerrors.NewMainError()
|
||||
me.SetCode(&syncStateNotFound)
|
||||
|
||||
deltaError := odataerrors.NewODataError()
|
||||
deltaError.SetErrorEscaped(me)
|
||||
|
||||
return deltaError
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
||||
next := "next"
|
||||
delta := "delta"
|
||||
prevDelta := "prev-delta"
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
items []apiMock.PagerResult[models.DriveItemable]
|
||||
deltaURL string
|
||||
prevDeltaSuccess bool
|
||||
prevDelta string
|
||||
err error
|
||||
}{
|
||||
{
|
||||
name: "delta on first run",
|
||||
deltaURL: delta,
|
||||
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||
{DeltaLink: &delta},
|
||||
},
|
||||
prevDeltaSuccess: true,
|
||||
prevDelta: prevDelta,
|
||||
},
|
||||
{
|
||||
name: "empty prev delta",
|
||||
deltaURL: delta,
|
||||
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||
{DeltaLink: &delta},
|
||||
},
|
||||
prevDeltaSuccess: false,
|
||||
prevDelta: "",
|
||||
},
|
||||
{
|
||||
name: "next then delta",
|
||||
deltaURL: delta,
|
||||
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||
{NextLink: &next},
|
||||
{DeltaLink: &delta},
|
||||
},
|
||||
prevDeltaSuccess: true,
|
||||
prevDelta: prevDelta,
|
||||
},
|
||||
{
|
||||
name: "invalid prev delta",
|
||||
deltaURL: delta,
|
||||
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||
{Err: getDeltaError()},
|
||||
{DeltaLink: &delta}, // works on retry
|
||||
},
|
||||
prevDelta: prevDelta,
|
||||
prevDeltaSuccess: false,
|
||||
},
|
||||
{
|
||||
name: "fail a normal delta query",
|
||||
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||
{NextLink: &next},
|
||||
{Err: assert.AnError},
|
||||
},
|
||||
prevDelta: prevDelta,
|
||||
prevDeltaSuccess: true,
|
||||
err: assert.AnError,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
|
||||
ToReturn: test.items,
|
||||
}
|
||||
|
||||
collectorFunc := func(
|
||||
ctx context.Context,
|
||||
driveID, driveName string,
|
||||
driveItems []models.DriveItemable,
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]map[string]string,
|
||||
doNotMergeItems bool,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
delta, _, _, err := collectItems(
|
||||
ctx,
|
||||
itemPager,
|
||||
"",
|
||||
"General",
|
||||
collectorFunc,
|
||||
map[string]string{},
|
||||
test.prevDelta,
|
||||
fault.New(true))
|
||||
|
||||
require.ErrorIs(t, err, test.err, "delta fetch err", clues.ToCore(err))
|
||||
require.Equal(t, test.deltaURL, delta.URL, "delta url")
|
||||
require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
|
||||
driveID := "test-drive"
|
||||
collCount := 3
|
||||
|
||||
@ -36,7 +36,6 @@ type BackupHandler interface {
|
||||
GetItemPermissioner
|
||||
GetItemer
|
||||
NewDrivePagerer
|
||||
EnumerateDriveItemsDeltaer
|
||||
|
||||
// PathPrefix constructs the service and category specific path prefix for
|
||||
// the given values.
|
||||
@ -51,7 +50,7 @@ type BackupHandler interface {
|
||||
|
||||
// ServiceCat returns the service and category used by this implementation.
|
||||
ServiceCat() (path.ServiceType, path.CategoryType)
|
||||
|
||||
NewItemPager(driveID, link string, fields []string) api.DeltaPager[models.DriveItemable]
|
||||
// FormatDisplayPath creates a human-readable string to represent the
|
||||
// provided path.
|
||||
FormatDisplayPath(driveName string, parentPath *path.Builder) string
|
||||
@ -80,17 +79,6 @@ type GetItemer interface {
|
||||
) (models.DriveItemable, error)
|
||||
}
|
||||
|
||||
type EnumerateDriveItemsDeltaer interface {
|
||||
EnumerateDriveItemsDelta(
|
||||
ctx context.Context,
|
||||
driveID, prevDeltaLink string,
|
||||
) (
|
||||
[]models.DriveItemable,
|
||||
api.DeltaUpdate,
|
||||
error,
|
||||
)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// restore
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
142
src/internal/m365/collection/drive/item_collector.go
Normal file
142
src/internal/m365/collection/drive/item_collector.go
Normal file
@ -0,0 +1,142 @@
|
||||
package drive
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// DeltaUpdate holds the results of a current delta token. It normally
|
||||
// gets produced when aggregating the addition and removal of items in
|
||||
// a delta-queryable folder.
|
||||
// FIXME: This is same as exchange.api.DeltaUpdate
|
||||
type DeltaUpdate struct {
|
||||
// the deltaLink itself
|
||||
URL string
|
||||
// true if the old delta was marked as invalid
|
||||
Reset bool
|
||||
}
|
||||
|
||||
// itemCollector functions collect the items found in a drive
|
||||
type itemCollector func(
|
||||
ctx context.Context,
|
||||
driveID, driveName string,
|
||||
driveItems []models.DriveItemable,
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollections map[string]map[string]string,
|
||||
validPrevDelta bool,
|
||||
errs *fault.Bus,
|
||||
) error
|
||||
|
||||
// collectItems will enumerate all items in the specified drive and hand them to the
|
||||
// provided `collector` method
|
||||
func collectItems(
|
||||
ctx context.Context,
|
||||
pager api.DeltaPager[models.DriveItemable],
|
||||
driveID, driveName string,
|
||||
collector itemCollector,
|
||||
oldPaths map[string]string,
|
||||
prevDelta string,
|
||||
errs *fault.Bus,
|
||||
) (
|
||||
DeltaUpdate,
|
||||
map[string]string, // newPaths
|
||||
map[string]struct{}, // excluded
|
||||
error,
|
||||
) {
|
||||
var (
|
||||
newDeltaURL = ""
|
||||
newPaths = map[string]string{}
|
||||
excluded = map[string]struct{}{}
|
||||
invalidPrevDelta = len(prevDelta) == 0
|
||||
|
||||
// itemCollection is used to identify which collection a
|
||||
// file belongs to. This is useful to delete a file from the
|
||||
// collection it was previously in, in case it was moved to a
|
||||
// different collection within the same delta query
|
||||
// drive ID -> item ID -> item ID
|
||||
itemCollection = map[string]map[string]string{
|
||||
driveID: {},
|
||||
}
|
||||
)
|
||||
|
||||
if !invalidPrevDelta {
|
||||
maps.Copy(newPaths, oldPaths)
|
||||
pager.SetNextLink(prevDelta)
|
||||
}
|
||||
|
||||
for {
|
||||
// assume delta urls here, which allows single-token consumption
|
||||
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
|
||||
|
||||
if graph.IsErrInvalidDelta(err) {
|
||||
logger.Ctx(ctx).Infow("Invalid previous delta link", "link", prevDelta)
|
||||
|
||||
invalidPrevDelta = true
|
||||
newPaths = map[string]string{}
|
||||
|
||||
pager.Reset(ctx)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return DeltaUpdate{}, nil, nil, graph.Wrap(ctx, err, "getting page")
|
||||
}
|
||||
|
||||
vals := page.GetValue()
|
||||
|
||||
err = collector(
|
||||
ctx,
|
||||
driveID,
|
||||
driveName,
|
||||
vals,
|
||||
oldPaths,
|
||||
newPaths,
|
||||
excluded,
|
||||
itemCollection,
|
||||
invalidPrevDelta,
|
||||
errs)
|
||||
if err != nil {
|
||||
return DeltaUpdate{}, nil, nil, err
|
||||
}
|
||||
|
||||
nextLink, deltaLink := api.NextAndDeltaLink(page)
|
||||
|
||||
if len(deltaLink) > 0 {
|
||||
newDeltaURL = deltaLink
|
||||
}
|
||||
|
||||
// Check if there are more items
|
||||
if len(nextLink) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Debugw("Found nextLink", "link", nextLink)
|
||||
pager.SetNextLink(nextLink)
|
||||
}
|
||||
|
||||
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
|
||||
}
|
||||
|
||||
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
|
||||
func newItem(name string, folder bool) *models.DriveItem {
|
||||
itemToCreate := models.NewDriveItem()
|
||||
itemToCreate.SetName(&name)
|
||||
|
||||
if folder {
|
||||
itemToCreate.SetFolder(models.NewFolder())
|
||||
} else {
|
||||
itemToCreate.SetFile(models.NewFile())
|
||||
}
|
||||
|
||||
return itemToCreate
|
||||
}
|
||||
@ -87,6 +87,13 @@ func (h itemBackupHandler) NewDrivePager(
|
||||
return h.ac.NewUserDrivePager(resourceOwner, fields)
|
||||
}
|
||||
|
||||
func (h itemBackupHandler) NewItemPager(
|
||||
driveID, link string,
|
||||
fields []string,
|
||||
) api.DeltaPager[models.DriveItemable] {
|
||||
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
|
||||
}
|
||||
|
||||
func (h itemBackupHandler) AugmentItemInfo(
|
||||
dii details.ItemInfo,
|
||||
item models.DriveItemable,
|
||||
@ -132,13 +139,6 @@ func (h itemBackupHandler) IncludesDir(dir string) bool {
|
||||
return h.scope.Matches(selectors.OneDriveFolder, dir)
|
||||
}
|
||||
|
||||
func (h itemBackupHandler) EnumerateDriveItemsDelta(
|
||||
ctx context.Context,
|
||||
driveID, prevDeltaLink string,
|
||||
) ([]models.DriveItemable, api.DeltaUpdate, error) {
|
||||
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Restore
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -20,6 +20,8 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
@ -58,6 +60,83 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
|
||||
suite.userDriveID = ptr.Val(odDrives[0].GetId())
|
||||
}
|
||||
|
||||
// TestItemReader is an integration test that makes a few assumptions
|
||||
// about the test environment
|
||||
// 1) It assumes the test user has a drive
|
||||
// 2) It assumes the drive has a file it can use to test `driveItemReader`
|
||||
// The test checks these in below
|
||||
func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var driveItem models.DriveItemable
|
||||
// This item collector tries to find "a" drive item that is a non-empty
|
||||
// file to test the reader function
|
||||
itemCollector := func(
|
||||
_ context.Context,
|
||||
_, _ string,
|
||||
items []models.DriveItemable,
|
||||
_ map[string]string,
|
||||
_ map[string]string,
|
||||
_ map[string]struct{},
|
||||
_ map[string]map[string]string,
|
||||
_ bool,
|
||||
_ *fault.Bus,
|
||||
) error {
|
||||
if driveItem != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, item := range items {
|
||||
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
|
||||
driveItem = item
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
ip := suite.service.ac.
|
||||
Drives().
|
||||
NewDriveItemDeltaPager(suite.userDriveID, "", api.DriveItemSelectDefault())
|
||||
|
||||
_, _, _, err := collectItems(
|
||||
ctx,
|
||||
ip,
|
||||
suite.userDriveID,
|
||||
"General",
|
||||
itemCollector,
|
||||
map[string]string{},
|
||||
"",
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Test Requirement 2: Need a file
|
||||
require.NotEmpty(
|
||||
t,
|
||||
driveItem,
|
||||
"no file item found for user %s drive %s",
|
||||
suite.user,
|
||||
suite.userDriveID)
|
||||
|
||||
bh := itemBackupHandler{
|
||||
suite.service.ac.Drives(),
|
||||
suite.user,
|
||||
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
|
||||
}
|
||||
|
||||
// Read data for the file
|
||||
itemData, err := downloadItem(ctx, bh, driveItem)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
size, err := io.Copy(io.Discard, itemData)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotZero(t, size)
|
||||
}
|
||||
|
||||
// TestItemWriter is an integration test for uploading data to OneDrive
|
||||
// It creates a new folder with a new item and writes data to it
|
||||
func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
@ -92,7 +171,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
ctx,
|
||||
test.driveID,
|
||||
ptr.Val(root.GetId()),
|
||||
api.NewDriveItem(newFolderName, true),
|
||||
newItem(newFolderName, true),
|
||||
control.Copy)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotNil(t, newFolder.GetId())
|
||||
@ -104,7 +183,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
ctx,
|
||||
test.driveID,
|
||||
ptr.Val(newFolder.GetId()),
|
||||
api.NewDriveItem(newItemName, false),
|
||||
newItem(newItemName, false),
|
||||
control.Copy)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotNil(t, newItem.GetId())
|
||||
@ -238,7 +317,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
{
|
||||
name: "success",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{
|
||||
"@microsoft.graph.downloadUrl": url,
|
||||
})
|
||||
@ -257,7 +336,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
{
|
||||
name: "success, content url set instead of download url",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{
|
||||
"@content.downloadUrl": url,
|
||||
})
|
||||
@ -276,7 +355,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
{
|
||||
name: "api getter returns error",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{
|
||||
"@microsoft.graph.downloadUrl": url,
|
||||
})
|
||||
@ -292,7 +371,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
{
|
||||
name: "download url is empty",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
return di
|
||||
},
|
||||
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||
@ -307,7 +386,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
{
|
||||
name: "malware",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{
|
||||
"@microsoft.graph.downloadUrl": url,
|
||||
})
|
||||
@ -329,7 +408,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
{
|
||||
name: "non-2xx http response",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{
|
||||
"@microsoft.graph.downloadUrl": url,
|
||||
})
|
||||
@ -378,7 +457,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
|
||||
url = "https://example.com"
|
||||
|
||||
itemFunc = func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di := newItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{
|
||||
"@microsoft.graph.downloadUrl": url,
|
||||
})
|
||||
|
||||
@ -92,6 +92,13 @@ func (h libraryBackupHandler) NewDrivePager(
|
||||
return h.ac.NewSiteDrivePager(resourceOwner, fields)
|
||||
}
|
||||
|
||||
func (h libraryBackupHandler) NewItemPager(
|
||||
driveID, link string,
|
||||
fields []string,
|
||||
) api.DeltaPager[models.DriveItemable] {
|
||||
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
|
||||
}
|
||||
|
||||
func (h libraryBackupHandler) AugmentItemInfo(
|
||||
dii details.ItemInfo,
|
||||
item models.DriveItemable,
|
||||
@ -170,13 +177,6 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
|
||||
return h.scope.Matches(selectors.SharePointLibraryFolder, dir)
|
||||
}
|
||||
|
||||
func (h libraryBackupHandler) EnumerateDriveItemsDelta(
|
||||
ctx context.Context,
|
||||
driveID, prevDeltaLink string,
|
||||
) ([]models.DriveItemable, api.DeltaUpdate, error) {
|
||||
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Restore
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -671,7 +671,7 @@ func createFolder(
|
||||
ctx,
|
||||
driveID,
|
||||
parentFolderID,
|
||||
api.NewDriveItem(folderName, true),
|
||||
newItem(folderName, true),
|
||||
control.Replace)
|
||||
|
||||
// ErrItemAlreadyExistsConflict can only occur for folders if the
|
||||
@ -692,7 +692,7 @@ func createFolder(
|
||||
ctx,
|
||||
driveID,
|
||||
parentFolderID,
|
||||
api.NewDriveItem(folderName, true),
|
||||
newItem(folderName, true),
|
||||
control.Copy)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "creating folder")
|
||||
@ -733,7 +733,7 @@ func restoreFile(
|
||||
}
|
||||
|
||||
var (
|
||||
item = api.NewDriveItem(name, false)
|
||||
item = newItem(name, false)
|
||||
collisionKey = api.DriveItemCollisionKey(item)
|
||||
collision api.DriveItemIDType
|
||||
shouldDeleteOriginal bool
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -46,7 +47,7 @@ type urlCache struct {
|
||||
refreshMu sync.Mutex
|
||||
deltaQueryCount int
|
||||
|
||||
edid EnumerateDriveItemsDeltaer
|
||||
itemPager api.DeltaPager[models.DriveItemable]
|
||||
|
||||
errs *fault.Bus
|
||||
}
|
||||
@ -55,10 +56,13 @@ type urlCache struct {
|
||||
func newURLCache(
|
||||
driveID, prevDelta string,
|
||||
refreshInterval time.Duration,
|
||||
edid EnumerateDriveItemsDeltaer,
|
||||
itemPager api.DeltaPager[models.DriveItemable],
|
||||
errs *fault.Bus,
|
||||
) (*urlCache, error) {
|
||||
err := validateCacheParams(driveID, refreshInterval, edid)
|
||||
err := validateCacheParams(
|
||||
driveID,
|
||||
refreshInterval,
|
||||
itemPager)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "cache params")
|
||||
}
|
||||
@ -67,9 +71,9 @@ func newURLCache(
|
||||
idToProps: make(map[string]itemProps),
|
||||
lastRefreshTime: time.Time{},
|
||||
driveID: driveID,
|
||||
edid: edid,
|
||||
prevDelta: prevDelta,
|
||||
refreshInterval: refreshInterval,
|
||||
itemPager: itemPager,
|
||||
errs: errs,
|
||||
},
|
||||
nil
|
||||
@ -79,7 +83,7 @@ func newURLCache(
|
||||
func validateCacheParams(
|
||||
driveID string,
|
||||
refreshInterval time.Duration,
|
||||
edid EnumerateDriveItemsDeltaer,
|
||||
itemPager api.DeltaPager[models.DriveItemable],
|
||||
) error {
|
||||
if len(driveID) == 0 {
|
||||
return clues.New("drive id is empty")
|
||||
@ -89,8 +93,8 @@ func validateCacheParams(
|
||||
return clues.New("invalid refresh interval")
|
||||
}
|
||||
|
||||
if edid == nil {
|
||||
return clues.New("nil item enumerator")
|
||||
if itemPager == nil {
|
||||
return clues.New("nil item pager")
|
||||
}
|
||||
|
||||
return nil
|
||||
@ -156,23 +160,44 @@ func (uc *urlCache) refreshCache(
|
||||
// Issue a delta query to graph
|
||||
logger.Ctx(ctx).Info("refreshing url cache")
|
||||
|
||||
items, du, err := uc.edid.EnumerateDriveItemsDelta(ctx, uc.driveID, uc.prevDelta)
|
||||
err := uc.deltaQuery(ctx)
|
||||
if err != nil {
|
||||
// clear cache
|
||||
uc.idToProps = make(map[string]itemProps)
|
||||
return clues.Stack(err)
|
||||
}
|
||||
|
||||
uc.deltaQueryCount++
|
||||
|
||||
if err := uc.updateCache(ctx, items, uc.errs); err != nil {
|
||||
return clues.Stack(err)
|
||||
return err
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Info("url cache refreshed")
|
||||
|
||||
// Update last refresh time
|
||||
uc.lastRefreshTime = time.Now()
|
||||
uc.prevDelta = du.URL
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// deltaQuery performs a delta query on the drive and update the cache
|
||||
func (uc *urlCache) deltaQuery(
|
||||
ctx context.Context,
|
||||
) error {
|
||||
logger.Ctx(ctx).Debug("starting delta query")
|
||||
// Reset item pager to remove any previous state
|
||||
uc.itemPager.Reset(ctx)
|
||||
|
||||
_, _, _, err := collectItems(
|
||||
ctx,
|
||||
uc.itemPager,
|
||||
uc.driveID,
|
||||
"",
|
||||
uc.updateCache,
|
||||
map[string]string{},
|
||||
uc.prevDelta,
|
||||
uc.errs)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "delta query")
|
||||
}
|
||||
|
||||
uc.deltaQueryCount++
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -199,7 +224,13 @@ func (uc *urlCache) readCache(
|
||||
// It assumes that cacheMu is held by caller in write mode
|
||||
func (uc *urlCache) updateCache(
|
||||
ctx context.Context,
|
||||
_, _ string,
|
||||
items []models.DriveItemable,
|
||||
_ map[string]string,
|
||||
_ map[string]string,
|
||||
_ map[string]struct{},
|
||||
_ map[string]map[string]string,
|
||||
_ bool,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
el := errs.Local()
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package drive
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"math/rand"
|
||||
@ -17,19 +18,15 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// integration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type URLCacheIntegrationSuite struct {
|
||||
tester.Suite
|
||||
ac api.Client
|
||||
@ -75,6 +72,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
ac = suite.ac.Drives()
|
||||
driveID = suite.driveID
|
||||
newFolderName = testdata.DefaultRestoreConfig("folder").Location
|
||||
driveItemPager = suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectDefault())
|
||||
)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
@ -84,11 +82,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
root, err := ac.GetRootFolder(ctx, driveID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
newFolder, err := ac.PostItemInContainer(
|
||||
newFolder, err := ac.Drives().PostItemInContainer(
|
||||
ctx,
|
||||
driveID,
|
||||
ptr.Val(root.GetId()),
|
||||
api.NewDriveItem(newFolderName, true),
|
||||
newItem(newFolderName, true),
|
||||
control.Copy)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
@ -96,10 +94,33 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
|
||||
nfid := ptr.Val(newFolder.GetId())
|
||||
|
||||
collectorFunc := func(
|
||||
context.Context,
|
||||
string,
|
||||
string,
|
||||
[]models.DriveItemable,
|
||||
map[string]string,
|
||||
map[string]string,
|
||||
map[string]struct{},
|
||||
map[string]map[string]string,
|
||||
bool,
|
||||
*fault.Bus,
|
||||
) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get the previous delta to feed into url cache
|
||||
_, du, err := ac.EnumerateDriveItemsDelta(ctx, suite.driveID, "")
|
||||
prevDelta, _, _, err := collectItems(
|
||||
ctx,
|
||||
suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectURLCache()),
|
||||
suite.driveID,
|
||||
"drive-name",
|
||||
collectorFunc,
|
||||
map[string]string{},
|
||||
"",
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotEmpty(t, du.URL)
|
||||
require.NotNil(t, prevDelta.URL)
|
||||
|
||||
// Create a bunch of files in the new folder
|
||||
var items []models.DriveItemable
|
||||
@ -107,11 +128,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
for i := 0; i < 5; i++ {
|
||||
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
|
||||
|
||||
item, err := ac.PostItemInContainer(
|
||||
item, err := ac.Drives().PostItemInContainer(
|
||||
ctx,
|
||||
driveID,
|
||||
nfid,
|
||||
api.NewDriveItem(newItemName, false),
|
||||
newItem(newItemName, false),
|
||||
control.Copy)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
@ -121,9 +142,9 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
// Create a new URL cache with a long TTL
|
||||
uc, err := newURLCache(
|
||||
suite.driveID,
|
||||
du.URL,
|
||||
prevDelta.URL,
|
||||
1*time.Hour,
|
||||
suite.ac.Drives(),
|
||||
driveItemPager,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
@ -174,10 +195,6 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
require.Equal(t, 1, uc.deltaQueryCount)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// unit
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type URLCacheUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
@ -188,20 +205,27 @@ func TestURLCacheUnitSuite(t *testing.T) {
|
||||
|
||||
func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
deltaString := "delta"
|
||||
next := "next"
|
||||
driveID := "drive1"
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
pagerItems map[string][]models.DriveItemable
|
||||
pagerErr map[string]error
|
||||
pagerResult map[string][]apiMock.PagerResult[models.DriveItemable]
|
||||
expectedItemProps map[string]itemProps
|
||||
expectedErr require.ErrorAssertionFunc
|
||||
cacheAssert func(*urlCache, time.Time)
|
||||
}{
|
||||
{
|
||||
name: "single item in cache",
|
||||
pagerItems: map[string][]models.DriveItemable{
|
||||
driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
@ -218,14 +242,19 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "multiple items in cache",
|
||||
pagerItems: map[string][]models.DriveItemable{
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
@ -258,14 +287,19 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "duplicate items with potentially new urls",
|
||||
pagerItems: map[string][]models.DriveItemable{
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://test1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://test2.com", false),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
@ -290,12 +324,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "deleted items",
|
||||
pagerItems: map[string][]models.DriveItemable{
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
@ -316,8 +355,15 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "item not found in cache",
|
||||
pagerItems: map[string][]models.DriveItemable{
|
||||
driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"2": {},
|
||||
@ -330,10 +376,23 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "delta query error",
|
||||
pagerItems: map[string][]models.DriveItemable{},
|
||||
pagerErr: map[string]error{
|
||||
driveID: errors.New("delta query error"),
|
||||
name: "multi-page delta query error",
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
},
|
||||
NextLink: &next,
|
||||
},
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
Err: errors.New("delta query error"),
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {},
|
||||
@ -349,11 +408,16 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
|
||||
{
|
||||
name: "folder item",
|
||||
pagerItems: map[string][]models.DriveItemable{
|
||||
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||
driveID: {
|
||||
{
|
||||
Values: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
driveItem("2", "folder2", "root", "root", false, true, false),
|
||||
},
|
||||
DeltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"2": {},
|
||||
@ -373,17 +437,15 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
medi := mock.EnumeratesDriveItemsDelta{
|
||||
Items: test.pagerItems,
|
||||
Err: test.pagerErr,
|
||||
DeltaUpdate: map[string]api.DeltaUpdate{driveID: {URL: deltaString}},
|
||||
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
|
||||
ToReturn: test.pagerResult[driveID],
|
||||
}
|
||||
|
||||
cache, err := newURLCache(
|
||||
driveID,
|
||||
"",
|
||||
1*time.Hour,
|
||||
&medi,
|
||||
itemPager,
|
||||
fault.New(true))
|
||||
|
||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||
@ -418,17 +480,15 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
|
||||
// Test needsRefresh
|
||||
func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
||||
var (
|
||||
t = suite.T()
|
||||
driveID = "drive1"
|
||||
refreshInterval = 1 * time.Second
|
||||
)
|
||||
driveID := "drive1"
|
||||
t := suite.T()
|
||||
refreshInterval := 1 * time.Second
|
||||
|
||||
cache, err := newURLCache(
|
||||
driveID,
|
||||
"",
|
||||
refreshInterval,
|
||||
&mock.EnumeratesDriveItemsDelta{},
|
||||
&apiMock.DeltaPager[models.DriveItemable]{},
|
||||
fault.New(true))
|
||||
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -450,12 +510,14 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
||||
require.False(t, cache.needsRefresh())
|
||||
}
|
||||
|
||||
// Test newURLCache
|
||||
func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
// table driven tests
|
||||
table := []struct {
|
||||
name string
|
||||
driveID string
|
||||
refreshInt time.Duration
|
||||
itemPager EnumerateDriveItemsDeltaer
|
||||
itemPager api.DeltaPager[models.DriveItemable]
|
||||
errors *fault.Bus
|
||||
expectedErr require.ErrorAssertionFunc
|
||||
}{
|
||||
@ -463,7 +525,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
name: "invalid driveID",
|
||||
driveID: "",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: &mock.EnumeratesDriveItemsDelta{},
|
||||
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.Error,
|
||||
},
|
||||
@ -471,12 +533,12 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
name: "invalid refresh interval",
|
||||
driveID: "drive1",
|
||||
refreshInt: 100 * time.Millisecond,
|
||||
itemPager: &mock.EnumeratesDriveItemsDelta{},
|
||||
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid item enumerator",
|
||||
name: "invalid itemPager",
|
||||
driveID: "drive1",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: nil,
|
||||
@ -487,7 +549,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
name: "valid",
|
||||
driveID: "drive1",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: &mock.EnumeratesDriveItemsDelta{},
|
||||
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.NoError,
|
||||
},
|
||||
|
||||
@ -2,6 +2,7 @@ package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -526,6 +527,8 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
|
||||
|
||||
require.NotEmpty(t, c.FullPath().Folder(false))
|
||||
|
||||
fmt.Printf("\n-----\nfolder %+v\n-----\n", c.FullPath().Folder(false))
|
||||
|
||||
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
|
||||
// interface.
|
||||
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
|
||||
@ -534,6 +537,8 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
|
||||
|
||||
loc := c.(data.LocationPather).LocationPath().String()
|
||||
|
||||
fmt.Printf("\n-----\nloc %+v\n-----\n", c.(data.LocationPather).LocationPath().String())
|
||||
|
||||
require.NotEmpty(t, loc)
|
||||
|
||||
delete(test.channelNames, loc)
|
||||
|
||||
@ -8,13 +8,11 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -24,8 +22,6 @@ import (
|
||||
type BackupHandler struct {
|
||||
ItemInfo details.ItemInfo
|
||||
|
||||
DriveItemEnumeration EnumeratesDriveItemsDelta
|
||||
|
||||
GI GetsItem
|
||||
GIP GetsItemPermission
|
||||
|
||||
@ -59,7 +55,6 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler {
|
||||
OneDrive: &details.OneDriveInfo{},
|
||||
Extension: &details.ExtensionData{},
|
||||
},
|
||||
DriveItemEnumeration: EnumeratesDriveItemsDelta{},
|
||||
GI: GetsItem{Err: clues.New("not defined")},
|
||||
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
||||
PathPrefixFn: defaultOneDrivePathPrefixer,
|
||||
@ -129,6 +124,10 @@ func (h BackupHandler) NewDrivePager(string, []string) api.Pager[models.Driveabl
|
||||
return h.DrivePagerV
|
||||
}
|
||||
|
||||
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DeltaPager[models.DriveItemable] {
|
||||
return h.ItemPagerV[driveID]
|
||||
}
|
||||
|
||||
func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string {
|
||||
return "/" + pb.String()
|
||||
}
|
||||
@ -153,13 +152,6 @@ func (h *BackupHandler) Get(context.Context, string, map[string]string) (*http.R
|
||||
return h.GetResps[c], h.GetErrs[c]
|
||||
}
|
||||
|
||||
func (h BackupHandler) EnumerateDriveItemsDelta(
|
||||
ctx context.Context,
|
||||
driveID, prevDeltaLink string,
|
||||
) ([]models.DriveItemable, api.DeltaUpdate, error) {
|
||||
return h.DriveItemEnumeration.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
|
||||
}
|
||||
|
||||
func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) {
|
||||
return h.GI.GetItem(ctx, "", "")
|
||||
}
|
||||
@ -262,65 +254,6 @@ func (m GetsItem) GetItem(
|
||||
return m.Item, m.Err
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Enumerates Drive Items
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type EnumeratesDriveItemsDelta struct {
|
||||
Items map[string][]models.DriveItemable
|
||||
DeltaUpdate map[string]api.DeltaUpdate
|
||||
Err map[string]error
|
||||
}
|
||||
|
||||
func (edi EnumeratesDriveItemsDelta) EnumerateDriveItemsDelta(
|
||||
_ context.Context,
|
||||
driveID, _ string,
|
||||
) (
|
||||
[]models.DriveItemable,
|
||||
api.DeltaUpdate,
|
||||
error,
|
||||
) {
|
||||
return edi.Items[driveID], edi.DeltaUpdate[driveID], edi.Err[driveID]
|
||||
}
|
||||
|
||||
func PagerResultToEDID(
|
||||
m map[string][]apiMock.PagerResult[models.DriveItemable],
|
||||
) EnumeratesDriveItemsDelta {
|
||||
edi := EnumeratesDriveItemsDelta{
|
||||
Items: map[string][]models.DriveItemable{},
|
||||
DeltaUpdate: map[string]api.DeltaUpdate{},
|
||||
Err: map[string]error{},
|
||||
}
|
||||
|
||||
for driveID, results := range m {
|
||||
var (
|
||||
err error
|
||||
items = []models.DriveItemable{}
|
||||
deltaUpdate api.DeltaUpdate
|
||||
)
|
||||
|
||||
for _, pr := range results {
|
||||
items = append(items, pr.Values...)
|
||||
|
||||
if pr.DeltaLink != nil {
|
||||
deltaUpdate = api.DeltaUpdate{URL: ptr.Val(pr.DeltaLink)}
|
||||
}
|
||||
|
||||
if pr.Err != nil {
|
||||
err = pr.Err
|
||||
}
|
||||
|
||||
deltaUpdate.Reset = deltaUpdate.Reset || pr.ResetDelta
|
||||
}
|
||||
|
||||
edi.Items[driveID] = items
|
||||
edi.Err[driveID] = err
|
||||
edi.DeltaUpdate[driveID] = deltaUpdate
|
||||
}
|
||||
|
||||
return edi
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Get Item Permissioner
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -90,8 +90,11 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||
|
||||
var (
|
||||
paths = map[string]string{}
|
||||
currPaths = map[string]string{}
|
||||
newPaths = map[string]string{}
|
||||
excluded = map[string]struct{}{}
|
||||
itemColls = map[string]map[string]string{
|
||||
driveID: {},
|
||||
}
|
||||
collMap = map[string]map[string]*drive.Collection{
|
||||
driveID: {},
|
||||
}
|
||||
@ -106,14 +109,15 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||
|
||||
c.CollectionMap = collMap
|
||||
|
||||
_, err := c.UpdateCollections(
|
||||
err := c.UpdateCollections(
|
||||
ctx,
|
||||
driveID,
|
||||
"General",
|
||||
test.items,
|
||||
paths,
|
||||
currPaths,
|
||||
newPaths,
|
||||
excluded,
|
||||
itemColls,
|
||||
true,
|
||||
fault.New(true))
|
||||
|
||||
|
||||
@ -384,20 +384,20 @@ func (pec printableErrCore) Values() []string {
|
||||
// funcs, and the function that spawned the local bus should always
|
||||
// return `local.Failure()` to ensure that hard failures are propagated
|
||||
// back upstream.
|
||||
func (e *Bus) Local() *LocalBus {
|
||||
return &LocalBus{
|
||||
func (e *Bus) Local() *localBus {
|
||||
return &localBus{
|
||||
mu: &sync.Mutex{},
|
||||
bus: e,
|
||||
}
|
||||
}
|
||||
|
||||
type LocalBus struct {
|
||||
type localBus struct {
|
||||
mu *sync.Mutex
|
||||
bus *Bus
|
||||
current error
|
||||
}
|
||||
|
||||
func (e *LocalBus) AddRecoverable(ctx context.Context, err error) {
|
||||
func (e *localBus) AddRecoverable(ctx context.Context, err error) {
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
@ -422,7 +422,7 @@ func (e *LocalBus) AddRecoverable(ctx context.Context, err error) {
|
||||
// 2. Skipping avoids a permanent and consistent failure. If
|
||||
// the underlying reason is transient or otherwise recoverable,
|
||||
// the item should not be skipped.
|
||||
func (e *LocalBus) AddSkip(ctx context.Context, s *Skipped) {
|
||||
func (e *localBus) AddSkip(ctx context.Context, s *Skipped) {
|
||||
if s == nil {
|
||||
return
|
||||
}
|
||||
@ -437,7 +437,7 @@ func (e *LocalBus) AddSkip(ctx context.Context, s *Skipped) {
|
||||
// It does not return the underlying bus.Failure(), only the failure
|
||||
// that was recorded within the local bus instance. This error should
|
||||
// get returned by any func which created a local bus.
|
||||
func (e *LocalBus) Failure() error {
|
||||
func (e *localBus) Failure() error {
|
||||
return e.current
|
||||
}
|
||||
|
||||
|
||||
@ -697,7 +697,7 @@ func (s ExchangeScope) IncludesCategory(cat exchangeCategory) bool {
|
||||
// returns true if the category is included in the scope's data type,
|
||||
// and the value is set to Any().
|
||||
func (s ExchangeScope) IsAny(cat exchangeCategory) bool {
|
||||
return IsAnyTarget(s, cat)
|
||||
return isAnyTarget(s, cat)
|
||||
}
|
||||
|
||||
// Get returns the data category in the scope. If the scope
|
||||
|
||||
@ -699,7 +699,7 @@ func (s GroupsScope) IncludesCategory(cat groupsCategory) bool {
|
||||
// returns true if the category is included in the scope's data type,
|
||||
// and the value is set to Any().
|
||||
func (s GroupsScope) IsAny(cat groupsCategory) bool {
|
||||
return IsAnyTarget(s, cat)
|
||||
return isAnyTarget(s, cat)
|
||||
}
|
||||
|
||||
// Get returns the data category in the scope. If the scope
|
||||
|
||||
@ -484,7 +484,7 @@ func (s OneDriveScope) Matches(cat oneDriveCategory, target string) bool {
|
||||
// returns true if the category is included in the scope's data type,
|
||||
// and the value is set to Any().
|
||||
func (s OneDriveScope) IsAny(cat oneDriveCategory) bool {
|
||||
return IsAnyTarget(s, cat)
|
||||
return isAnyTarget(s, cat)
|
||||
}
|
||||
|
||||
// Get returns the data category in the scope. If the scope
|
||||
|
||||
@ -694,7 +694,7 @@ func matchesPathValues[T scopeT, C categoryT](
|
||||
return false
|
||||
}
|
||||
|
||||
if IsAnyTarget(sc, cc) {
|
||||
if isAnyTarget(sc, cc) {
|
||||
// continue, not return: all path keys must match the entry to succeed
|
||||
continue
|
||||
}
|
||||
@ -795,7 +795,7 @@ func isNoneTarget[T scopeT, C categoryT](s T, cat C) bool {
|
||||
|
||||
// returns true if the category is included in the scope's category type,
|
||||
// and the value is set to Any().
|
||||
func IsAnyTarget[T scopeT, C categoryT](s T, cat C) bool {
|
||||
func isAnyTarget[T scopeT, C categoryT](s T, cat C) bool {
|
||||
if !typeAndCategoryMatches(cat, s.categorizer()) {
|
||||
return false
|
||||
}
|
||||
|
||||
@ -125,14 +125,14 @@ func (suite *SelectorScopesSuite) TestGetCatValue() {
|
||||
func (suite *SelectorScopesSuite) TestIsAnyTarget() {
|
||||
t := suite.T()
|
||||
stub := stubScope("")
|
||||
assert.True(t, IsAnyTarget(stub, rootCatStub))
|
||||
assert.True(t, IsAnyTarget(stub, leafCatStub))
|
||||
assert.False(t, IsAnyTarget(stub, mockCategorizer("smarf")))
|
||||
assert.True(t, isAnyTarget(stub, rootCatStub))
|
||||
assert.True(t, isAnyTarget(stub, leafCatStub))
|
||||
assert.False(t, isAnyTarget(stub, mockCategorizer("smarf")))
|
||||
|
||||
stub = stubScope("none")
|
||||
assert.False(t, IsAnyTarget(stub, rootCatStub))
|
||||
assert.False(t, IsAnyTarget(stub, leafCatStub))
|
||||
assert.False(t, IsAnyTarget(stub, mockCategorizer("smarf")))
|
||||
assert.False(t, isAnyTarget(stub, rootCatStub))
|
||||
assert.False(t, isAnyTarget(stub, leafCatStub))
|
||||
assert.False(t, isAnyTarget(stub, mockCategorizer("smarf")))
|
||||
}
|
||||
|
||||
var reduceTestTable = []struct {
|
||||
|
||||
@ -625,7 +625,7 @@ func (s SharePointScope) IncludesCategory(cat sharePointCategory) bool {
|
||||
// returns true if the category is included in the scope's data type,
|
||||
// and the value is set to Any().
|
||||
func (s SharePointScope) IsAny(cat sharePointCategory) bool {
|
||||
return IsAnyTarget(s, cat)
|
||||
return isAnyTarget(s, cat)
|
||||
}
|
||||
|
||||
// Get returns the data category in the scope. If the scope
|
||||
|
||||
@ -101,7 +101,7 @@ func idAnd(ss ...string) []string {
|
||||
// exported
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func DefaultDriveItemProps() []string {
|
||||
func DriveItemSelectDefault() []string {
|
||||
return idAnd(
|
||||
"content.downloadUrl",
|
||||
"createdBy",
|
||||
|
||||
11
src/pkg/services/m365/api/delta.go
Normal file
11
src/pkg/services/m365/api/delta.go
Normal file
@ -0,0 +1,11 @@
|
||||
package api
|
||||
|
||||
// DeltaUpdate holds the results of a current delta token. It normally
|
||||
// gets produced when aggregating the addition and removal of items in
|
||||
// a delta-queryable folder.
|
||||
type DeltaUpdate struct {
|
||||
// the deltaLink itself
|
||||
URL string
|
||||
// true if the old delta was marked as invalid
|
||||
Reset bool
|
||||
}
|
||||
@ -351,10 +351,6 @@ func (c Drives) PostItemLinkShareUpdate(
|
||||
return itm, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helper funcs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// DriveItemCollisionKeyy constructs a key from the item name.
|
||||
// collision keys are used to identify duplicate item conflicts for handling advanced restoration config.
|
||||
func DriveItemCollisionKey(item models.DriveItemable) string {
|
||||
@ -364,17 +360,3 @@ func DriveItemCollisionKey(item models.DriveItemable) string {
|
||||
|
||||
return ptr.Val(item.GetName())
|
||||
}
|
||||
|
||||
// NewDriveItem initializes a `models.DriveItemable` with either a folder or file entry.
|
||||
func NewDriveItem(name string, folder bool) *models.DriveItem {
|
||||
itemToCreate := models.NewDriveItem()
|
||||
itemToCreate.SetName(&name)
|
||||
|
||||
if folder {
|
||||
itemToCreate.SetFolder(models.NewFolder())
|
||||
} else {
|
||||
itemToCreate.SetFile(models.NewFile())
|
||||
}
|
||||
|
||||
return itemToCreate
|
||||
}
|
||||
|
||||
@ -15,11 +15,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
type DriveItemIDType struct {
|
||||
ItemID string
|
||||
IsFolder bool
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// non-delta item pager
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -70,6 +65,11 @@ func (p *driveItemPageCtrl) ValidModTimes() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type DriveItemIDType struct {
|
||||
ItemID string
|
||||
IsFolder bool
|
||||
}
|
||||
|
||||
func (c Drives) GetItemsInContainerByCollisionKey(
|
||||
ctx context.Context,
|
||||
driveID, containerID string,
|
||||
@ -131,9 +131,9 @@ type DriveItemDeltaPageCtrl struct {
|
||||
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
|
||||
}
|
||||
|
||||
func (c Drives) newDriveItemDeltaPager(
|
||||
driveID, prevDeltaLink string,
|
||||
selectProps ...string,
|
||||
func (c Drives) NewDriveItemDeltaPager(
|
||||
driveID, link string,
|
||||
selectFields []string,
|
||||
) *DriveItemDeltaPageCtrl {
|
||||
preferHeaderItems := []string{
|
||||
"deltashowremovedasdeleted",
|
||||
@ -142,32 +142,28 @@ func (c Drives) newDriveItemDeltaPager(
|
||||
"hierarchicalsharing",
|
||||
}
|
||||
|
||||
options := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{
|
||||
requestConfig := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{
|
||||
Headers: newPreferHeaders(preferHeaderItems...),
|
||||
QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{},
|
||||
}
|
||||
|
||||
if len(selectProps) > 0 {
|
||||
options.QueryParameters.Select = selectProps
|
||||
}
|
||||
|
||||
builder := c.Stable.
|
||||
Client().
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(onedrive.RootID).
|
||||
Delta()
|
||||
|
||||
if len(prevDeltaLink) > 0 {
|
||||
builder = drives.NewItemItemsItemDeltaRequestBuilder(prevDeltaLink, c.Stable.Adapter())
|
||||
QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{
|
||||
Select: selectFields,
|
||||
},
|
||||
}
|
||||
|
||||
res := &DriveItemDeltaPageCtrl{
|
||||
gs: c.Stable,
|
||||
driveID: driveID,
|
||||
options: options,
|
||||
builder: builder,
|
||||
options: requestConfig,
|
||||
builder: c.Stable.
|
||||
Client().
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(onedrive.RootID).
|
||||
Delta(),
|
||||
}
|
||||
|
||||
if len(link) > 0 {
|
||||
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, c.Stable.Adapter())
|
||||
}
|
||||
|
||||
return res
|
||||
@ -197,27 +193,6 @@ func (p *DriveItemDeltaPageCtrl) ValidModTimes() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// EnumerateDriveItems will enumerate all items in the specified drive and hand them to the
|
||||
// provided `collector` method
|
||||
func (c Drives) EnumerateDriveItemsDelta(
|
||||
ctx context.Context,
|
||||
driveID string,
|
||||
prevDeltaLink string,
|
||||
) (
|
||||
[]models.DriveItemable,
|
||||
DeltaUpdate,
|
||||
error,
|
||||
) {
|
||||
pager := c.newDriveItemDeltaPager(driveID, prevDeltaLink, DefaultDriveItemProps()...)
|
||||
|
||||
items, du, err := deltaEnumerateItems[models.DriveItemable](ctx, pager, prevDeltaLink)
|
||||
if err != nil {
|
||||
return nil, du, clues.Stack(err)
|
||||
}
|
||||
|
||||
return items, du, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// user's drives pager
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -178,18 +178,3 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *DrivePagerIntgSuite) TestEnumerateDriveItems() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
items, du, err := suite.its.
|
||||
ac.
|
||||
Drives().
|
||||
EnumerateDriveItemsDelta(ctx, suite.its.user.driveID, "")
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotEmpty(t, items, "no items found in user's drive")
|
||||
assert.NotEmpty(t, du.URL, "should have a delta link")
|
||||
}
|
||||
|
||||
@ -17,7 +17,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type DriveAPIIntgSuite struct {
|
||||
@ -51,6 +50,20 @@ func (suite *DriveAPIIntgSuite) TestDrives_CreatePagerAndGetPage() {
|
||||
assert.NotNil(t, a)
|
||||
}
|
||||
|
||||
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
|
||||
func newItem(name string, folder bool) *models.DriveItem {
|
||||
itemToCreate := models.NewDriveItem()
|
||||
itemToCreate.SetName(&name)
|
||||
|
||||
if folder {
|
||||
itemToCreate.SetFolder(models.NewFolder())
|
||||
} else {
|
||||
itemToCreate.SetFile(models.NewFile())
|
||||
}
|
||||
|
||||
return itemToCreate
|
||||
}
|
||||
|
||||
func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
||||
t := suite.T()
|
||||
|
||||
@ -65,12 +78,12 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
||||
ctx,
|
||||
suite.its.user.driveID,
|
||||
suite.its.user.driveRootFolderID,
|
||||
api.NewDriveItem(rc.Location, true),
|
||||
newItem(rc.Location, true),
|
||||
control.Replace)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// generate a folder to use for collision testing
|
||||
folder := api.NewDriveItem("collision", true)
|
||||
folder := newItem("collision", true)
|
||||
origFolder, err := acd.PostItemInContainer(
|
||||
ctx,
|
||||
suite.its.user.driveID,
|
||||
@ -80,7 +93,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// generate an item to use for collision testing
|
||||
file := api.NewDriveItem("collision.txt", false)
|
||||
file := newItem("collision.txt", false)
|
||||
origFile, err := acd.PostItemInContainer(
|
||||
ctx,
|
||||
suite.its.user.driveID,
|
||||
@ -228,7 +241,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
||||
ctx,
|
||||
suite.its.user.driveID,
|
||||
suite.its.user.driveRootFolderID,
|
||||
api.NewDriveItem(rc.Location, true),
|
||||
newItem(rc.Location, true),
|
||||
// skip instead of replace here to get
|
||||
// an ErrItemAlreadyExistsConflict, just in case.
|
||||
control.Skip)
|
||||
@ -236,7 +249,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
||||
|
||||
// generate items within that folder
|
||||
for i := 0; i < 5; i++ {
|
||||
file := api.NewDriveItem(fmt.Sprintf("collision_%d.txt", i), false)
|
||||
file := newItem(fmt.Sprintf("collision_%d.txt", i), false)
|
||||
f, err := acd.PostItemInContainer(
|
||||
ctx,
|
||||
suite.its.user.driveID,
|
||||
@ -252,7 +265,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
||||
ctx,
|
||||
suite.its.user.driveID,
|
||||
ptr.Val(folder.GetParentReference().GetId()),
|
||||
api.NewDriveItem(rc.Location, true),
|
||||
newItem(rc.Location, true),
|
||||
control.Replace)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotEmpty(t, ptr.Val(resultFolder.GetId()))
|
||||
|
||||
@ -13,20 +13,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// common structs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// DeltaUpdate holds the results of a current delta token. It normally
|
||||
// gets produced when aggregating the addition and removal of items in
|
||||
// a delta-queryable folder.
|
||||
type DeltaUpdate struct {
|
||||
// the deltaLink itself
|
||||
URL string
|
||||
// true if the old delta was marked as invalid
|
||||
Reset bool
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// common interfaces
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -35,7 +35,6 @@ type PagerResult[T any] struct {
|
||||
Values []T
|
||||
NextLink *string
|
||||
DeltaLink *string
|
||||
ResetDelta bool
|
||||
Err error
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user