require rootID on tree construction (#4746)
Turns out the root ID name isn't an appropriate match for establishing the root node. Instead, the backup hander is now extended with a getRootFolder method and will pass the expected root folder ID into the tree's constructor func to ensure we establish the correct root node. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🐛 Bugfix #### Issue(s) * #4689 #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
c6306942f7
commit
54ba241fbe
@ -8,6 +8,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/pkg/errors"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
@ -296,7 +297,7 @@ func (c *Collections) Get(
|
|||||||
) ([]data.BackupCollection, bool, error) {
|
) ([]data.BackupCollection, bool, error) {
|
||||||
if c.ctrl.ToggleFeatures.UseDeltaTree {
|
if c.ctrl.ToggleFeatures.UseDeltaTree {
|
||||||
colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, ssmb, errs)
|
colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, ssmb, errs)
|
||||||
if err != nil {
|
if err != nil && !errors.Is(err, errGetTreeNotImplemented) {
|
||||||
return nil, false, clues.Wrap(err, "processing backup using tree")
|
return nil, false, clues.Wrap(err, "processing backup using tree")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -828,7 +829,7 @@ func (c *Collections) PopulateDriveCollections(
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
counter.Inc(count.PagesEnumerated)
|
counter.Inc(count.TotalPagesEnumerated)
|
||||||
|
|
||||||
if reset {
|
if reset {
|
||||||
counter.Inc(count.PagerResets)
|
counter.Inc(count.PagerResets)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -154,7 +154,12 @@ func (c *Collections) getTree(
|
|||||||
|
|
||||||
logger.Ctx(ctx).Infow("produced collections", "count_collections", len(collections))
|
logger.Ctx(ctx).Infow("produced collections", "count_collections", len(collections))
|
||||||
|
|
||||||
return collections, canUsePrevBackup, nil
|
// hack to satisfy the linter since we're returning an error
|
||||||
|
if ctx == nil {
|
||||||
|
return nil, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return collections, canUsePrevBackup, errGetTreeNotImplemented
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Collections) makeDriveCollections(
|
func (c *Collections) makeDriveCollections(
|
||||||
@ -171,7 +176,12 @@ func (c *Collections) makeDriveCollections(
|
|||||||
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix")
|
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix")
|
||||||
}
|
}
|
||||||
|
|
||||||
tree := newFolderyMcFolderFace(ppfx)
|
root, err := c.handler.GetRootFolder(ctx, ptr.Val(drv.GetId()))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "getting root folder")
|
||||||
|
}
|
||||||
|
|
||||||
|
tree := newFolderyMcFolderFace(ppfx, ptr.Val(root.GetId()))
|
||||||
|
|
||||||
counter.Add(count.PrevPaths, int64(len(prevPaths)))
|
counter.Add(count.PrevPaths, int64(len(prevPaths)))
|
||||||
|
|
||||||
@ -272,65 +282,105 @@ func (c *Collections) populateTree(
|
|||||||
ctx = clues.Add(ctx, "invalid_prev_delta", len(prevDeltaLink) == 0)
|
ctx = clues.Add(ctx, "invalid_prev_delta", len(prevDeltaLink) == 0)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
driveID = ptr.Val(drv.GetId())
|
currDeltaLink = prevDeltaLink
|
||||||
el = errs.Local()
|
driveID = ptr.Val(drv.GetId())
|
||||||
|
el = errs.Local()
|
||||||
|
du pagers.DeltaUpdate
|
||||||
|
finished bool
|
||||||
|
hitLimit bool
|
||||||
|
// TODO: plug this into the limiter
|
||||||
|
maxDeltas = 100
|
||||||
|
countDeltas = 0
|
||||||
)
|
)
|
||||||
|
|
||||||
// TODO(keepers): to end in a correct state, we'll eventually need to run this
|
// enumerate through multiple deltas until we either:
|
||||||
// query multiple times over, until it ends in an empty change set.
|
// 1. hit a consistent state (ie: no changes since last delta enum)
|
||||||
pager := c.handler.EnumerateDriveItemsDelta(
|
// 2. hit the limit
|
||||||
ctx,
|
for !hitLimit && !finished && el.Failure() == nil {
|
||||||
driveID,
|
counter.Inc(count.TotalDeltasProcessed)
|
||||||
prevDeltaLink,
|
|
||||||
api.CallConfig{
|
|
||||||
Select: api.DefaultDriveItemProps(),
|
|
||||||
})
|
|
||||||
|
|
||||||
for page, reset, done := pager.NextPage(); !done; page, reset, done = pager.NextPage() {
|
var (
|
||||||
if el.Failure() != nil {
|
pageCount int
|
||||||
break
|
pageItemCount int
|
||||||
}
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
if reset {
|
countDeltas++
|
||||||
counter.Inc(count.PagerResets)
|
|
||||||
tree.reset()
|
|
||||||
c.resetStats()
|
|
||||||
}
|
|
||||||
|
|
||||||
err := c.enumeratePageOfItems(
|
pager := c.handler.EnumerateDriveItemsDelta(
|
||||||
ctx,
|
ctx,
|
||||||
tree,
|
driveID,
|
||||||
drv,
|
currDeltaLink,
|
||||||
page,
|
api.CallConfig{
|
||||||
limiter,
|
Select: api.DefaultDriveItemProps(),
|
||||||
counter,
|
})
|
||||||
errs)
|
|
||||||
if err != nil {
|
for page, reset, done := pager.NextPage(); !done; page, reset, done = pager.NextPage() {
|
||||||
if errors.Is(err, errHitLimit) {
|
if el.Failure() != nil {
|
||||||
break
|
return du, el.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
el.AddRecoverable(ctx, clues.Stack(err))
|
if reset {
|
||||||
|
counter.Inc(count.PagerResets)
|
||||||
|
tree.reset()
|
||||||
|
c.resetStats()
|
||||||
|
|
||||||
|
pageCount = 0
|
||||||
|
pageItemCount = 0
|
||||||
|
countDeltas = 0
|
||||||
|
} else {
|
||||||
|
counter.Inc(count.TotalPagesEnumerated)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = c.enumeratePageOfItems(
|
||||||
|
ctx,
|
||||||
|
tree,
|
||||||
|
drv,
|
||||||
|
page,
|
||||||
|
limiter,
|
||||||
|
counter,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, errHitLimit) {
|
||||||
|
hitLimit = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
el.AddRecoverable(ctx, clues.Stack(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
pageCount++
|
||||||
|
|
||||||
|
pageItemCount += len(page)
|
||||||
|
|
||||||
|
// Stop enumeration early if we've reached the page limit. Keep this
|
||||||
|
// at the end of the loop so we don't request another page (pager.NextPage)
|
||||||
|
// before seeing we've passed the limit.
|
||||||
|
if limiter.hitPageLimit(pageCount) {
|
||||||
|
hitLimit = true
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
counter.Inc(count.PagesEnumerated)
|
// Always cancel the pager so that even if we exit early from the loop above
|
||||||
|
// we don't deadlock. Cancelling a pager that's already completed is
|
||||||
|
// essentially a noop.
|
||||||
|
pager.Cancel()
|
||||||
|
|
||||||
// Stop enumeration early if we've reached the page limit. Keep this
|
du, err = pager.Results()
|
||||||
// at the end of the loop so we don't request another page (pager.NextPage)
|
if err != nil {
|
||||||
// before seeing we've passed the limit.
|
return du, clues.Stack(err)
|
||||||
if limiter.hitPageLimit(int(counter.Get(count.PagesEnumerated))) {
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Always cancel the pager so that even if we exit early from the loop above
|
currDeltaLink = du.URL
|
||||||
// we don't deadlock. Cancelling a pager that's already completed is
|
|
||||||
// essentially a noop.
|
|
||||||
pager.Cancel()
|
|
||||||
|
|
||||||
du, err := pager.Results()
|
// 0 pages is never expected. We should at least have one (empty) page to
|
||||||
if err != nil {
|
// consume. But checking pageCount == 1 is brittle in a non-helpful way.
|
||||||
return du, clues.Stack(err)
|
finished = pageCount < 2 && pageItemCount == 0
|
||||||
|
|
||||||
|
if countDeltas >= maxDeltas {
|
||||||
|
return pagers.DeltaUpdate{}, clues.New("unable to produce consistent delta after 100 queries")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Infow("enumerated collection delta", "stats", counter.Values())
|
logger.Ctx(ctx).Infow("enumerated collection delta", "stats", counter.Values())
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -23,6 +22,10 @@ type folderyMcFolderFace struct {
|
|||||||
// new, moved, and notMoved root
|
// new, moved, and notMoved root
|
||||||
root *nodeyMcNodeFace
|
root *nodeyMcNodeFace
|
||||||
|
|
||||||
|
// the ID of the actual root folder.
|
||||||
|
// required to ensure correct population of the root node.
|
||||||
|
rootID string
|
||||||
|
|
||||||
// the majority of operations we perform can be handled with
|
// the majority of operations we perform can be handled with
|
||||||
// a folder ID lookup instead of re-walking the entire tree.
|
// a folder ID lookup instead of re-walking the entire tree.
|
||||||
// Ex: adding a new file to its parent folder.
|
// Ex: adding a new file to its parent folder.
|
||||||
@ -45,9 +48,11 @@ type folderyMcFolderFace struct {
|
|||||||
|
|
||||||
func newFolderyMcFolderFace(
|
func newFolderyMcFolderFace(
|
||||||
prefix path.Path,
|
prefix path.Path,
|
||||||
|
rootID string,
|
||||||
) *folderyMcFolderFace {
|
) *folderyMcFolderFace {
|
||||||
return &folderyMcFolderFace{
|
return &folderyMcFolderFace{
|
||||||
prefix: prefix,
|
prefix: prefix,
|
||||||
|
rootID: rootID,
|
||||||
folderIDToNode: map[string]*nodeyMcNodeFace{},
|
folderIDToNode: map[string]*nodeyMcNodeFace{},
|
||||||
tombstones: map[string]*nodeyMcNodeFace{},
|
tombstones: map[string]*nodeyMcNodeFace{},
|
||||||
fileIDToParentID: map[string]string{},
|
fileIDToParentID: map[string]string{},
|
||||||
@ -150,17 +155,12 @@ func (face *folderyMcFolderFace) setFolder(
|
|||||||
return clues.NewWC(ctx, "missing folder name")
|
return clues.NewWC(ctx, "missing folder name")
|
||||||
}
|
}
|
||||||
|
|
||||||
// drive doesn't normally allow the `:` character in folder names.
|
if len(parentID) == 0 && id != face.rootID {
|
||||||
// so `root:` is, by default, the only folder that can match this
|
|
||||||
// name. That makes this check a little bit brittle, but generally
|
|
||||||
// reliable, since we should always see the root first and can rely
|
|
||||||
// on the naming structure.
|
|
||||||
if len(parentID) == 0 && name != odConsts.RootPathDir {
|
|
||||||
return clues.NewWC(ctx, "non-root folder missing parent id")
|
return clues.NewWC(ctx, "non-root folder missing parent id")
|
||||||
}
|
}
|
||||||
|
|
||||||
// only set the root node once.
|
// only set the root node once.
|
||||||
if name == odConsts.RootPathDir {
|
if id == face.rootID {
|
||||||
if face.root == nil {
|
if face.root == nil {
|
||||||
root := newNodeyMcNodeFace(nil, id, name, isPackage)
|
root := newNodeyMcNodeFace(nil, id, name, isPackage)
|
||||||
face.root = root
|
face.root = root
|
||||||
|
|||||||
@ -13,75 +13,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// helpers
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
var loc = path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy")
|
|
||||||
|
|
||||||
func treeWithRoot() *folderyMcFolderFace {
|
|
||||||
tree := newFolderyMcFolderFace(nil)
|
|
||||||
rootey := newNodeyMcNodeFace(nil, rootID, rootName, false)
|
|
||||||
tree.root = rootey
|
|
||||||
tree.folderIDToNode[rootID] = rootey
|
|
||||||
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func treeWithTombstone() *folderyMcFolderFace {
|
|
||||||
tree := treeWithRoot()
|
|
||||||
tree.tombstones[id(folder)] = newNodeyMcNodeFace(nil, id(folder), "", false)
|
|
||||||
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func treeWithFolders() *folderyMcFolderFace {
|
|
||||||
tree := treeWithRoot()
|
|
||||||
|
|
||||||
o := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true)
|
|
||||||
tree.folderIDToNode[o.id] = o
|
|
||||||
tree.root.children[o.id] = o
|
|
||||||
|
|
||||||
f := newNodeyMcNodeFace(o, id(folder), name(folder), false)
|
|
||||||
tree.folderIDToNode[f.id] = f
|
|
||||||
o.children[f.id] = f
|
|
||||||
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func treeWithFileAtRoot() *folderyMcFolderFace {
|
|
||||||
tree := treeWithRoot()
|
|
||||||
tree.root.files[id(file)] = fileyMcFileFace{
|
|
||||||
lastModified: time.Now(),
|
|
||||||
contentSize: 42,
|
|
||||||
}
|
|
||||||
tree.fileIDToParentID[id(file)] = rootID
|
|
||||||
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func treeWithFileInFolder() *folderyMcFolderFace {
|
|
||||||
tree := treeWithFolders()
|
|
||||||
tree.folderIDToNode[id(folder)].files[id(file)] = fileyMcFileFace{
|
|
||||||
lastModified: time.Now(),
|
|
||||||
contentSize: 42,
|
|
||||||
}
|
|
||||||
tree.fileIDToParentID[id(file)] = id(folder)
|
|
||||||
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func treeWithFileInTombstone() *folderyMcFolderFace {
|
|
||||||
tree := treeWithTombstone()
|
|
||||||
tree.tombstones[id(folder)].files[id(file)] = fileyMcFileFace{
|
|
||||||
lastModified: time.Now(),
|
|
||||||
contentSize: 42,
|
|
||||||
}
|
|
||||||
tree.fileIDToParentID[id(file)] = id(folder)
|
|
||||||
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// tests
|
// tests
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -102,7 +33,7 @@ func (suite *DeltaTreeUnitSuite) TestNewFolderyMcFolderFace() {
|
|||||||
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
folderFace := newFolderyMcFolderFace(p)
|
folderFace := newFolderyMcFolderFace(p, rootID)
|
||||||
assert.Equal(t, p, folderFace.prefix)
|
assert.Equal(t, p, folderFace.prefix)
|
||||||
assert.Nil(t, folderFace.root)
|
assert.Nil(t, folderFace.root)
|
||||||
assert.NotNil(t, folderFace.folderIDToNode)
|
assert.NotNil(t, folderFace.folderIDToNode)
|
||||||
@ -144,7 +75,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
tname: "add root",
|
tname: "add root",
|
||||||
tree: newFolderyMcFolderFace(nil),
|
tree: newFolderyMcFolderFace(nil, rootID),
|
||||||
id: rootID,
|
id: rootID,
|
||||||
name: rootName,
|
name: rootName,
|
||||||
isPackage: true,
|
isPackage: true,
|
||||||
@ -272,7 +203,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
|||||||
{
|
{
|
||||||
name: "add tombstone",
|
name: "add tombstone",
|
||||||
id: id(folder),
|
id: id(folder),
|
||||||
tree: newFolderyMcFolderFace(nil),
|
tree: newFolderyMcFolderFace(nil, rootID),
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -283,7 +214,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "missing ID",
|
name: "missing ID",
|
||||||
tree: newFolderyMcFolderFace(nil),
|
tree: newFolderyMcFolderFace(nil, rootID),
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -39,6 +39,7 @@ type BackupHandler interface {
|
|||||||
api.Getter
|
api.Getter
|
||||||
GetItemPermissioner
|
GetItemPermissioner
|
||||||
GetItemer
|
GetItemer
|
||||||
|
GetRootFolderer
|
||||||
NewDrivePagerer
|
NewDrivePagerer
|
||||||
EnumerateDriveItemsDeltaer
|
EnumerateDriveItemsDeltaer
|
||||||
|
|
||||||
|
|||||||
@ -1,17 +1,35 @@
|
|||||||
package drive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
|
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/count"
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
|
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||||
)
|
)
|
||||||
|
|
||||||
const defaultItemSize int64 = 42
|
const defaultItemSize int64 = 42
|
||||||
@ -59,3 +77,768 @@ func loadTestService(t *testing.T) *oneDriveService {
|
|||||||
|
|
||||||
return service
|
return service
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// collections
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type statePath struct {
|
||||||
|
state data.CollectionState
|
||||||
|
currPath path.Path
|
||||||
|
prevPath path.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
func toODPath(t *testing.T, s string) path.Path {
|
||||||
|
spl := path.Split(s)
|
||||||
|
p, err := path.Builder{}.
|
||||||
|
Append(spl[4:]...).
|
||||||
|
ToDataLayerPath(
|
||||||
|
spl[0],
|
||||||
|
spl[2],
|
||||||
|
path.OneDriveService,
|
||||||
|
path.FilesCategory,
|
||||||
|
false)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func asDeleted(t *testing.T, prev string) statePath {
|
||||||
|
return statePath{
|
||||||
|
state: data.DeletedState,
|
||||||
|
prevPath: toODPath(t, prev),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func asMoved(t *testing.T, prev, curr string) statePath {
|
||||||
|
return statePath{
|
||||||
|
state: data.MovedState,
|
||||||
|
prevPath: toODPath(t, prev),
|
||||||
|
currPath: toODPath(t, curr),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func asNew(t *testing.T, curr string) statePath {
|
||||||
|
return statePath{
|
||||||
|
state: data.NewState,
|
||||||
|
currPath: toODPath(t, curr),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func asNotMoved(t *testing.T, p string) statePath {
|
||||||
|
return statePath{
|
||||||
|
state: data.NotMovedState,
|
||||||
|
prevPath: toODPath(t, p),
|
||||||
|
currPath: toODPath(t, p),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// stub drive items
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type itemType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
isFile itemType = 1
|
||||||
|
isFolder itemType = 2
|
||||||
|
isPackage itemType = 3
|
||||||
|
)
|
||||||
|
|
||||||
|
func coreItem(
|
||||||
|
id, name, parentPath, parentID string,
|
||||||
|
it itemType,
|
||||||
|
) *models.DriveItem {
|
||||||
|
item := models.NewDriveItem()
|
||||||
|
item.SetName(&name)
|
||||||
|
item.SetId(&id)
|
||||||
|
|
||||||
|
parentReference := models.NewItemReference()
|
||||||
|
parentReference.SetPath(&parentPath)
|
||||||
|
parentReference.SetId(&parentID)
|
||||||
|
item.SetParentReference(parentReference)
|
||||||
|
|
||||||
|
switch it {
|
||||||
|
case isFile:
|
||||||
|
item.SetSize(ptr.To[int64](42))
|
||||||
|
item.SetFile(models.NewFile())
|
||||||
|
case isFolder:
|
||||||
|
item.SetFolder(models.NewFolder())
|
||||||
|
case isPackage:
|
||||||
|
item.SetPackageEscaped(models.NewPackageEscaped())
|
||||||
|
}
|
||||||
|
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
func driveItem(
|
||||||
|
id, name, parentPath, parentID string,
|
||||||
|
it itemType,
|
||||||
|
) models.DriveItemable {
|
||||||
|
return coreItem(id, name, parentPath, parentID, it)
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileAtRoot() models.DriveItemable {
|
||||||
|
return driveItem(id(file), name(file), parentDir(), rootID, isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileAt(
|
||||||
|
parentX any,
|
||||||
|
) models.DriveItemable {
|
||||||
|
pd := parentDir(namex(folder, parentX))
|
||||||
|
pid := idx(folder, parentX)
|
||||||
|
|
||||||
|
if parentX == folder {
|
||||||
|
pd = parentDir(name(folder))
|
||||||
|
pid = id(folder)
|
||||||
|
}
|
||||||
|
|
||||||
|
return driveItem(
|
||||||
|
id(file),
|
||||||
|
name(file),
|
||||||
|
pd,
|
||||||
|
pid,
|
||||||
|
isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileAtDeep(
|
||||||
|
parentDir, parentID string,
|
||||||
|
) models.DriveItemable {
|
||||||
|
return driveItem(
|
||||||
|
id(file),
|
||||||
|
name(file),
|
||||||
|
parentDir,
|
||||||
|
parentID,
|
||||||
|
isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func filexAtRoot(
|
||||||
|
x any,
|
||||||
|
) models.DriveItemable {
|
||||||
|
return driveItem(
|
||||||
|
idx(file, x),
|
||||||
|
namex(file, x),
|
||||||
|
parentDir(),
|
||||||
|
rootID,
|
||||||
|
isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func filexAt(
|
||||||
|
x, parentX any,
|
||||||
|
) models.DriveItemable {
|
||||||
|
pd := parentDir(namex(folder, parentX))
|
||||||
|
pid := idx(folder, parentX)
|
||||||
|
|
||||||
|
if parentX == folder {
|
||||||
|
pd = parentDir(name(folder))
|
||||||
|
pid = id(folder)
|
||||||
|
}
|
||||||
|
|
||||||
|
return driveItem(
|
||||||
|
idx(file, x),
|
||||||
|
namex(file, x),
|
||||||
|
pd,
|
||||||
|
pid,
|
||||||
|
isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func filexWSizeAtRoot(
|
||||||
|
x any,
|
||||||
|
size int64,
|
||||||
|
) models.DriveItemable {
|
||||||
|
return driveItemWithSize(
|
||||||
|
idx(file, x),
|
||||||
|
namex(file, x),
|
||||||
|
parentDir(),
|
||||||
|
rootID,
|
||||||
|
size,
|
||||||
|
isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func filexWSizeAt(
|
||||||
|
x, parentX any,
|
||||||
|
size int64,
|
||||||
|
) models.DriveItemable {
|
||||||
|
pd := parentDir(namex(folder, parentX))
|
||||||
|
pid := idx(folder, parentX)
|
||||||
|
|
||||||
|
if parentX == folder {
|
||||||
|
pd = parentDir(name(folder))
|
||||||
|
pid = id(folder)
|
||||||
|
}
|
||||||
|
|
||||||
|
return driveItemWithSize(
|
||||||
|
idx(file, x),
|
||||||
|
namex(file, x),
|
||||||
|
pd,
|
||||||
|
pid,
|
||||||
|
size,
|
||||||
|
isFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func folderAtRoot() models.DriveItemable {
|
||||||
|
return driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func folderAtDeep(
|
||||||
|
parentDir, parentID string,
|
||||||
|
) models.DriveItemable {
|
||||||
|
return driveItem(
|
||||||
|
id(folder),
|
||||||
|
name(folder),
|
||||||
|
parentDir,
|
||||||
|
parentID,
|
||||||
|
isFolder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func folderxAt(
|
||||||
|
x, parentX any,
|
||||||
|
) models.DriveItemable {
|
||||||
|
pd := parentDir(namex(folder, parentX))
|
||||||
|
pid := idx(folder, parentX)
|
||||||
|
|
||||||
|
if parentX == folder {
|
||||||
|
pd = parentDir(name(folder))
|
||||||
|
pid = id(folder)
|
||||||
|
}
|
||||||
|
|
||||||
|
return driveItem(
|
||||||
|
idx(folder, x),
|
||||||
|
namex(folder, x),
|
||||||
|
pd,
|
||||||
|
pid,
|
||||||
|
isFolder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func folderxAtRoot(
|
||||||
|
x any,
|
||||||
|
) models.DriveItemable {
|
||||||
|
return driveItem(
|
||||||
|
idx(folder, x),
|
||||||
|
namex(folder, x),
|
||||||
|
parentDir(),
|
||||||
|
rootID,
|
||||||
|
isFolder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func driveItemWithSize(
|
||||||
|
id, name, parentPath, parentID string,
|
||||||
|
size int64,
|
||||||
|
it itemType,
|
||||||
|
) models.DriveItemable {
|
||||||
|
res := coreItem(id, name, parentPath, parentID, it)
|
||||||
|
res.SetSize(ptr.To(size))
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileItem(
|
||||||
|
id, name, parentPath, parentID, url string,
|
||||||
|
deleted bool,
|
||||||
|
) models.DriveItemable {
|
||||||
|
di := driveItem(id, name, parentPath, parentID, isFile)
|
||||||
|
di.SetAdditionalData(map[string]any{
|
||||||
|
"@microsoft.graph.downloadUrl": url,
|
||||||
|
})
|
||||||
|
|
||||||
|
if deleted {
|
||||||
|
di.SetDeleted(models.NewDeleted())
|
||||||
|
}
|
||||||
|
|
||||||
|
return di
|
||||||
|
}
|
||||||
|
|
||||||
|
func malwareItem(
|
||||||
|
id, name, parentPath, parentID string,
|
||||||
|
it itemType,
|
||||||
|
) models.DriveItemable {
|
||||||
|
c := coreItem(id, name, parentPath, parentID, it)
|
||||||
|
|
||||||
|
mal := models.NewMalware()
|
||||||
|
malStr := "test malware"
|
||||||
|
mal.SetDescription(&malStr)
|
||||||
|
|
||||||
|
c.SetMalware(mal)
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func driveRootItem() models.DriveItemable {
|
||||||
|
item := models.NewDriveItem()
|
||||||
|
item.SetName(ptr.To(rootName))
|
||||||
|
item.SetId(ptr.To(rootID))
|
||||||
|
item.SetRoot(models.NewRoot())
|
||||||
|
item.SetFolder(models.NewFolder())
|
||||||
|
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
// delItem creates a DriveItemable that is marked as deleted. path must be set
|
||||||
|
// to the base drive path.
|
||||||
|
func delItem(
|
||||||
|
id string,
|
||||||
|
parentID string,
|
||||||
|
it itemType,
|
||||||
|
) models.DriveItemable {
|
||||||
|
item := models.NewDriveItem()
|
||||||
|
item.SetId(&id)
|
||||||
|
item.SetDeleted(models.NewDeleted())
|
||||||
|
|
||||||
|
parentReference := models.NewItemReference()
|
||||||
|
parentReference.SetId(&parentID)
|
||||||
|
item.SetParentReference(parentReference)
|
||||||
|
|
||||||
|
switch it {
|
||||||
|
case isFile:
|
||||||
|
item.SetFile(models.NewFile())
|
||||||
|
case isFolder:
|
||||||
|
item.SetFolder(models.NewFolder())
|
||||||
|
case isPackage:
|
||||||
|
item.SetPackageEscaped(models.NewPackageEscaped())
|
||||||
|
}
|
||||||
|
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
func id(v string) string {
|
||||||
|
return fmt.Sprintf("id_%s_0", v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func idx(v string, sfx any) string {
|
||||||
|
return fmt.Sprintf("id_%s_%v", v, sfx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func name(v string) string {
|
||||||
|
return fmt.Sprintf("n_%s_0", v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func namex(v string, sfx any) string {
|
||||||
|
return fmt.Sprintf("n_%s_%v", v, sfx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func toPath(elems ...string) string {
|
||||||
|
es := []string{}
|
||||||
|
for _, elem := range elems {
|
||||||
|
es = append(es, path.Split(elem)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch len(es) {
|
||||||
|
case 0:
|
||||||
|
return ""
|
||||||
|
case 1:
|
||||||
|
return es[0]
|
||||||
|
default:
|
||||||
|
return path.Builder{}.Append(es...).String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func fullPath(elems ...string) string {
|
||||||
|
return toPath(append(
|
||||||
|
[]string{
|
||||||
|
tenant,
|
||||||
|
path.OneDriveService.String(),
|
||||||
|
user,
|
||||||
|
path.FilesCategory.String(),
|
||||||
|
odConsts.DriveFolderPrefixBuilder(id(drive)).String(),
|
||||||
|
},
|
||||||
|
elems...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func driveFullPath(driveID any, elems ...string) string {
|
||||||
|
return toPath(append(
|
||||||
|
[]string{
|
||||||
|
tenant,
|
||||||
|
path.OneDriveService.String(),
|
||||||
|
user,
|
||||||
|
path.FilesCategory.String(),
|
||||||
|
odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String(),
|
||||||
|
},
|
||||||
|
elems...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func parentDir(elems ...string) string {
|
||||||
|
return toPath(append(
|
||||||
|
[]string{odConsts.DriveFolderPrefixBuilder(id(drive)).String()},
|
||||||
|
elems...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func driveParentDir(driveID any, elems ...string) string {
|
||||||
|
return toPath(append(
|
||||||
|
[]string{odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String()},
|
||||||
|
elems...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// just for readability
|
||||||
|
const (
|
||||||
|
doMergeItems = true
|
||||||
|
doNotMergeItems = false
|
||||||
|
)
|
||||||
|
|
||||||
|
// common item names
|
||||||
|
const (
|
||||||
|
bar = "bar"
|
||||||
|
delta = "delta_url"
|
||||||
|
drive = "drive"
|
||||||
|
fanny = "fanny"
|
||||||
|
file = "file"
|
||||||
|
folder = "folder"
|
||||||
|
foo = "foo"
|
||||||
|
item = "item"
|
||||||
|
malware = "malware"
|
||||||
|
nav = "nav"
|
||||||
|
pkg = "package"
|
||||||
|
rootID = odConsts.RootID
|
||||||
|
rootName = odConsts.RootPathDir
|
||||||
|
subfolder = "subfolder"
|
||||||
|
tenant = "t"
|
||||||
|
user = "u"
|
||||||
|
)
|
||||||
|
|
||||||
|
var anyFolderScope = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
|
||||||
|
|
||||||
|
type failingColl struct{}
|
||||||
|
|
||||||
|
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||||
|
ic := make(chan data.Item)
|
||||||
|
defer close(ic)
|
||||||
|
|
||||||
|
errs.AddRecoverable(ctx, assert.AnError)
|
||||||
|
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
func (f failingColl) FullPath() path.Path { return nil }
|
||||||
|
func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error) { return nil, nil }
|
||||||
|
|
||||||
|
func makeExcludeMap(files ...string) map[string]struct{} {
|
||||||
|
delList := map[string]struct{}{}
|
||||||
|
for _, file := range files {
|
||||||
|
delList[file+metadata.DataFileSuffix] = struct{}{}
|
||||||
|
delList[file+metadata.MetaFileSuffix] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return delList
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// limiter
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func minimumLimitOpts() control.Options {
|
||||||
|
minLimitOpts := control.DefaultOptions()
|
||||||
|
minLimitOpts.PreviewLimits.Enabled = true
|
||||||
|
minLimitOpts.PreviewLimits.MaxBytes = 1
|
||||||
|
minLimitOpts.PreviewLimits.MaxContainers = 1
|
||||||
|
minLimitOpts.PreviewLimits.MaxItems = 1
|
||||||
|
minLimitOpts.PreviewLimits.MaxItemsPerContainer = 1
|
||||||
|
minLimitOpts.PreviewLimits.MaxPages = 1
|
||||||
|
|
||||||
|
return minLimitOpts
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// enumerators
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func collWithMBH(mbh BackupHandler) *Collections {
|
||||||
|
return NewCollections(
|
||||||
|
mbh,
|
||||||
|
tenant,
|
||||||
|
idname.NewProvider(user, user),
|
||||||
|
func(*support.ControllerOperationStatus) {},
|
||||||
|
control.Options{ToggleFeatures: control.Toggles{
|
||||||
|
UseDeltaTree: true,
|
||||||
|
}},
|
||||||
|
count.New())
|
||||||
|
}
|
||||||
|
|
||||||
|
func collWithMBHAndOpts(
|
||||||
|
mbh BackupHandler,
|
||||||
|
opts control.Options,
|
||||||
|
) *Collections {
|
||||||
|
return NewCollections(
|
||||||
|
mbh,
|
||||||
|
tenant,
|
||||||
|
idname.NewProvider(user, user),
|
||||||
|
func(*support.ControllerOperationStatus) {},
|
||||||
|
opts,
|
||||||
|
count.New())
|
||||||
|
}
|
||||||
|
|
||||||
|
// func fullOrPrevPath(
|
||||||
|
// t *testing.T,
|
||||||
|
// coll data.BackupCollection,
|
||||||
|
// ) path.Path {
|
||||||
|
// var collPath path.Path
|
||||||
|
|
||||||
|
// if coll.State() != data.DeletedState {
|
||||||
|
// collPath = coll.FullPath()
|
||||||
|
// } else {
|
||||||
|
// collPath = coll.PreviousPath()
|
||||||
|
// }
|
||||||
|
|
||||||
|
// require.False(
|
||||||
|
// t,
|
||||||
|
// len(collPath.Elements()) < 4,
|
||||||
|
// "malformed or missing collection path")
|
||||||
|
|
||||||
|
// return collPath
|
||||||
|
// }
|
||||||
|
|
||||||
|
func pagerForDrives(drives ...models.Driveable) *apiMock.Pager[models.Driveable] {
|
||||||
|
return &apiMock.Pager[models.Driveable]{
|
||||||
|
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||||
|
{Values: drives},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makePrevMetadataColls(
|
||||||
|
t *testing.T,
|
||||||
|
mbh BackupHandler,
|
||||||
|
previousPaths map[string]map[string]string,
|
||||||
|
) []data.RestoreCollection {
|
||||||
|
pathPrefix, err := mbh.MetadataPathPrefix(tenant)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
prevDeltas := map[string]string{}
|
||||||
|
|
||||||
|
for driveID := range previousPaths {
|
||||||
|
prevDeltas[driveID] = idx(delta, "prev")
|
||||||
|
}
|
||||||
|
|
||||||
|
mdColl, err := graph.MakeMetadataCollection(
|
||||||
|
pathPrefix,
|
||||||
|
[]graph.MetadataCollectionEntry{
|
||||||
|
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, prevDeltas),
|
||||||
|
graph.NewMetadataEntry(bupMD.PreviousPathFileName, previousPaths),
|
||||||
|
},
|
||||||
|
func(*support.ControllerOperationStatus) {},
|
||||||
|
count.New())
|
||||||
|
require.NoError(t, err, "creating metadata collection", clues.ToCore(err))
|
||||||
|
|
||||||
|
return []data.RestoreCollection{
|
||||||
|
dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// func compareMetadata(
|
||||||
|
// t *testing.T,
|
||||||
|
// mdColl data.Collection,
|
||||||
|
// expectDeltas map[string]string,
|
||||||
|
// expectPrevPaths map[string]map[string]string,
|
||||||
|
// ) {
|
||||||
|
// ctx, flush := tester.NewContext(t)
|
||||||
|
// defer flush()
|
||||||
|
|
||||||
|
// colls := []data.RestoreCollection{
|
||||||
|
// dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}),
|
||||||
|
// }
|
||||||
|
|
||||||
|
// deltas, prevs, _, err := deserializeAndValidateMetadata(
|
||||||
|
// ctx,
|
||||||
|
// colls,
|
||||||
|
// count.New(),
|
||||||
|
// fault.New(true))
|
||||||
|
// require.NoError(t, err, "deserializing metadata", clues.ToCore(err))
|
||||||
|
// assert.Equal(t, expectDeltas, deltas, "delta urls")
|
||||||
|
// assert.Equal(t, expectPrevPaths, prevs, "previous paths")
|
||||||
|
// }
|
||||||
|
|
||||||
|
// for comparisons done by collection state
|
||||||
|
type stateAssertion struct {
|
||||||
|
itemIDs []string
|
||||||
|
// should never get set by the user.
|
||||||
|
// this flag gets flipped when calling assertions.compare.
|
||||||
|
// any unseen collection will error on requireNoUnseenCollections
|
||||||
|
// sawCollection bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// for comparisons done by a given collection path
|
||||||
|
type collectionAssertion struct {
|
||||||
|
doNotMerge assert.BoolAssertionFunc
|
||||||
|
states map[data.CollectionState]*stateAssertion
|
||||||
|
excludedItems map[string]struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type statesToItemIDs map[data.CollectionState][]string
|
||||||
|
|
||||||
|
// TODO(keepers): move excludeItems to a more global position.
|
||||||
|
func newCollAssertion(
|
||||||
|
doNotMerge bool,
|
||||||
|
itemsByState statesToItemIDs,
|
||||||
|
excludeItems ...string,
|
||||||
|
) collectionAssertion {
|
||||||
|
states := map[data.CollectionState]*stateAssertion{}
|
||||||
|
|
||||||
|
for state, itemIDs := range itemsByState {
|
||||||
|
states[state] = &stateAssertion{
|
||||||
|
itemIDs: itemIDs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dnm := assert.False
|
||||||
|
if doNotMerge {
|
||||||
|
dnm = assert.True
|
||||||
|
}
|
||||||
|
|
||||||
|
return collectionAssertion{
|
||||||
|
doNotMerge: dnm,
|
||||||
|
states: states,
|
||||||
|
excludedItems: makeExcludeMap(excludeItems...),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// to aggregate all collection-related expectations in the backup
|
||||||
|
// map collection path -> collection state -> assertion
|
||||||
|
type collectionAssertions map[string]collectionAssertion
|
||||||
|
|
||||||
|
// ensure the provided collection matches expectations as set by the test.
|
||||||
|
// func (cas collectionAssertions) compare(
|
||||||
|
// t *testing.T,
|
||||||
|
// coll data.BackupCollection,
|
||||||
|
// excludes *prefixmatcher.StringSetMatchBuilder,
|
||||||
|
// ) {
|
||||||
|
// ctx, flush := tester.NewContext(t)
|
||||||
|
// defer flush()
|
||||||
|
|
||||||
|
// var (
|
||||||
|
// itemCh = coll.Items(ctx, fault.New(true))
|
||||||
|
// itemIDs = []string{}
|
||||||
|
// )
|
||||||
|
|
||||||
|
// p := fullOrPrevPath(t, coll)
|
||||||
|
|
||||||
|
// for itm := range itemCh {
|
||||||
|
// itemIDs = append(itemIDs, itm.ID())
|
||||||
|
// }
|
||||||
|
|
||||||
|
// expect := cas[p.String()]
|
||||||
|
// expectState := expect.states[coll.State()]
|
||||||
|
// expectState.sawCollection = true
|
||||||
|
|
||||||
|
// assert.ElementsMatchf(
|
||||||
|
// t,
|
||||||
|
// expectState.itemIDs,
|
||||||
|
// itemIDs,
|
||||||
|
// "expected all items to match in collection with:\nstate %q\npath %q",
|
||||||
|
// coll.State(),
|
||||||
|
// p)
|
||||||
|
|
||||||
|
// expect.doNotMerge(
|
||||||
|
// t,
|
||||||
|
// coll.DoNotMergeItems(),
|
||||||
|
// "expected collection to have the appropariate doNotMerge flag")
|
||||||
|
|
||||||
|
// if result, ok := excludes.Get(p.String()); ok {
|
||||||
|
// assert.Equal(
|
||||||
|
// t,
|
||||||
|
// expect.excludedItems,
|
||||||
|
// result,
|
||||||
|
// "excluded items")
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// ensure that no collections in the expected set are still flagged
|
||||||
|
// as sawCollection == false.
|
||||||
|
// func (cas collectionAssertions) requireNoUnseenCollections(
|
||||||
|
// t *testing.T,
|
||||||
|
// ) {
|
||||||
|
// for p, withPath := range cas {
|
||||||
|
// for _, state := range withPath.states {
|
||||||
|
// require.True(
|
||||||
|
// t,
|
||||||
|
// state.sawCollection,
|
||||||
|
// "results should have contained collection:\n\t%q\t\n%q",
|
||||||
|
// state, p)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
func aPage(items ...models.DriveItemable) mock.NextPage {
|
||||||
|
return mock.NextPage{
|
||||||
|
Items: append([]models.DriveItemable{driveRootItem()}, items...),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func aPageWReset(items ...models.DriveItemable) mock.NextPage {
|
||||||
|
return mock.NextPage{
|
||||||
|
Items: append([]models.DriveItemable{driveRootItem()}, items...),
|
||||||
|
Reset: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func aReset(items ...models.DriveItemable) mock.NextPage {
|
||||||
|
return mock.NextPage{
|
||||||
|
Items: []models.DriveItemable{},
|
||||||
|
Reset: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// delta trees
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var loc = path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy")
|
||||||
|
|
||||||
|
func treeWithRoot() *folderyMcFolderFace {
|
||||||
|
tree := newFolderyMcFolderFace(nil, rootID)
|
||||||
|
rootey := newNodeyMcNodeFace(nil, rootID, rootName, false)
|
||||||
|
tree.root = rootey
|
||||||
|
tree.folderIDToNode[rootID] = rootey
|
||||||
|
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func treeWithTombstone() *folderyMcFolderFace {
|
||||||
|
tree := treeWithRoot()
|
||||||
|
tree.tombstones[id(folder)] = newNodeyMcNodeFace(nil, id(folder), "", false)
|
||||||
|
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func treeWithFolders() *folderyMcFolderFace {
|
||||||
|
tree := treeWithRoot()
|
||||||
|
|
||||||
|
parent := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true)
|
||||||
|
tree.folderIDToNode[parent.id] = parent
|
||||||
|
tree.root.children[parent.id] = parent
|
||||||
|
|
||||||
|
f := newNodeyMcNodeFace(parent, id(folder), name(folder), false)
|
||||||
|
tree.folderIDToNode[f.id] = f
|
||||||
|
parent.children[f.id] = f
|
||||||
|
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func treeWithFileAtRoot() *folderyMcFolderFace {
|
||||||
|
tree := treeWithRoot()
|
||||||
|
tree.root.files[id(file)] = fileyMcFileFace{
|
||||||
|
lastModified: time.Now(),
|
||||||
|
contentSize: 42,
|
||||||
|
}
|
||||||
|
tree.fileIDToParentID[id(file)] = rootID
|
||||||
|
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func treeWithFileInFolder() *folderyMcFolderFace {
|
||||||
|
tree := treeWithFolders()
|
||||||
|
tree.folderIDToNode[id(folder)].files[id(file)] = fileyMcFileFace{
|
||||||
|
lastModified: time.Now(),
|
||||||
|
contentSize: 42,
|
||||||
|
}
|
||||||
|
tree.fileIDToParentID[id(file)] = id(folder)
|
||||||
|
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func treeWithFileInTombstone() *folderyMcFolderFace {
|
||||||
|
tree := treeWithTombstone()
|
||||||
|
tree.tombstones[id(folder)].files[id(file)] = fileyMcFileFace{
|
||||||
|
lastModified: time.Now(),
|
||||||
|
contentSize: 42,
|
||||||
|
}
|
||||||
|
tree.fileIDToParentID[id(file)] = id(folder)
|
||||||
|
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|||||||
@ -20,29 +20,8 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// helpers
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
func minimumLimitOpts() control.Options {
|
|
||||||
minLimitOpts := control.DefaultOptions()
|
|
||||||
minLimitOpts.PreviewLimits.Enabled = true
|
|
||||||
minLimitOpts.PreviewLimits.MaxBytes = 1
|
|
||||||
minLimitOpts.PreviewLimits.MaxContainers = 1
|
|
||||||
minLimitOpts.PreviewLimits.MaxItems = 1
|
|
||||||
minLimitOpts.PreviewLimits.MaxItemsPerContainer = 1
|
|
||||||
minLimitOpts.PreviewLimits.MaxPages = 1
|
|
||||||
|
|
||||||
return minLimitOpts
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// tests
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
type LimiterUnitSuite struct {
|
type LimiterUnitSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
}
|
}
|
||||||
@ -55,7 +34,7 @@ type backupLimitTest struct {
|
|||||||
name string
|
name string
|
||||||
limits control.PreviewItemLimits
|
limits control.PreviewItemLimits
|
||||||
drives []models.Driveable
|
drives []models.Driveable
|
||||||
enumerator mock.EnumerateItemsDeltaByDrive
|
enumerator mock.EnumerateDriveItemsDelta
|
||||||
// Collection name -> set of item IDs. We can't check item data because
|
// Collection name -> set of item IDs. We can't check item data because
|
||||||
// that's not mocked out. Metadata is checked separately.
|
// that's not mocked out. Metadata is checked separately.
|
||||||
expectedItemIDsInCollection map[string][]string
|
expectedItemIDsInCollection map[string][]string
|
||||||
@ -82,17 +61,12 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(aPage(
|
||||||
Pages: pagesOf(pageItems(
|
filexWSizeAtRoot(1, 7),
|
||||||
driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 7, isFile),
|
filexWSizeAtRoot(2, 1),
|
||||||
driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(), rootID, 1, isFile),
|
filexWSizeAtRoot(3, 1))))),
|
||||||
driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(), rootID, 1, isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 2), idx(file, 3)},
|
||||||
},
|
},
|
||||||
@ -108,17 +82,12 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(aPage(
|
||||||
Pages: pagesOf(pageItems(
|
filexWSizeAtRoot(1, 1),
|
||||||
driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 1, isFile),
|
filexWSizeAtRoot(2, 2),
|
||||||
driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(), rootID, 2, isFile),
|
filexWSizeAtRoot(3, 1))))),
|
||||||
driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(), rootID, 1, isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||||
},
|
},
|
||||||
@ -134,18 +103,13 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(aPage(
|
||||||
Pages: pagesOf(pageItems(
|
filexWSizeAtRoot(1, 1),
|
||||||
driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 1, isFile),
|
folderxAtRoot(1),
|
||||||
driveItemWithSize(idx(folder, 1), namex(folder, 1), parentDir(), rootID, 1, isFolder),
|
filexWSizeAt(2, 1, 2),
|
||||||
driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(namex(folder, 1)), idx(folder, 1), 2, isFile),
|
filexWSizeAt(3, 1, 1))))),
|
||||||
driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), 1, isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1)},
|
fullPath(): {idx(file, 1)},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)},
|
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)},
|
||||||
@ -162,20 +126,15 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(aPage(
|
||||||
Pages: pagesOf(pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile),
|
filexAtRoot(4),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(), rootID, isFile),
|
filexAtRoot(5),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(), rootID, isFile),
|
filexAtRoot(6))))),
|
||||||
driveItem(idx(file, 6), namex(file, 6), parentDir(), rootID, isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
},
|
},
|
||||||
@ -191,25 +150,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2)),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
// Repeated items shouldn't count against the limit.
|
||||||
// Repeated items shouldn't count against the limit.
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(3, 1),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(4, 1),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(5, 1),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(6, 1))))),
|
||||||
driveItem(idx(file, 6), namex(file, 6), parentDir(namex(folder, 1)), idx(folder, 1), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)},
|
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)},
|
||||||
@ -226,23 +180,18 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 1,
|
MaxPages: 1,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2)),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(3, 1),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(4, 1),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(5, 1),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(6, 1))))),
|
||||||
driveItem(idx(file, 6), namex(file, 6), parentDir(namex(folder, 1)), idx(folder, 1), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||||
},
|
},
|
||||||
@ -258,22 +207,17 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3)),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(4, 1),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(5, 1))))),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
// Root has an additional item. It's hard to fix that in the code
|
// Root has an additional item. It's hard to fix that in the code
|
||||||
// though.
|
// though.
|
||||||
@ -292,24 +236,19 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
folderAtRoot(),
|
||||||
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
|
filexAt(1, folder),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(name(folder)), id(folder), isFile),
|
filexAt(2, folder)),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(name(folder)), id(folder), isFile)),
|
aPage(
|
||||||
pageItems(
|
folderAtRoot(),
|
||||||
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
|
// Updated item that shouldn't count against the limit a second time.
|
||||||
// Updated item that shouldn't count against the limit a second time.
|
filexAt(2, folder),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(name(folder)), id(folder), isFile),
|
filexAt(3, folder),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(name(folder)), id(folder), isFile),
|
filexAt(4, folder))))),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(name(folder)), id(folder), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {},
|
fullPath(): {},
|
||||||
fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
@ -326,25 +265,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
// Put folder 0 at limit.
|
||||||
// Put folder 0 at limit.
|
folderAtRoot(),
|
||||||
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
|
filexAt(3, folder),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(name(folder)), id(folder), isFile),
|
filexAt(4, folder)),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(name(folder)), id(folder), isFile)),
|
aPage(
|
||||||
pageItems(
|
folderAtRoot(),
|
||||||
driveItem(id(folder), name(folder), parentDir(), rootID, isFolder),
|
// Try to move item from root to folder 0 which is already at the limit.
|
||||||
// Try to move item from root to folder 0 which is already at the limit.
|
filexAt(1, folder))))),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(name(folder)), id(folder), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||||
fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)},
|
fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)},
|
||||||
@ -361,24 +295,19 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3)),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(4, 1)),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(5, 1))))),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||||
@ -395,27 +324,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3)),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(4, 1),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(5, 1),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
// This container shouldn't be returned.
|
||||||
// This container shouldn't be returned.
|
folderxAtRoot(2),
|
||||||
driveItem(idx(folder, 2), namex(folder, 2), parentDir(), rootID, isFolder),
|
filexAt(7, 2),
|
||||||
driveItem(idx(file, 7), namex(file, 7), parentDir(namex(folder, 2)), idx(folder, 2), isFile),
|
filexAt(8, 2),
|
||||||
driveItem(idx(file, 8), namex(file, 8), parentDir(namex(folder, 2)), idx(folder, 2), isFile),
|
filexAt(9, 2))))),
|
||||||
driveItem(idx(file, 9), namex(file, 9), parentDir(namex(folder, 2)), idx(folder, 2), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||||
@ -432,28 +356,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3)),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(4, 1),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile),
|
filexAt(5, 1)),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile)),
|
aPage(
|
||||||
pageItems(
|
// This container shouldn't be returned.
|
||||||
// This container shouldn't be returned.
|
folderxAtRoot(2),
|
||||||
driveItem(idx(folder, 2), namex(folder, 2), parentDir(), rootID, isFolder),
|
filexAt(7, 2),
|
||||||
driveItem(idx(file, 7), namex(file, 7), parentDir(namex(folder, 2)), idx(folder, 2), isFile),
|
filexAt(8, 2),
|
||||||
driveItem(idx(file, 8), namex(file, 8), parentDir(namex(folder, 2)), idx(folder, 2), isFile),
|
filexAt(9, 2))))),
|
||||||
driveItem(idx(file, 9), namex(file, 9), parentDir(namex(folder, 2)), idx(folder, 2), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||||
@ -470,28 +389,21 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1, drive2},
|
drives: []models.Driveable{drive1, drive2},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(aPage(
|
||||||
Pages: pagesOf(pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile),
|
filexAtRoot(4),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(), rootID, isFile),
|
filexAtRoot(5)))),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(), rootID, isFile))),
|
mock.Drive(idx(drive, 2)).With(
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
mock.Delta(id(delta), nil).With(aPage(
|
||||||
},
|
filexAtRoot(1),
|
||||||
idx(drive, 2): {
|
filexAtRoot(2),
|
||||||
Pages: pagesOf(pageItems(
|
filexAtRoot(3),
|
||||||
driveItem(idx(file, 1), namex(file, 1), driveParentDir(2), rootID, isFile),
|
filexAtRoot(4),
|
||||||
driveItem(idx(file, 2), namex(file, 2), driveParentDir(2), rootID, isFile),
|
filexAtRoot(5))))),
|
||||||
driveItem(idx(file, 3), namex(file, 3), driveParentDir(2), rootID, isFile),
|
|
||||||
driveItem(idx(file, 4), namex(file, 4), driveParentDir(2), rootID, isFile),
|
|
||||||
driveItem(idx(file, 5), namex(file, 5), driveParentDir(2), rootID, isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
@ -507,24 +419,19 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxPages: 1,
|
MaxPages: 1,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
enumerator: mock.EnumerateItemsDeltaByDrive{
|
enumerator: mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(
|
||||||
Pages: pagesOf(
|
aPage(
|
||||||
pageItems(
|
filexAtRoot(1),
|
||||||
driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile),
|
filexAtRoot(2),
|
||||||
driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile),
|
filexAtRoot(3)),
|
||||||
driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(4, 1)),
|
||||||
driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile)),
|
aPage(
|
||||||
pageItems(
|
folderxAtRoot(1),
|
||||||
driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder),
|
filexAt(5, 1))))),
|
||||||
driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))),
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||||
@ -876,14 +783,9 @@ func runGetPreviewLimitsDefaults(
|
|||||||
{Values: []models.Driveable{drv}},
|
{Values: []models.Driveable{drv}},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
mockEnumerator = mock.EnumerateItemsDeltaByDrive{
|
mockEnumerator = mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(id(drive)).With(
|
||||||
id(drive): {
|
mock.Delta(id(delta), nil).With(pages...)))
|
||||||
Pages: pages,
|
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator)
|
mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator)
|
||||||
c = collWithMBHAndOpts(mbh, opts)
|
c = collWithMBHAndOpts(mbh, opts)
|
||||||
errs = fault.New(true)
|
errs = fault.New(true)
|
||||||
|
|||||||
@ -182,6 +182,13 @@ func (h siteBackupHandler) EnumerateDriveItemsDelta(
|
|||||||
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc)
|
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h siteBackupHandler) GetRootFolder(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.Drives().GetRootFolder(ctx, driveID)
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Restore
|
// Restore
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -27,7 +27,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -533,7 +532,6 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
assert.Equal(t, 0, len(uc.idToProps))
|
assert.Equal(t, 0, len(uc.idToProps))
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
name: "folder item",
|
name: "folder item",
|
||||||
pages: []mock.NextPage{
|
pages: []mock.NextPage{
|
||||||
@ -564,21 +562,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
medi := mock.EnumerateItemsDeltaByDrive{
|
driveEnumer := mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(driveID).
|
||||||
driveID: {
|
WithErr(test.pagerErr).
|
||||||
Pages: test.pages,
|
With(mock.Delta(deltaString, test.pagerErr).
|
||||||
Err: test.pagerErr,
|
With(test.pages...)))
|
||||||
DeltaUpdate: pagers.DeltaUpdate{URL: deltaString},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cache, err := newURLCache(
|
cache, err := newURLCache(
|
||||||
driveID,
|
driveID,
|
||||||
"",
|
"",
|
||||||
1*time.Hour,
|
1*time.Hour,
|
||||||
&medi,
|
driveEnumer,
|
||||||
count.New(),
|
count.New(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -623,7 +617,7 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
|||||||
driveID,
|
driveID,
|
||||||
"",
|
"",
|
||||||
refreshInterval,
|
refreshInterval,
|
||||||
&mock.EnumerateItemsDeltaByDrive{},
|
&mock.EnumerateDriveItemsDelta{},
|
||||||
count.New(),
|
count.New(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
|
|
||||||
@ -659,7 +653,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "invalid driveID",
|
name: "invalid driveID",
|
||||||
driveID: "",
|
driveID: "",
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: &mock.EnumerateItemsDeltaByDrive{},
|
itemPager: &mock.EnumerateDriveItemsDelta{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
},
|
},
|
||||||
@ -667,7 +661,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "invalid refresh interval",
|
name: "invalid refresh interval",
|
||||||
driveID: "drive1",
|
driveID: "drive1",
|
||||||
refreshInt: 100 * time.Millisecond,
|
refreshInt: 100 * time.Millisecond,
|
||||||
itemPager: &mock.EnumerateItemsDeltaByDrive{},
|
itemPager: &mock.EnumerateDriveItemsDelta{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
},
|
},
|
||||||
@ -683,7 +677,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "valid",
|
name: "valid",
|
||||||
driveID: "drive1",
|
driveID: "drive1",
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: &mock.EnumerateItemsDeltaByDrive{},
|
itemPager: &mock.EnumerateDriveItemsDelta{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -182,6 +182,13 @@ func (h userDriveBackupHandler) EnumerateDriveItemsDelta(
|
|||||||
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc)
|
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h userDriveBackupHandler) GetRootFolder(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.Drives().GetRootFolder(ctx, driveID)
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Restore
|
// Restore
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package mock
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
@ -9,6 +10,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -30,7 +32,7 @@ type BackupHandler[T any] struct {
|
|||||||
// and plug in the selector scope there.
|
// and plug in the selector scope there.
|
||||||
Sel selectors.Selector
|
Sel selectors.Selector
|
||||||
|
|
||||||
DriveItemEnumeration EnumerateItemsDeltaByDrive
|
DriveItemEnumeration EnumerateDriveItemsDelta
|
||||||
|
|
||||||
GI GetsItem
|
GI GetsItem
|
||||||
GIP GetsItemPermission
|
GIP GetsItemPermission
|
||||||
@ -57,6 +59,18 @@ type BackupHandler[T any] struct {
|
|||||||
getCall int
|
getCall int
|
||||||
GetResps []*http.Response
|
GetResps []*http.Response
|
||||||
GetErrs []error
|
GetErrs []error
|
||||||
|
|
||||||
|
RootFolder models.DriveItemable
|
||||||
|
}
|
||||||
|
|
||||||
|
func stubRootFolder() models.DriveItemable {
|
||||||
|
item := models.NewDriveItem()
|
||||||
|
item.SetName(ptr.To(odConsts.RootPathDir))
|
||||||
|
item.SetId(ptr.To(odConsts.RootID))
|
||||||
|
item.SetRoot(models.NewRoot())
|
||||||
|
item.SetFolder(models.NewFolder())
|
||||||
|
|
||||||
|
return item
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable] {
|
func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable] {
|
||||||
@ -69,7 +83,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable
|
|||||||
Extension: &details.ExtensionData{},
|
Extension: &details.ExtensionData{},
|
||||||
},
|
},
|
||||||
Sel: sel.Selector,
|
Sel: sel.Selector,
|
||||||
DriveItemEnumeration: EnumerateItemsDeltaByDrive{},
|
DriveItemEnumeration: EnumerateDriveItemsDelta{},
|
||||||
GI: GetsItem{Err: clues.New("not defined")},
|
GI: GetsItem{Err: clues.New("not defined")},
|
||||||
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
||||||
PathPrefixFn: defaultOneDrivePathPrefixer,
|
PathPrefixFn: defaultOneDrivePathPrefixer,
|
||||||
@ -81,6 +95,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable
|
|||||||
LocationIDFn: defaultOneDriveLocationIDer,
|
LocationIDFn: defaultOneDriveLocationIDer,
|
||||||
GetResps: []*http.Response{nil},
|
GetResps: []*http.Response{nil},
|
||||||
GetErrs: []error{clues.New("not defined")},
|
GetErrs: []error{clues.New("not defined")},
|
||||||
|
RootFolder: stubRootFolder(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,13 +120,14 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab
|
|||||||
LocationIDFn: defaultSharePointLocationIDer,
|
LocationIDFn: defaultSharePointLocationIDer,
|
||||||
GetResps: []*http.Response{nil},
|
GetResps: []*http.Response{nil},
|
||||||
GetErrs: []error{clues.New("not defined")},
|
GetErrs: []error{clues.New("not defined")},
|
||||||
|
RootFolder: stubRootFolder(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultDriveBHWith(
|
func DefaultDriveBHWith(
|
||||||
resource string,
|
resource string,
|
||||||
drivePager *apiMock.Pager[models.Driveable],
|
drivePager *apiMock.Pager[models.Driveable],
|
||||||
enumerator EnumerateItemsDeltaByDrive,
|
enumerator EnumerateDriveItemsDelta,
|
||||||
) *BackupHandler[models.DriveItemable] {
|
) *BackupHandler[models.DriveItemable] {
|
||||||
mbh := DefaultOneDriveBH(resource)
|
mbh := DefaultOneDriveBH(resource)
|
||||||
mbh.DrivePagerV = drivePager
|
mbh.DrivePagerV = drivePager
|
||||||
@ -287,6 +303,10 @@ func (h BackupHandler[T]) IncludesDir(dir string) bool {
|
|||||||
selectors.OneDriveScope(scope).Matches(selectors.OneDriveFolder, dir)
|
selectors.OneDriveScope(scope).Matches(selectors.OneDriveFolder, dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler[T]) GetRootFolder(context.Context, string) (models.DriveItemable, error) {
|
||||||
|
return h.RootFolder, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Get Itemer
|
// Get Itemer
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -304,7 +324,7 @@ func (m GetsItem) GetItem(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Enumerates Drive Items
|
// Drive Items Enumerator
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type NextPage struct {
|
type NextPage struct {
|
||||||
@ -312,43 +332,138 @@ type NextPage struct {
|
|||||||
Reset bool
|
Reset bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type EnumerateItemsDeltaByDrive struct {
|
type EnumerateDriveItemsDelta struct {
|
||||||
DrivePagers map[string]*DriveItemsDeltaPager
|
DrivePagers map[string]*DriveDeltaEnumerator
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ pagers.NextPageResulter[models.DriveItemable] = &DriveItemsDeltaPager{}
|
func DriveEnumerator(
|
||||||
|
ds ...*DriveDeltaEnumerator,
|
||||||
|
) EnumerateDriveItemsDelta {
|
||||||
|
enumerator := EnumerateDriveItemsDelta{
|
||||||
|
DrivePagers: map[string]*DriveDeltaEnumerator{},
|
||||||
|
}
|
||||||
|
|
||||||
type DriveItemsDeltaPager struct {
|
for _, drive := range ds {
|
||||||
Idx int
|
enumerator.DrivePagers[drive.DriveID] = drive
|
||||||
|
}
|
||||||
|
|
||||||
|
return enumerator
|
||||||
|
}
|
||||||
|
|
||||||
|
func (en EnumerateDriveItemsDelta) EnumerateDriveItemsDelta(
|
||||||
|
_ context.Context,
|
||||||
|
driveID, _ string,
|
||||||
|
_ api.CallConfig,
|
||||||
|
) pagers.NextPageResulter[models.DriveItemable] {
|
||||||
|
iterator := en.DrivePagers[driveID]
|
||||||
|
return iterator.nextDelta()
|
||||||
|
}
|
||||||
|
|
||||||
|
type DriveDeltaEnumerator struct {
|
||||||
|
DriveID string
|
||||||
|
idx int
|
||||||
|
DeltaQueries []*DeltaQuery
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func Drive(driveID string) *DriveDeltaEnumerator {
|
||||||
|
return &DriveDeltaEnumerator{DriveID: driveID}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dde *DriveDeltaEnumerator) With(ds ...*DeltaQuery) *DriveDeltaEnumerator {
|
||||||
|
dde.DeltaQueries = ds
|
||||||
|
return dde
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithErr adds an error that is always returned in the last delta index.
|
||||||
|
func (dde *DriveDeltaEnumerator) WithErr(err error) *DriveDeltaEnumerator {
|
||||||
|
dde.Err = err
|
||||||
|
return dde
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dde *DriveDeltaEnumerator) nextDelta() *DeltaQuery {
|
||||||
|
if dde.idx == len(dde.DeltaQueries) {
|
||||||
|
// at the end of the enumeration, return an empty page with no items,
|
||||||
|
// not even the root. This is what graph api would do to signify an absence
|
||||||
|
// of changes in the delta.
|
||||||
|
lastDU := dde.DeltaQueries[dde.idx-1].DeltaUpdate
|
||||||
|
|
||||||
|
return &DeltaQuery{
|
||||||
|
DeltaUpdate: lastDU,
|
||||||
|
Pages: []NextPage{{
|
||||||
|
Items: []models.DriveItemable{},
|
||||||
|
}},
|
||||||
|
Err: dde.Err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if dde.idx > len(dde.DeltaQueries) {
|
||||||
|
// a panic isn't optimal here, but since this mechanism is internal to testing,
|
||||||
|
// it's an acceptable way to have the tests ensure we don't over-enumerate deltas.
|
||||||
|
panic(fmt.Sprintf("delta index %d larger than count of delta iterations in mock", dde.idx))
|
||||||
|
}
|
||||||
|
|
||||||
|
pages := dde.DeltaQueries[dde.idx]
|
||||||
|
|
||||||
|
dde.idx++
|
||||||
|
|
||||||
|
return pages
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ pagers.NextPageResulter[models.DriveItemable] = &DeltaQuery{}
|
||||||
|
|
||||||
|
type DeltaQuery struct {
|
||||||
|
idx int
|
||||||
Pages []NextPage
|
Pages []NextPage
|
||||||
DeltaUpdate pagers.DeltaUpdate
|
DeltaUpdate pagers.DeltaUpdate
|
||||||
Err error
|
Err error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (edibd EnumerateItemsDeltaByDrive) EnumerateDriveItemsDelta(
|
func Delta(
|
||||||
_ context.Context,
|
resultDeltaID string,
|
||||||
driveID, _ string,
|
err error,
|
||||||
_ api.CallConfig,
|
) *DeltaQuery {
|
||||||
) pagers.NextPageResulter[models.DriveItemable] {
|
return &DeltaQuery{
|
||||||
didp := edibd.DrivePagers[driveID]
|
DeltaUpdate: pagers.DeltaUpdate{URL: resultDeltaID},
|
||||||
return didp
|
Err: err,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (edi *DriveItemsDeltaPager) NextPage() ([]models.DriveItemable, bool, bool) {
|
func DeltaWReset(
|
||||||
if edi.Idx >= len(edi.Pages) {
|
resultDeltaID string,
|
||||||
|
err error,
|
||||||
|
) *DeltaQuery {
|
||||||
|
return &DeltaQuery{
|
||||||
|
DeltaUpdate: pagers.DeltaUpdate{
|
||||||
|
URL: resultDeltaID,
|
||||||
|
Reset: true,
|
||||||
|
},
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DeltaQuery) With(
|
||||||
|
pages ...NextPage,
|
||||||
|
) *DeltaQuery {
|
||||||
|
dq.Pages = pages
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DeltaQuery) NextPage() ([]models.DriveItemable, bool, bool) {
|
||||||
|
if dq.idx >= len(dq.Pages) {
|
||||||
return nil, false, true
|
return nil, false, true
|
||||||
}
|
}
|
||||||
|
|
||||||
np := edi.Pages[edi.Idx]
|
np := dq.Pages[dq.idx]
|
||||||
edi.Idx = edi.Idx + 1
|
dq.idx = dq.idx + 1
|
||||||
|
|
||||||
return np.Items, np.Reset, false
|
return np.Items, np.Reset, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (edi *DriveItemsDeltaPager) Cancel() {}
|
func (dq *DeltaQuery) Cancel() {}
|
||||||
|
|
||||||
func (edi *DriveItemsDeltaPager) Results() (pagers.DeltaUpdate, error) {
|
func (dq *DeltaQuery) Results() (pagers.DeltaUpdate, error) {
|
||||||
return edi.DeltaUpdate, edi.Err
|
return dq.DeltaUpdate, dq.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -20,7 +20,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -93,11 +92,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
mbh = mock.DefaultSharePointBH(siteID)
|
mbh = mock.DefaultSharePointBH(siteID)
|
||||||
du = pagers.DeltaUpdate{
|
|
||||||
URL: "notempty",
|
|
||||||
Reset: false,
|
|
||||||
}
|
|
||||||
paths = map[string]string{}
|
paths = map[string]string{}
|
||||||
excluded = map[string]struct{}{}
|
excluded = map[string]struct{}{}
|
||||||
collMap = map[string]map[string]*drive.Collection{
|
collMap = map[string]map[string]*drive.Collection{
|
||||||
@ -106,14 +101,9 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
topLevelPackages = map[string]struct{}{}
|
topLevelPackages = map[string]struct{}{}
|
||||||
)
|
)
|
||||||
|
|
||||||
mbh.DriveItemEnumeration = mock.EnumerateItemsDeltaByDrive{
|
mbh.DriveItemEnumeration = mock.DriveEnumerator(
|
||||||
DrivePagers: map[string]*mock.DriveItemsDeltaPager{
|
mock.Drive(driveID).With(
|
||||||
driveID: {
|
mock.Delta("notempty", nil).With(mock.NextPage{Items: test.items})))
|
||||||
Pages: []mock.NextPage{{Items: test.items}},
|
|
||||||
DeltaUpdate: du,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
c := drive.NewCollections(
|
c := drive.NewCollections(
|
||||||
mbh,
|
mbh,
|
||||||
|
|||||||
@ -50,7 +50,6 @@ const (
|
|||||||
NoDeltaQueries Key = "cannot-make-delta-queries"
|
NoDeltaQueries Key = "cannot-make-delta-queries"
|
||||||
Packages Key = "packages"
|
Packages Key = "packages"
|
||||||
PagerResets Key = "pager-resets"
|
PagerResets Key = "pager-resets"
|
||||||
PagesEnumerated Key = "pages-enumerated"
|
|
||||||
PrevDeltas Key = "previous-deltas"
|
PrevDeltas Key = "previous-deltas"
|
||||||
PrevPaths Key = "previous-paths"
|
PrevPaths Key = "previous-paths"
|
||||||
PreviousPathMetadataCollision Key = "previous-path-metadata-collision"
|
PreviousPathMetadataCollision Key = "previous-path-metadata-collision"
|
||||||
@ -80,10 +79,12 @@ const (
|
|||||||
const (
|
const (
|
||||||
TotalDeleteFilesProcessed Key = "total-delete-files-processed"
|
TotalDeleteFilesProcessed Key = "total-delete-files-processed"
|
||||||
TotalDeleteFoldersProcessed Key = "total-delete-folders-processed"
|
TotalDeleteFoldersProcessed Key = "total-delete-folders-processed"
|
||||||
|
TotalDeltasProcessed Key = "total-deltas-processed"
|
||||||
TotalFilesProcessed Key = "total-files-processed"
|
TotalFilesProcessed Key = "total-files-processed"
|
||||||
TotalFoldersProcessed Key = "total-folders-processed"
|
TotalFoldersProcessed Key = "total-folders-processed"
|
||||||
TotalMalwareProcessed Key = "total-malware-processed"
|
TotalMalwareProcessed Key = "total-malware-processed"
|
||||||
TotalPackagesProcessed Key = "total-packages-processed"
|
TotalPackagesProcessed Key = "total-packages-processed"
|
||||||
|
TotalPagesEnumerated Key = "total-pages-enumerated"
|
||||||
)
|
)
|
||||||
|
|
||||||
// miscellaneous
|
// miscellaneous
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user