use tree version in nightly tests (#4885)

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Supportability/Tests

#### Issue(s)

* #4689

#### Test Plan

- [x] 💚 E2E
This commit is contained in:
Keepers 2023-12-21 11:44:09 -07:00 committed by GitHub
parent 1944c070cf
commit 753ed1a075
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1846 additions and 1451 deletions

View File

@ -181,12 +181,7 @@ func (c *Collections) makeDriveCollections(
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix") return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix")
} }
root, err := c.handler.GetRootFolder(ctx, driveID) tree := newFolderyMcFolderFace(ppfx)
if err != nil {
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "getting root folder")
}
tree := newFolderyMcFolderFace(ppfx, ptr.Val(root.GetId()))
counter.Add(count.PrevPaths, int64(len(prevPaths))) counter.Add(count.PrevPaths, int64(len(prevPaths)))
@ -232,7 +227,7 @@ func (c *Collections) makeDriveCollections(
// only populate the global excluded items if no delta reset occurred. // only populate the global excluded items if no delta reset occurred.
// if a reset did occur, the collections should already be marked as // if a reset did occur, the collections should already be marked as
// "do not merge", therefore everything will get processed as a new addition. // "do not merge", therefore everything will get processed as a new addition.
if !tree.hadReset { if !tree.hadReset && len(prevDeltaLink) > 0 {
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID) p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID)
if err != nil { if err != nil {
err = clues.WrapWC(ctx, err, "making canonical path for item exclusions") err = clues.WrapWC(ctx, err, "making canonical path for item exclusions")
@ -543,10 +538,9 @@ func (c *Collections) addFolderToTree(
notSelected = shouldSkip(ctx, collectionPath, c.handler, ptr.Val(drv.GetName())) notSelected = shouldSkip(ctx, collectionPath, c.handler, ptr.Val(drv.GetName()))
if notSelected { if notSelected {
logger.Ctx(ctx).Debugw("path not selected", "skipped_path", collectionPath.String()) logger.Ctx(ctx).Debugw("path not selected", "skipped_path", collectionPath.String())
return nil, nil
} }
err = tree.setFolder(ctx, folder) err = tree.setFolder(ctx, folder, notSelected)
return nil, clues.Stack(err).OrNil() return nil, clues.Stack(err).OrNil()
} }
@ -665,12 +659,9 @@ func (c *Collections) addFileToTree(
} }
} }
err := tree.addFile(file) err := tree.addFile(ctx, file)
if err != nil {
return nil, clues.StackWC(ctx, err)
}
return nil, nil return nil, clues.Stack(err).OrNil()
} }
// quality-of-life wrapper that transforms each tombstone in the map // quality-of-life wrapper that transforms each tombstone in the map

View File

@ -165,10 +165,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with(delta(nil)), d1.newEnumer().with(delta(nil)),
d2.newEnumer().with(delta(nil))), d2.newEnumer().with(delta(nil))),
metadata: multiDriveMetadata( metadata: multiDriveMetadata(t),
t,
d1.newPrevPaths(t),
d2.newPrevPaths(t)),
expect: expected{ expect: expected{
canUsePrevBackup: assert.True, canUsePrevBackup: assert.True,
collections: expectCollections( collections: expectCollections(
@ -197,10 +194,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
d2.fileAt(root, "r"), d2.fileAt(root, "r"),
d2.folderAt(root), d2.folderAt(root),
d2.fileAt(folder, "f"))))), d2.fileAt(folder, "f"))))),
metadata: multiDriveMetadata( metadata: multiDriveMetadata(t),
t,
d1.newPrevPaths(t),
d2.newPrevPaths(t)),
expect: expected{ expect: expected{
canUsePrevBackup: assert.True, canUsePrevBackup: assert.True,
collections: expectCollections( collections: expectCollections(
@ -387,8 +381,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
expect, _ := test.expect.globalExcludedFileIDs.Get(d) expect, _ := test.expect.globalExcludedFileIDs.Get(d)
result, rok := globalExcludes.Get(d) result, rok := globalExcludes.Get(d)
if len(test.metadata) > 0 {
require.True(t, rok, "drive results have a global excludes entry") require.True(t, rok, "drive results have a global excludes entry")
assert.Equal(t, expect, result, "global excluded file IDs") assert.Equal(t, expect, result, "global excluded file IDs")
} else {
require.False(t, rok, "drive results have no global excludes entry")
assert.Empty(t, result, "global excluded file IDs")
}
}) })
} }
}) })

View File

@ -27,10 +27,6 @@ type folderyMcFolderFace struct {
// new, moved, and notMoved root // new, moved, and notMoved root
root *nodeyMcNodeFace root *nodeyMcNodeFace
// the ID of the actual root folder.
// required to ensure correct population of the root node.
rootID string
// the majority of operations we perform can be handled with // the majority of operations we perform can be handled with
// a folder ID lookup instead of re-walking the entire tree. // a folder ID lookup instead of re-walking the entire tree.
// Ex: adding a new file to its parent folder. // Ex: adding a new file to its parent folder.
@ -53,11 +49,9 @@ type folderyMcFolderFace struct {
func newFolderyMcFolderFace( func newFolderyMcFolderFace(
prefix path.Path, prefix path.Path,
rootID string,
) *folderyMcFolderFace { ) *folderyMcFolderFace {
return &folderyMcFolderFace{ return &folderyMcFolderFace{
prefix: prefix, prefix: prefix,
rootID: rootID,
folderIDToNode: map[string]*nodeyMcNodeFace{}, folderIDToNode: map[string]*nodeyMcNodeFace{},
tombstones: map[string]*nodeyMcNodeFace{}, tombstones: map[string]*nodeyMcNodeFace{},
fileIDToParentID: map[string]string{}, fileIDToParentID: map[string]string{},
@ -93,15 +87,23 @@ type nodeyMcNodeFace struct {
children map[string]*nodeyMcNodeFace children map[string]*nodeyMcNodeFace
// file item ID -> file metadata // file item ID -> file metadata
files map[string]*custom.DriveItem files map[string]*custom.DriveItem
// when true, this flag means the folder appeared in enumeration,
// but was not included in the backup selection. We include
// unselected folders in the tree so we don't have to hold stateful
// decisions (such as folder selection) as part of delta processing;
// we only need to evaluate them during post-processing.
isNotSelected bool
} }
func newNodeyMcNodeFace( func newNodeyMcNodeFace(
parent *nodeyMcNodeFace, parent *nodeyMcNodeFace,
folder *custom.DriveItem, folder *custom.DriveItem,
isNotSelected bool,
) *nodeyMcNodeFace { ) *nodeyMcNodeFace {
return &nodeyMcNodeFace{ return &nodeyMcNodeFace{
parent: parent, parent: parent,
folder: folder, folder: folder,
isNotSelected: isNotSelected,
children: map[string]*nodeyMcNodeFace{}, children: map[string]*nodeyMcNodeFace{},
files: map[string]*custom.DriveItem{}, files: map[string]*custom.DriveItem{},
} }
@ -134,6 +136,7 @@ func (face *folderyMcFolderFace) getNode(id string) *nodeyMcNodeFace {
func (face *folderyMcFolderFace) setFolder( func (face *folderyMcFolderFace) setFolder(
ctx context.Context, ctx context.Context,
folder *custom.DriveItem, folder *custom.DriveItem,
isNotSelected bool,
) error { ) error {
var ( var (
id = ptr.Val(folder.GetId()) id = ptr.Val(folder.GetId())
@ -151,14 +154,13 @@ func (face *folderyMcFolderFace) setFolder(
} }
if (parentFolder == nil || len(ptr.Val(parentFolder.GetId())) == 0) && if (parentFolder == nil || len(ptr.Val(parentFolder.GetId())) == 0) &&
id != face.rootID { folder.GetRoot() == nil {
return clues.NewWC(ctx, "non-root folder missing parent id") return clues.NewWC(ctx, "non-root folder missing parent id")
} }
// only set the root node once. if folder.GetRoot() != nil {
if id == face.rootID {
if face.root == nil { if face.root == nil {
root := newNodeyMcNodeFace(nil, folder) root := newNodeyMcNodeFace(nil, folder, isNotSelected)
face.root = root face.root = root
face.folderIDToNode[id] = root face.folderIDToNode[id] = root
} else { } else {
@ -169,6 +171,11 @@ func (face *folderyMcFolderFace) setFolder(
return nil return nil
} }
ctx = clues.Add(
ctx,
"parent_id", ptr.Val(parentFolder.GetId()),
"parent_dir_path", path.LoggableDir(ptr.Val(parentFolder.GetPath())))
// There are four possible changes that can happen at this point. // There are four possible changes that can happen at this point.
// 1. new folder addition. // 1. new folder addition.
// 2. duplicate folder addition. // 2. duplicate folder addition.
@ -221,7 +228,7 @@ func (face *folderyMcFolderFace) setFolder(
nodey.folder = folder nodey.folder = folder
} else { } else {
// change type 1: new addition // change type 1: new addition
nodey = newNodeyMcNodeFace(parentNode, folder) nodey = newNodeyMcNodeFace(parentNode, folder, isNotSelected)
} }
// ensure the parent points to this node, and that the node is registered // ensure the parent points to this node, and that the node is registered
@ -264,7 +271,7 @@ func (face *folderyMcFolderFace) setTombstone(
} }
if _, alreadyBuried := face.tombstones[id]; !alreadyBuried { if _, alreadyBuried := face.tombstones[id]; !alreadyBuried {
face.tombstones[id] = newNodeyMcNodeFace(nil, folder) face.tombstones[id] = newNodeyMcNodeFace(nil, folder, false)
} }
return nil return nil
@ -308,7 +315,7 @@ func (face *folderyMcFolderFace) setPreviousPath(
return nil return nil
} }
zombey := newNodeyMcNodeFace(nil, custom.NewDriveItem(folderID, "")) zombey := newNodeyMcNodeFace(nil, custom.NewDriveItem(folderID, ""), false)
zombey.prev = prev zombey.prev = prev
face.tombstones[folderID] = zombey face.tombstones[folderID] = zombey
@ -328,6 +335,7 @@ func (face *folderyMcFolderFace) hasFile(id string) bool {
// file was already added to the tree and is getting relocated, // file was already added to the tree and is getting relocated,
// this func will update and/or clean up all the old references. // this func will update and/or clean up all the old references.
func (face *folderyMcFolderFace) addFile( func (face *folderyMcFolderFace) addFile(
ctx context.Context,
file *custom.DriveItem, file *custom.DriveItem,
) error { ) error {
var ( var (
@ -336,32 +344,33 @@ func (face *folderyMcFolderFace) addFile(
parentID string parentID string
) )
if len(id) == 0 {
return clues.NewWC(ctx, "item added without ID")
}
if parentFolder == nil || len(ptr.Val(parentFolder.GetId())) == 0 { if parentFolder == nil || len(ptr.Val(parentFolder.GetId())) == 0 {
return clues.New("item added without parent folder ID") return clues.NewWC(ctx, "item added without parent folder ID")
} }
parentID = ptr.Val(parentFolder.GetId()) parentID = ptr.Val(parentFolder.GetId())
if len(id) == 0 { ctx = clues.Add(
return clues.New("item added without ID") ctx,
} "parent_id", ptr.Val(parentFolder.GetId()),
"parent_dir_path", path.LoggableDir(ptr.Val(parentFolder.GetPath())))
// in case of file movement, clean up any references // in case of file movement, clean up any references
// to the file in the old parent // to the file in the old parent
oldParentID, ok := face.fileIDToParentID[id] oldParentID, ok := face.fileIDToParentID[id]
if ok && oldParentID != parentID { if ok && oldParentID != parentID {
if nodey, ok := face.folderIDToNode[oldParentID]; ok { if nodey := face.getNode(oldParentID); nodey != nil {
delete(nodey.files, id) delete(nodey.files, id)
} }
if zombey, ok := face.tombstones[oldParentID]; ok {
delete(zombey.files, id)
}
} }
parent, ok := face.folderIDToNode[parentID] parent, ok := face.folderIDToNode[parentID]
if !ok { if !ok {
return clues.New("item added before parent") return clues.NewWC(ctx, "file added before parent")
} }
face.fileIDToParentID[id] = parentID face.fileIDToParentID[id] = parentID
@ -432,7 +441,11 @@ func (face *folderyMcFolderFace) walkTreeAndBuildCollections(
isChildOfPackage bool, isChildOfPackage bool,
result map[string]collectable, result map[string]collectable,
) error { ) error {
if node == nil { // all non-root folders get skipped when not selected.
// the root folder stays in- because it's required to build
// the tree of selected folders- but if it's not selected
// then we won't include any of its files.
if node == nil || (node != face.root && node.isNotSelected) {
return nil return nil
} }
@ -476,6 +489,12 @@ func (face *folderyMcFolderFace) walkTreeAndBuildCollections(
files[id] = node.folder files[id] = node.folder
} }
// should only occur if the root is not selected, since we should
// have backed out on all other non-selected folders by this point.
if node.isNotSelected {
files = map[string]*custom.DriveItem{}
}
cbl := collectable{ cbl := collectable{
currPath: collectionPath, currPath: collectionPath,
files: files, files: files,

View File

@ -36,7 +36,7 @@ func (suite *DeltaTreeUnitSuite) TestNewFolderyMcFolderFace() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
folderFace := newFolderyMcFolderFace(p, rootID) folderFace := newFolderyMcFolderFace(p)
assert.Equal(t, p, folderFace.prefix) assert.Equal(t, p, folderFace.prefix)
assert.Nil(t, folderFace.root) assert.Nil(t, folderFace.root)
assert.NotNil(t, folderFace.folderIDToNode) assert.NotNil(t, folderFace.folderIDToNode)
@ -52,7 +52,7 @@ func (suite *DeltaTreeUnitSuite) TestNewNodeyMcNodeFace() {
fld = custom.ToCustomDriveItem(d.folderAt(root)) fld = custom.ToCustomDriveItem(d.folderAt(root))
) )
nodeFace := newNodeyMcNodeFace(parent, fld) nodeFace := newNodeyMcNodeFace(parent, fld, false)
assert.Equal(t, parent, nodeFace.parent) assert.Equal(t, parent, nodeFace.parent)
assert.Equal(t, folderID(), ptr.Val(nodeFace.folder.GetId())) assert.Equal(t, folderID(), ptr.Val(nodeFace.folder.GetId()))
assert.Equal(t, folderName(), ptr.Val(nodeFace.folder.GetName())) assert.Equal(t, folderName(), ptr.Val(nodeFace.folder.GetName()))
@ -177,7 +177,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
tree := test.tree(t, drive()) tree := test.tree(t, drive())
folder := test.folder() folder := test.folder()
err := tree.setFolder(ctx, folder) err := tree.setFolder(ctx, folder, false)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if err != nil { if err != nil {
@ -497,7 +497,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTree()
tree := treeWithRoot(t, d) tree := treeWithRoot(t, d)
set := func(folder *custom.DriveItem) { set := func(folder *custom.DriveItem) {
err := tree.setFolder(ctx, folder) err := tree.setFolder(ctx, folder, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
} }
@ -600,7 +600,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst
tree := treeWithRoot(t, d) tree := treeWithRoot(t, d)
set := func(folder *custom.DriveItem) { set := func(folder *custom.DriveItem) {
err := tree.setFolder(ctx, folder) err := tree.setFolder(ctx, folder, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
} }
@ -884,7 +884,10 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
df = custom.ToCustomDriveItem(d.fileWSizeAt(test.contentSize, test.parent)) df = custom.ToCustomDriveItem(d.fileWSizeAt(test.contentSize, test.parent))
) )
err := tree.addFile(df) ctx, flush := tester.NewContext(t)
defer flush()
err := tree.addFile(ctx, df)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectFiles, tree.fileIDToParentID) assert.Equal(t, test.expectFiles, tree.fileIDToParentID)
@ -968,6 +971,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
fID = fileID() fID = fileID()
) )
ctx, flush := tester.NewContext(t)
defer flush()
require.Len(t, tree.fileIDToParentID, 0) require.Len(t, tree.fileIDToParentID, 0)
require.Len(t, tree.deletedFileIDs, 0) require.Len(t, tree.deletedFileIDs, 0)
@ -978,7 +984,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
assert.Len(t, tree.deletedFileIDs, 1) assert.Len(t, tree.deletedFileIDs, 1)
assert.Contains(t, tree.deletedFileIDs, fID) assert.Contains(t, tree.deletedFileIDs, fID)
err := tree.addFile(custom.ToCustomDriveItem(d.fileAt(root))) err := tree.addFile(
ctx,
custom.ToCustomDriveItem(d.fileAt(root)))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Len(t, tree.fileIDToParentID, 1) assert.Len(t, tree.fileIDToParentID, 1)
@ -1127,10 +1135,10 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
defer flush() defer flush()
tree := treeWithRoot(t, d) tree := treeWithRoot(t, d)
err := tree.setFolder(ctx, custom.ToCustomDriveItem(d.packageAtRoot())) err := tree.setFolder(ctx, custom.ToCustomDriveItem(d.packageAtRoot()), false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(pkg))) err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(pkg)), false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return tree return tree
@ -1199,6 +1207,58 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
}, },
}, },
}, },
{
name: "folder hierarchy with unselected root and child and no previous paths",
tree: treeWithUnselectedRootAndFolder,
expectErr: require.NoError,
prevPaths: map[string]string{},
expect: map[string]collectable{
rootID: {
currPath: d.fullPath(t),
files: map[string]*custom.DriveItem{},
folderID: rootID,
isPackageOrChildOfPackage: false,
},
folderID(): {
currPath: d.fullPath(t, folderName()),
files: map[string]*custom.DriveItem{
folderID(): custom.ToCustomDriveItem(d.folderAt(root)),
fileID(): custom.ToCustomDriveItem(d.fileAt(folder)),
},
folderID: folderID(),
isPackageOrChildOfPackage: false,
},
},
},
{
name: "folder hierarchy with unselected root and child with previous paths",
tree: treeWithUnselectedRootAndFolder,
expectErr: require.NoError,
prevPaths: map[string]string{
rootID: d.strPath(t),
folderID(): d.strPath(t, folderName()),
folderID("nope"): d.strPath(t, folderName("nope")),
},
expect: map[string]collectable{
rootID: {
currPath: d.fullPath(t),
prevPath: d.fullPath(t),
files: map[string]*custom.DriveItem{},
folderID: rootID,
isPackageOrChildOfPackage: false,
},
folderID(): {
currPath: d.fullPath(t, folderName()),
prevPath: d.fullPath(t, folderName()),
files: map[string]*custom.DriveItem{
folderID(): custom.ToCustomDriveItem(d.folderAt(root)),
fileID(): custom.ToCustomDriveItem(d.fileAt(folder)),
},
folderID: folderID(),
isPackageOrChildOfPackage: false,
},
},
},
{ {
name: "root and tombstones", name: "root and tombstones",
tree: treeWithFileInTombstone, tree: treeWithFileInTombstone,
@ -1241,7 +1301,13 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
results, err := tree.generateCollectables() results, err := tree.generateCollectables()
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
assert.Len(t, results, len(test.expect)) assert.Len(
t,
results,
len(test.expect),
"count of collections\n\tWanted: %+v\n\tGot: %+v",
maps.Keys(test.expect),
maps.Keys(results))
for id, expect := range test.expect { for id, expect := range test.expect {
require.Contains(t, results, id) require.Contains(t, results, id)

View File

@ -490,22 +490,22 @@ func defaultLoc() path.Elements {
} }
func newTree(t *testing.T, d *deltaDrive) *folderyMcFolderFace { func newTree(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
return newFolderyMcFolderFace(defaultTreePfx(t, d), rootID) return newFolderyMcFolderFace(defaultTreePfx(t, d))
} }
func treeWithRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace { func treeWithRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
tree := newFolderyMcFolderFace(defaultTreePfx(t, d), rootID) tree := newFolderyMcFolderFace(defaultTreePfx(t, d))
root := custom.ToCustomDriveItem(rootFolder()) root := custom.ToCustomDriveItem(rootFolder())
//nolint:forbidigo //nolint:forbidigo
err := tree.setFolder(context.Background(), root) err := tree.setFolder(context.Background(), root, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return tree return tree
} }
func treeAfterReset(t *testing.T, d *deltaDrive) *folderyMcFolderFace { func treeAfterReset(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
tree := newFolderyMcFolderFace(defaultTreePfx(t, d), rootID) tree := newFolderyMcFolderFace(defaultTreePfx(t, d))
tree.reset() tree.reset()
return tree return tree
@ -535,21 +535,24 @@ func treeWithFolders(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
folder := custom.ToCustomDriveItem(d.folderAt("parent")) folder := custom.ToCustomDriveItem(d.folderAt("parent"))
//nolint:forbidigo //nolint:forbidigo
err := tree.setFolder(context.Background(), parent) err := tree.setFolder(context.Background(), parent, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
//nolint:forbidigo //nolint:forbidigo
err = tree.setFolder(context.Background(), folder) err = tree.setFolder(context.Background(), folder, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return tree return tree
} }
func treeWithFileAtRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace { func treeWithFileAtRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
ctx, flush := tester.NewContext(t)
defer flush()
tree := treeWithRoot(t, d) tree := treeWithRoot(t, d)
f := custom.ToCustomDriveItem(d.fileAt(root)) f := custom.ToCustomDriveItem(d.fileAt(root))
err := tree.addFile(f) err := tree.addFile(ctx, f)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return tree return tree
@ -563,10 +566,13 @@ func treeWithDeletedFile(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
} }
func treeWithFileInFolder(t *testing.T, d *deltaDrive) *folderyMcFolderFace { func treeWithFileInFolder(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
ctx, flush := tester.NewContext(t)
defer flush()
tree := treeWithFolders(t, d) tree := treeWithFolders(t, d)
f := custom.ToCustomDriveItem(d.fileAt(folder)) f := custom.ToCustomDriveItem(d.fileAt(folder))
err := tree.addFile(f) err := tree.addFile(ctx, f)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return tree return tree
@ -583,6 +589,31 @@ func treeWithFileInTombstone(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
return tree return tree
} }
func treeWithUnselectedRootAndFolder(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
ctx, flush := tester.NewContext(t)
defer flush()
tree := treeWithRoot(t, d)
tree.root.isNotSelected = true
err := tree.addFile(ctx, custom.ToCustomDriveItem(d.fileAt(root, "r")))
require.NoError(t, err, clues.ToCore(err))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(root)), false)
require.NoError(t, err, clues.ToCore(err))
err = tree.addFile(ctx, custom.ToCustomDriveItem(d.fileAt(folder)))
require.NoError(t, err, clues.ToCore(err))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(root, "nope")), true)
require.NoError(t, err, clues.ToCore(err))
err = tree.addFile(ctx, custom.ToCustomDriveItem(d.fileAt("nope", "n")))
require.NoError(t, err, clues.ToCore(err))
return tree
}
// root -> idx(folder, parent) -> folderID() // root -> idx(folder, parent) -> folderID()
// one item at each dir // one item at each dir
// one tombstone: idx(folder, tombstone) // one tombstone: idx(folder, tombstone)
@ -603,39 +634,39 @@ func fullTreeWithNames(
// file "r" in root // file "r" in root
df := custom.ToCustomDriveItem(d.fileAt(root, "r")) df := custom.ToCustomDriveItem(d.fileAt(root, "r"))
err := tree.addFile(df) err := tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// root -> folderID(parentX) // root -> folderID(parentX)
parent := custom.ToCustomDriveItem(d.folderAt(root, parentFolderSuffix)) parent := custom.ToCustomDriveItem(d.folderAt(root, parentFolderSuffix))
err = tree.setFolder(ctx, parent) err = tree.setFolder(ctx, parent, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// file "p" in folderID(parentX) // file "p" in folderID(parentX)
df = custom.ToCustomDriveItem(d.fileAt(parentFolderSuffix, "p")) df = custom.ToCustomDriveItem(d.fileAt(parentFolderSuffix, "p"))
err = tree.addFile(df) err = tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// folderID(parentX) -> folderID() // folderID(parentX) -> folderID()
fld := custom.ToCustomDriveItem(d.folderAt(parentFolderSuffix)) fld := custom.ToCustomDriveItem(d.folderAt(parentFolderSuffix))
err = tree.setFolder(ctx, fld) err = tree.setFolder(ctx, fld, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// file "f" in folderID() // file "f" in folderID()
df = custom.ToCustomDriveItem(d.fileAt(folder, "f")) df = custom.ToCustomDriveItem(d.fileAt(folder, "f"))
err = tree.addFile(df) err = tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// tombstone - have to set a non-tombstone folder first, // tombstone - have to set a non-tombstone folder first,
// then add the item, // then add the item,
// then tombstone the folder // then tombstone the folder
tomb := custom.ToCustomDriveItem(d.folderAt(root, tombstoneSuffix)) tomb := custom.ToCustomDriveItem(d.folderAt(root, tombstoneSuffix))
err = tree.setFolder(ctx, tomb) err = tree.setFolder(ctx, tomb, false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// file "t" in tombstone // file "t" in tombstone
df = custom.ToCustomDriveItem(d.fileAt(tombstoneSuffix, "t")) df = custom.ToCustomDriveItem(d.fileAt(tombstoneSuffix, "t"))
err = tree.addFile(df) err = tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = tree.setTombstone(ctx, tomb) err = tree.setTombstone(ctx, tomb)

View File

@ -647,7 +647,11 @@ func runBackupAndCompare(
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup") assert.True(t, canUsePreviousBackup, "can use previous backup")
// No excludes yet because this isn't an incremental backup. // No excludes yet because this isn't an incremental backup.
assert.True(t, excludes.Empty()) assert.True(
t,
excludes.Empty(),
"global excludes should have no entries, got:\n\t%+v",
excludes.Keys())
t.Logf("Backup enumeration complete in %v\n", time.Since(start)) t.Logf("Backup enumeration complete in %v\n", time.Since(start))

View File

@ -509,11 +509,14 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
restoreCfg.OnCollision = control.Replace restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{ cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(), Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()}, ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service, Service: testData.service,
Opts: control.DefaultOptions(), Opts: opts,
RestoreCfg: restoreCfg, RestoreCfg: restoreCfg,
} }
@ -759,11 +762,14 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
restoreCfg.OnCollision = control.Replace restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{ cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(), Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()}, ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service, Service: testData.service,
Opts: control.DefaultOptions(), Opts: opts,
RestoreCfg: restoreCfg, RestoreCfg: restoreCfg,
} }
@ -850,11 +856,14 @@ func testRestoreNoPermissionsAndBackup(suite oneDriveSuite, startVersion int) {
restoreCfg.OnCollision = control.Replace restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = false restoreCfg.IncludePermissions = false
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{ cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(), Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()}, ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service, Service: testData.service,
Opts: control.DefaultOptions(), Opts: opts,
RestoreCfg: restoreCfg, RestoreCfg: restoreCfg,
} }
@ -1056,11 +1065,14 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
restoreCfg.OnCollision = control.Replace restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{ cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(), Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()}, ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service, Service: testData.service,
Opts: control.DefaultOptions(), Opts: opts,
RestoreCfg: restoreCfg, RestoreCfg: restoreCfg,
} }
@ -1252,11 +1264,14 @@ func testLinkSharesInheritanceRestoreAndBackup(suite oneDriveSuite, startVersion
restoreCfg.OnCollision = control.Replace restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{ cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(), Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()}, ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service, Service: testData.service,
Opts: control.DefaultOptions(), Opts: opts,
RestoreCfg: restoreCfg, RestoreCfg: restoreCfg,
} }
@ -1369,11 +1384,14 @@ func testRestoreFolderNamedFolderRegression(
restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting) restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting)
restoreCfg.IncludePermissions = true restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{ cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(), Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()}, ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service, Service: testData.service,
Opts: control.DefaultOptions(), Opts: opts,
RestoreCfg: restoreCfg, RestoreCfg: restoreCfg,
} }

View File

@ -0,0 +1,398 @@
package m365
import (
"context"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/idname"
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store"
)
type BackupOpDependencies struct {
Acct account.Account
Ctrl *m365.Controller
KMS *kopia.ModelStore
KW *kopia.Wrapper
Sel selectors.Selector
SSS streamstore.Streamer
St storage.Storage
SW store.BackupStorer
closer func()
}
func (bod *BackupOpDependencies) Close(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
) {
bod.closer()
if bod.KW != nil {
err := bod.KW.Close(ctx)
assert.NoErrorf(t, err, "kw close: %+v", clues.ToCore(err))
}
if bod.KMS != nil {
err := bod.KW.Close(ctx)
assert.NoErrorf(t, err, "kms close: %+v", clues.ToCore(err))
}
}
// PrepNewTestBackupOp generates all clients required to run a backup operation,
// returning both a backup operation created with those clients, as well as
// the clients themselves.
func PrepNewTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bus events.Eventer,
sel selectors.Selector,
opts control.Options,
backupVersion int,
counter *count.Bus,
) (
operations.BackupOperation,
*BackupOpDependencies,
) {
bod := &BackupOpDependencies{
Acct: tconfig.NewM365Account(t),
St: storeTD.NewPrefixedS3Storage(t),
}
repoNameHash := strTD.NewHashForRepoConfigName()
k := kopia.NewConn(bod.St)
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
require.NoError(t, err, clues.ToCore(err))
defer func() {
if err != nil {
bod.Close(t, ctx)
t.FailNow()
}
}()
// kopiaRef comes with a count of 1 and Wrapper bumps it again
// we're so safe to close here.
bod.closer = func() {
err := k.Close(ctx)
assert.NoErrorf(t, err, "k close: %+v", clues.ToCore(err))
}
bod.KW, err = kopia.NewWrapper(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.KMS, err = kopia.NewModelStore(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.SW = store.NewWrapper(bod.KMS)
bod.Ctrl, bod.Sel = ControllerWithSelector(
t,
ctx,
bod.Acct,
sel,
nil,
bod.Close,
counter)
bo := NewTestBackupOp(
t,
ctx,
bod,
bus,
opts,
counter)
bo.BackupVersion = backupVersion
bod.SSS = streamstore.NewStreamer(
bod.KW,
bod.Acct.ID(),
bod.Sel.PathService())
return bo, bod
}
// NewTestBackupOp accepts the clients required to compose a backup operation, plus
// any other metadata, and uses them to generate a new backup operation. This
// allows backup chains to utilize the same temp directory and configuration
// details.
func NewTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bod *BackupOpDependencies,
bus events.Eventer,
opts control.Options,
counter *count.Bus,
) operations.BackupOperation {
bod.Ctrl.IDNameLookup = idname.NewCache(map[string]string{bod.Sel.ID(): bod.Sel.Name()})
bo, err := operations.NewBackupOperation(
ctx,
opts,
bod.KW,
bod.SW,
bod.Ctrl,
bod.Acct,
bod.Sel,
bod.Sel,
bus,
counter)
if !assert.NoError(t, err, clues.ToCore(err)) {
bod.Close(t, ctx)
t.FailNow()
}
return bo
}
func RunAndCheckBackup(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bo *operations.BackupOperation,
mb *evmock.Bus,
acceptNoData bool,
) {
err := bo.Run(ctx)
if !assert.NoError(t, err, clues.ToCore(err)) {
for i, err := range bo.Errors.Recovered() {
t.Logf("recoverable err %d, %+v", i, err)
}
assert.Fail(t, "not allowed to error")
}
require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")
expectStatus := []operations.OpStatus{operations.Completed}
if acceptNoData {
expectStatus = append(expectStatus, operations.NoData)
}
require.Contains(
t,
expectStatus,
bo.Status,
"backup doesn't match expectation, wanted any of %v, got %s",
expectStatus,
bo.Status)
require.NotZero(t, bo.Results.ItemsWritten)
assert.NotZero(t, bo.Results.ItemsRead, "count of items read")
assert.NotZero(t, bo.Results.BytesRead, "bytes read")
assert.NotZero(t, bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
}
func CheckBackupIsInManifests(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
kw *kopia.Wrapper,
sw store.BackupStorer,
bo *operations.BackupOperation,
sel selectors.Selector,
resourceOwner string,
categories ...path.CategoryType,
) {
for _, category := range categories {
t.Run(category.String(), func(t *testing.T) {
var (
r = identity.NewReason("", resourceOwner, sel.PathService(), category)
tags = map[string]string{kopia.TagBackupCategory: ""}
found bool
)
bf, err := kw.NewBaseFinder(sw)
require.NoError(t, err, clues.ToCore(err))
mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags)
for _, man := range mans.MergeBases() {
bID, ok := man.GetSnapshotTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ItemDataSnapshot.ID) {
continue
}
if bID == string(bo.Results.BackupID) {
found = true
break
}
}
assert.True(t, found, "backup retrieved by previous snapshot manifest")
})
}
}
func RunMergeBaseGroupsUpdate(
suite tester.Suite,
sel selectors.Selector,
expectCached bool,
) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
opts.ToggleFeatures.UseDeltaTree = true
// Need outside the inner test case so bod lasts for the entire test.
bo, bod := PrepNewTestBackupOp(
t,
ctx,
mb,
sel,
opts,
version.All8MigrateUserPNToID,
count.New())
defer bod.Close(t, ctx)
suite.Run("makeMergeBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
RunAndCheckBackup(t, ctx, &bo, mb, false)
reasons, err := bod.Sel.Reasons(bod.Acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
CheckBackupIsInManifests(
t,
ctx,
bod.KW,
bod.SW,
&bo,
bod.Sel,
bod.Sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.Acct.ID(),
bod.Sel.ID(),
bod.Sel.PathService(),
whatSet,
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.KMS,
bod.SSS,
expectDeets,
false)
})
suite.Run("makeIncrementalBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
)
forcedFull := NewTestBackupOp(
t,
ctx,
bod,
mb,
opts,
count.New())
forcedFull.BackupVersion = version.Groups9Update
RunAndCheckBackup(t, ctx, &forcedFull, mb, false)
reasons, err := bod.Sel.Reasons(bod.Acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
CheckBackupIsInManifests(
t,
ctx,
bod.KW,
bod.SW,
&forcedFull,
bod.Sel,
bod.Sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
forcedFull.Results.BackupID,
bod.Acct.ID(),
bod.Sel.ID(),
bod.Sel.PathService(),
whatSet,
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
forcedFull.Results.BackupID,
whatSet,
bod.KMS,
bod.SSS,
expectDeets,
false)
check := assert.Zero
if expectCached {
check = assert.NotZero
}
check(
t,
forcedFull.Results.Counts[string(count.PersistedCachedFiles)],
"cached items")
})
}

View File

@ -1,4 +1,4 @@
package test_test package exchange_test
import ( import (
"context" "context"
@ -19,6 +19,7 @@ import (
evmock "github.com/alcionai/corso/src/internal/events/mock" evmock "github.com/alcionai/corso/src/internal/events/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
exchTD "github.com/alcionai/corso/src/internal/m365/service/exchange/testdata" exchTD "github.com/alcionai/corso/src/internal/m365/service/exchange/testdata"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
@ -40,7 +41,7 @@ import (
type ExchangeBackupIntgSuite struct { type ExchangeBackupIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestExchangeBackupIntgSuite(t *testing.T) { func TestExchangeBackupIntgSuite(t *testing.T) {
@ -52,7 +53,7 @@ func TestExchangeBackupIntgSuite(t *testing.T) {
} }
func (suite *ExchangeBackupIntgSuite) SetupSuite() { func (suite *ExchangeBackupIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
// MetadataFileNames produces the category-specific set of filenames used to // MetadataFileNames produces the category-specific set of filenames used to
@ -79,9 +80,9 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{ {
name: "Mail", name: "Mail",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.its.user.ID sel.DiscreteOwner = suite.its.User.ID
return sel return sel
}, },
@ -91,7 +92,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{ {
name: "Contacts", name: "Contacts",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch())) sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
return sel return sel
}, },
@ -101,7 +102,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{ {
name: "Calendar Events", name: "Calendar Events",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch())) sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()))
return sel return sel
}, },
@ -124,33 +125,33 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
whatSet = deeTD.CategoryFromRepoRef whatSet = deeTD.CategoryFromRepoRef
) )
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
sel = bod.sel sel = bod.Sel
userID := sel.ID() userID := sel.ID()
m365, err := bod.acct.M365Config() m365, err := bod.Acct.M365Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// run the tests // run the tests
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&bo, &bo,
sel, sel,
userID, userID,
test.category) test.category)
checkMetadataFilesExist( CheckMetadataFilesExist(
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
bod.kw, bod.KW,
bod.kms, bod.KMS,
m365.AzureTenantID, m365.AzureTenantID,
userID, userID,
path.ExchangeService, path.ExchangeService,
@ -160,19 +161,19 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
bod.acct.ID(), bod.Acct.ID(),
userID, userID,
path.ExchangeService, path.ExchangeService,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss) bod.SSS)
deeTD.CheckBackupDetails( deeTD.CheckBackupDetails(
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
false) false)
@ -181,7 +182,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
// produces fewer results than the last backup. // produces fewer results than the last backup.
var ( var (
incMB = evmock.NewBus() incMB = evmock.NewBus()
incBO = newTestBackupOp( incBO = NewTestBackupOp(
t, t,
ctx, ctx,
bod, bod,
@ -190,22 +191,22 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
counter) counter)
) )
runAndCheckBackup(t, ctx, &incBO, incMB, true) RunAndCheckBackup(t, ctx, &incBO, incMB, true)
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&incBO, &incBO,
sel, sel,
userID, userID,
test.category) test.category)
checkMetadataFilesExist( CheckMetadataFilesExist(
t, t,
ctx, ctx,
incBO.Results.BackupID, incBO.Results.BackupID,
bod.kw, bod.KW,
bod.kms, bod.KMS,
m365.AzureTenantID, m365.AzureTenantID,
userID, userID,
path.ExchangeService, path.ExchangeService,
@ -215,8 +216,8 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
ctx, ctx,
incBO.Results.BackupID, incBO.Results.BackupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
false) false)
@ -260,16 +261,16 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
path.ContactsCategory: MetadataFileNames(path.ContactsCategory), path.ContactsCategory: MetadataFileNames(path.ContactsCategory),
// path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory), // path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory),
} }
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now) container1 = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 1, now)
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now) container2 = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 2, now)
container3 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 3, now) container3 = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 3, now)
containerRename = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 4, now) containerRename = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 4, now)
// container3 and containerRename don't exist yet. Those will get created // container3 and containerRename don't exist yet. Those will get created
// later on during the tests. Putting their identifiers into the selector // later on during the tests. Putting their identifiers into the selector
// at this point is harmless. // at this point is harmless.
containers = []string{container1, container2, container3, containerRename} containers = []string{container1, container2, container3, containerRename}
sel = selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel = selectors.NewExchangeBackup([]string{suite.its.User.ID})
whatSet = deeTD.CategoryFromRepoRef whatSet = deeTD.CategoryFromRepoRef
opts = control.DefaultOptions() opts = control.DefaultOptions()
) )
@ -309,7 +310,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
mailDBF := func(id, timeStamp, subject, body string) []byte { mailDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.MessageWith( return exchMock.MessageWith(
suite.its.user.ID, suite.its.user.ID, suite.its.user.ID, suite.its.User.ID, suite.its.User.ID, suite.its.User.ID,
subject, body, body, subject, body, body,
now, now, now, now) now, now, now, now)
} }
@ -326,7 +327,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
eventDBF := func(id, timeStamp, subject, body string) []byte { eventDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.EventWith( return exchMock.EventWith(
suite.its.user.ID, subject, body, body, suite.its.User.ID, subject, body, body,
exchMock.NoOriginalStartDate, now, now, exchMock.NoOriginalStartDate, now, now,
exchMock.NoRecurrence, exchMock.NoAttendees, exchMock.NoRecurrence, exchMock.NoAttendees,
exchMock.NoAttachments, exchMock.NoCancelledOccurrences, exchMock.NoAttachments, exchMock.NoCancelledOccurrences,
@ -335,7 +336,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// test data set // test data set
dataset := map[path.CategoryType]struct { dataset := map[path.CategoryType]struct {
dbf dataBuilderFunc dbf DataBuilderFunc
dests map[string]contDeets dests map[string]contDeets
}{ }{
path.EmailCategory: { path.EmailCategory: {
@ -447,7 +448,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// populate initial test data // populate initial test data
for category, gen := range dataset { for category, gen := range dataset {
for destName := range gen.dests { for destName := range gen.dests {
generateContainerOfItems( GenerateContainerOfItems(
t, t,
ctx, ctx,
ctrl, ctrl,
@ -477,11 +478,11 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
} }
} }
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
// run the initial backup // run the initial backup
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
// precheck to ensure the expectedDeets are correct. // precheck to ensure the expectedDeets are correct.
// if we fail here, the expectedDeets were populated incorrectly. // if we fail here, the expectedDeets were populated incorrectly.
@ -490,8 +491,8 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
true) true)
@ -589,14 +590,14 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
name: "add a new folder", name: "add a new folder",
updateUserData: func(t *testing.T, ctx context.Context) { updateUserData: func(t *testing.T, ctx context.Context) {
for category, gen := range dataset { for category, gen := range dataset {
generateContainerOfItems( GenerateContainerOfItems(
t, t,
ctx, ctx,
ctrl, ctrl,
service, service,
category, category,
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
creds.AzureTenantID, suite.its.user.ID, "", "", container3, creds.AzureTenantID, suite.its.User.ID, "", "", container3,
2, 2,
version.Backup, version.Backup,
gen.dbf) gen.dbf)
@ -672,7 +673,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
_, itemData := generateItemData(t, category, uidn.ID(), mailDBF) _, itemData := GenerateItemData(t, category, uidn.ID(), mailDBF)
body, err := api.BytesToMessageable(itemData) body, err := api.BytesToMessageable(itemData)
require.NoErrorf(t, err, "transforming mail bytes to messageable: %+v", clues.ToCore(err)) require.NoErrorf(t, err, "transforming mail bytes to messageable: %+v", clues.ToCore(err))
@ -685,7 +686,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ptr.Val(itm.GetId())) ptr.Val(itm.GetId()))
case path.ContactsCategory: case path.ContactsCategory:
_, itemData := generateItemData(t, category, uidn.ID(), contactDBF) _, itemData := GenerateItemData(t, category, uidn.ID(), contactDBF)
body, err := api.BytesToContactable(itemData) body, err := api.BytesToContactable(itemData)
require.NoErrorf(t, err, "transforming contact bytes to contactable: %+v", clues.ToCore(err)) require.NoErrorf(t, err, "transforming contact bytes to contactable: %+v", clues.ToCore(err))
@ -698,7 +699,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ptr.Val(itm.GetId())) ptr.Val(itm.GetId()))
case path.EventsCategory: case path.EventsCategory:
_, itemData := generateItemData(t, category, uidn.ID(), eventDBF) _, itemData := GenerateItemData(t, category, uidn.ID(), eventDBF)
body, err := api.BytesToEventable(itemData) body, err := api.BytesToEventable(itemData)
require.NoErrorf(t, err, "transforming event bytes to eventable: %+v", clues.ToCore(err)) require.NoErrorf(t, err, "transforming event bytes to eventable: %+v", clues.ToCore(err))
@ -819,7 +820,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ctx, flush := tester.WithContext(t, ctx) ctx, flush := tester.WithContext(t, ctx)
defer flush() defer flush()
incBO := newTestBackupOp(t, ctx, bod, incMB, opts, counter) incBO := NewTestBackupOp(t, ctx, bod, incMB, opts, counter)
suite.Run("PreTestSetup", func() { suite.Run("PreTestSetup", func() {
t := suite.T() t := suite.T()
@ -835,21 +836,21 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
bupID := incBO.Results.BackupID bupID := incBO.Results.BackupID
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&incBO, &incBO,
sels, sels,
uidn.ID(), uidn.ID(),
maps.Keys(categories)...) maps.Keys(categories)...)
checkMetadataFilesExist( CheckMetadataFilesExist(
t, t,
ctx, ctx,
bupID, bupID,
bod.kw, bod.KW,
bod.kms, bod.KMS,
atid, atid,
uidn.ID(), uidn.ID(),
service, service,
@ -859,8 +860,8 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ctx, ctx,
bupID, bupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
true) true)
@ -889,7 +890,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
type ExchangeBackupNightlyIntgSuite struct { type ExchangeBackupNightlyIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestExchangeBackupNightlyIntgSuite(t *testing.T) { func TestExchangeBackupNightlyIntgSuite(t *testing.T) {
@ -901,22 +902,22 @@ func TestExchangeBackupNightlyIntgSuite(t *testing.T) {
} }
func (suite *ExchangeBackupNightlyIntgSuite) SetupSuite() { func (suite *ExchangeBackupNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
func (suite *ExchangeBackupNightlyIntgSuite) TestBackup_Run_exchangeVersion9MergeBase() { func (suite *ExchangeBackupNightlyIntgSuite) TestBackup_Run_exchangeVersion9MergeBase() {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include( sel.Include(
sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()), sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
// sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()), // sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()),
sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
runMergeBaseGroupsUpdate(suite, sel.Selector, true) RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
} }
type ExchangeRestoreNightlyIntgSuite struct { type ExchangeRestoreNightlyIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestExchangeRestoreIntgSuite(t *testing.T) { func TestExchangeRestoreIntgSuite(t *testing.T) {
@ -928,7 +929,7 @@ func TestExchangeRestoreIntgSuite(t *testing.T) {
} }
func (suite *ExchangeRestoreNightlyIntgSuite) SetupSuite() { func (suite *ExchangeRestoreNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
type clientItemPager interface { type clientItemPager interface {
@ -959,7 +960,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
// a backup is required to run restores // a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) baseSel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
baseSel.Include( baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs // events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled. // to have their account recycled.
@ -967,7 +968,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()), baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.its.user.ID baseSel.DiscreteOwner = suite.its.User.ID
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
@ -975,10 +976,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts = control.DefaultOptions() opts = control.DefaultOptions()
) )
bo, bod := prepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
rsel, err := baseSel.ToExchangeRestore() rsel, err := baseSel.ToExchangeRestore()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -1002,8 +1003,8 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
} }
testCategories = map[path.CategoryType]clientItemPager{ testCategories = map[path.CategoryType]clientItemPager{
path.ContactsCategory: suite.its.ac.Contacts(), path.ContactsCategory: suite.its.AC.Contacts(),
path.EmailCategory: suite.its.ac.Mail(), path.EmailCategory: suite.its.AC.Mail(),
// path.EventsCategory: suite.its.ac.Events(), // path.EventsCategory: suite.its.ac.Events(),
} }
) )
@ -1021,10 +1022,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Copy restoreCfg.OnCollision = control.Copy
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr1, ctr1,
@ -1032,7 +1033,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts, opts,
restoreCfg) restoreCfg)
runAndCheckRestore(t, ctx, &ro, mb, false) RunAndCheckRestore(t, ctx, &ro, mb, false)
// get all files in folder, use these as the base // get all files in folder, use these as the base
// set of files to compare against. // set of files to compare against.
@ -1058,7 +1059,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
}) })
} }
checkRestoreCounts(t, ctr1, 0, 0, countItemsInRestore) CheckRestoreCounts(t, ctr1, 0, 0, countItemsInRestore)
}) })
// Exit the test if the baseline failed as it'll just cause more failures // Exit the test if the baseline failed as it'll just cause more failures
@ -1080,10 +1081,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Skip restoreCfg.OnCollision = control.Skip
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr2, ctr2,
@ -1091,14 +1092,14 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts, opts,
restoreCfg) restoreCfg)
deets := runAndCheckRestore(t, ctx, &ro, mb, false) deets := RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Zero( assert.Zero(
t, t,
len(deets.Entries), len(deets.Entries),
"no items should have been restored") "no items should have been restored")
checkRestoreCounts(t, ctr2, countItemsInRestore, 0, 0) CheckRestoreCounts(t, ctr2, countItemsInRestore, 0, 0)
result := map[string]string{} result := map[string]string{}
@ -1109,7 +1110,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
m := filterCollisionKeyResults( m := FilterCollisionKeyResults(
t, t,
ctx, ctx,
userID, userID,
@ -1141,10 +1142,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Replace restoreCfg.OnCollision = control.Replace
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr3, ctr3,
@ -1152,7 +1153,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts, opts,
restoreCfg) restoreCfg)
deets := runAndCheckRestore(t, ctx, &ro, mb, false) deets := RunAndCheckRestore(t, ctx, &ro, mb, false)
filtEnts := []details.Entry{} filtEnts := []details.Entry{}
for _, e := range deets.Entries { for _, e := range deets.Entries {
@ -1163,7 +1164,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
assert.Len(t, filtEnts, countItemsInRestore, "every item should have been replaced") assert.Len(t, filtEnts, countItemsInRestore, "every item should have been replaced")
checkRestoreCounts(t, ctr3, 0, countItemsInRestore, 0) CheckRestoreCounts(t, ctr3, 0, countItemsInRestore, 0)
result := map[string]string{} result := map[string]string{}
@ -1174,7 +1175,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
m := filterCollisionKeyResults( m := FilterCollisionKeyResults(
t, t,
ctx, ctx,
userID, userID,
@ -1211,10 +1212,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Copy restoreCfg.OnCollision = control.Copy
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr4, ctr4,
@ -1222,7 +1223,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts, opts,
restoreCfg) restoreCfg)
deets := runAndCheckRestore(t, ctx, &ro, mb, false) deets := RunAndCheckRestore(t, ctx, &ro, mb, false)
filtEnts := []details.Entry{} filtEnts := []details.Entry{}
for _, e := range deets.Entries { for _, e := range deets.Entries {
@ -1233,7 +1234,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
assert.Len(t, filtEnts, countItemsInRestore, "every item should have been copied") assert.Len(t, filtEnts, countItemsInRestore, "every item should have been copied")
checkRestoreCounts(t, ctr4, 0, 0, countItemsInRestore) CheckRestoreCounts(t, ctr4, 0, 0, countItemsInRestore)
result := map[string]string{} result := map[string]string{}
@ -1244,7 +1245,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
m := filterCollisionKeyResults( m := FilterCollisionKeyResults(
t, t,
ctx, ctx,
userID, userID,
@ -1276,7 +1277,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
// a backup is required to run restores // a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) baseSel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
baseSel.Include( baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs // events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled. // to have their account recycled.
@ -1284,7 +1285,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()), baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.its.user.ID baseSel.DiscreteOwner = suite.its.User.ID
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
@ -1292,10 +1293,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
opts = control.DefaultOptions() opts = control.DefaultOptions()
) )
bo, bod := prepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
rsel, err := baseSel.ToExchangeRestore() rsel, err := baseSel.ToExchangeRestore()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -1303,11 +1304,11 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
var ( var (
restoreCfg = ctrlTD.DefaultRestoreConfig("exchange_restore_to_user") restoreCfg = ctrlTD.DefaultRestoreConfig("exchange_restore_to_user")
sel = rsel.Selector sel = rsel.Selector
userID = suite.its.user.ID userID = suite.its.User.ID
secondaryUserID = suite.its.secondaryUser.ID secondaryUserID = suite.its.SecondaryUser.ID
uid = userID uid = userID
acCont = suite.its.ac.Contacts() acCont = suite.its.AC.Contacts()
acMail = suite.its.ac.Mail() acMail = suite.its.AC.Mail()
// acEvts = suite.its.ac.Events() // acEvts = suite.its.ac.Events()
firstCtr = count.New() firstCtr = count.New()
) )
@ -1317,10 +1318,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
// first restore to the current user // first restore to the current user
ro1, _ := prepNewTestRestoreOp( ro1, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
firstCtr, firstCtr,
@ -1328,7 +1329,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
opts, opts,
restoreCfg) restoreCfg)
runAndCheckRestore(t, ctx, &ro1, mb, false) RunAndCheckRestore(t, ctx, &ro1, mb, false)
// get all files in folder, use these as the base // get all files in folder, use these as the base
// set of files to compare against. // set of files to compare against.
@ -1376,10 +1377,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
secondCtr := count.New() secondCtr := count.New()
restoreCfg.ProtectedResource = uid restoreCfg.ProtectedResource = uid
ro2, _ := prepNewTestRestoreOp( ro2, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
secondCtr, secondCtr,
@ -1387,7 +1388,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
opts, opts,
restoreCfg) restoreCfg)
runAndCheckRestore(t, ctx, &ro2, mb, false) RunAndCheckRestore(t, ctx, &ro2, mb, false)
var ( var (
secondaryItemIDs = map[path.CategoryType]map[string]struct{}{} secondaryItemIDs = map[path.CategoryType]map[string]struct{}{}

View File

@ -1,4 +1,4 @@
package test_test package groups_test
import ( import (
"context" "context"
@ -9,6 +9,7 @@ import (
evmock "github.com/alcionai/corso/src/internal/events/mock" evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive" "github.com/alcionai/corso/src/internal/m365/collection/drive"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
@ -24,7 +25,7 @@ import (
type GroupsBackupIntgSuite struct { type GroupsBackupIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestGroupsBackupIntgSuite(t *testing.T) { func TestGroupsBackupIntgSuite(t *testing.T) {
@ -36,13 +37,117 @@ func TestGroupsBackupIntgSuite(t *testing.T) {
} }
func (suite *GroupsBackupIntgSuite) SetupSuite() { func (suite *GroupsBackupIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
// TODO(v0 export): Channels export func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
RunBasicDriveishBackupTests(
suite,
path.GroupsService,
control.DefaultOptions(),
sel.Selector)
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() { func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
sel := selectors.NewGroupsRestore([]string{suite.its.group.ID}) runGroupsIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_extensionsGroups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
RunDriveishBackupWithExtensionsTests(
suite,
path.GroupsService,
control.DefaultOptions(),
sel.Selector)
}
// ---------------------------------------------------------------------------
// test version using the tree-based drive item processor
// ---------------------------------------------------------------------------
type GroupsBackupTreeIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestGroupsBackupTreeIntgSuite(t *testing.T) {
suite.Run(t, &GroupsBackupTreeIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *GroupsBackupTreeIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeGroups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunBasicDriveishBackupTests(
suite,
path.GroupsService,
opts,
sel.Selector)
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeIncrementalGroups() {
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
runGroupsIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeExtensionsGroups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunDriveishBackupWithExtensionsTests(
suite,
path.GroupsService,
opts,
sel.Selector)
}
// ---------------------------------------------------------------------------
// common backup test wrappers
// ---------------------------------------------------------------------------
func runGroupsIncrementalBackupTests(
suite tester.Suite,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewGroupsRestore([]string{its.Group.ID})
ic := func(cs []string) selectors.Selector { ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch())) sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -53,24 +158,25 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
t *testing.T, t *testing.T,
ctx context.Context, ctx context.Context,
) string { ) string {
return suite.its.group.RootSite.DriveID return its.Group.RootSite.DriveID
} }
gtsi := func( gtsi := func(
t *testing.T, t *testing.T,
ctx context.Context, ctx context.Context,
) string { ) string {
return suite.its.group.RootSite.ID return its.Group.RootSite.ID
} }
grh := func(ac api.Client) drive.RestoreHandler { grh := func(ac api.Client) drive.RestoreHandler {
return drive.NewSiteRestoreHandler(ac, path.GroupsService) return drive.NewSiteRestoreHandler(ac, path.GroupsService)
} }
runDriveIncrementalTest( RunIncrementalDriveishBackupTest(
suite, suite,
suite.its.group.ID, opts,
suite.its.user.ID, its.Group.ID,
its.User.ID,
path.GroupsService, path.GroupsService,
path.LibrariesCategory, path.LibrariesCategory,
ic, ic,
@ -88,7 +194,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID}) sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions() opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef whatSet = deeTD.CategoryFromRepoRef
) )
@ -98,7 +204,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
selTD.GroupsBackupChannelScope(sel), selTD.GroupsBackupChannelScope(sel),
sel.Conversation(selectors.Any())) sel.Conversation(selectors.Any()))
bo, bod := prepNewTestBackupOp( bo, bod := PrepNewTestBackupOp(
t, t,
ctx, ctx,
mb, mb,
@ -106,41 +212,41 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
opts, opts,
version.All8MigrateUserPNToID, version.All8MigrateUserPNToID,
count.New()) count.New())
defer bod.close(t, ctx) defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&bo, &bo,
bod.sel, bod.Sel,
bod.sel.ID(), bod.Sel.ID(),
path.ChannelMessagesCategory) path.ChannelMessagesCategory)
_, expectDeets := deeTD.GetDeetsInBackup( _, expectDeets := deeTD.GetDeetsInBackup(
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
bod.acct.ID(), bod.Acct.ID(),
bod.sel.ID(), bod.Sel.ID(),
path.GroupsService, path.GroupsService,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss) bod.SSS)
deeTD.CheckBackupDetails( deeTD.CheckBackupDetails(
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
false) false)
mb = evmock.NewBus() mb = evmock.NewBus()
forcedFull := newTestBackupOp( forcedFull := NewTestBackupOp(
t, t,
ctx, ctx,
bod, bod,
@ -149,34 +255,34 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
count.New()) count.New())
forcedFull.BackupVersion = version.Groups9Update forcedFull.BackupVersion = version.Groups9Update
runAndCheckBackup(t, ctx, &forcedFull, mb, false) RunAndCheckBackup(t, ctx, &forcedFull, mb, false)
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&forcedFull, &forcedFull,
bod.sel, bod.Sel,
bod.sel.ID(), bod.Sel.ID(),
path.ChannelMessagesCategory) path.ChannelMessagesCategory)
_, expectDeets = deeTD.GetDeetsInBackup( _, expectDeets = deeTD.GetDeetsInBackup(
t, t,
ctx, ctx,
forcedFull.Results.BackupID, forcedFull.Results.BackupID,
bod.acct.ID(), bod.Acct.ID(),
bod.sel.ID(), bod.Sel.ID(),
path.GroupsService, path.GroupsService,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss) bod.SSS)
deeTD.CheckBackupDetails( deeTD.CheckBackupDetails(
t, t,
ctx, ctx,
forcedFull.Results.BackupID, forcedFull.Results.BackupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
false) false)
@ -198,7 +304,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
counter = count.New() counter = count.New()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID}) sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions() opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef whatSet = deeTD.CategoryFromRepoRef
) )
@ -208,37 +314,37 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
selTD.GroupsBackupChannelScope(sel), selTD.GroupsBackupChannelScope(sel),
sel.Conversation(selectors.Any())) sel.Conversation(selectors.Any()))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&bo, &bo,
bod.sel, bod.Sel,
bod.sel.ID(), bod.Sel.ID(),
path.ChannelMessagesCategory) path.ChannelMessagesCategory)
_, expectDeets := deeTD.GetDeetsInBackup( _, expectDeets := deeTD.GetDeetsInBackup(
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
bod.acct.ID(), bod.Acct.ID(),
bod.sel.ID(), bod.Sel.ID(),
path.GroupsService, path.GroupsService,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss) bod.SSS)
deeTD.CheckBackupDetails( deeTD.CheckBackupDetails(
t, t,
ctx, ctx,
bo.Results.BackupID, bo.Results.BackupID,
whatSet, whatSet,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
false) false)
} }
@ -252,30 +358,30 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsExtensions() {
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
counter = count.New() counter = count.New()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID}) sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions() opts = control.DefaultOptions()
tenID = tconfig.M365TenantID(t) tenID = tconfig.M365TenantID(t)
svc = path.GroupsService svc = path.GroupsService
ws = deeTD.DriveIDFromRepoRef ws = deeTD.DriveIDFromRepoRef
) )
opts.ItemExtensionFactory = getTestExtensionFactories() opts.ItemExtensionFactory = GetTestExtensionFactories()
// does not apply to channel messages // does not apply to channel messages
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel)) sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests( CheckBackupIsInManifests(
t, t,
ctx, ctx,
bod.kw, bod.KW,
bod.sw, bod.SW,
&bo, &bo,
bod.sel, bod.Sel,
bod.sel.ID(), bod.Sel.ID(),
path.LibrariesCategory) path.LibrariesCategory)
bID := bo.Results.BackupID bID := bo.Results.BackupID
@ -285,32 +391,32 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsExtensions() {
ctx, ctx,
bID, bID,
tenID, tenID,
bod.sel.ID(), bod.Sel.ID(),
svc, svc,
ws, ws,
bod.kms, bod.KMS,
bod.sss) bod.SSS)
deeTD.CheckBackupDetails( deeTD.CheckBackupDetails(
t, t,
ctx, ctx,
bID, bID,
ws, ws,
bod.kms, bod.KMS,
bod.sss, bod.SSS,
expectDeets, expectDeets,
false) false)
// Check that the extensions are in the backup // Check that the extensions are in the backup
for _, ent := range deets.Entries { for _, ent := range deets.Entries {
if ent.Folder == nil { if ent.Folder == nil {
verifyExtensionData(t, ent.ItemInfo, path.GroupsService) VerifyExtensionData(t, ent.ItemInfo, path.GroupsService)
} }
} }
} }
type GroupsBackupNightlyIntgSuite struct { type GroupsBackupNightlyIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestGroupsBackupNightlyIntgSuite(t *testing.T) { func TestGroupsBackupNightlyIntgSuite(t *testing.T) {
@ -322,30 +428,30 @@ func TestGroupsBackupNightlyIntgSuite(t *testing.T) {
} }
func (suite *GroupsBackupNightlyIntgSuite) SetupSuite() { func (suite *GroupsBackupNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBase() { func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBase() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID}) sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include( sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel)) selTD.GroupsBackupChannelScope(sel))
runMergeBaseGroupsUpdate(suite, sel.Selector, false) RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
} }
func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBases() { func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBases() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID}) sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include( sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel)) selTD.GroupsBackupChannelScope(sel))
runDriveAssistBaseGroupsUpdate(suite, sel.Selector, false) RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
} }
type GroupsRestoreNightlyIntgSuite struct { type GroupsRestoreNightlyIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestGroupsRestoreIntgSuite(t *testing.T) { func TestGroupsRestoreIntgSuite(t *testing.T) {
@ -357,36 +463,20 @@ func TestGroupsRestoreIntgSuite(t *testing.T) {
} }
func (suite *GroupsRestoreNightlyIntgSuite) SetupSuite() { func (suite *GroupsRestoreNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsWithAdvancedOptions() { func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsWithAdvancedOptions() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID}) sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel)) sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
sel.Filter(sel.Library("documents")) sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.group.ID sel.DiscreteOwner = suite.its.Group.ID
runDriveRestoreWithAdvancedOptions( RunDriveRestoreWithAdvancedOptions(
suite.T(), suite.T(),
suite, suite,
suite.its.ac, suite.its.AC,
sel.Selector, sel.Selector,
suite.its.group.RootSite.DriveID, suite.its.Group.RootSite.DriveID,
suite.its.group.RootSite.DriveRootFolderID) suite.its.Group.RootSite.DriveRootFolderID)
} }
// func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsAlternateProtectedResource() {
// sel := selectors.NewGroupsBackup([]string{suite.its.group.ID})
// sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
// sel.Filter(sel.Library("documents"))
// sel.DiscreteOwner = suite.its.group.ID
// runDriveRestoreToAlternateProtectedResource(
// suite.T(),
// suite,
// suite.its.ac,
// sel.Selector,
// suite.its.group.RootSite,
// suite.its.secondaryGroup.RootSite,
// suite.its.secondaryGroup.ID)
// }

View File

@ -1,4 +1,4 @@
package test_test package m365
import ( import (
"context" "context"
@ -13,28 +13,19 @@ import (
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365" "github.com/alcionai/corso/src/internal/m365"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/dttm" "github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/extensions" "github.com/alcionai/corso/src/pkg/extensions"
@ -44,9 +35,6 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph" "github.com/alcionai/corso/src/pkg/services/m365/api/graph"
gmock "github.com/alcionai/corso/src/pkg/services/m365/api/graph/mock" gmock "github.com/alcionai/corso/src/pkg/services/m365/api/graph/mock"
"github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -55,7 +43,7 @@ import (
// GockClient produces a new exchange api client that can be // GockClient produces a new exchange api client that can be
// mocked using gock. // mocked using gock.
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) { func GockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
s, err := gmock.NewService(creds, counter) s, err := gmock.NewService(creds, counter)
if err != nil { if err != nil {
return api.Client{}, err return api.Client{}, err
@ -76,231 +64,9 @@ func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error
// Does not use the tester.DefaultTestRestoreDestination syntax as some of these // Does not use the tester.DefaultTestRestoreDestination syntax as some of these
// items are created directly, not as a result of restoration, and we want to ensure // items are created directly, not as a result of restoration, and we want to ensure
// they get clearly selected without accidental overlap. // they get clearly selected without accidental overlap.
const incrementalsDestContainerPrefix = "incrementals_ci_" const IncrementalsDestContainerPrefix = "incrementals_ci_"
type backupOpDependencies struct { func CheckMetadataFilesExist(
acct account.Account
ctrl *m365.Controller
kms *kopia.ModelStore
kw *kopia.Wrapper
sel selectors.Selector
sss streamstore.Streamer
st storage.Storage
sw store.BackupStorer
closer func()
}
func (bod *backupOpDependencies) close(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
) {
bod.closer()
if bod.kw != nil {
err := bod.kw.Close(ctx)
assert.NoErrorf(t, err, "kw close: %+v", clues.ToCore(err))
}
if bod.kms != nil {
err := bod.kw.Close(ctx)
assert.NoErrorf(t, err, "kms close: %+v", clues.ToCore(err))
}
}
// prepNewTestBackupOp generates all clients required to run a backup operation,
// returning both a backup operation created with those clients, as well as
// the clients themselves.
func prepNewTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bus events.Eventer,
sel selectors.Selector,
opts control.Options,
backupVersion int,
counter *count.Bus,
) (
operations.BackupOperation,
*backupOpDependencies,
) {
bod := &backupOpDependencies{
acct: tconfig.NewM365Account(t),
st: storeTD.NewPrefixedS3Storage(t),
}
repoNameHash := strTD.NewHashForRepoConfigName()
k := kopia.NewConn(bod.st)
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
require.NoError(t, err, clues.ToCore(err))
defer func() {
if err != nil {
bod.close(t, ctx)
t.FailNow()
}
}()
// kopiaRef comes with a count of 1 and Wrapper bumps it again
// we're so safe to close here.
bod.closer = func() {
err := k.Close(ctx)
assert.NoErrorf(t, err, "k close: %+v", clues.ToCore(err))
}
bod.kw, err = kopia.NewWrapper(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.kms, err = kopia.NewModelStore(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.sw = store.NewWrapper(bod.kms)
bod.ctrl, bod.sel = ControllerWithSelector(
t,
ctx,
bod.acct,
sel,
nil,
bod.close,
counter)
bo := newTestBackupOp(
t,
ctx,
bod,
bus,
opts,
counter)
bo.BackupVersion = backupVersion
bod.sss = streamstore.NewStreamer(
bod.kw,
bod.acct.ID(),
bod.sel.PathService())
return bo, bod
}
// newTestBackupOp accepts the clients required to compose a backup operation, plus
// any other metadata, and uses them to generate a new backup operation. This
// allows backup chains to utilize the same temp directory and configuration
// details.
func newTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bod *backupOpDependencies,
bus events.Eventer,
opts control.Options,
counter *count.Bus,
) operations.BackupOperation {
bod.ctrl.IDNameLookup = idname.NewCache(map[string]string{bod.sel.ID(): bod.sel.Name()})
bo, err := operations.NewBackupOperation(
ctx,
opts,
bod.kw,
bod.sw,
bod.ctrl,
bod.acct,
bod.sel,
bod.sel,
bus,
counter)
if !assert.NoError(t, err, clues.ToCore(err)) {
bod.close(t, ctx)
t.FailNow()
}
return bo
}
func runAndCheckBackup(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bo *operations.BackupOperation,
mb *evmock.Bus,
acceptNoData bool,
) {
err := bo.Run(ctx)
if !assert.NoError(t, err, clues.ToCore(err)) {
for i, err := range bo.Errors.Recovered() {
t.Logf("recoverable err %d, %+v", i, err)
}
assert.Fail(t, "not allowed to error")
}
require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")
expectStatus := []operations.OpStatus{operations.Completed}
if acceptNoData {
expectStatus = append(expectStatus, operations.NoData)
}
require.Contains(
t,
expectStatus,
bo.Status,
"backup doesn't match expectation, wanted any of %v, got %s",
expectStatus,
bo.Status)
require.NotZero(t, bo.Results.ItemsWritten)
assert.NotZero(t, bo.Results.ItemsRead, "count of items read")
assert.NotZero(t, bo.Results.BytesRead, "bytes read")
assert.NotZero(t, bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
}
func checkBackupIsInManifests(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
kw *kopia.Wrapper,
sw store.BackupStorer,
bo *operations.BackupOperation,
sel selectors.Selector,
resourceOwner string,
categories ...path.CategoryType,
) {
for _, category := range categories {
t.Run(category.String(), func(t *testing.T) {
var (
r = identity.NewReason("", resourceOwner, sel.PathService(), category)
tags = map[string]string{kopia.TagBackupCategory: ""}
found bool
)
bf, err := kw.NewBaseFinder(sw)
require.NoError(t, err, clues.ToCore(err))
mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags)
for _, man := range mans.MergeBases() {
bID, ok := man.GetSnapshotTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ItemDataSnapshot.ID) {
continue
}
if bID == string(bo.Results.BackupID) {
found = true
break
}
}
assert.True(t, found, "backup retrieved by previous snapshot manifest")
})
}
}
func checkMetadataFilesExist(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
backupID model.StableID, backupID model.StableID,
@ -376,147 +142,6 @@ func checkMetadataFilesExist(
} }
} }
func runMergeBaseGroupsUpdate(
suite tester.Suite,
sel selectors.Selector,
expectCached bool,
) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
// Need outside the inner test case so bod lasts for the entire test.
bo, bod := prepNewTestBackupOp(
t,
ctx,
mb,
sel,
opts,
version.All8MigrateUserPNToID,
count.New())
defer bod.close(t, ctx)
suite.Run("makeMergeBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
runAndCheckBackup(t, ctx, &bo, mb, false)
reasons, err := bod.sel.Reasons(bod.acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.sel.PathService(),
whatSet,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
expectDeets,
false)
})
suite.Run("makeIncrementalBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
)
forcedFull := newTestBackupOp(
t,
ctx,
bod,
mb,
opts,
count.New())
forcedFull.BackupVersion = version.Groups9Update
runAndCheckBackup(t, ctx, &forcedFull, mb, false)
reasons, err := bod.sel.Reasons(bod.acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&forcedFull,
bod.sel,
bod.sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
forcedFull.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.sel.PathService(),
whatSet,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
forcedFull.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
expectDeets,
false)
check := assert.Zero
if expectCached {
check = assert.NotZero
}
check(
t,
forcedFull.Results.Counts[string(count.PersistedCachedFiles)],
"cached items")
})
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Incremental Item Generators // Incremental Item Generators
// TODO: this is ripped from factory.go, which is ripped from other tests. // TODO: this is ripped from factory.go, which is ripped from other tests.
@ -527,9 +152,9 @@ func runMergeBaseGroupsUpdate(
// the params here are what generateContainerOfItems passes into the func. // the params here are what generateContainerOfItems passes into the func.
// the callback provider can use them, or not, as wanted. // the callback provider can use them, or not, as wanted.
type dataBuilderFunc func(id, timeStamp, subject, body string) []byte type DataBuilderFunc func(id, timeStamp, subject, body string) []byte
func generateContainerOfItems( func GenerateContainerOfItems(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
ctrl *m365.Controller, ctrl *m365.Controller,
@ -539,16 +164,16 @@ func generateContainerOfItems(
tenantID, resourceOwner, siteID, driveID, destFldr string, tenantID, resourceOwner, siteID, driveID, destFldr string,
howManyItems int, howManyItems int,
backupVersion int, backupVersion int,
dbf dataBuilderFunc, dbf DataBuilderFunc,
) *details.Details { ) *details.Details {
t.Helper() t.Helper()
items := make([]incrementalItem, 0, howManyItems) items := make([]IncrementalItem, 0, howManyItems)
for i := 0; i < howManyItems; i++ { for i := 0; i < howManyItems; i++ {
id, d := generateItemData(t, cat, resourceOwner, dbf) id, d := GenerateItemData(t, cat, resourceOwner, dbf)
items = append(items, incrementalItem{ items = append(items, IncrementalItem{
name: id, name: id,
data: d, data: d,
}) })
@ -563,7 +188,7 @@ func generateContainerOfItems(
pathFolders = []string{odConsts.SitesPathDir, siteID, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr} pathFolders = []string{odConsts.SitesPathDir, siteID, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
} }
collections := []incrementalCollection{{ collections := []IncrementalCollection{{
pathFolders: pathFolders, pathFolders: pathFolders,
category: cat, category: cat,
items: items, items: items,
@ -573,7 +198,7 @@ func generateContainerOfItems(
restoreCfg.Location = destFldr restoreCfg.Location = destFldr
restoreCfg.IncludePermissions = true restoreCfg.IncludePermissions = true
dataColls := buildCollections( dataColls := BuildCollections(
t, t,
service, service,
tenantID, resourceOwner, tenantID, resourceOwner,
@ -604,11 +229,11 @@ func generateContainerOfItems(
return deets return deets
} }
func generateItemData( func GenerateItemData(
t *testing.T, t *testing.T,
category path.CategoryType, category path.CategoryType,
resourceOwner string, resourceOwner string,
dbf dataBuilderFunc, dbf DataBuilderFunc,
) (string, []byte) { ) (string, []byte) {
var ( var (
now = dttm.Now() now = dttm.Now()
@ -621,30 +246,30 @@ func generateItemData(
return id, dbf(id, nowLegacy, subject, body) return id, dbf(id, nowLegacy, subject, body)
} }
type incrementalItem struct { type IncrementalItem struct {
name string name string
data []byte data []byte
} }
type incrementalCollection struct { type IncrementalCollection struct {
pathFolders []string pathFolders []string
category path.CategoryType category path.CategoryType
items []incrementalItem items []IncrementalItem
} }
func buildCollections( func BuildCollections(
t *testing.T, t *testing.T,
service path.ServiceType, service path.ServiceType,
tenant, user string, tenant, user string,
restoreCfg control.RestoreConfig, restoreCfg control.RestoreConfig,
colls []incrementalCollection, colls []IncrementalCollection,
) []data.RestoreCollection { ) []data.RestoreCollection {
t.Helper() t.Helper()
collections := make([]data.RestoreCollection, 0, len(colls)) collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls { for _, c := range colls {
pth := toDataLayerPath( pth := ToDataLayerPath(
t, t,
service, service,
tenant, tenant,
@ -666,7 +291,7 @@ func buildCollections(
return collections return collections
} }
func toDataLayerPath( func ToDataLayerPath(
t *testing.T, t *testing.T,
service path.ServiceType, service path.ServiceType,
tenant, resourceOwner string, tenant, resourceOwner string,
@ -746,30 +371,30 @@ func ControllerWithSelector(
// Suite Setup // Suite Setup
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type ids struct { type IDs struct {
ID string ID string
DriveID string DriveID string
DriveRootFolderID string DriveRootFolderID string
} }
type gids struct { type GIDs struct {
ID string ID string
RootSite ids RootSite IDs
} }
type intgTesterSetup struct { type IntgTesterSetup struct {
ac api.Client AC api.Client
gockAC api.Client GockAC api.Client
user ids User IDs
secondaryUser ids SecondaryUser IDs
site ids Site IDs
secondarySite ids SecondarySite IDs
group gids Group GIDs
secondaryGroup gids SecondaryGroup GIDs
} }
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup { func NewIntegrationTesterSetup(t *testing.T) IntgTesterSetup {
its := intgTesterSetup{} its := IntgTesterSetup{}
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
@ -782,32 +407,32 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
counter := count.New() counter := count.New()
its.ac, err = api.NewClient( its.AC, err = api.NewClient(
creds, creds,
control.DefaultOptions(), control.DefaultOptions(),
counter) counter)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
its.gockAC, err = gockClient(creds, counter) its.GockAC, err = GockClient(creds, counter)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
its.user = userIDs(t, tconfig.M365UserID(t), its.ac) its.User = userIDs(t, tconfig.M365UserID(t), its.AC)
its.secondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.ac) its.SecondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.AC)
its.site = siteIDs(t, tconfig.M365SiteID(t), its.ac) its.Site = siteIDs(t, tconfig.M365SiteID(t), its.AC)
its.secondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.ac) its.SecondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.AC)
// teamID is used here intentionally. We want the group // teamID is used here intentionally. We want the group
// to have access to teams data // to have access to teams data
its.group = groupIDs(t, tconfig.M365TeamID(t), its.ac) its.Group = groupIDs(t, tconfig.M365TeamID(t), its.AC)
its.secondaryGroup = groupIDs(t, tconfig.SecondaryM365TeamID(t), its.ac) its.SecondaryGroup = groupIDs(t, tconfig.SecondaryM365TeamID(t), its.AC)
return its return its
} }
func userIDs(t *testing.T, id string, ac api.Client) ids { func userIDs(t *testing.T, id string, ac api.Client) IDs {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
r := ids{ID: id} r := IDs{ID: id}
drive, err := ac.Users().GetDefaultDrive(ctx, id) drive, err := ac.Users().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -822,11 +447,11 @@ func userIDs(t *testing.T, id string, ac api.Client) ids {
return r return r
} }
func siteIDs(t *testing.T, id string, ac api.Client) ids { func siteIDs(t *testing.T, id string, ac api.Client) IDs {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
r := ids{ID: id} r := IDs{ID: id}
drive, err := ac.Sites().GetDefaultDrive(ctx, id) drive, err := ac.Sites().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -841,11 +466,11 @@ func siteIDs(t *testing.T, id string, ac api.Client) ids {
return r return r
} }
func groupIDs(t *testing.T, id string, ac api.Client) gids { func groupIDs(t *testing.T, id string, ac api.Client) GIDs {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
r := gids{ID: id} r := GIDs{ID: id}
site, err := ac.Groups().GetRootSite(ctx, id) site, err := ac.Groups().GetRootSite(ctx, id)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -865,13 +490,13 @@ func groupIDs(t *testing.T, id string, ac api.Client) gids {
return r return r
} }
func getTestExtensionFactories() []extensions.CreateItemExtensioner { func GetTestExtensionFactories() []extensions.CreateItemExtensioner {
return []extensions.CreateItemExtensioner{ return []extensions.CreateItemExtensioner{
&extensions.MockItemExtensionFactory{}, &extensions.MockItemExtensionFactory{},
} }
} }
func verifyExtensionData( func VerifyExtensionData(
t *testing.T, t *testing.T,
itemInfo details.ItemInfo, itemInfo details.ItemInfo,
p path.ServiceType, p path.ServiceType,

View File

@ -0,0 +1,416 @@
package onedrive_test
import (
"context"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/model"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
type OneDriveBackupIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveBackupIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveBackupIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveBackupIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunBasicDriveishBackupTests(
suite,
path.OneDriveService,
control.DefaultOptions(),
sel.Selector)
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
runOneDriveIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_extensionsOneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunDriveishBackupWithExtensionsTests(
suite,
path.OneDriveService,
control.DefaultOptions(),
sel.Selector)
}
// ---------------------------------------------------------------------------
// test version using the tree-based drive item processor
// ---------------------------------------------------------------------------
type OneDriveBackupTreeIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveBackupTreeIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveBackupTreeIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveBackupTreeIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeOneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunBasicDriveishBackupTests(
suite,
path.OneDriveService,
opts,
sel.Selector)
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeIncrementalOneDrive() {
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
runOneDriveIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeExtensionsOneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunDriveishBackupWithExtensionsTests(
suite,
path.OneDriveService,
opts,
sel.Selector)
}
// ---------------------------------------------------------------------------
// common backup test wrappers
// ---------------------------------------------------------------------------
func runOneDriveIncrementalBackupTests(
suite tester.Suite,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewOneDriveRestore([]string{its.User.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.Folders(cs, selectors.PrefixMatch()))
return sel.Selector
}
gtdi := func(
t *testing.T,
ctx context.Context,
) string {
d, err := its.AC.Users().GetDefaultDrive(ctx, its.User.ID)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default user drive").
With("user", its.User.ID)
}
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID")
return id
}
grh := func(ac api.Client) drive.RestoreHandler {
return drive.NewUserDriveRestoreHandler(ac)
}
RunIncrementalDriveishBackupTest(
suite,
opts,
its.User.ID,
its.User.ID,
path.OneDriveService,
path.FilesCategory,
ic,
gtdi,
nil,
grh,
false)
}
// ---------------------------------------------------------------------------
// other drive tests
// ---------------------------------------------------------------------------
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
acct = tconfig.NewM365Account(t)
opts = control.DefaultOptions()
mb = evmock.NewBus()
counter = count.New()
categories = map[path.CategoryType][][]string{
path.FilesCategory: {{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}},
}
)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
ctrl, err := m365.NewController(
ctx,
acct,
path.OneDriveService,
control.DefaultOptions(),
counter)
require.NoError(t, err, clues.ToCore(err))
userable, err := ctrl.AC.Users().GetByID(
ctx,
suite.its.User.ID,
api.CallConfig{})
require.NoError(t, err, clues.ToCore(err))
uid := ptr.Val(userable.GetId())
uname := ptr.Val(userable.GetUserPrincipalName())
oldsel := selectors.NewOneDriveBackup([]string{uname})
oldsel.Include(selTD.OneDriveBackupFolderScope(oldsel))
bo, bod := PrepNewTestBackupOp(t, ctx, mb, oldsel.Selector, opts, 0, counter)
defer bod.Close(t, ctx)
sel := bod.Sel
// ensure the initial owner uses name in both cases
bo.ResourceOwner = sel.SetDiscreteOwnerIDName(uname, uname)
// required, otherwise we don't run the migration
bo.BackupVersion = version.All8MigrateUserPNToID - 1
require.Equalf(
t,
bo.ResourceOwner.Name(),
bo.ResourceOwner.ID(),
"historical representation of user id [%s] should match pn [%s]",
bo.ResourceOwner.ID(),
bo.ResourceOwner.Name())
// run the initial backup
RunAndCheckBackup(t, ctx, &bo, mb, false)
newsel := selectors.NewOneDriveBackup([]string{uid})
newsel.Include(selTD.OneDriveBackupFolderScope(newsel))
sel = newsel.SetDiscreteOwnerIDName(uid, uname)
var (
incMB = evmock.NewBus()
// the incremental backup op should have a proper user ID for the id.
incBO = NewTestBackupOp(t, ctx, bod, incMB, opts, counter)
)
require.NotEqualf(
t,
incBO.ResourceOwner.Name(),
incBO.ResourceOwner.ID(),
"current representation of user: id [%s] should differ from PN [%s]",
incBO.ResourceOwner.ID(),
incBO.ResourceOwner.Name())
err = incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
CheckBackupIsInManifests(
t,
ctx,
bod.KW,
bod.SW,
&incBO,
sel,
uid,
maps.Keys(categories)...)
CheckMetadataFilesExist(
t,
ctx,
incBO.Results.BackupID,
bod.KW,
bod.KMS,
creds.AzureTenantID,
uid,
path.OneDriveService,
categories)
// 2 on read/writes to account for metadata: 1 delta and 1 path.
assert.LessOrEqual(t, 2, incBO.Results.ItemsWritten, "items written")
assert.LessOrEqual(t, 1, incBO.Results.NonMetaItemsWritten, "non meta items written")
assert.LessOrEqual(t, 2, incBO.Results.ItemsRead, "items read")
assert.NoError(t, incBO.Errors.Failure(), "non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "backup-end events")
bid := incBO.Results.BackupID
bup := &backup.Backup{}
err = bod.KMS.Get(ctx, model.BackupSchema, bid, bup)
require.NoError(t, err, clues.ToCore(err))
var (
ssid = bup.StreamStoreID
deets details.Details
ss = streamstore.NewStreamer(bod.KW, creds.AzureTenantID, path.OneDriveService)
)
err = ss.Read(ctx, ssid, streamstore.DetailsReader(details.UnmarshalTo(&deets)), fault.New(true))
require.NoError(t, err, clues.ToCore(err))
for _, ent := range deets.Entries {
// 46 is the tenant uuid + "onedrive" + two slashes
if len(ent.RepoRef) > 46 {
assert.Contains(t, ent.RepoRef, uid)
}
}
}
type OneDriveBackupNightlyIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveBackupNightlyIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveBackupNightlyIntgSuite{
Suite: tester.NewNightlySuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveBackupNightlyIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupNightlyIntgSuite) TestBackup_Run_oneDriveVersion9MergeBase() {
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
}
//func (suite *OneDriveBackupNightlyIntgSuite) TestBackup_Run_oneDriveVersion9AssistBases() {
// sel := selectors.NewOneDriveBackup([]string{tconfig.SecondaryM365UserID(suite.T())})
// sel.Include(selTD.OneDriveBackupFolderScope(sel))
//
// runDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
//}
type OneDriveRestoreNightlyIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveRestoreIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveRestoreNightlyIntgSuite{
Suite: tester.NewNightlySuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveRestoreNightlyIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveRestoreNightlyIntgSuite) TestRestore_Run_onedriveWithAdvancedOptions() {
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.its.User.ID
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.its.AC,
sel.Selector,
suite.its.User.DriveID,
suite.its.User.DriveRootFolderID)
}
func (suite *OneDriveRestoreNightlyIntgSuite) TestRestore_Run_onedriveAlternateProtectedResource() {
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.its.User.ID
RunDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.its.AC,
sel.Selector,
suite.its.User,
suite.its.SecondaryUser,
suite.its.SecondaryUser.ID)
}

View File

@ -1,4 +1,4 @@
package test_test package m365
import ( import (
"context" "context"
@ -28,7 +28,7 @@ import (
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
type restoreOpDependencies struct { type RestoreOpDependencies struct {
acct account.Account acct account.Account
ctrl *m365.Controller ctrl *m365.Controller
kms *kopia.ModelStore kms *kopia.ModelStore
@ -41,7 +41,7 @@ type restoreOpDependencies struct {
closer func() closer func()
} }
func (rod *restoreOpDependencies) close( func (rod *RestoreOpDependencies) Close(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
) { ) {
@ -60,10 +60,10 @@ func (rod *restoreOpDependencies) close(
} }
} }
// prepNewTestRestoreOp generates all clients required to run a restore operation, // PrepNewTestRestoreOp generates all clients required to run a restore operation,
// returning both a restore operation created with those clients, as well as // returning both a restore operation created with those clients, as well as
// the clients themselves. // the clients themselves.
func prepNewTestRestoreOp( func PrepNewTestRestoreOp(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
backupStore storage.Storage, backupStore storage.Storage,
@ -75,10 +75,10 @@ func prepNewTestRestoreOp(
restoreCfg control.RestoreConfig, restoreCfg control.RestoreConfig,
) ( ) (
operations.RestoreOperation, operations.RestoreOperation,
*restoreOpDependencies, *RestoreOpDependencies,
) { ) {
var ( var (
rod = &restoreOpDependencies{ rod = &RestoreOpDependencies{
acct: tconfig.NewM365Account(t), acct: tconfig.NewM365Account(t),
st: backupStore, st: backupStore,
} }
@ -114,10 +114,10 @@ func prepNewTestRestoreOp(
rod.acct, rod.acct,
sel, sel,
nil, nil,
rod.close, rod.Close,
counter) counter)
ro := newTestRestoreOp( ro := NewTestRestoreOp(
t, t,
ctx, ctx,
rod, rod,
@ -135,14 +135,14 @@ func prepNewTestRestoreOp(
return ro, rod return ro, rod
} }
// newTestRestoreOp accepts the clients required to compose a restore operation, plus // NewTestRestoreOp accepts the clients required to compose a restore operation, plus
// any other metadata, and uses them to generate a new restore operation. This // any other metadata, and uses them to generate a new restore operation. This
// allows restore chains to utilize the same temp directory and configuration // allows restore chains to utilize the same temp directory and configuration
// details. // details.
func newTestRestoreOp( func NewTestRestoreOp(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
rod *restoreOpDependencies, rod *RestoreOpDependencies,
backupID model.StableID, backupID model.StableID,
bus events.Eventer, bus events.Eventer,
counter *count.Bus, counter *count.Bus,
@ -167,14 +167,14 @@ func newTestRestoreOp(
bus, bus,
counter) counter)
if !assert.NoError(t, err, clues.ToCore(err)) { if !assert.NoError(t, err, clues.ToCore(err)) {
rod.close(t, ctx) rod.Close(t, ctx)
t.FailNow() t.FailNow()
} }
return ro return ro
} }
func runAndCheckRestore( func RunAndCheckRestore(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
ro *operations.RestoreOperation, ro *operations.RestoreOperation,
@ -223,7 +223,7 @@ type GetItemsInContainerByCollisionKeyer[T any] interface {
) (map[string]T, error) ) (map[string]T, error)
} }
func filterCollisionKeyResults[T any]( func FilterCollisionKeyResults[T any](
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
protectedResourceID, containerID string, protectedResourceID, containerID string,
@ -243,7 +243,7 @@ func filterCollisionKeyResults[T any](
return m return m
} }
func checkRestoreCounts( func CheckRestoreCounts(
t *testing.T, t *testing.T,
ctr *count.Bus, ctr *count.Bus,
expectSkips, expectReplaces, expectNew int, expectSkips, expectReplaces, expectNew int,

View File

@ -1,4 +1,4 @@
package test_test package sharepoint_test
import ( import (
"context" "context"
@ -14,10 +14,10 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
evmock "github.com/alcionai/corso/src/internal/events/mock" evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive" "github.com/alcionai/corso/src/internal/m365/collection/drive"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
ctrlTD "github.com/alcionai/corso/src/pkg/control/testdata" ctrlTD "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
@ -31,7 +31,7 @@ import (
type SharePointBackupIntgSuite struct { type SharePointBackupIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestSharePointBackupIntgSuite(t *testing.T) { func TestSharePointBackupIntgSuite(t *testing.T) {
@ -43,11 +43,117 @@ func TestSharePointBackupIntgSuite(t *testing.T) {
} }
func (suite *SharePointBackupIntgSuite) SetupSuite() { func (suite *SharePointBackupIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
RunBasicDriveishBackupTests(
suite,
path.SharePointService,
control.DefaultOptions(),
sel.Selector)
} }
func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() { func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
sel := selectors.NewSharePointRestore([]string{suite.its.site.ID}) runSharePointIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_extensionsSharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
RunDriveishBackupWithExtensionsTests(
suite,
path.SharePointService,
control.DefaultOptions(),
sel.Selector)
}
// ---------------------------------------------------------------------------
// test version using the tree-based drive item processor
// ---------------------------------------------------------------------------
type SharePointBackupTreeIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestSharePointBackupTreeIntgSuite(t *testing.T) {
suite.Run(t, &SharePointBackupTreeIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *SharePointBackupTreeIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeSharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunBasicDriveishBackupTests(
suite,
path.SharePointService,
opts,
sel.Selector)
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeIncrementalSharePoint() {
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
runSharePointIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeExtensionsSharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunDriveishBackupWithExtensionsTests(
suite,
path.SharePointService,
opts,
sel.Selector)
}
// ---------------------------------------------------------------------------
// common backup test wrappers
// ---------------------------------------------------------------------------
func runSharePointIncrementalBackupTests(
suite tester.Suite,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewSharePointRestore([]string{its.Site.ID})
ic := func(cs []string) selectors.Selector { ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch())) sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -58,10 +164,10 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
t *testing.T, t *testing.T,
ctx context.Context, ctx context.Context,
) string { ) string {
d, err := suite.its.ac.Sites().GetDefaultDrive(ctx, suite.its.site.ID) d, err := its.AC.Sites().GetDefaultDrive(ctx, its.Site.ID)
if err != nil { if err != nil {
err = graph.Wrap(ctx, err, "retrieving default site drive"). err = graph.Wrap(ctx, err, "retrieving default site drive").
With("site", suite.its.site.ID) With("site", its.Site.ID)
} }
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -76,10 +182,11 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
return drive.NewSiteRestoreHandler(ac, path.SharePointService) return drive.NewSiteRestoreHandler(ac, path.SharePointService)
} }
runDriveIncrementalTest( RunIncrementalDriveishBackupTest(
suite, suite,
suite.its.site.ID, opts,
suite.its.user.ID, its.Site.ID,
its.User.ID,
path.SharePointService, path.SharePointService,
path.LibrariesCategory, path.LibrariesCategory,
ic, ic,
@ -89,103 +196,9 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
true) true)
} }
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointBasic() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewSharePointBackup([]string{suite.its.site.ID})
opts = control.DefaultOptions()
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
path.LibrariesCategory)
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointExtensions() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewSharePointBackup([]string{suite.its.site.ID})
opts = control.DefaultOptions()
tenID = tconfig.M365TenantID(t)
svc = path.SharePointService
ws = deeTD.DriveIDFromRepoRef
)
opts.ItemExtensionFactory = getTestExtensionFactories()
sel.Include(selTD.SharePointBackupFolderScope(sel))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
path.LibrariesCategory)
bID := bo.Results.BackupID
deets, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bID,
tenID,
bod.sel.ID(),
svc,
ws,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
bID,
ws,
bod.kms,
bod.sss,
expectDeets,
false)
// Check that the extensions are in the backup
for _, ent := range deets.Entries {
if ent.Folder == nil {
verifyExtensionData(t, ent.ItemInfo, path.SharePointService)
}
}
}
type SharePointBackupNightlyIntgSuite struct { type SharePointBackupNightlyIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestSharePointBackupNightlyIntgSuite(t *testing.T) { func TestSharePointBackupNightlyIntgSuite(t *testing.T) {
@ -197,26 +210,26 @@ func TestSharePointBackupNightlyIntgSuite(t *testing.T) {
} }
func (suite *SharePointBackupNightlyIntgSuite) SetupSuite() { func (suite *SharePointBackupNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9MergeBase() { func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9MergeBase() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
runMergeBaseGroupsUpdate(suite, sel.Selector, true) RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
} }
func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9AssistBases() { func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9AssistBases() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
runDriveAssistBaseGroupsUpdate(suite, sel.Selector, true) RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
} }
type SharePointRestoreNightlyIntgSuite struct { type SharePointRestoreNightlyIntgSuite struct {
tester.Suite tester.Suite
its intgTesterSetup its IntgTesterSetup
} }
func TestSharePointRestoreIntgSuite(t *testing.T) { func TestSharePointRestoreIntgSuite(t *testing.T) {
@ -228,38 +241,38 @@ func TestSharePointRestoreIntgSuite(t *testing.T) {
} }
func (suite *SharePointRestoreNightlyIntgSuite) SetupSuite() { func (suite *SharePointRestoreNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T()) suite.its = NewIntegrationTesterSetup(suite.T())
} }
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() { func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents")) sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.site.ID sel.DiscreteOwner = suite.its.Site.ID
runDriveRestoreWithAdvancedOptions( RunDriveRestoreWithAdvancedOptions(
suite.T(), suite.T(),
suite, suite,
suite.its.ac, suite.its.AC,
sel.Selector, sel.Selector,
suite.its.site.DriveID, suite.its.Site.DriveID,
suite.its.site.DriveRootFolderID) suite.its.Site.DriveRootFolderID)
} }
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointAlternateProtectedResource() { func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointAlternateProtectedResource() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents")) sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.site.ID sel.DiscreteOwner = suite.its.Site.ID
runDriveRestoreToAlternateProtectedResource( RunDriveRestoreToAlternateProtectedResource(
suite.T(), suite.T(),
suite, suite,
suite.its.ac, suite.its.AC,
sel.Selector, sel.Selector,
suite.its.site, suite.its.Site,
suite.its.secondarySite, suite.its.SecondarySite,
suite.its.secondarySite.ID) suite.its.SecondarySite.ID)
} }
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDeletedDrives() { func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDeletedDrives() {
@ -276,13 +289,13 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
rc.OnCollision = control.Copy rc.OnCollision = control.Copy
// create a new drive // create a new drive
md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.site.ID, rc.Location) md, err := suite.its.AC.Lists().PostDrive(ctx, suite.its.Site.ID, rc.Location)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(md.GetId()) driveID := ptr.Val(md.GetId())
// get the root folder // get the root folder
mdi, err := suite.its.ac.Drives().GetRootFolder(ctx, driveID) mdi, err := suite.its.AC.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
rootFolderID := ptr.Val(mdi.GetId()) rootFolderID := ptr.Val(mdi.GetId())
@ -296,7 +309,7 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
file := models.NewFile() file := models.NewFile()
item.SetFile(file) item.SetFile(file)
_, err = suite.its.ac.Drives().PostItemInContainer( _, err = suite.its.AC.Drives().PostItemInContainer(
ctx, ctx,
driveID, driveID,
rootFolderID, rootFolderID,
@ -309,18 +322,18 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
mb = evmock.NewBus() mb = evmock.NewBus()
counter = count.New() counter = count.New()
opts = control.DefaultOptions() opts = control.DefaultOptions()
graphClient = suite.its.ac.Stable.Client() graphClient = suite.its.AC.Stable.Client()
) )
bsel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) bsel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
bsel.Include(selTD.SharePointBackupFolderScope(bsel)) bsel.Include(selTD.SharePointBackupFolderScope(bsel))
bsel.Filter(bsel.Library(rc.Location)) bsel.Filter(bsel.Library(rc.Location))
bsel.DiscreteOwner = suite.its.site.ID bsel.DiscreteOwner = suite.its.Site.ID
bo, bod := prepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false) RunAndCheckBackup(t, ctx, &bo, mb, false)
// test cases: // test cases:
@ -348,18 +361,18 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
ctr = count.New() ctr = count.New()
) )
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr, ctr,
bod.sel, bod.Sel,
opts, opts,
rc) rc)
runAndCheckRestore(t, ctx, &ro, mb, false) RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item") assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
resp, err := graphClient. resp, err := graphClient.
@ -399,23 +412,23 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
ctr = count.New() ctr = count.New()
) )
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr, ctr,
bod.sel, bod.Sel,
opts, opts,
rc) rc)
runAndCheckRestore(t, ctx, &ro, mb, false) RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item") assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
pgr := suite.its.ac. pgr := suite.its.AC.
Drives(). Drives().
NewSiteDrivePager(suite.its.site.ID, []string{"id", "name"}) NewSiteDrivePager(suite.its.Site.ID, []string{"id", "name"})
drives, err := api.GetAllDrives(ctx, pgr) drives, err := api.GetAllDrives(ctx, pgr)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -434,7 +447,7 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
md = created md = created
driveID = ptr.Val(md.GetId()) driveID = ptr.Val(md.GetId())
mdi, err := suite.its.ac.Drives().GetRootFolder(ctx, driveID) mdi, err := suite.its.AC.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
rootFolderID = ptr.Val(mdi.GetId()) rootFolderID = ptr.Val(mdi.GetId())
@ -467,18 +480,18 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
ctr = count.New() ctr = count.New()
) )
ro, _ := prepNewTestRestoreOp( ro, _ := PrepNewTestRestoreOp(
t, t,
ctx, ctx,
bod.st, bod.St,
bo.Results.BackupID, bo.Results.BackupID,
mb, mb,
ctr, ctr,
bod.sel, bod.Sel,
opts, opts,
rc) rc)
runAndCheckRestore(t, ctx, &ro, mb, false) RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item") assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")