use tree version in nightly tests (#4885)

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Supportability/Tests

#### Issue(s)

* #4689

#### Test Plan

- [x] 💚 E2E
This commit is contained in:
Keepers 2023-12-21 11:44:09 -07:00 committed by GitHub
parent 1944c070cf
commit 753ed1a075
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1846 additions and 1451 deletions

View File

@ -181,12 +181,7 @@ func (c *Collections) makeDriveCollections(
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix")
}
root, err := c.handler.GetRootFolder(ctx, driveID)
if err != nil {
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "getting root folder")
}
tree := newFolderyMcFolderFace(ppfx, ptr.Val(root.GetId()))
tree := newFolderyMcFolderFace(ppfx)
counter.Add(count.PrevPaths, int64(len(prevPaths)))
@ -232,7 +227,7 @@ func (c *Collections) makeDriveCollections(
// only populate the global excluded items if no delta reset occurred.
// if a reset did occur, the collections should already be marked as
// "do not merge", therefore everything will get processed as a new addition.
if !tree.hadReset {
if !tree.hadReset && len(prevDeltaLink) > 0 {
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID)
if err != nil {
err = clues.WrapWC(ctx, err, "making canonical path for item exclusions")
@ -543,10 +538,9 @@ func (c *Collections) addFolderToTree(
notSelected = shouldSkip(ctx, collectionPath, c.handler, ptr.Val(drv.GetName()))
if notSelected {
logger.Ctx(ctx).Debugw("path not selected", "skipped_path", collectionPath.String())
return nil, nil
}
err = tree.setFolder(ctx, folder)
err = tree.setFolder(ctx, folder, notSelected)
return nil, clues.Stack(err).OrNil()
}
@ -665,12 +659,9 @@ func (c *Collections) addFileToTree(
}
}
err := tree.addFile(file)
if err != nil {
return nil, clues.StackWC(ctx, err)
}
err := tree.addFile(ctx, file)
return nil, nil
return nil, clues.Stack(err).OrNil()
}
// quality-of-life wrapper that transforms each tombstone in the map

View File

@ -165,10 +165,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
enumerator: driveEnumerator(
d1.newEnumer().with(delta(nil)),
d2.newEnumer().with(delta(nil))),
metadata: multiDriveMetadata(
t,
d1.newPrevPaths(t),
d2.newPrevPaths(t)),
metadata: multiDriveMetadata(t),
expect: expected{
canUsePrevBackup: assert.True,
collections: expectCollections(
@ -197,10 +194,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
d2.fileAt(root, "r"),
d2.folderAt(root),
d2.fileAt(folder, "f"))))),
metadata: multiDriveMetadata(
t,
d1.newPrevPaths(t),
d2.newPrevPaths(t)),
metadata: multiDriveMetadata(t),
expect: expected{
canUsePrevBackup: assert.True,
collections: expectCollections(
@ -387,8 +381,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
expect, _ := test.expect.globalExcludedFileIDs.Get(d)
result, rok := globalExcludes.Get(d)
if len(test.metadata) > 0 {
require.True(t, rok, "drive results have a global excludes entry")
assert.Equal(t, expect, result, "global excluded file IDs")
} else {
require.False(t, rok, "drive results have no global excludes entry")
assert.Empty(t, result, "global excluded file IDs")
}
})
}
})

View File

@ -27,10 +27,6 @@ type folderyMcFolderFace struct {
// new, moved, and notMoved root
root *nodeyMcNodeFace
// the ID of the actual root folder.
// required to ensure correct population of the root node.
rootID string
// the majority of operations we perform can be handled with
// a folder ID lookup instead of re-walking the entire tree.
// Ex: adding a new file to its parent folder.
@ -53,11 +49,9 @@ type folderyMcFolderFace struct {
func newFolderyMcFolderFace(
prefix path.Path,
rootID string,
) *folderyMcFolderFace {
return &folderyMcFolderFace{
prefix: prefix,
rootID: rootID,
folderIDToNode: map[string]*nodeyMcNodeFace{},
tombstones: map[string]*nodeyMcNodeFace{},
fileIDToParentID: map[string]string{},
@ -93,15 +87,23 @@ type nodeyMcNodeFace struct {
children map[string]*nodeyMcNodeFace
// file item ID -> file metadata
files map[string]*custom.DriveItem
// when true, this flag means the folder appeared in enumeration,
// but was not included in the backup selection. We include
// unselected folders in the tree so we don't have to hold stateful
// decisions (such as folder selection) as part of delta processing;
// we only need to evaluate them during post-processing.
isNotSelected bool
}
func newNodeyMcNodeFace(
parent *nodeyMcNodeFace,
folder *custom.DriveItem,
isNotSelected bool,
) *nodeyMcNodeFace {
return &nodeyMcNodeFace{
parent: parent,
folder: folder,
isNotSelected: isNotSelected,
children: map[string]*nodeyMcNodeFace{},
files: map[string]*custom.DriveItem{},
}
@ -134,6 +136,7 @@ func (face *folderyMcFolderFace) getNode(id string) *nodeyMcNodeFace {
func (face *folderyMcFolderFace) setFolder(
ctx context.Context,
folder *custom.DriveItem,
isNotSelected bool,
) error {
var (
id = ptr.Val(folder.GetId())
@ -151,14 +154,13 @@ func (face *folderyMcFolderFace) setFolder(
}
if (parentFolder == nil || len(ptr.Val(parentFolder.GetId())) == 0) &&
id != face.rootID {
folder.GetRoot() == nil {
return clues.NewWC(ctx, "non-root folder missing parent id")
}
// only set the root node once.
if id == face.rootID {
if folder.GetRoot() != nil {
if face.root == nil {
root := newNodeyMcNodeFace(nil, folder)
root := newNodeyMcNodeFace(nil, folder, isNotSelected)
face.root = root
face.folderIDToNode[id] = root
} else {
@ -169,6 +171,11 @@ func (face *folderyMcFolderFace) setFolder(
return nil
}
ctx = clues.Add(
ctx,
"parent_id", ptr.Val(parentFolder.GetId()),
"parent_dir_path", path.LoggableDir(ptr.Val(parentFolder.GetPath())))
// There are four possible changes that can happen at this point.
// 1. new folder addition.
// 2. duplicate folder addition.
@ -221,7 +228,7 @@ func (face *folderyMcFolderFace) setFolder(
nodey.folder = folder
} else {
// change type 1: new addition
nodey = newNodeyMcNodeFace(parentNode, folder)
nodey = newNodeyMcNodeFace(parentNode, folder, isNotSelected)
}
// ensure the parent points to this node, and that the node is registered
@ -264,7 +271,7 @@ func (face *folderyMcFolderFace) setTombstone(
}
if _, alreadyBuried := face.tombstones[id]; !alreadyBuried {
face.tombstones[id] = newNodeyMcNodeFace(nil, folder)
face.tombstones[id] = newNodeyMcNodeFace(nil, folder, false)
}
return nil
@ -308,7 +315,7 @@ func (face *folderyMcFolderFace) setPreviousPath(
return nil
}
zombey := newNodeyMcNodeFace(nil, custom.NewDriveItem(folderID, ""))
zombey := newNodeyMcNodeFace(nil, custom.NewDriveItem(folderID, ""), false)
zombey.prev = prev
face.tombstones[folderID] = zombey
@ -328,6 +335,7 @@ func (face *folderyMcFolderFace) hasFile(id string) bool {
// file was already added to the tree and is getting relocated,
// this func will update and/or clean up all the old references.
func (face *folderyMcFolderFace) addFile(
ctx context.Context,
file *custom.DriveItem,
) error {
var (
@ -336,32 +344,33 @@ func (face *folderyMcFolderFace) addFile(
parentID string
)
if len(id) == 0 {
return clues.NewWC(ctx, "item added without ID")
}
if parentFolder == nil || len(ptr.Val(parentFolder.GetId())) == 0 {
return clues.New("item added without parent folder ID")
return clues.NewWC(ctx, "item added without parent folder ID")
}
parentID = ptr.Val(parentFolder.GetId())
if len(id) == 0 {
return clues.New("item added without ID")
}
ctx = clues.Add(
ctx,
"parent_id", ptr.Val(parentFolder.GetId()),
"parent_dir_path", path.LoggableDir(ptr.Val(parentFolder.GetPath())))
// in case of file movement, clean up any references
// to the file in the old parent
oldParentID, ok := face.fileIDToParentID[id]
if ok && oldParentID != parentID {
if nodey, ok := face.folderIDToNode[oldParentID]; ok {
if nodey := face.getNode(oldParentID); nodey != nil {
delete(nodey.files, id)
}
if zombey, ok := face.tombstones[oldParentID]; ok {
delete(zombey.files, id)
}
}
parent, ok := face.folderIDToNode[parentID]
if !ok {
return clues.New("item added before parent")
return clues.NewWC(ctx, "file added before parent")
}
face.fileIDToParentID[id] = parentID
@ -432,7 +441,11 @@ func (face *folderyMcFolderFace) walkTreeAndBuildCollections(
isChildOfPackage bool,
result map[string]collectable,
) error {
if node == nil {
// all non-root folders get skipped when not selected.
// the root folder stays in- because it's required to build
// the tree of selected folders- but if it's not selected
// then we won't include any of its files.
if node == nil || (node != face.root && node.isNotSelected) {
return nil
}
@ -476,6 +489,12 @@ func (face *folderyMcFolderFace) walkTreeAndBuildCollections(
files[id] = node.folder
}
// should only occur if the root is not selected, since we should
// have backed out on all other non-selected folders by this point.
if node.isNotSelected {
files = map[string]*custom.DriveItem{}
}
cbl := collectable{
currPath: collectionPath,
files: files,

View File

@ -36,7 +36,7 @@ func (suite *DeltaTreeUnitSuite) TestNewFolderyMcFolderFace() {
require.NoError(t, err, clues.ToCore(err))
folderFace := newFolderyMcFolderFace(p, rootID)
folderFace := newFolderyMcFolderFace(p)
assert.Equal(t, p, folderFace.prefix)
assert.Nil(t, folderFace.root)
assert.NotNil(t, folderFace.folderIDToNode)
@ -52,7 +52,7 @@ func (suite *DeltaTreeUnitSuite) TestNewNodeyMcNodeFace() {
fld = custom.ToCustomDriveItem(d.folderAt(root))
)
nodeFace := newNodeyMcNodeFace(parent, fld)
nodeFace := newNodeyMcNodeFace(parent, fld, false)
assert.Equal(t, parent, nodeFace.parent)
assert.Equal(t, folderID(), ptr.Val(nodeFace.folder.GetId()))
assert.Equal(t, folderName(), ptr.Val(nodeFace.folder.GetName()))
@ -177,7 +177,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
tree := test.tree(t, drive())
folder := test.folder()
err := tree.setFolder(ctx, folder)
err := tree.setFolder(ctx, folder, false)
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
@ -497,7 +497,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTree()
tree := treeWithRoot(t, d)
set := func(folder *custom.DriveItem) {
err := tree.setFolder(ctx, folder)
err := tree.setFolder(ctx, folder, false)
require.NoError(t, err, clues.ToCore(err))
}
@ -600,7 +600,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst
tree := treeWithRoot(t, d)
set := func(folder *custom.DriveItem) {
err := tree.setFolder(ctx, folder)
err := tree.setFolder(ctx, folder, false)
require.NoError(t, err, clues.ToCore(err))
}
@ -884,7 +884,10 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
df = custom.ToCustomDriveItem(d.fileWSizeAt(test.contentSize, test.parent))
)
err := tree.addFile(df)
ctx, flush := tester.NewContext(t)
defer flush()
err := tree.addFile(ctx, df)
test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectFiles, tree.fileIDToParentID)
@ -968,6 +971,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
fID = fileID()
)
ctx, flush := tester.NewContext(t)
defer flush()
require.Len(t, tree.fileIDToParentID, 0)
require.Len(t, tree.deletedFileIDs, 0)
@ -978,7 +984,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
assert.Len(t, tree.deletedFileIDs, 1)
assert.Contains(t, tree.deletedFileIDs, fID)
err := tree.addFile(custom.ToCustomDriveItem(d.fileAt(root)))
err := tree.addFile(
ctx,
custom.ToCustomDriveItem(d.fileAt(root)))
require.NoError(t, err, clues.ToCore(err))
assert.Len(t, tree.fileIDToParentID, 1)
@ -1127,10 +1135,10 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
defer flush()
tree := treeWithRoot(t, d)
err := tree.setFolder(ctx, custom.ToCustomDriveItem(d.packageAtRoot()))
err := tree.setFolder(ctx, custom.ToCustomDriveItem(d.packageAtRoot()), false)
require.NoError(t, err, clues.ToCore(err))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(pkg)))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(pkg)), false)
require.NoError(t, err, clues.ToCore(err))
return tree
@ -1199,6 +1207,58 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
},
},
},
{
name: "folder hierarchy with unselected root and child and no previous paths",
tree: treeWithUnselectedRootAndFolder,
expectErr: require.NoError,
prevPaths: map[string]string{},
expect: map[string]collectable{
rootID: {
currPath: d.fullPath(t),
files: map[string]*custom.DriveItem{},
folderID: rootID,
isPackageOrChildOfPackage: false,
},
folderID(): {
currPath: d.fullPath(t, folderName()),
files: map[string]*custom.DriveItem{
folderID(): custom.ToCustomDriveItem(d.folderAt(root)),
fileID(): custom.ToCustomDriveItem(d.fileAt(folder)),
},
folderID: folderID(),
isPackageOrChildOfPackage: false,
},
},
},
{
name: "folder hierarchy with unselected root and child with previous paths",
tree: treeWithUnselectedRootAndFolder,
expectErr: require.NoError,
prevPaths: map[string]string{
rootID: d.strPath(t),
folderID(): d.strPath(t, folderName()),
folderID("nope"): d.strPath(t, folderName("nope")),
},
expect: map[string]collectable{
rootID: {
currPath: d.fullPath(t),
prevPath: d.fullPath(t),
files: map[string]*custom.DriveItem{},
folderID: rootID,
isPackageOrChildOfPackage: false,
},
folderID(): {
currPath: d.fullPath(t, folderName()),
prevPath: d.fullPath(t, folderName()),
files: map[string]*custom.DriveItem{
folderID(): custom.ToCustomDriveItem(d.folderAt(root)),
fileID(): custom.ToCustomDriveItem(d.fileAt(folder)),
},
folderID: folderID(),
isPackageOrChildOfPackage: false,
},
},
},
{
name: "root and tombstones",
tree: treeWithFileInTombstone,
@ -1241,7 +1301,13 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
results, err := tree.generateCollectables()
test.expectErr(t, err, clues.ToCore(err))
assert.Len(t, results, len(test.expect))
assert.Len(
t,
results,
len(test.expect),
"count of collections\n\tWanted: %+v\n\tGot: %+v",
maps.Keys(test.expect),
maps.Keys(results))
for id, expect := range test.expect {
require.Contains(t, results, id)

View File

@ -490,22 +490,22 @@ func defaultLoc() path.Elements {
}
func newTree(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
return newFolderyMcFolderFace(defaultTreePfx(t, d), rootID)
return newFolderyMcFolderFace(defaultTreePfx(t, d))
}
func treeWithRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
tree := newFolderyMcFolderFace(defaultTreePfx(t, d), rootID)
tree := newFolderyMcFolderFace(defaultTreePfx(t, d))
root := custom.ToCustomDriveItem(rootFolder())
//nolint:forbidigo
err := tree.setFolder(context.Background(), root)
err := tree.setFolder(context.Background(), root, false)
require.NoError(t, err, clues.ToCore(err))
return tree
}
func treeAfterReset(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
tree := newFolderyMcFolderFace(defaultTreePfx(t, d), rootID)
tree := newFolderyMcFolderFace(defaultTreePfx(t, d))
tree.reset()
return tree
@ -535,21 +535,24 @@ func treeWithFolders(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
folder := custom.ToCustomDriveItem(d.folderAt("parent"))
//nolint:forbidigo
err := tree.setFolder(context.Background(), parent)
err := tree.setFolder(context.Background(), parent, false)
require.NoError(t, err, clues.ToCore(err))
//nolint:forbidigo
err = tree.setFolder(context.Background(), folder)
err = tree.setFolder(context.Background(), folder, false)
require.NoError(t, err, clues.ToCore(err))
return tree
}
func treeWithFileAtRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
ctx, flush := tester.NewContext(t)
defer flush()
tree := treeWithRoot(t, d)
f := custom.ToCustomDriveItem(d.fileAt(root))
err := tree.addFile(f)
err := tree.addFile(ctx, f)
require.NoError(t, err, clues.ToCore(err))
return tree
@ -563,10 +566,13 @@ func treeWithDeletedFile(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
}
func treeWithFileInFolder(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
ctx, flush := tester.NewContext(t)
defer flush()
tree := treeWithFolders(t, d)
f := custom.ToCustomDriveItem(d.fileAt(folder))
err := tree.addFile(f)
err := tree.addFile(ctx, f)
require.NoError(t, err, clues.ToCore(err))
return tree
@ -583,6 +589,31 @@ func treeWithFileInTombstone(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
return tree
}
func treeWithUnselectedRootAndFolder(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
ctx, flush := tester.NewContext(t)
defer flush()
tree := treeWithRoot(t, d)
tree.root.isNotSelected = true
err := tree.addFile(ctx, custom.ToCustomDriveItem(d.fileAt(root, "r")))
require.NoError(t, err, clues.ToCore(err))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(root)), false)
require.NoError(t, err, clues.ToCore(err))
err = tree.addFile(ctx, custom.ToCustomDriveItem(d.fileAt(folder)))
require.NoError(t, err, clues.ToCore(err))
err = tree.setFolder(ctx, custom.ToCustomDriveItem(d.folderAt(root, "nope")), true)
require.NoError(t, err, clues.ToCore(err))
err = tree.addFile(ctx, custom.ToCustomDriveItem(d.fileAt("nope", "n")))
require.NoError(t, err, clues.ToCore(err))
return tree
}
// root -> idx(folder, parent) -> folderID()
// one item at each dir
// one tombstone: idx(folder, tombstone)
@ -603,39 +634,39 @@ func fullTreeWithNames(
// file "r" in root
df := custom.ToCustomDriveItem(d.fileAt(root, "r"))
err := tree.addFile(df)
err := tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err))
// root -> folderID(parentX)
parent := custom.ToCustomDriveItem(d.folderAt(root, parentFolderSuffix))
err = tree.setFolder(ctx, parent)
err = tree.setFolder(ctx, parent, false)
require.NoError(t, err, clues.ToCore(err))
// file "p" in folderID(parentX)
df = custom.ToCustomDriveItem(d.fileAt(parentFolderSuffix, "p"))
err = tree.addFile(df)
err = tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err))
// folderID(parentX) -> folderID()
fld := custom.ToCustomDriveItem(d.folderAt(parentFolderSuffix))
err = tree.setFolder(ctx, fld)
err = tree.setFolder(ctx, fld, false)
require.NoError(t, err, clues.ToCore(err))
// file "f" in folderID()
df = custom.ToCustomDriveItem(d.fileAt(folder, "f"))
err = tree.addFile(df)
err = tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err))
// tombstone - have to set a non-tombstone folder first,
// then add the item,
// then tombstone the folder
tomb := custom.ToCustomDriveItem(d.folderAt(root, tombstoneSuffix))
err = tree.setFolder(ctx, tomb)
err = tree.setFolder(ctx, tomb, false)
require.NoError(t, err, clues.ToCore(err))
// file "t" in tombstone
df = custom.ToCustomDriveItem(d.fileAt(tombstoneSuffix, "t"))
err = tree.addFile(df)
err = tree.addFile(ctx, df)
require.NoError(t, err, clues.ToCore(err))
err = tree.setTombstone(ctx, tomb)

View File

@ -647,7 +647,11 @@ func runBackupAndCompare(
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")
// No excludes yet because this isn't an incremental backup.
assert.True(t, excludes.Empty())
assert.True(
t,
excludes.Empty(),
"global excludes should have no entries, got:\n\t%+v",
excludes.Keys())
t.Logf("Backup enumeration complete in %v\n", time.Since(start))

View File

@ -509,11 +509,14 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: control.DefaultOptions(),
Opts: opts,
RestoreCfg: restoreCfg,
}
@ -759,11 +762,14 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: control.DefaultOptions(),
Opts: opts,
RestoreCfg: restoreCfg,
}
@ -850,11 +856,14 @@ func testRestoreNoPermissionsAndBackup(suite oneDriveSuite, startVersion int) {
restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = false
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: control.DefaultOptions(),
Opts: opts,
RestoreCfg: restoreCfg,
}
@ -1056,11 +1065,14 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: control.DefaultOptions(),
Opts: opts,
RestoreCfg: restoreCfg,
}
@ -1252,11 +1264,14 @@ func testLinkSharesInheritanceRestoreAndBackup(suite oneDriveSuite, startVersion
restoreCfg.OnCollision = control.Replace
restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: control.DefaultOptions(),
Opts: opts,
RestoreCfg: restoreCfg,
}
@ -1369,11 +1384,14 @@ func testRestoreFolderNamedFolderRegression(
restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting)
restoreCfg.IncludePermissions = true
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
cfg := m365Stub.ConfigInfo{
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: control.DefaultOptions(),
Opts: opts,
RestoreCfg: restoreCfg,
}

View File

@ -0,0 +1,398 @@
package m365
import (
"context"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/idname"
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store"
)
type BackupOpDependencies struct {
Acct account.Account
Ctrl *m365.Controller
KMS *kopia.ModelStore
KW *kopia.Wrapper
Sel selectors.Selector
SSS streamstore.Streamer
St storage.Storage
SW store.BackupStorer
closer func()
}
func (bod *BackupOpDependencies) Close(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
) {
bod.closer()
if bod.KW != nil {
err := bod.KW.Close(ctx)
assert.NoErrorf(t, err, "kw close: %+v", clues.ToCore(err))
}
if bod.KMS != nil {
err := bod.KW.Close(ctx)
assert.NoErrorf(t, err, "kms close: %+v", clues.ToCore(err))
}
}
// PrepNewTestBackupOp generates all clients required to run a backup operation,
// returning both a backup operation created with those clients, as well as
// the clients themselves.
func PrepNewTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bus events.Eventer,
sel selectors.Selector,
opts control.Options,
backupVersion int,
counter *count.Bus,
) (
operations.BackupOperation,
*BackupOpDependencies,
) {
bod := &BackupOpDependencies{
Acct: tconfig.NewM365Account(t),
St: storeTD.NewPrefixedS3Storage(t),
}
repoNameHash := strTD.NewHashForRepoConfigName()
k := kopia.NewConn(bod.St)
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
require.NoError(t, err, clues.ToCore(err))
defer func() {
if err != nil {
bod.Close(t, ctx)
t.FailNow()
}
}()
// kopiaRef comes with a count of 1 and Wrapper bumps it again
// we're so safe to close here.
bod.closer = func() {
err := k.Close(ctx)
assert.NoErrorf(t, err, "k close: %+v", clues.ToCore(err))
}
bod.KW, err = kopia.NewWrapper(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.KMS, err = kopia.NewModelStore(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.SW = store.NewWrapper(bod.KMS)
bod.Ctrl, bod.Sel = ControllerWithSelector(
t,
ctx,
bod.Acct,
sel,
nil,
bod.Close,
counter)
bo := NewTestBackupOp(
t,
ctx,
bod,
bus,
opts,
counter)
bo.BackupVersion = backupVersion
bod.SSS = streamstore.NewStreamer(
bod.KW,
bod.Acct.ID(),
bod.Sel.PathService())
return bo, bod
}
// NewTestBackupOp accepts the clients required to compose a backup operation, plus
// any other metadata, and uses them to generate a new backup operation. This
// allows backup chains to utilize the same temp directory and configuration
// details.
func NewTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bod *BackupOpDependencies,
bus events.Eventer,
opts control.Options,
counter *count.Bus,
) operations.BackupOperation {
bod.Ctrl.IDNameLookup = idname.NewCache(map[string]string{bod.Sel.ID(): bod.Sel.Name()})
bo, err := operations.NewBackupOperation(
ctx,
opts,
bod.KW,
bod.SW,
bod.Ctrl,
bod.Acct,
bod.Sel,
bod.Sel,
bus,
counter)
if !assert.NoError(t, err, clues.ToCore(err)) {
bod.Close(t, ctx)
t.FailNow()
}
return bo
}
func RunAndCheckBackup(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bo *operations.BackupOperation,
mb *evmock.Bus,
acceptNoData bool,
) {
err := bo.Run(ctx)
if !assert.NoError(t, err, clues.ToCore(err)) {
for i, err := range bo.Errors.Recovered() {
t.Logf("recoverable err %d, %+v", i, err)
}
assert.Fail(t, "not allowed to error")
}
require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")
expectStatus := []operations.OpStatus{operations.Completed}
if acceptNoData {
expectStatus = append(expectStatus, operations.NoData)
}
require.Contains(
t,
expectStatus,
bo.Status,
"backup doesn't match expectation, wanted any of %v, got %s",
expectStatus,
bo.Status)
require.NotZero(t, bo.Results.ItemsWritten)
assert.NotZero(t, bo.Results.ItemsRead, "count of items read")
assert.NotZero(t, bo.Results.BytesRead, "bytes read")
assert.NotZero(t, bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
}
func CheckBackupIsInManifests(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
kw *kopia.Wrapper,
sw store.BackupStorer,
bo *operations.BackupOperation,
sel selectors.Selector,
resourceOwner string,
categories ...path.CategoryType,
) {
for _, category := range categories {
t.Run(category.String(), func(t *testing.T) {
var (
r = identity.NewReason("", resourceOwner, sel.PathService(), category)
tags = map[string]string{kopia.TagBackupCategory: ""}
found bool
)
bf, err := kw.NewBaseFinder(sw)
require.NoError(t, err, clues.ToCore(err))
mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags)
for _, man := range mans.MergeBases() {
bID, ok := man.GetSnapshotTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ItemDataSnapshot.ID) {
continue
}
if bID == string(bo.Results.BackupID) {
found = true
break
}
}
assert.True(t, found, "backup retrieved by previous snapshot manifest")
})
}
}
func RunMergeBaseGroupsUpdate(
suite tester.Suite,
sel selectors.Selector,
expectCached bool,
) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
opts.ToggleFeatures.UseDeltaTree = true
// Need outside the inner test case so bod lasts for the entire test.
bo, bod := PrepNewTestBackupOp(
t,
ctx,
mb,
sel,
opts,
version.All8MigrateUserPNToID,
count.New())
defer bod.Close(t, ctx)
suite.Run("makeMergeBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
RunAndCheckBackup(t, ctx, &bo, mb, false)
reasons, err := bod.Sel.Reasons(bod.Acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
CheckBackupIsInManifests(
t,
ctx,
bod.KW,
bod.SW,
&bo,
bod.Sel,
bod.Sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.Acct.ID(),
bod.Sel.ID(),
bod.Sel.PathService(),
whatSet,
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.KMS,
bod.SSS,
expectDeets,
false)
})
suite.Run("makeIncrementalBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
)
forcedFull := NewTestBackupOp(
t,
ctx,
bod,
mb,
opts,
count.New())
forcedFull.BackupVersion = version.Groups9Update
RunAndCheckBackup(t, ctx, &forcedFull, mb, false)
reasons, err := bod.Sel.Reasons(bod.Acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
CheckBackupIsInManifests(
t,
ctx,
bod.KW,
bod.SW,
&forcedFull,
bod.Sel,
bod.Sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
forcedFull.Results.BackupID,
bod.Acct.ID(),
bod.Sel.ID(),
bod.Sel.PathService(),
whatSet,
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
forcedFull.Results.BackupID,
whatSet,
bod.KMS,
bod.SSS,
expectDeets,
false)
check := assert.Zero
if expectCached {
check = assert.NotZero
}
check(
t,
forcedFull.Results.Counts[string(count.PersistedCachedFiles)],
"cached items")
})
}

View File

@ -1,4 +1,4 @@
package test_test
package exchange_test
import (
"context"
@ -19,6 +19,7 @@ import (
evmock "github.com/alcionai/corso/src/internal/events/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
exchTD "github.com/alcionai/corso/src/internal/m365/service/exchange/testdata"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
@ -40,7 +41,7 @@ import (
type ExchangeBackupIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestExchangeBackupIntgSuite(t *testing.T) {
@ -52,7 +53,7 @@ func TestExchangeBackupIntgSuite(t *testing.T) {
}
func (suite *ExchangeBackupIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
// MetadataFileNames produces the category-specific set of filenames used to
@ -79,9 +80,9 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{
name: "Mail",
selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.its.user.ID
sel.DiscreteOwner = suite.its.User.ID
return sel
},
@ -91,7 +92,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{
name: "Contacts",
selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
return sel
},
@ -101,7 +102,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{
name: "Calendar Events",
selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()))
return sel
},
@ -124,33 +125,33 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
whatSet = deeTD.CategoryFromRepoRef
)
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup, counter)
defer bod.Close(t, ctx)
sel = bod.sel
sel = bod.Sel
userID := sel.ID()
m365, err := bod.acct.M365Config()
m365, err := bod.Acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
// run the tests
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
RunAndCheckBackup(t, ctx, &bo, mb, false)
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&bo,
sel,
userID,
test.category)
checkMetadataFilesExist(
CheckMetadataFilesExist(
t,
ctx,
bo.Results.BackupID,
bod.kw,
bod.kms,
bod.KW,
bod.KMS,
m365.AzureTenantID,
userID,
path.ExchangeService,
@ -160,19 +161,19 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
t,
ctx,
bo.Results.BackupID,
bod.acct.ID(),
bod.Acct.ID(),
userID,
path.ExchangeService,
whatSet,
bod.kms,
bod.sss)
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
false)
@ -181,7 +182,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
// produces fewer results than the last backup.
var (
incMB = evmock.NewBus()
incBO = newTestBackupOp(
incBO = NewTestBackupOp(
t,
ctx,
bod,
@ -190,22 +191,22 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
counter)
)
runAndCheckBackup(t, ctx, &incBO, incMB, true)
checkBackupIsInManifests(
RunAndCheckBackup(t, ctx, &incBO, incMB, true)
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&incBO,
sel,
userID,
test.category)
checkMetadataFilesExist(
CheckMetadataFilesExist(
t,
ctx,
incBO.Results.BackupID,
bod.kw,
bod.kms,
bod.KW,
bod.KMS,
m365.AzureTenantID,
userID,
path.ExchangeService,
@ -215,8 +216,8 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
ctx,
incBO.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
false)
@ -260,16 +261,16 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
path.ContactsCategory: MetadataFileNames(path.ContactsCategory),
// path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory),
}
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now)
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now)
container3 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 3, now)
containerRename = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 4, now)
container1 = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 1, now)
container2 = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 2, now)
container3 = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 3, now)
containerRename = fmt.Sprintf("%s%d_%s", IncrementalsDestContainerPrefix, 4, now)
// container3 and containerRename don't exist yet. Those will get created
// later on during the tests. Putting their identifiers into the selector
// at this point is harmless.
containers = []string{container1, container2, container3, containerRename}
sel = selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel = selectors.NewExchangeBackup([]string{suite.its.User.ID})
whatSet = deeTD.CategoryFromRepoRef
opts = control.DefaultOptions()
)
@ -309,7 +310,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
mailDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.MessageWith(
suite.its.user.ID, suite.its.user.ID, suite.its.user.ID,
suite.its.User.ID, suite.its.User.ID, suite.its.User.ID,
subject, body, body,
now, now, now, now)
}
@ -326,7 +327,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
eventDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.EventWith(
suite.its.user.ID, subject, body, body,
suite.its.User.ID, subject, body, body,
exchMock.NoOriginalStartDate, now, now,
exchMock.NoRecurrence, exchMock.NoAttendees,
exchMock.NoAttachments, exchMock.NoCancelledOccurrences,
@ -335,7 +336,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// test data set
dataset := map[path.CategoryType]struct {
dbf dataBuilderFunc
dbf DataBuilderFunc
dests map[string]contDeets
}{
path.EmailCategory: {
@ -447,7 +448,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// populate initial test data
for category, gen := range dataset {
for destName := range gen.dests {
generateContainerOfItems(
GenerateContainerOfItems(
t,
ctx,
ctrl,
@ -477,11 +478,11 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
}
}
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
// run the initial backup
runAndCheckBackup(t, ctx, &bo, mb, false)
RunAndCheckBackup(t, ctx, &bo, mb, false)
// precheck to ensure the expectedDeets are correct.
// if we fail here, the expectedDeets were populated incorrectly.
@ -490,8 +491,8 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
true)
@ -589,14 +590,14 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
name: "add a new folder",
updateUserData: func(t *testing.T, ctx context.Context) {
for category, gen := range dataset {
generateContainerOfItems(
GenerateContainerOfItems(
t,
ctx,
ctrl,
service,
category,
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
creds.AzureTenantID, suite.its.user.ID, "", "", container3,
creds.AzureTenantID, suite.its.User.ID, "", "", container3,
2,
version.Backup,
gen.dbf)
@ -672,7 +673,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
switch category {
case path.EmailCategory:
_, itemData := generateItemData(t, category, uidn.ID(), mailDBF)
_, itemData := GenerateItemData(t, category, uidn.ID(), mailDBF)
body, err := api.BytesToMessageable(itemData)
require.NoErrorf(t, err, "transforming mail bytes to messageable: %+v", clues.ToCore(err))
@ -685,7 +686,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ptr.Val(itm.GetId()))
case path.ContactsCategory:
_, itemData := generateItemData(t, category, uidn.ID(), contactDBF)
_, itemData := GenerateItemData(t, category, uidn.ID(), contactDBF)
body, err := api.BytesToContactable(itemData)
require.NoErrorf(t, err, "transforming contact bytes to contactable: %+v", clues.ToCore(err))
@ -698,7 +699,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ptr.Val(itm.GetId()))
case path.EventsCategory:
_, itemData := generateItemData(t, category, uidn.ID(), eventDBF)
_, itemData := GenerateItemData(t, category, uidn.ID(), eventDBF)
body, err := api.BytesToEventable(itemData)
require.NoErrorf(t, err, "transforming event bytes to eventable: %+v", clues.ToCore(err))
@ -819,7 +820,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ctx, flush := tester.WithContext(t, ctx)
defer flush()
incBO := newTestBackupOp(t, ctx, bod, incMB, opts, counter)
incBO := NewTestBackupOp(t, ctx, bod, incMB, opts, counter)
suite.Run("PreTestSetup", func() {
t := suite.T()
@ -835,21 +836,21 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
bupID := incBO.Results.BackupID
checkBackupIsInManifests(
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&incBO,
sels,
uidn.ID(),
maps.Keys(categories)...)
checkMetadataFilesExist(
CheckMetadataFilesExist(
t,
ctx,
bupID,
bod.kw,
bod.kms,
bod.KW,
bod.KMS,
atid,
uidn.ID(),
service,
@ -859,8 +860,8 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
ctx,
bupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
true)
@ -889,7 +890,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
type ExchangeBackupNightlyIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestExchangeBackupNightlyIntgSuite(t *testing.T) {
@ -901,22 +902,22 @@ func TestExchangeBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *ExchangeBackupNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *ExchangeBackupNightlyIntgSuite) TestBackup_Run_exchangeVersion9MergeBase() {
sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(
sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
// sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()),
sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
runMergeBaseGroupsUpdate(suite, sel.Selector, true)
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
}
type ExchangeRestoreNightlyIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestExchangeRestoreIntgSuite(t *testing.T) {
@ -928,7 +929,7 @@ func TestExchangeRestoreIntgSuite(t *testing.T) {
}
func (suite *ExchangeRestoreNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
type clientItemPager interface {
@ -959,7 +960,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
// a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
baseSel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled.
@ -967,7 +968,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.its.user.ID
baseSel.DiscreteOwner = suite.its.User.ID
var (
mb = evmock.NewBus()
@ -975,10 +976,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts = control.DefaultOptions()
)
bo, bod := prepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
RunAndCheckBackup(t, ctx, &bo, mb, false)
rsel, err := baseSel.ToExchangeRestore()
require.NoError(t, err, clues.ToCore(err))
@ -1002,8 +1003,8 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
}
testCategories = map[path.CategoryType]clientItemPager{
path.ContactsCategory: suite.its.ac.Contacts(),
path.EmailCategory: suite.its.ac.Mail(),
path.ContactsCategory: suite.its.AC.Contacts(),
path.EmailCategory: suite.its.AC.Mail(),
// path.EventsCategory: suite.its.ac.Events(),
}
)
@ -1021,10 +1022,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Copy
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr1,
@ -1032,7 +1033,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro, mb, false)
RunAndCheckRestore(t, ctx, &ro, mb, false)
// get all files in folder, use these as the base
// set of files to compare against.
@ -1058,7 +1059,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
})
}
checkRestoreCounts(t, ctr1, 0, 0, countItemsInRestore)
CheckRestoreCounts(t, ctr1, 0, 0, countItemsInRestore)
})
// Exit the test if the baseline failed as it'll just cause more failures
@ -1080,10 +1081,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Skip
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr2,
@ -1091,14 +1092,14 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts,
restoreCfg)
deets := runAndCheckRestore(t, ctx, &ro, mb, false)
deets := RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Zero(
t,
len(deets.Entries),
"no items should have been restored")
checkRestoreCounts(t, ctr2, countItemsInRestore, 0, 0)
CheckRestoreCounts(t, ctr2, countItemsInRestore, 0, 0)
result := map[string]string{}
@ -1109,7 +1110,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
ctx, flush := tester.NewContext(t)
defer flush()
m := filterCollisionKeyResults(
m := FilterCollisionKeyResults(
t,
ctx,
userID,
@ -1141,10 +1142,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Replace
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr3,
@ -1152,7 +1153,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts,
restoreCfg)
deets := runAndCheckRestore(t, ctx, &ro, mb, false)
deets := RunAndCheckRestore(t, ctx, &ro, mb, false)
filtEnts := []details.Entry{}
for _, e := range deets.Entries {
@ -1163,7 +1164,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
assert.Len(t, filtEnts, countItemsInRestore, "every item should have been replaced")
checkRestoreCounts(t, ctr3, 0, countItemsInRestore, 0)
CheckRestoreCounts(t, ctr3, 0, countItemsInRestore, 0)
result := map[string]string{}
@ -1174,7 +1175,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
ctx, flush := tester.NewContext(t)
defer flush()
m := filterCollisionKeyResults(
m := FilterCollisionKeyResults(
t,
ctx,
userID,
@ -1211,10 +1212,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
restoreCfg.OnCollision = control.Copy
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr4,
@ -1222,7 +1223,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
opts,
restoreCfg)
deets := runAndCheckRestore(t, ctx, &ro, mb, false)
deets := RunAndCheckRestore(t, ctx, &ro, mb, false)
filtEnts := []details.Entry{}
for _, e := range deets.Entries {
@ -1233,7 +1234,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
assert.Len(t, filtEnts, countItemsInRestore, "every item should have been copied")
checkRestoreCounts(t, ctr4, 0, 0, countItemsInRestore)
CheckRestoreCounts(t, ctr4, 0, 0, countItemsInRestore)
result := map[string]string{}
@ -1244,7 +1245,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
ctx, flush := tester.NewContext(t)
defer flush()
m := filterCollisionKeyResults(
m := FilterCollisionKeyResults(
t,
ctx,
userID,
@ -1276,7 +1277,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
// a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
baseSel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled.
@ -1284,7 +1285,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.its.user.ID
baseSel.DiscreteOwner = suite.its.User.ID
var (
mb = evmock.NewBus()
@ -1292,10 +1293,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
opts = control.DefaultOptions()
)
bo, bod := prepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
RunAndCheckBackup(t, ctx, &bo, mb, false)
rsel, err := baseSel.ToExchangeRestore()
require.NoError(t, err, clues.ToCore(err))
@ -1303,11 +1304,11 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
var (
restoreCfg = ctrlTD.DefaultRestoreConfig("exchange_restore_to_user")
sel = rsel.Selector
userID = suite.its.user.ID
secondaryUserID = suite.its.secondaryUser.ID
userID = suite.its.User.ID
secondaryUserID = suite.its.SecondaryUser.ID
uid = userID
acCont = suite.its.ac.Contacts()
acMail = suite.its.ac.Mail()
acCont = suite.its.AC.Contacts()
acMail = suite.its.AC.Mail()
// acEvts = suite.its.ac.Events()
firstCtr = count.New()
)
@ -1317,10 +1318,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
// first restore to the current user
ro1, _ := prepNewTestRestoreOp(
ro1, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
firstCtr,
@ -1328,7 +1329,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro1, mb, false)
RunAndCheckRestore(t, ctx, &ro1, mb, false)
// get all files in folder, use these as the base
// set of files to compare against.
@ -1376,10 +1377,10 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
secondCtr := count.New()
restoreCfg.ProtectedResource = uid
ro2, _ := prepNewTestRestoreOp(
ro2, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
secondCtr,
@ -1387,7 +1388,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro2, mb, false)
RunAndCheckRestore(t, ctx, &ro2, mb, false)
var (
secondaryItemIDs = map[path.CategoryType]map[string]struct{}{}

View File

@ -1,4 +1,4 @@
package test_test
package groups_test
import (
"context"
@ -9,6 +9,7 @@ import (
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
@ -24,7 +25,7 @@ import (
type GroupsBackupIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestGroupsBackupIntgSuite(t *testing.T) {
@ -36,13 +37,117 @@ func TestGroupsBackupIntgSuite(t *testing.T) {
}
func (suite *GroupsBackupIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
// TODO(v0 export): Channels export
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
RunBasicDriveishBackupTests(
suite,
path.GroupsService,
control.DefaultOptions(),
sel.Selector)
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
sel := selectors.NewGroupsRestore([]string{suite.its.group.ID})
runGroupsIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_extensionsGroups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
RunDriveishBackupWithExtensionsTests(
suite,
path.GroupsService,
control.DefaultOptions(),
sel.Selector)
}
// ---------------------------------------------------------------------------
// test version using the tree-based drive item processor
// ---------------------------------------------------------------------------
type GroupsBackupTreeIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestGroupsBackupTreeIntgSuite(t *testing.T) {
suite.Run(t, &GroupsBackupTreeIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *GroupsBackupTreeIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeGroups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunBasicDriveishBackupTests(
suite,
path.GroupsService,
opts,
sel.Selector)
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeIncrementalGroups() {
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
runGroupsIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeExtensionsGroups() {
var (
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunDriveishBackupWithExtensionsTests(
suite,
path.GroupsService,
opts,
sel.Selector)
}
// ---------------------------------------------------------------------------
// common backup test wrappers
// ---------------------------------------------------------------------------
func runGroupsIncrementalBackupTests(
suite tester.Suite,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewGroupsRestore([]string{its.Group.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -53,24 +158,25 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
t *testing.T,
ctx context.Context,
) string {
return suite.its.group.RootSite.DriveID
return its.Group.RootSite.DriveID
}
gtsi := func(
t *testing.T,
ctx context.Context,
) string {
return suite.its.group.RootSite.ID
return its.Group.RootSite.ID
}
grh := func(ac api.Client) drive.RestoreHandler {
return drive.NewSiteRestoreHandler(ac, path.GroupsService)
}
runDriveIncrementalTest(
RunIncrementalDriveishBackupTest(
suite,
suite.its.group.ID,
suite.its.user.ID,
opts,
its.Group.ID,
its.User.ID,
path.GroupsService,
path.LibrariesCategory,
ic,
@ -88,7 +194,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
var (
mb = evmock.NewBus()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
@ -98,7 +204,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
selTD.GroupsBackupChannelScope(sel),
sel.Conversation(selectors.Any()))
bo, bod := prepNewTestBackupOp(
bo, bod := PrepNewTestBackupOp(
t,
ctx,
mb,
@ -106,41 +212,41 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
opts,
version.All8MigrateUserPNToID,
count.New())
defer bod.close(t, ctx)
defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
RunAndCheckBackup(t, ctx, &bo, mb, false)
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&bo,
bod.sel,
bod.sel.ID(),
bod.Sel,
bod.Sel.ID(),
path.ChannelMessagesCategory)
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.Acct.ID(),
bod.Sel.ID(),
path.GroupsService,
whatSet,
bod.kms,
bod.sss)
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
false)
mb = evmock.NewBus()
forcedFull := newTestBackupOp(
forcedFull := NewTestBackupOp(
t,
ctx,
bod,
@ -149,34 +255,34 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
count.New())
forcedFull.BackupVersion = version.Groups9Update
runAndCheckBackup(t, ctx, &forcedFull, mb, false)
checkBackupIsInManifests(
RunAndCheckBackup(t, ctx, &forcedFull, mb, false)
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&forcedFull,
bod.sel,
bod.sel.ID(),
bod.Sel,
bod.Sel.ID(),
path.ChannelMessagesCategory)
_, expectDeets = deeTD.GetDeetsInBackup(
t,
ctx,
forcedFull.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.Acct.ID(),
bod.Sel.ID(),
path.GroupsService,
whatSet,
bod.kms,
bod.sss)
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
forcedFull.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
false)
@ -198,7 +304,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
@ -208,37 +314,37 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
selTD.GroupsBackupChannelScope(sel),
sel.Conversation(selectors.Any()))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
RunAndCheckBackup(t, ctx, &bo, mb, false)
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&bo,
bod.sel,
bod.sel.ID(),
bod.Sel,
bod.Sel.ID(),
path.ChannelMessagesCategory)
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.Acct.ID(),
bod.Sel.ID(),
path.GroupsService,
whatSet,
bod.kms,
bod.sss)
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
false)
}
@ -252,30 +358,30 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsExtensions() {
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions()
tenID = tconfig.M365TenantID(t)
svc = path.GroupsService
ws = deeTD.DriveIDFromRepoRef
)
opts.ItemExtensionFactory = getTestExtensionFactories()
opts.ItemExtensionFactory = GetTestExtensionFactories()
// does not apply to channel messages
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
RunAndCheckBackup(t, ctx, &bo, mb, false)
CheckBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
bod.KW,
bod.SW,
&bo,
bod.sel,
bod.sel.ID(),
bod.Sel,
bod.Sel.ID(),
path.LibrariesCategory)
bID := bo.Results.BackupID
@ -285,32 +391,32 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsExtensions() {
ctx,
bID,
tenID,
bod.sel.ID(),
bod.Sel.ID(),
svc,
ws,
bod.kms,
bod.sss)
bod.KMS,
bod.SSS)
deeTD.CheckBackupDetails(
t,
ctx,
bID,
ws,
bod.kms,
bod.sss,
bod.KMS,
bod.SSS,
expectDeets,
false)
// Check that the extensions are in the backup
for _, ent := range deets.Entries {
if ent.Folder == nil {
verifyExtensionData(t, ent.ItemInfo, path.GroupsService)
VerifyExtensionData(t, ent.ItemInfo, path.GroupsService)
}
}
}
type GroupsBackupNightlyIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestGroupsBackupNightlyIntgSuite(t *testing.T) {
@ -322,30 +428,30 @@ func TestGroupsBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *GroupsBackupNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBase() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
runMergeBaseGroupsUpdate(suite, sel.Selector, false)
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
}
func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBases() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
runDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
}
type GroupsRestoreNightlyIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestGroupsRestoreIntgSuite(t *testing.T) {
@ -357,36 +463,20 @@ func TestGroupsRestoreIntgSuite(t *testing.T) {
}
func (suite *GroupsRestoreNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsWithAdvancedOptions() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.group.ID
sel.DiscreteOwner = suite.its.Group.ID
runDriveRestoreWithAdvancedOptions(
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.its.ac,
suite.its.AC,
sel.Selector,
suite.its.group.RootSite.DriveID,
suite.its.group.RootSite.DriveRootFolderID)
suite.its.Group.RootSite.DriveID,
suite.its.Group.RootSite.DriveRootFolderID)
}
// func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsAlternateProtectedResource() {
// sel := selectors.NewGroupsBackup([]string{suite.its.group.ID})
// sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
// sel.Filter(sel.Library("documents"))
// sel.DiscreteOwner = suite.its.group.ID
// runDriveRestoreToAlternateProtectedResource(
// suite.T(),
// suite,
// suite.its.ac,
// sel.Selector,
// suite.its.group.RootSite,
// suite.its.secondaryGroup.RootSite,
// suite.its.secondaryGroup.ID)
// }

View File

@ -1,4 +1,4 @@
package test_test
package m365
import (
"context"
@ -13,28 +13,19 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/extensions"
@ -44,9 +35,6 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
gmock "github.com/alcionai/corso/src/pkg/services/m365/api/graph/mock"
"github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store"
)
// ---------------------------------------------------------------------------
@ -55,7 +43,7 @@ import (
// GockClient produces a new exchange api client that can be
// mocked using gock.
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
func GockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
s, err := gmock.NewService(creds, counter)
if err != nil {
return api.Client{}, err
@ -76,231 +64,9 @@ func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error
// Does not use the tester.DefaultTestRestoreDestination syntax as some of these
// items are created directly, not as a result of restoration, and we want to ensure
// they get clearly selected without accidental overlap.
const incrementalsDestContainerPrefix = "incrementals_ci_"
const IncrementalsDestContainerPrefix = "incrementals_ci_"
type backupOpDependencies struct {
acct account.Account
ctrl *m365.Controller
kms *kopia.ModelStore
kw *kopia.Wrapper
sel selectors.Selector
sss streamstore.Streamer
st storage.Storage
sw store.BackupStorer
closer func()
}
func (bod *backupOpDependencies) close(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
) {
bod.closer()
if bod.kw != nil {
err := bod.kw.Close(ctx)
assert.NoErrorf(t, err, "kw close: %+v", clues.ToCore(err))
}
if bod.kms != nil {
err := bod.kw.Close(ctx)
assert.NoErrorf(t, err, "kms close: %+v", clues.ToCore(err))
}
}
// prepNewTestBackupOp generates all clients required to run a backup operation,
// returning both a backup operation created with those clients, as well as
// the clients themselves.
func prepNewTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bus events.Eventer,
sel selectors.Selector,
opts control.Options,
backupVersion int,
counter *count.Bus,
) (
operations.BackupOperation,
*backupOpDependencies,
) {
bod := &backupOpDependencies{
acct: tconfig.NewM365Account(t),
st: storeTD.NewPrefixedS3Storage(t),
}
repoNameHash := strTD.NewHashForRepoConfigName()
k := kopia.NewConn(bod.st)
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
require.NoError(t, err, clues.ToCore(err))
defer func() {
if err != nil {
bod.close(t, ctx)
t.FailNow()
}
}()
// kopiaRef comes with a count of 1 and Wrapper bumps it again
// we're so safe to close here.
bod.closer = func() {
err := k.Close(ctx)
assert.NoErrorf(t, err, "k close: %+v", clues.ToCore(err))
}
bod.kw, err = kopia.NewWrapper(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.kms, err = kopia.NewModelStore(k)
if !assert.NoError(t, err, clues.ToCore(err)) {
return operations.BackupOperation{}, nil
}
bod.sw = store.NewWrapper(bod.kms)
bod.ctrl, bod.sel = ControllerWithSelector(
t,
ctx,
bod.acct,
sel,
nil,
bod.close,
counter)
bo := newTestBackupOp(
t,
ctx,
bod,
bus,
opts,
counter)
bo.BackupVersion = backupVersion
bod.sss = streamstore.NewStreamer(
bod.kw,
bod.acct.ID(),
bod.sel.PathService())
return bo, bod
}
// newTestBackupOp accepts the clients required to compose a backup operation, plus
// any other metadata, and uses them to generate a new backup operation. This
// allows backup chains to utilize the same temp directory and configuration
// details.
func newTestBackupOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bod *backupOpDependencies,
bus events.Eventer,
opts control.Options,
counter *count.Bus,
) operations.BackupOperation {
bod.ctrl.IDNameLookup = idname.NewCache(map[string]string{bod.sel.ID(): bod.sel.Name()})
bo, err := operations.NewBackupOperation(
ctx,
opts,
bod.kw,
bod.sw,
bod.ctrl,
bod.acct,
bod.sel,
bod.sel,
bus,
counter)
if !assert.NoError(t, err, clues.ToCore(err)) {
bod.close(t, ctx)
t.FailNow()
}
return bo
}
func runAndCheckBackup(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
bo *operations.BackupOperation,
mb *evmock.Bus,
acceptNoData bool,
) {
err := bo.Run(ctx)
if !assert.NoError(t, err, clues.ToCore(err)) {
for i, err := range bo.Errors.Recovered() {
t.Logf("recoverable err %d, %+v", i, err)
}
assert.Fail(t, "not allowed to error")
}
require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")
expectStatus := []operations.OpStatus{operations.Completed}
if acceptNoData {
expectStatus = append(expectStatus, operations.NoData)
}
require.Contains(
t,
expectStatus,
bo.Status,
"backup doesn't match expectation, wanted any of %v, got %s",
expectStatus,
bo.Status)
require.NotZero(t, bo.Results.ItemsWritten)
assert.NotZero(t, bo.Results.ItemsRead, "count of items read")
assert.NotZero(t, bo.Results.BytesRead, "bytes read")
assert.NotZero(t, bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
}
func checkBackupIsInManifests(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
kw *kopia.Wrapper,
sw store.BackupStorer,
bo *operations.BackupOperation,
sel selectors.Selector,
resourceOwner string,
categories ...path.CategoryType,
) {
for _, category := range categories {
t.Run(category.String(), func(t *testing.T) {
var (
r = identity.NewReason("", resourceOwner, sel.PathService(), category)
tags = map[string]string{kopia.TagBackupCategory: ""}
found bool
)
bf, err := kw.NewBaseFinder(sw)
require.NoError(t, err, clues.ToCore(err))
mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags)
for _, man := range mans.MergeBases() {
bID, ok := man.GetSnapshotTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ItemDataSnapshot.ID) {
continue
}
if bID == string(bo.Results.BackupID) {
found = true
break
}
}
assert.True(t, found, "backup retrieved by previous snapshot manifest")
})
}
}
func checkMetadataFilesExist(
func CheckMetadataFilesExist(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
backupID model.StableID,
@ -376,147 +142,6 @@ func checkMetadataFilesExist(
}
}
func runMergeBaseGroupsUpdate(
suite tester.Suite,
sel selectors.Selector,
expectCached bool,
) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
// Need outside the inner test case so bod lasts for the entire test.
bo, bod := prepNewTestBackupOp(
t,
ctx,
mb,
sel,
opts,
version.All8MigrateUserPNToID,
count.New())
defer bod.close(t, ctx)
suite.Run("makeMergeBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
runAndCheckBackup(t, ctx, &bo, mb, false)
reasons, err := bod.sel.Reasons(bod.acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.sel.PathService(),
whatSet,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
expectDeets,
false)
})
suite.Run("makeIncrementalBackup", func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
)
forcedFull := newTestBackupOp(
t,
ctx,
bod,
mb,
opts,
count.New())
forcedFull.BackupVersion = version.Groups9Update
runAndCheckBackup(t, ctx, &forcedFull, mb, false)
reasons, err := bod.sel.Reasons(bod.acct.ID(), false)
require.NoError(t, err, clues.ToCore(err))
for _, reason := range reasons {
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&forcedFull,
bod.sel,
bod.sel.ID(),
reason.Category())
}
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
forcedFull.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
bod.sel.PathService(),
whatSet,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
forcedFull.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
expectDeets,
false)
check := assert.Zero
if expectCached {
check = assert.NotZero
}
check(
t,
forcedFull.Results.Counts[string(count.PersistedCachedFiles)],
"cached items")
})
}
// ---------------------------------------------------------------------------
// Incremental Item Generators
// TODO: this is ripped from factory.go, which is ripped from other tests.
@ -527,9 +152,9 @@ func runMergeBaseGroupsUpdate(
// the params here are what generateContainerOfItems passes into the func.
// the callback provider can use them, or not, as wanted.
type dataBuilderFunc func(id, timeStamp, subject, body string) []byte
type DataBuilderFunc func(id, timeStamp, subject, body string) []byte
func generateContainerOfItems(
func GenerateContainerOfItems(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
ctrl *m365.Controller,
@ -539,16 +164,16 @@ func generateContainerOfItems(
tenantID, resourceOwner, siteID, driveID, destFldr string,
howManyItems int,
backupVersion int,
dbf dataBuilderFunc,
dbf DataBuilderFunc,
) *details.Details {
t.Helper()
items := make([]incrementalItem, 0, howManyItems)
items := make([]IncrementalItem, 0, howManyItems)
for i := 0; i < howManyItems; i++ {
id, d := generateItemData(t, cat, resourceOwner, dbf)
id, d := GenerateItemData(t, cat, resourceOwner, dbf)
items = append(items, incrementalItem{
items = append(items, IncrementalItem{
name: id,
data: d,
})
@ -563,7 +188,7 @@ func generateContainerOfItems(
pathFolders = []string{odConsts.SitesPathDir, siteID, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
}
collections := []incrementalCollection{{
collections := []IncrementalCollection{{
pathFolders: pathFolders,
category: cat,
items: items,
@ -573,7 +198,7 @@ func generateContainerOfItems(
restoreCfg.Location = destFldr
restoreCfg.IncludePermissions = true
dataColls := buildCollections(
dataColls := BuildCollections(
t,
service,
tenantID, resourceOwner,
@ -604,11 +229,11 @@ func generateContainerOfItems(
return deets
}
func generateItemData(
func GenerateItemData(
t *testing.T,
category path.CategoryType,
resourceOwner string,
dbf dataBuilderFunc,
dbf DataBuilderFunc,
) (string, []byte) {
var (
now = dttm.Now()
@ -621,30 +246,30 @@ func generateItemData(
return id, dbf(id, nowLegacy, subject, body)
}
type incrementalItem struct {
type IncrementalItem struct {
name string
data []byte
}
type incrementalCollection struct {
type IncrementalCollection struct {
pathFolders []string
category path.CategoryType
items []incrementalItem
items []IncrementalItem
}
func buildCollections(
func BuildCollections(
t *testing.T,
service path.ServiceType,
tenant, user string,
restoreCfg control.RestoreConfig,
colls []incrementalCollection,
colls []IncrementalCollection,
) []data.RestoreCollection {
t.Helper()
collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls {
pth := toDataLayerPath(
pth := ToDataLayerPath(
t,
service,
tenant,
@ -666,7 +291,7 @@ func buildCollections(
return collections
}
func toDataLayerPath(
func ToDataLayerPath(
t *testing.T,
service path.ServiceType,
tenant, resourceOwner string,
@ -746,30 +371,30 @@ func ControllerWithSelector(
// Suite Setup
// ---------------------------------------------------------------------------
type ids struct {
type IDs struct {
ID string
DriveID string
DriveRootFolderID string
}
type gids struct {
type GIDs struct {
ID string
RootSite ids
RootSite IDs
}
type intgTesterSetup struct {
ac api.Client
gockAC api.Client
user ids
secondaryUser ids
site ids
secondarySite ids
group gids
secondaryGroup gids
type IntgTesterSetup struct {
AC api.Client
GockAC api.Client
User IDs
SecondaryUser IDs
Site IDs
SecondarySite IDs
Group GIDs
SecondaryGroup GIDs
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
func NewIntegrationTesterSetup(t *testing.T) IntgTesterSetup {
its := IntgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
@ -782,32 +407,32 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
counter := count.New()
its.ac, err = api.NewClient(
its.AC, err = api.NewClient(
creds,
control.DefaultOptions(),
counter)
require.NoError(t, err, clues.ToCore(err))
its.gockAC, err = gockClient(creds, counter)
its.GockAC, err = GockClient(creds, counter)
require.NoError(t, err, clues.ToCore(err))
its.user = userIDs(t, tconfig.M365UserID(t), its.ac)
its.secondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.ac)
its.site = siteIDs(t, tconfig.M365SiteID(t), its.ac)
its.secondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.ac)
its.User = userIDs(t, tconfig.M365UserID(t), its.AC)
its.SecondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.AC)
its.Site = siteIDs(t, tconfig.M365SiteID(t), its.AC)
its.SecondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.AC)
// teamID is used here intentionally. We want the group
// to have access to teams data
its.group = groupIDs(t, tconfig.M365TeamID(t), its.ac)
its.secondaryGroup = groupIDs(t, tconfig.SecondaryM365TeamID(t), its.ac)
its.Group = groupIDs(t, tconfig.M365TeamID(t), its.AC)
its.SecondaryGroup = groupIDs(t, tconfig.SecondaryM365TeamID(t), its.AC)
return its
}
func userIDs(t *testing.T, id string, ac api.Client) ids {
func userIDs(t *testing.T, id string, ac api.Client) IDs {
ctx, flush := tester.NewContext(t)
defer flush()
r := ids{ID: id}
r := IDs{ID: id}
drive, err := ac.Users().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err))
@ -822,11 +447,11 @@ func userIDs(t *testing.T, id string, ac api.Client) ids {
return r
}
func siteIDs(t *testing.T, id string, ac api.Client) ids {
func siteIDs(t *testing.T, id string, ac api.Client) IDs {
ctx, flush := tester.NewContext(t)
defer flush()
r := ids{ID: id}
r := IDs{ID: id}
drive, err := ac.Sites().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err))
@ -841,11 +466,11 @@ func siteIDs(t *testing.T, id string, ac api.Client) ids {
return r
}
func groupIDs(t *testing.T, id string, ac api.Client) gids {
func groupIDs(t *testing.T, id string, ac api.Client) GIDs {
ctx, flush := tester.NewContext(t)
defer flush()
r := gids{ID: id}
r := GIDs{ID: id}
site, err := ac.Groups().GetRootSite(ctx, id)
require.NoError(t, err, clues.ToCore(err))
@ -865,13 +490,13 @@ func groupIDs(t *testing.T, id string, ac api.Client) gids {
return r
}
func getTestExtensionFactories() []extensions.CreateItemExtensioner {
func GetTestExtensionFactories() []extensions.CreateItemExtensioner {
return []extensions.CreateItemExtensioner{
&extensions.MockItemExtensionFactory{},
}
}
func verifyExtensionData(
func VerifyExtensionData(
t *testing.T,
itemInfo details.ItemInfo,
p path.ServiceType,

View File

@ -0,0 +1,416 @@
package onedrive_test
import (
"context"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/model"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
type OneDriveBackupIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveBackupIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveBackupIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveBackupIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunBasicDriveishBackupTests(
suite,
path.OneDriveService,
control.DefaultOptions(),
sel.Selector)
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
runOneDriveIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_extensionsOneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunDriveishBackupWithExtensionsTests(
suite,
path.OneDriveService,
control.DefaultOptions(),
sel.Selector)
}
// ---------------------------------------------------------------------------
// test version using the tree-based drive item processor
// ---------------------------------------------------------------------------
type OneDriveBackupTreeIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveBackupTreeIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveBackupTreeIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveBackupTreeIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeOneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunBasicDriveishBackupTests(
suite,
path.OneDriveService,
opts,
sel.Selector)
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeIncrementalOneDrive() {
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
runOneDriveIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeExtensionsOneDrive() {
var (
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.OneDriveBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunDriveishBackupWithExtensionsTests(
suite,
path.OneDriveService,
opts,
sel.Selector)
}
// ---------------------------------------------------------------------------
// common backup test wrappers
// ---------------------------------------------------------------------------
func runOneDriveIncrementalBackupTests(
suite tester.Suite,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewOneDriveRestore([]string{its.User.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.Folders(cs, selectors.PrefixMatch()))
return sel.Selector
}
gtdi := func(
t *testing.T,
ctx context.Context,
) string {
d, err := its.AC.Users().GetDefaultDrive(ctx, its.User.ID)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default user drive").
With("user", its.User.ID)
}
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID")
return id
}
grh := func(ac api.Client) drive.RestoreHandler {
return drive.NewUserDriveRestoreHandler(ac)
}
RunIncrementalDriveishBackupTest(
suite,
opts,
its.User.ID,
its.User.ID,
path.OneDriveService,
path.FilesCategory,
ic,
gtdi,
nil,
grh,
false)
}
// ---------------------------------------------------------------------------
// other drive tests
// ---------------------------------------------------------------------------
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
acct = tconfig.NewM365Account(t)
opts = control.DefaultOptions()
mb = evmock.NewBus()
counter = count.New()
categories = map[path.CategoryType][][]string{
path.FilesCategory: {{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}},
}
)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
ctrl, err := m365.NewController(
ctx,
acct,
path.OneDriveService,
control.DefaultOptions(),
counter)
require.NoError(t, err, clues.ToCore(err))
userable, err := ctrl.AC.Users().GetByID(
ctx,
suite.its.User.ID,
api.CallConfig{})
require.NoError(t, err, clues.ToCore(err))
uid := ptr.Val(userable.GetId())
uname := ptr.Val(userable.GetUserPrincipalName())
oldsel := selectors.NewOneDriveBackup([]string{uname})
oldsel.Include(selTD.OneDriveBackupFolderScope(oldsel))
bo, bod := PrepNewTestBackupOp(t, ctx, mb, oldsel.Selector, opts, 0, counter)
defer bod.Close(t, ctx)
sel := bod.Sel
// ensure the initial owner uses name in both cases
bo.ResourceOwner = sel.SetDiscreteOwnerIDName(uname, uname)
// required, otherwise we don't run the migration
bo.BackupVersion = version.All8MigrateUserPNToID - 1
require.Equalf(
t,
bo.ResourceOwner.Name(),
bo.ResourceOwner.ID(),
"historical representation of user id [%s] should match pn [%s]",
bo.ResourceOwner.ID(),
bo.ResourceOwner.Name())
// run the initial backup
RunAndCheckBackup(t, ctx, &bo, mb, false)
newsel := selectors.NewOneDriveBackup([]string{uid})
newsel.Include(selTD.OneDriveBackupFolderScope(newsel))
sel = newsel.SetDiscreteOwnerIDName(uid, uname)
var (
incMB = evmock.NewBus()
// the incremental backup op should have a proper user ID for the id.
incBO = NewTestBackupOp(t, ctx, bod, incMB, opts, counter)
)
require.NotEqualf(
t,
incBO.ResourceOwner.Name(),
incBO.ResourceOwner.ID(),
"current representation of user: id [%s] should differ from PN [%s]",
incBO.ResourceOwner.ID(),
incBO.ResourceOwner.Name())
err = incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
CheckBackupIsInManifests(
t,
ctx,
bod.KW,
bod.SW,
&incBO,
sel,
uid,
maps.Keys(categories)...)
CheckMetadataFilesExist(
t,
ctx,
incBO.Results.BackupID,
bod.KW,
bod.KMS,
creds.AzureTenantID,
uid,
path.OneDriveService,
categories)
// 2 on read/writes to account for metadata: 1 delta and 1 path.
assert.LessOrEqual(t, 2, incBO.Results.ItemsWritten, "items written")
assert.LessOrEqual(t, 1, incBO.Results.NonMetaItemsWritten, "non meta items written")
assert.LessOrEqual(t, 2, incBO.Results.ItemsRead, "items read")
assert.NoError(t, incBO.Errors.Failure(), "non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "backup-end events")
bid := incBO.Results.BackupID
bup := &backup.Backup{}
err = bod.KMS.Get(ctx, model.BackupSchema, bid, bup)
require.NoError(t, err, clues.ToCore(err))
var (
ssid = bup.StreamStoreID
deets details.Details
ss = streamstore.NewStreamer(bod.KW, creds.AzureTenantID, path.OneDriveService)
)
err = ss.Read(ctx, ssid, streamstore.DetailsReader(details.UnmarshalTo(&deets)), fault.New(true))
require.NoError(t, err, clues.ToCore(err))
for _, ent := range deets.Entries {
// 46 is the tenant uuid + "onedrive" + two slashes
if len(ent.RepoRef) > 46 {
assert.Contains(t, ent.RepoRef, uid)
}
}
}
type OneDriveBackupNightlyIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveBackupNightlyIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveBackupNightlyIntgSuite{
Suite: tester.NewNightlySuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveBackupNightlyIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupNightlyIntgSuite) TestBackup_Run_oneDriveVersion9MergeBase() {
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
}
//func (suite *OneDriveBackupNightlyIntgSuite) TestBackup_Run_oneDriveVersion9AssistBases() {
// sel := selectors.NewOneDriveBackup([]string{tconfig.SecondaryM365UserID(suite.T())})
// sel.Include(selTD.OneDriveBackupFolderScope(sel))
//
// runDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
//}
type OneDriveRestoreNightlyIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestOneDriveRestoreIntgSuite(t *testing.T) {
suite.Run(t, &OneDriveRestoreNightlyIntgSuite{
Suite: tester.NewNightlySuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *OneDriveRestoreNightlyIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveRestoreNightlyIntgSuite) TestRestore_Run_onedriveWithAdvancedOptions() {
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.its.User.ID
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.its.AC,
sel.Selector,
suite.its.User.DriveID,
suite.its.User.DriveRootFolderID)
}
func (suite *OneDriveRestoreNightlyIntgSuite) TestRestore_Run_onedriveAlternateProtectedResource() {
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.its.User.ID
RunDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.its.AC,
sel.Selector,
suite.its.User,
suite.its.SecondaryUser,
suite.its.SecondaryUser.ID)
}

View File

@ -1,4 +1,4 @@
package test_test
package m365
import (
"context"
@ -28,7 +28,7 @@ import (
"github.com/alcionai/corso/src/pkg/store"
)
type restoreOpDependencies struct {
type RestoreOpDependencies struct {
acct account.Account
ctrl *m365.Controller
kms *kopia.ModelStore
@ -41,7 +41,7 @@ type restoreOpDependencies struct {
closer func()
}
func (rod *restoreOpDependencies) close(
func (rod *RestoreOpDependencies) Close(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
) {
@ -60,10 +60,10 @@ func (rod *restoreOpDependencies) close(
}
}
// prepNewTestRestoreOp generates all clients required to run a restore operation,
// PrepNewTestRestoreOp generates all clients required to run a restore operation,
// returning both a restore operation created with those clients, as well as
// the clients themselves.
func prepNewTestRestoreOp(
func PrepNewTestRestoreOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
backupStore storage.Storage,
@ -75,10 +75,10 @@ func prepNewTestRestoreOp(
restoreCfg control.RestoreConfig,
) (
operations.RestoreOperation,
*restoreOpDependencies,
*RestoreOpDependencies,
) {
var (
rod = &restoreOpDependencies{
rod = &RestoreOpDependencies{
acct: tconfig.NewM365Account(t),
st: backupStore,
}
@ -114,10 +114,10 @@ func prepNewTestRestoreOp(
rod.acct,
sel,
nil,
rod.close,
rod.Close,
counter)
ro := newTestRestoreOp(
ro := NewTestRestoreOp(
t,
ctx,
rod,
@ -135,14 +135,14 @@ func prepNewTestRestoreOp(
return ro, rod
}
// newTestRestoreOp accepts the clients required to compose a restore operation, plus
// NewTestRestoreOp accepts the clients required to compose a restore operation, plus
// any other metadata, and uses them to generate a new restore operation. This
// allows restore chains to utilize the same temp directory and configuration
// details.
func newTestRestoreOp(
func NewTestRestoreOp(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
rod *restoreOpDependencies,
rod *RestoreOpDependencies,
backupID model.StableID,
bus events.Eventer,
counter *count.Bus,
@ -167,14 +167,14 @@ func newTestRestoreOp(
bus,
counter)
if !assert.NoError(t, err, clues.ToCore(err)) {
rod.close(t, ctx)
rod.Close(t, ctx)
t.FailNow()
}
return ro
}
func runAndCheckRestore(
func RunAndCheckRestore(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
ro *operations.RestoreOperation,
@ -223,7 +223,7 @@ type GetItemsInContainerByCollisionKeyer[T any] interface {
) (map[string]T, error)
}
func filterCollisionKeyResults[T any](
func FilterCollisionKeyResults[T any](
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
protectedResourceID, containerID string,
@ -243,7 +243,7 @@ func filterCollisionKeyResults[T any](
return m
}
func checkRestoreCounts(
func CheckRestoreCounts(
t *testing.T,
ctr *count.Bus,
expectSkips, expectReplaces, expectNew int,

View File

@ -1,4 +1,4 @@
package test_test
package sharepoint_test
import (
"context"
@ -14,10 +14,10 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
evmock "github.com/alcionai/corso/src/internal/events/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/control"
ctrlTD "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/count"
@ -31,7 +31,7 @@ import (
type SharePointBackupIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestSharePointBackupIntgSuite(t *testing.T) {
@ -43,11 +43,117 @@ func TestSharePointBackupIntgSuite(t *testing.T) {
}
func (suite *SharePointBackupIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
RunBasicDriveishBackupTests(
suite,
path.SharePointService,
control.DefaultOptions(),
sel.Selector)
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
sel := selectors.NewSharePointRestore([]string{suite.its.site.ID})
runSharePointIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_extensionsSharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
RunDriveishBackupWithExtensionsTests(
suite,
path.SharePointService,
control.DefaultOptions(),
sel.Selector)
}
// ---------------------------------------------------------------------------
// test version using the tree-based drive item processor
// ---------------------------------------------------------------------------
type SharePointBackupTreeIntgSuite struct {
tester.Suite
its IntgTesterSetup
}
func TestSharePointBackupTreeIntgSuite(t *testing.T) {
suite.Run(t, &SharePointBackupTreeIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *SharePointBackupTreeIntgSuite) SetupSuite() {
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeSharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunBasicDriveishBackupTests(
suite,
path.SharePointService,
opts,
sel.Selector)
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeIncrementalSharePoint() {
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
runSharePointIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeExtensionsSharePoint() {
var (
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
opts = control.DefaultOptions()
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
opts.ToggleFeatures.UseDeltaTree = true
RunDriveishBackupWithExtensionsTests(
suite,
path.SharePointService,
opts,
sel.Selector)
}
// ---------------------------------------------------------------------------
// common backup test wrappers
// ---------------------------------------------------------------------------
func runSharePointIncrementalBackupTests(
suite tester.Suite,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewSharePointRestore([]string{its.Site.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -58,10 +164,10 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
t *testing.T,
ctx context.Context,
) string {
d, err := suite.its.ac.Sites().GetDefaultDrive(ctx, suite.its.site.ID)
d, err := its.AC.Sites().GetDefaultDrive(ctx, its.Site.ID)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default site drive").
With("site", suite.its.site.ID)
With("site", its.Site.ID)
}
require.NoError(t, err, clues.ToCore(err))
@ -76,10 +182,11 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
return drive.NewSiteRestoreHandler(ac, path.SharePointService)
}
runDriveIncrementalTest(
RunIncrementalDriveishBackupTest(
suite,
suite.its.site.ID,
suite.its.user.ID,
opts,
its.Site.ID,
its.User.ID,
path.SharePointService,
path.LibrariesCategory,
ic,
@ -89,103 +196,9 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
true)
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointBasic() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewSharePointBackup([]string{suite.its.site.ID})
opts = control.DefaultOptions()
)
sel.Include(selTD.SharePointBackupFolderScope(sel))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
path.LibrariesCategory)
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointExtensions() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewSharePointBackup([]string{suite.its.site.ID})
opts = control.DefaultOptions()
tenID = tconfig.M365TenantID(t)
svc = path.SharePointService
ws = deeTD.DriveIDFromRepoRef
)
opts.ItemExtensionFactory = getTestExtensionFactories()
sel.Include(selTD.SharePointBackupFolderScope(sel))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
path.LibrariesCategory)
bID := bo.Results.BackupID
deets, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bID,
tenID,
bod.sel.ID(),
svc,
ws,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
bID,
ws,
bod.kms,
bod.sss,
expectDeets,
false)
// Check that the extensions are in the backup
for _, ent := range deets.Entries {
if ent.Folder == nil {
verifyExtensionData(t, ent.ItemInfo, path.SharePointService)
}
}
}
type SharePointBackupNightlyIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestSharePointBackupNightlyIntgSuite(t *testing.T) {
@ -197,26 +210,26 @@ func TestSharePointBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *SharePointBackupNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9MergeBase() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
runMergeBaseGroupsUpdate(suite, sel.Selector, true)
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
}
func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9AssistBases() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
runDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
}
type SharePointRestoreNightlyIntgSuite struct {
tester.Suite
its intgTesterSetup
its IntgTesterSetup
}
func TestSharePointRestoreIntgSuite(t *testing.T) {
@ -228,38 +241,38 @@ func TestSharePointRestoreIntgSuite(t *testing.T) {
}
func (suite *SharePointRestoreNightlyIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.site.ID
sel.DiscreteOwner = suite.its.Site.ID
runDriveRestoreWithAdvancedOptions(
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.its.ac,
suite.its.AC,
sel.Selector,
suite.its.site.DriveID,
suite.its.site.DriveRootFolderID)
suite.its.Site.DriveID,
suite.its.Site.DriveRootFolderID)
}
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointAlternateProtectedResource() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.site.ID
sel.DiscreteOwner = suite.its.Site.ID
runDriveRestoreToAlternateProtectedResource(
RunDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.its.ac,
suite.its.AC,
sel.Selector,
suite.its.site,
suite.its.secondarySite,
suite.its.secondarySite.ID)
suite.its.Site,
suite.its.SecondarySite,
suite.its.SecondarySite.ID)
}
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDeletedDrives() {
@ -276,13 +289,13 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
rc.OnCollision = control.Copy
// create a new drive
md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.site.ID, rc.Location)
md, err := suite.its.AC.Lists().PostDrive(ctx, suite.its.Site.ID, rc.Location)
require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(md.GetId())
// get the root folder
mdi, err := suite.its.ac.Drives().GetRootFolder(ctx, driveID)
mdi, err := suite.its.AC.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
rootFolderID := ptr.Val(mdi.GetId())
@ -296,7 +309,7 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
file := models.NewFile()
item.SetFile(file)
_, err = suite.its.ac.Drives().PostItemInContainer(
_, err = suite.its.AC.Drives().PostItemInContainer(
ctx,
driveID,
rootFolderID,
@ -309,18 +322,18 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
mb = evmock.NewBus()
counter = count.New()
opts = control.DefaultOptions()
graphClient = suite.its.ac.Stable.Client()
graphClient = suite.its.AC.Stable.Client()
)
bsel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
bsel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
bsel.Include(selTD.SharePointBackupFolderScope(bsel))
bsel.Filter(bsel.Library(rc.Location))
bsel.DiscreteOwner = suite.its.site.ID
bsel.DiscreteOwner = suite.its.Site.ID
bo, bod := prepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx)
bo, bod := PrepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
RunAndCheckBackup(t, ctx, &bo, mb, false)
// test cases:
@ -348,18 +361,18 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
ctr = count.New()
)
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr,
bod.sel,
bod.Sel,
opts,
rc)
runAndCheckRestore(t, ctx, &ro, mb, false)
RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
resp, err := graphClient.
@ -399,23 +412,23 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
ctr = count.New()
)
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr,
bod.sel,
bod.Sel,
opts,
rc)
runAndCheckRestore(t, ctx, &ro, mb, false)
RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
pgr := suite.its.ac.
pgr := suite.its.AC.
Drives().
NewSiteDrivePager(suite.its.site.ID, []string{"id", "name"})
NewSiteDrivePager(suite.its.Site.ID, []string{"id", "name"})
drives, err := api.GetAllDrives(ctx, pgr)
require.NoError(t, err, clues.ToCore(err))
@ -434,7 +447,7 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
md = created
driveID = ptr.Val(md.GetId())
mdi, err := suite.its.ac.Drives().GetRootFolder(ctx, driveID)
mdi, err := suite.its.AC.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
rootFolderID = ptr.Val(mdi.GetId())
@ -467,18 +480,18 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
ctr = count.New()
)
ro, _ := prepNewTestRestoreOp(
ro, _ := PrepNewTestRestoreOp(
t,
ctx,
bod.st,
bod.St,
bo.Results.BackupID,
mb,
ctr,
bod.sel,
bod.Sel,
opts,
rc)
runAndCheckRestore(t, ctx, &ro, mb, false)
RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")