Add incremental backup test for groups (#4248)
<!-- PR description--> --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [x] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * closes https://github.com/alcionai/corso/issues/3990 #### Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [ ] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
f7042129f4
commit
4a7847936e
@ -15,13 +15,13 @@ import (
|
|||||||
|
|
||||||
// MetadataFileNames produces the category-specific set of filenames used to
|
// MetadataFileNames produces the category-specific set of filenames used to
|
||||||
// store graph metadata such as delta tokens and folderID->path references.
|
// store graph metadata such as delta tokens and folderID->path references.
|
||||||
func MetadataFileNames(cat path.CategoryType) []string {
|
func MetadataFileNames(cat path.CategoryType) [][]string {
|
||||||
switch cat {
|
switch cat {
|
||||||
// TODO: should this include events?
|
// TODO: should this include events?
|
||||||
case path.EmailCategory, path.ContactsCategory:
|
case path.EmailCategory, path.ContactsCategory:
|
||||||
return []string{metadata.DeltaURLsFileName, metadata.PreviousPathFileName}
|
return [][]string{{metadata.DeltaURLsFileName}, {metadata.PreviousPathFileName}}
|
||||||
default:
|
default:
|
||||||
return []string{metadata.PreviousPathFileName}
|
return [][]string{{metadata.PreviousPathFileName}}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -61,7 +61,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
|||||||
name string
|
name string
|
||||||
selector func() *selectors.ExchangeBackup
|
selector func() *selectors.ExchangeBackup
|
||||||
category path.CategoryType
|
category path.CategoryType
|
||||||
metadataFiles []string
|
metadataFiles [][]string
|
||||||
}{
|
}{
|
||||||
// {
|
// {
|
||||||
// name: "Mail",
|
// name: "Mail",
|
||||||
@ -140,7 +140,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
|||||||
m365.AzureTenantID,
|
m365.AzureTenantID,
|
||||||
userID,
|
userID,
|
||||||
path.ExchangeService,
|
path.ExchangeService,
|
||||||
map[path.CategoryType][]string{test.category: test.metadataFiles})
|
map[path.CategoryType][][]string{test.category: test.metadataFiles})
|
||||||
|
|
||||||
_, expectDeets := deeTD.GetDeetsInBackup(
|
_, expectDeets := deeTD.GetDeetsInBackup(
|
||||||
t,
|
t,
|
||||||
@ -194,7 +194,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
|||||||
m365.AzureTenantID,
|
m365.AzureTenantID,
|
||||||
userID,
|
userID,
|
||||||
path.ExchangeService,
|
path.ExchangeService,
|
||||||
map[path.CategoryType][]string{test.category: test.metadataFiles})
|
map[path.CategoryType][][]string{test.category: test.metadataFiles})
|
||||||
deeTD.CheckBackupDetails(
|
deeTD.CheckBackupDetails(
|
||||||
t,
|
t,
|
||||||
ctx,
|
ctx,
|
||||||
@ -243,7 +243,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
|
|||||||
mb = evmock.NewBus()
|
mb = evmock.NewBus()
|
||||||
now = dttm.Now()
|
now = dttm.Now()
|
||||||
service = path.ExchangeService
|
service = path.ExchangeService
|
||||||
categories = map[path.CategoryType][]string{
|
categories = map[path.CategoryType][][]string{
|
||||||
path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory),
|
path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory),
|
||||||
path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory),
|
path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory),
|
||||||
// path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory),
|
// path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory),
|
||||||
@ -439,6 +439,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
|
|||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
uidn.ID(),
|
uidn.ID(),
|
||||||
"",
|
"",
|
||||||
|
"",
|
||||||
destName,
|
destName,
|
||||||
2,
|
2,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
@ -577,7 +578,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
|
|||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
||||||
creds.AzureTenantID, suite.its.user.ID, "", container3,
|
creds.AzureTenantID, suite.its.user.ID, "", "", container3,
|
||||||
2,
|
2,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
gen.dbf)
|
gen.dbf)
|
||||||
|
|||||||
@ -1,11 +1,13 @@
|
|||||||
package test_test
|
package test_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
@ -14,6 +16,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -34,10 +37,48 @@ func (suite *GroupsBackupIntgSuite) SetupSuite() {
|
|||||||
suite.its = newIntegrationTesterSetup(suite.T())
|
suite.its = newIntegrationTesterSetup(suite.T())
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(v1 backup): Incremental backup
|
|
||||||
// TODO(v0,v1 restore): Library restore
|
// TODO(v0,v1 restore): Library restore
|
||||||
// TODO(v0 export): Channels export
|
// TODO(v0 export): Channels export
|
||||||
|
|
||||||
|
func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
|
||||||
|
sel := selectors.NewGroupsRestore([]string{suite.its.group.ID})
|
||||||
|
|
||||||
|
ic := func(cs []string) selectors.Selector {
|
||||||
|
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
|
||||||
|
return sel.Selector
|
||||||
|
}
|
||||||
|
|
||||||
|
gtdi := func(
|
||||||
|
t *testing.T,
|
||||||
|
ctx context.Context,
|
||||||
|
) string {
|
||||||
|
return suite.its.group.RootSite.DriveID
|
||||||
|
}
|
||||||
|
|
||||||
|
gtsi := func(
|
||||||
|
t *testing.T,
|
||||||
|
ctx context.Context,
|
||||||
|
) string {
|
||||||
|
return suite.its.group.RootSite.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
grh := func(ac api.Client) drive.RestoreHandler {
|
||||||
|
return drive.NewLibraryRestoreHandler(ac, path.GroupsService)
|
||||||
|
}
|
||||||
|
|
||||||
|
runDriveIncrementalTest(
|
||||||
|
suite,
|
||||||
|
suite.its.group.ID,
|
||||||
|
suite.its.user.ID,
|
||||||
|
path.GroupsService,
|
||||||
|
path.LibrariesCategory,
|
||||||
|
ic,
|
||||||
|
gtdi,
|
||||||
|
gtsi,
|
||||||
|
grh,
|
||||||
|
true)
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
@ -52,8 +93,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
sel.Include(
|
sel.Include(
|
||||||
// TODO(abin): ensure implementation succeeds
|
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||||
// selTD.GroupsBackupLibraryFolderScope(sel),
|
|
||||||
selTD.GroupsBackupChannelScope(sel))
|
selTD.GroupsBackupChannelScope(sel))
|
||||||
|
|
||||||
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup)
|
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup)
|
||||||
|
|||||||
@ -278,7 +278,7 @@ func checkMetadataFilesExist(
|
|||||||
ms *kopia.ModelStore,
|
ms *kopia.ModelStore,
|
||||||
tenant, resourceOwner string,
|
tenant, resourceOwner string,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
filesByCat map[path.CategoryType][]string,
|
filesByCat map[path.CategoryType][][]string,
|
||||||
) {
|
) {
|
||||||
for category, files := range filesByCat {
|
for category, files := range filesByCat {
|
||||||
t.Run(category.String(), func(t *testing.T) {
|
t.Run(category.String(), func(t *testing.T) {
|
||||||
@ -293,7 +293,7 @@ func checkMetadataFilesExist(
|
|||||||
pathsByRef := map[string][]string{}
|
pathsByRef := map[string][]string{}
|
||||||
|
|
||||||
for _, fName := range files {
|
for _, fName := range files {
|
||||||
p, err := path.BuildMetadata(tenant, resourceOwner, service, category, true, fName)
|
p, err := path.BuildMetadata(tenant, resourceOwner, service, category, true, fName...)
|
||||||
if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) {
|
if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -306,7 +306,7 @@ func checkMetadataFilesExist(
|
|||||||
paths = append(
|
paths = append(
|
||||||
paths,
|
paths,
|
||||||
path.RestorePaths{StoragePath: p, RestorePath: dir})
|
path.RestorePaths{StoragePath: p, RestorePath: dir})
|
||||||
pathsByRef[dir.ShortRef()] = append(pathsByRef[dir.ShortRef()], fName)
|
pathsByRef[dir.ShortRef()] = append(pathsByRef[dir.ShortRef()], fName[len(fName)-1])
|
||||||
}
|
}
|
||||||
|
|
||||||
cols, err := kw.ProduceRestoreCollections(
|
cols, err := kw.ProduceRestoreCollections(
|
||||||
@ -365,7 +365,7 @@ func generateContainerOfItems(
|
|||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
cat path.CategoryType,
|
cat path.CategoryType,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
tenantID, resourceOwner, driveID, destFldr string,
|
tenantID, resourceOwner, siteID, driveID, destFldr string,
|
||||||
howManyItems int,
|
howManyItems int,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
dbf dataBuilderFunc,
|
dbf dataBuilderFunc,
|
||||||
@ -388,6 +388,8 @@ func generateContainerOfItems(
|
|||||||
switch service {
|
switch service {
|
||||||
case path.OneDriveService, path.SharePointService:
|
case path.OneDriveService, path.SharePointService:
|
||||||
pathFolders = []string{odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
|
pathFolders = []string{odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
|
||||||
|
case path.GroupsService:
|
||||||
|
pathFolders = []string{odConsts.SitesPathDir, siteID, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
|
||||||
}
|
}
|
||||||
|
|
||||||
collections := []incrementalCollection{{
|
collections := []incrementalCollection{{
|
||||||
|
|||||||
@ -23,6 +23,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/internal/streamstore"
|
"github.com/alcionai/corso/src/internal/streamstore"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -144,6 +145,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
|
|||||||
path.FilesCategory,
|
path.FilesCategory,
|
||||||
ic,
|
ic,
|
||||||
gtdi,
|
gtdi,
|
||||||
|
nil,
|
||||||
grh,
|
grh,
|
||||||
false)
|
false)
|
||||||
}
|
}
|
||||||
@ -155,6 +157,7 @@ func runDriveIncrementalTest(
|
|||||||
category path.CategoryType,
|
category path.CategoryType,
|
||||||
includeContainers func([]string) selectors.Selector,
|
includeContainers func([]string) selectors.Selector,
|
||||||
getTestDriveID func(*testing.T, context.Context) string,
|
getTestDriveID func(*testing.T, context.Context) string,
|
||||||
|
getTestSiteID func(*testing.T, context.Context) string,
|
||||||
getRestoreHandler func(api.Client) drive.RestoreHandler,
|
getRestoreHandler func(api.Client) drive.RestoreHandler,
|
||||||
skipPermissionsTests bool,
|
skipPermissionsTests bool,
|
||||||
) {
|
) {
|
||||||
@ -173,9 +176,7 @@ func runDriveIncrementalTest(
|
|||||||
// some drives cannot have `:` in file/folder names
|
// some drives cannot have `:` in file/folder names
|
||||||
now = dttm.FormatNow(dttm.SafeForTesting)
|
now = dttm.FormatNow(dttm.SafeForTesting)
|
||||||
|
|
||||||
categories = map[path.CategoryType][]string{
|
categories = map[path.CategoryType][][]string{}
|
||||||
category: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName},
|
|
||||||
}
|
|
||||||
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now)
|
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now)
|
||||||
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now)
|
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now)
|
||||||
container3 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 3, now)
|
container3 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 3, now)
|
||||||
@ -188,6 +189,12 @@ func runDriveIncrementalTest(
|
|||||||
containers = []string{container1, container2, container3}
|
containers = []string{container1, container2, container3}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if service == path.GroupsService && category == path.LibrariesCategory {
|
||||||
|
categories[category] = [][]string{{odConsts.SitesPathDir, bupMD.PreviousPathFileName}}
|
||||||
|
} else {
|
||||||
|
categories[category] = [][]string{{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}}
|
||||||
|
}
|
||||||
|
|
||||||
sel := includeContainers(containers)
|
sel := includeContainers(containers)
|
||||||
|
|
||||||
creds, err := acct.M365Config()
|
creds, err := acct.M365Config()
|
||||||
@ -202,6 +209,7 @@ func runDriveIncrementalTest(
|
|||||||
var (
|
var (
|
||||||
atid = creds.AzureTenantID
|
atid = creds.AzureTenantID
|
||||||
driveID = getTestDriveID(t, ctx)
|
driveID = getTestDriveID(t, ctx)
|
||||||
|
siteID = ""
|
||||||
fileDBF = func(id, timeStamp, subject, body string) []byte {
|
fileDBF = func(id, timeStamp, subject, body string) []byte {
|
||||||
return []byte(id + subject)
|
return []byte(id + subject)
|
||||||
}
|
}
|
||||||
@ -211,6 +219,11 @@ func runDriveIncrementalTest(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Will only be available for groups
|
||||||
|
if getTestSiteID != nil {
|
||||||
|
siteID = getTestSiteID(t, ctx)
|
||||||
|
}
|
||||||
|
|
||||||
rrPfx, err := path.BuildPrefix(atid, roidn.ID(), service, category)
|
rrPfx, err := path.BuildPrefix(atid, roidn.ID(), service, category)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
@ -293,7 +306,7 @@ func runDriveIncrementalTest(
|
|||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
sel,
|
sel,
|
||||||
atid, roidn.ID(), driveID, destName,
|
atid, roidn.ID(), siteID, driveID, destName,
|
||||||
2,
|
2,
|
||||||
// Use an old backup version so we don't need metadata files.
|
// Use an old backup version so we don't need metadata files.
|
||||||
0,
|
0,
|
||||||
@ -667,7 +680,7 @@ func runDriveIncrementalTest(
|
|||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
sel,
|
sel,
|
||||||
atid, roidn.ID(), driveID, container3,
|
atid, roidn.ID(), siteID, driveID, container3,
|
||||||
2,
|
2,
|
||||||
0,
|
0,
|
||||||
fileDBF)
|
fileDBF)
|
||||||
@ -757,6 +770,13 @@ func runDriveIncrementalTest(
|
|||||||
assertReadWrite = assert.Equal
|
assertReadWrite = assert.Equal
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if service == path.GroupsService && category == path.LibrariesCategory {
|
||||||
|
// Groups SharePoint have an extra metadata file at
|
||||||
|
// /libraries/sites/previouspath
|
||||||
|
expectWrites++
|
||||||
|
expectReads++
|
||||||
|
}
|
||||||
|
|
||||||
// Sharepoint can produce a superset of permissions by nature of
|
// Sharepoint can produce a superset of permissions by nature of
|
||||||
// its drive type. Since this counter comparison is a bit hacky
|
// its drive type. Since this counter comparison is a bit hacky
|
||||||
// to begin with, it's easiest to assert a <= comparison instead
|
// to begin with, it's easiest to assert a <= comparison instead
|
||||||
@ -791,8 +811,8 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
|
|||||||
opts = control.DefaultOptions()
|
opts = control.DefaultOptions()
|
||||||
mb = evmock.NewBus()
|
mb = evmock.NewBus()
|
||||||
|
|
||||||
categories = map[path.CategoryType][]string{
|
categories = map[path.CategoryType][][]string{
|
||||||
path.FilesCategory: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName},
|
path.FilesCategory: {{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -84,6 +84,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
|
|||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
ic,
|
ic,
|
||||||
gtdi,
|
gtdi,
|
||||||
|
nil,
|
||||||
grh,
|
grh,
|
||||||
true)
|
true)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,7 +23,7 @@ func ToDrivePath(p Path) (*DrivePath, error) {
|
|||||||
folders := p.Folders()
|
folders := p.Folders()
|
||||||
|
|
||||||
// Must be at least `drives/<driveID>/root:`
|
// Must be at least `drives/<driveID>/root:`
|
||||||
if len(folders) < 3 {
|
if len(folders) < 3 || (p.Service() == GroupsService && len(folders) < 5) {
|
||||||
return nil, clues.
|
return nil, clues.
|
||||||
New("folder path doesn't match expected format for Drive items").
|
New("folder path doesn't match expected format for Drive items").
|
||||||
With("path_folders", p.Folder(false))
|
With("path_folders", p.Folder(false))
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user