Add incremental backup test for groups (#4248)
<!-- PR description--> --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [x] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * closes https://github.com/alcionai/corso/issues/3990 #### Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [ ] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
f7042129f4
commit
4a7847936e
@ -15,13 +15,13 @@ import (
|
||||
|
||||
// MetadataFileNames produces the category-specific set of filenames used to
|
||||
// store graph metadata such as delta tokens and folderID->path references.
|
||||
func MetadataFileNames(cat path.CategoryType) []string {
|
||||
func MetadataFileNames(cat path.CategoryType) [][]string {
|
||||
switch cat {
|
||||
// TODO: should this include events?
|
||||
case path.EmailCategory, path.ContactsCategory:
|
||||
return []string{metadata.DeltaURLsFileName, metadata.PreviousPathFileName}
|
||||
return [][]string{{metadata.DeltaURLsFileName}, {metadata.PreviousPathFileName}}
|
||||
default:
|
||||
return []string{metadata.PreviousPathFileName}
|
||||
return [][]string{{metadata.PreviousPathFileName}}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -61,7 +61,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
||||
name string
|
||||
selector func() *selectors.ExchangeBackup
|
||||
category path.CategoryType
|
||||
metadataFiles []string
|
||||
metadataFiles [][]string
|
||||
}{
|
||||
// {
|
||||
// name: "Mail",
|
||||
@ -140,7 +140,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
||||
m365.AzureTenantID,
|
||||
userID,
|
||||
path.ExchangeService,
|
||||
map[path.CategoryType][]string{test.category: test.metadataFiles})
|
||||
map[path.CategoryType][][]string{test.category: test.metadataFiles})
|
||||
|
||||
_, expectDeets := deeTD.GetDeetsInBackup(
|
||||
t,
|
||||
@ -194,7 +194,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
||||
m365.AzureTenantID,
|
||||
userID,
|
||||
path.ExchangeService,
|
||||
map[path.CategoryType][]string{test.category: test.metadataFiles})
|
||||
map[path.CategoryType][][]string{test.category: test.metadataFiles})
|
||||
deeTD.CheckBackupDetails(
|
||||
t,
|
||||
ctx,
|
||||
@ -243,7 +243,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
|
||||
mb = evmock.NewBus()
|
||||
now = dttm.Now()
|
||||
service = path.ExchangeService
|
||||
categories = map[path.CategoryType][]string{
|
||||
categories = map[path.CategoryType][][]string{
|
||||
path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory),
|
||||
path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory),
|
||||
// path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory),
|
||||
@ -439,6 +439,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
|
||||
creds.AzureTenantID,
|
||||
uidn.ID(),
|
||||
"",
|
||||
"",
|
||||
destName,
|
||||
2,
|
||||
version.Backup,
|
||||
@ -577,7 +578,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
|
||||
service,
|
||||
category,
|
||||
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
||||
creds.AzureTenantID, suite.its.user.ID, "", container3,
|
||||
creds.AzureTenantID, suite.its.user.ID, "", "", container3,
|
||||
2,
|
||||
version.Backup,
|
||||
gen.dbf)
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
package test_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
@ -14,6 +16,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
@ -34,10 +37,48 @@ func (suite *GroupsBackupIntgSuite) SetupSuite() {
|
||||
suite.its = newIntegrationTesterSetup(suite.T())
|
||||
}
|
||||
|
||||
// TODO(v1 backup): Incremental backup
|
||||
// TODO(v0,v1 restore): Library restore
|
||||
// TODO(v0 export): Channels export
|
||||
|
||||
func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
|
||||
sel := selectors.NewGroupsRestore([]string{suite.its.group.ID})
|
||||
|
||||
ic := func(cs []string) selectors.Selector {
|
||||
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
|
||||
return sel.Selector
|
||||
}
|
||||
|
||||
gtdi := func(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
) string {
|
||||
return suite.its.group.RootSite.DriveID
|
||||
}
|
||||
|
||||
gtsi := func(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
) string {
|
||||
return suite.its.group.RootSite.ID
|
||||
}
|
||||
|
||||
grh := func(ac api.Client) drive.RestoreHandler {
|
||||
return drive.NewLibraryRestoreHandler(ac, path.GroupsService)
|
||||
}
|
||||
|
||||
runDriveIncrementalTest(
|
||||
suite,
|
||||
suite.its.group.ID,
|
||||
suite.its.user.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory,
|
||||
ic,
|
||||
gtdi,
|
||||
gtsi,
|
||||
grh,
|
||||
true)
|
||||
}
|
||||
|
||||
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||
t := suite.T()
|
||||
|
||||
@ -52,8 +93,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||
)
|
||||
|
||||
sel.Include(
|
||||
// TODO(abin): ensure implementation succeeds
|
||||
// selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup)
|
||||
|
||||
@ -278,7 +278,7 @@ func checkMetadataFilesExist(
|
||||
ms *kopia.ModelStore,
|
||||
tenant, resourceOwner string,
|
||||
service path.ServiceType,
|
||||
filesByCat map[path.CategoryType][]string,
|
||||
filesByCat map[path.CategoryType][][]string,
|
||||
) {
|
||||
for category, files := range filesByCat {
|
||||
t.Run(category.String(), func(t *testing.T) {
|
||||
@ -293,7 +293,7 @@ func checkMetadataFilesExist(
|
||||
pathsByRef := map[string][]string{}
|
||||
|
||||
for _, fName := range files {
|
||||
p, err := path.BuildMetadata(tenant, resourceOwner, service, category, true, fName)
|
||||
p, err := path.BuildMetadata(tenant, resourceOwner, service, category, true, fName...)
|
||||
if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) {
|
||||
continue
|
||||
}
|
||||
@ -306,7 +306,7 @@ func checkMetadataFilesExist(
|
||||
paths = append(
|
||||
paths,
|
||||
path.RestorePaths{StoragePath: p, RestorePath: dir})
|
||||
pathsByRef[dir.ShortRef()] = append(pathsByRef[dir.ShortRef()], fName)
|
||||
pathsByRef[dir.ShortRef()] = append(pathsByRef[dir.ShortRef()], fName[len(fName)-1])
|
||||
}
|
||||
|
||||
cols, err := kw.ProduceRestoreCollections(
|
||||
@ -365,7 +365,7 @@ func generateContainerOfItems(
|
||||
service path.ServiceType,
|
||||
cat path.CategoryType,
|
||||
sel selectors.Selector,
|
||||
tenantID, resourceOwner, driveID, destFldr string,
|
||||
tenantID, resourceOwner, siteID, driveID, destFldr string,
|
||||
howManyItems int,
|
||||
backupVersion int,
|
||||
dbf dataBuilderFunc,
|
||||
@ -388,6 +388,8 @@ func generateContainerOfItems(
|
||||
switch service {
|
||||
case path.OneDriveService, path.SharePointService:
|
||||
pathFolders = []string{odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
|
||||
case path.GroupsService:
|
||||
pathFolders = []string{odConsts.SitesPathDir, siteID, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr}
|
||||
}
|
||||
|
||||
collections := []incrementalCollection{{
|
||||
|
||||
@ -23,6 +23,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/model"
|
||||
"github.com/alcionai/corso/src/internal/streamstore"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -144,6 +145,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
|
||||
path.FilesCategory,
|
||||
ic,
|
||||
gtdi,
|
||||
nil,
|
||||
grh,
|
||||
false)
|
||||
}
|
||||
@ -155,6 +157,7 @@ func runDriveIncrementalTest(
|
||||
category path.CategoryType,
|
||||
includeContainers func([]string) selectors.Selector,
|
||||
getTestDriveID func(*testing.T, context.Context) string,
|
||||
getTestSiteID func(*testing.T, context.Context) string,
|
||||
getRestoreHandler func(api.Client) drive.RestoreHandler,
|
||||
skipPermissionsTests bool,
|
||||
) {
|
||||
@ -173,9 +176,7 @@ func runDriveIncrementalTest(
|
||||
// some drives cannot have `:` in file/folder names
|
||||
now = dttm.FormatNow(dttm.SafeForTesting)
|
||||
|
||||
categories = map[path.CategoryType][]string{
|
||||
category: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName},
|
||||
}
|
||||
categories = map[path.CategoryType][][]string{}
|
||||
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now)
|
||||
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now)
|
||||
container3 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 3, now)
|
||||
@ -188,6 +189,12 @@ func runDriveIncrementalTest(
|
||||
containers = []string{container1, container2, container3}
|
||||
)
|
||||
|
||||
if service == path.GroupsService && category == path.LibrariesCategory {
|
||||
categories[category] = [][]string{{odConsts.SitesPathDir, bupMD.PreviousPathFileName}}
|
||||
} else {
|
||||
categories[category] = [][]string{{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}}
|
||||
}
|
||||
|
||||
sel := includeContainers(containers)
|
||||
|
||||
creds, err := acct.M365Config()
|
||||
@ -202,6 +209,7 @@ func runDriveIncrementalTest(
|
||||
var (
|
||||
atid = creds.AzureTenantID
|
||||
driveID = getTestDriveID(t, ctx)
|
||||
siteID = ""
|
||||
fileDBF = func(id, timeStamp, subject, body string) []byte {
|
||||
return []byte(id + subject)
|
||||
}
|
||||
@ -211,6 +219,11 @@ func runDriveIncrementalTest(
|
||||
}
|
||||
)
|
||||
|
||||
// Will only be available for groups
|
||||
if getTestSiteID != nil {
|
||||
siteID = getTestSiteID(t, ctx)
|
||||
}
|
||||
|
||||
rrPfx, err := path.BuildPrefix(atid, roidn.ID(), service, category)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
@ -293,7 +306,7 @@ func runDriveIncrementalTest(
|
||||
service,
|
||||
category,
|
||||
sel,
|
||||
atid, roidn.ID(), driveID, destName,
|
||||
atid, roidn.ID(), siteID, driveID, destName,
|
||||
2,
|
||||
// Use an old backup version so we don't need metadata files.
|
||||
0,
|
||||
@ -667,7 +680,7 @@ func runDriveIncrementalTest(
|
||||
service,
|
||||
category,
|
||||
sel,
|
||||
atid, roidn.ID(), driveID, container3,
|
||||
atid, roidn.ID(), siteID, driveID, container3,
|
||||
2,
|
||||
0,
|
||||
fileDBF)
|
||||
@ -757,6 +770,13 @@ func runDriveIncrementalTest(
|
||||
assertReadWrite = assert.Equal
|
||||
)
|
||||
|
||||
if service == path.GroupsService && category == path.LibrariesCategory {
|
||||
// Groups SharePoint have an extra metadata file at
|
||||
// /libraries/sites/previouspath
|
||||
expectWrites++
|
||||
expectReads++
|
||||
}
|
||||
|
||||
// Sharepoint can produce a superset of permissions by nature of
|
||||
// its drive type. Since this counter comparison is a bit hacky
|
||||
// to begin with, it's easiest to assert a <= comparison instead
|
||||
@ -791,8 +811,8 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
|
||||
opts = control.DefaultOptions()
|
||||
mb = evmock.NewBus()
|
||||
|
||||
categories = map[path.CategoryType][]string{
|
||||
path.FilesCategory: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName},
|
||||
categories = map[path.CategoryType][][]string{
|
||||
path.FilesCategory: {{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@ -84,6 +84,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
|
||||
path.LibrariesCategory,
|
||||
ic,
|
||||
gtdi,
|
||||
nil,
|
||||
grh,
|
||||
true)
|
||||
}
|
||||
|
||||
@ -23,7 +23,7 @@ func ToDrivePath(p Path) (*DrivePath, error) {
|
||||
folders := p.Folders()
|
||||
|
||||
// Must be at least `drives/<driveID>/root:`
|
||||
if len(folders) < 3 {
|
||||
if len(folders) < 3 || (p.Service() == GroupsService && len(folders) < 5) {
|
||||
return nil, clues.
|
||||
New("folder path doesn't match expected format for Drive items").
|
||||
With("path_folders", p.Folder(false))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user