diff --git a/src/internal/m365/collection/exchange/metadata.go b/src/internal/m365/collection/exchange/metadata.go index 73a01b1d3..569573a6e 100644 --- a/src/internal/m365/collection/exchange/metadata.go +++ b/src/internal/m365/collection/exchange/metadata.go @@ -15,13 +15,13 @@ import ( // MetadataFileNames produces the category-specific set of filenames used to // store graph metadata such as delta tokens and folderID->path references. -func MetadataFileNames(cat path.CategoryType) []string { +func MetadataFileNames(cat path.CategoryType) [][]string { switch cat { // TODO: should this include events? case path.EmailCategory, path.ContactsCategory: - return []string{metadata.DeltaURLsFileName, metadata.PreviousPathFileName} + return [][]string{{metadata.DeltaURLsFileName}, {metadata.PreviousPathFileName}} default: - return []string{metadata.PreviousPathFileName} + return [][]string{{metadata.PreviousPathFileName}} } } diff --git a/src/internal/operations/test/exchange_test.go b/src/internal/operations/test/exchange_test.go index 91e185232..070bd33ae 100644 --- a/src/internal/operations/test/exchange_test.go +++ b/src/internal/operations/test/exchange_test.go @@ -61,7 +61,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() { name string selector func() *selectors.ExchangeBackup category path.CategoryType - metadataFiles []string + metadataFiles [][]string }{ // { // name: "Mail", @@ -140,7 +140,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() { m365.AzureTenantID, userID, path.ExchangeService, - map[path.CategoryType][]string{test.category: test.metadataFiles}) + map[path.CategoryType][][]string{test.category: test.metadataFiles}) _, expectDeets := deeTD.GetDeetsInBackup( t, @@ -194,7 +194,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() { m365.AzureTenantID, userID, path.ExchangeService, - map[path.CategoryType][]string{test.category: test.metadataFiles}) + map[path.CategoryType][][]string{test.category: test.metadataFiles}) deeTD.CheckBackupDetails( t, ctx, @@ -243,7 +243,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr mb = evmock.NewBus() now = dttm.Now() service = path.ExchangeService - categories = map[path.CategoryType][]string{ + categories = map[path.CategoryType][][]string{ path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory), path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory), // path.EventsCategory: exchange.MetadataFileNames(path.EventsCategory), @@ -439,6 +439,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr creds.AzureTenantID, uidn.ID(), "", + "", destName, 2, version.Backup, @@ -577,7 +578,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr service, category, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector, - creds.AzureTenantID, suite.its.user.ID, "", container3, + creds.AzureTenantID, suite.its.user.ID, "", "", container3, 2, version.Backup, gen.dbf) diff --git a/src/internal/operations/test/group_test.go b/src/internal/operations/test/group_test.go index 23d530eed..fc3b1db1c 100644 --- a/src/internal/operations/test/group_test.go +++ b/src/internal/operations/test/group_test.go @@ -1,11 +1,13 @@ package test_test import ( + "context" "testing" "github.com/stretchr/testify/suite" evmock "github.com/alcionai/corso/src/internal/events/mock" + "github.com/alcionai/corso/src/internal/m365/collection/drive" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/version" @@ -14,6 +16,7 @@ import ( "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" + "github.com/alcionai/corso/src/pkg/services/m365/api" storeTD "github.com/alcionai/corso/src/pkg/storage/testdata" ) @@ -34,10 +37,48 @@ func (suite *GroupsBackupIntgSuite) SetupSuite() { suite.its = newIntegrationTesterSetup(suite.T()) } -// TODO(v1 backup): Incremental backup // TODO(v0,v1 restore): Library restore // TODO(v0 export): Channels export +func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() { + sel := selectors.NewGroupsRestore([]string{suite.its.group.ID}) + + ic := func(cs []string) selectors.Selector { + sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch())) + return sel.Selector + } + + gtdi := func( + t *testing.T, + ctx context.Context, + ) string { + return suite.its.group.RootSite.DriveID + } + + gtsi := func( + t *testing.T, + ctx context.Context, + ) string { + return suite.its.group.RootSite.ID + } + + grh := func(ac api.Client) drive.RestoreHandler { + return drive.NewLibraryRestoreHandler(ac, path.GroupsService) + } + + runDriveIncrementalTest( + suite, + suite.its.group.ID, + suite.its.user.ID, + path.GroupsService, + path.LibrariesCategory, + ic, + gtdi, + gtsi, + grh, + true) +} + func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() { t := suite.T() @@ -52,8 +93,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() { ) sel.Include( - // TODO(abin): ensure implementation succeeds - // selTD.GroupsBackupLibraryFolderScope(sel), + selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupChannelScope(sel)) bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup) diff --git a/src/internal/operations/test/helper_test.go b/src/internal/operations/test/helper_test.go index 4ae9fad6f..421420a2d 100644 --- a/src/internal/operations/test/helper_test.go +++ b/src/internal/operations/test/helper_test.go @@ -278,7 +278,7 @@ func checkMetadataFilesExist( ms *kopia.ModelStore, tenant, resourceOwner string, service path.ServiceType, - filesByCat map[path.CategoryType][]string, + filesByCat map[path.CategoryType][][]string, ) { for category, files := range filesByCat { t.Run(category.String(), func(t *testing.T) { @@ -293,7 +293,7 @@ func checkMetadataFilesExist( pathsByRef := map[string][]string{} for _, fName := range files { - p, err := path.BuildMetadata(tenant, resourceOwner, service, category, true, fName) + p, err := path.BuildMetadata(tenant, resourceOwner, service, category, true, fName...) if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) { continue } @@ -306,7 +306,7 @@ func checkMetadataFilesExist( paths = append( paths, path.RestorePaths{StoragePath: p, RestorePath: dir}) - pathsByRef[dir.ShortRef()] = append(pathsByRef[dir.ShortRef()], fName) + pathsByRef[dir.ShortRef()] = append(pathsByRef[dir.ShortRef()], fName[len(fName)-1]) } cols, err := kw.ProduceRestoreCollections( @@ -365,7 +365,7 @@ func generateContainerOfItems( service path.ServiceType, cat path.CategoryType, sel selectors.Selector, - tenantID, resourceOwner, driveID, destFldr string, + tenantID, resourceOwner, siteID, driveID, destFldr string, howManyItems int, backupVersion int, dbf dataBuilderFunc, @@ -388,6 +388,8 @@ func generateContainerOfItems( switch service { case path.OneDriveService, path.SharePointService: pathFolders = []string{odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr} + case path.GroupsService: + pathFolders = []string{odConsts.SitesPathDir, siteID, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, destFldr} } collections := []incrementalCollection{{ diff --git a/src/internal/operations/test/onedrive_test.go b/src/internal/operations/test/onedrive_test.go index db058f5af..384000d19 100644 --- a/src/internal/operations/test/onedrive_test.go +++ b/src/internal/operations/test/onedrive_test.go @@ -23,6 +23,7 @@ import ( "github.com/alcionai/corso/src/internal/m365/collection/drive" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" "github.com/alcionai/corso/src/internal/m365/graph" + odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/streamstore" "github.com/alcionai/corso/src/internal/tester" @@ -144,6 +145,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() { path.FilesCategory, ic, gtdi, + nil, grh, false) } @@ -155,6 +157,7 @@ func runDriveIncrementalTest( category path.CategoryType, includeContainers func([]string) selectors.Selector, getTestDriveID func(*testing.T, context.Context) string, + getTestSiteID func(*testing.T, context.Context) string, getRestoreHandler func(api.Client) drive.RestoreHandler, skipPermissionsTests bool, ) { @@ -173,9 +176,7 @@ func runDriveIncrementalTest( // some drives cannot have `:` in file/folder names now = dttm.FormatNow(dttm.SafeForTesting) - categories = map[path.CategoryType][]string{ - category: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName}, - } + categories = map[path.CategoryType][][]string{} container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now) container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now) container3 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 3, now) @@ -188,6 +189,12 @@ func runDriveIncrementalTest( containers = []string{container1, container2, container3} ) + if service == path.GroupsService && category == path.LibrariesCategory { + categories[category] = [][]string{{odConsts.SitesPathDir, bupMD.PreviousPathFileName}} + } else { + categories[category] = [][]string{{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}} + } + sel := includeContainers(containers) creds, err := acct.M365Config() @@ -202,6 +209,7 @@ func runDriveIncrementalTest( var ( atid = creds.AzureTenantID driveID = getTestDriveID(t, ctx) + siteID = "" fileDBF = func(id, timeStamp, subject, body string) []byte { return []byte(id + subject) } @@ -211,6 +219,11 @@ func runDriveIncrementalTest( } ) + // Will only be available for groups + if getTestSiteID != nil { + siteID = getTestSiteID(t, ctx) + } + rrPfx, err := path.BuildPrefix(atid, roidn.ID(), service, category) require.NoError(t, err, clues.ToCore(err)) @@ -293,7 +306,7 @@ func runDriveIncrementalTest( service, category, sel, - atid, roidn.ID(), driveID, destName, + atid, roidn.ID(), siteID, driveID, destName, 2, // Use an old backup version so we don't need metadata files. 0, @@ -667,7 +680,7 @@ func runDriveIncrementalTest( service, category, sel, - atid, roidn.ID(), driveID, container3, + atid, roidn.ID(), siteID, driveID, container3, 2, 0, fileDBF) @@ -757,6 +770,13 @@ func runDriveIncrementalTest( assertReadWrite = assert.Equal ) + if service == path.GroupsService && category == path.LibrariesCategory { + // Groups SharePoint have an extra metadata file at + // /libraries/sites/previouspath + expectWrites++ + expectReads++ + } + // Sharepoint can produce a superset of permissions by nature of // its drive type. Since this counter comparison is a bit hacky // to begin with, it's easiest to assert a <= comparison instead @@ -791,8 +811,8 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() { opts = control.DefaultOptions() mb = evmock.NewBus() - categories = map[path.CategoryType][]string{ - path.FilesCategory: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName}, + categories = map[path.CategoryType][][]string{ + path.FilesCategory: {{bupMD.DeltaURLsFileName}, {bupMD.PreviousPathFileName}}, } ) diff --git a/src/internal/operations/test/sharepoint_test.go b/src/internal/operations/test/sharepoint_test.go index 055a86b2c..aa1558d07 100644 --- a/src/internal/operations/test/sharepoint_test.go +++ b/src/internal/operations/test/sharepoint_test.go @@ -84,6 +84,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() { path.LibrariesCategory, ic, gtdi, + nil, grh, true) } diff --git a/src/pkg/path/drive.go b/src/pkg/path/drive.go index 666dade42..a654138a6 100644 --- a/src/pkg/path/drive.go +++ b/src/pkg/path/drive.go @@ -23,7 +23,7 @@ func ToDrivePath(p Path) (*DrivePath, error) { folders := p.Folders() // Must be at least `drives//root:` - if len(folders) < 3 { + if len(folders) < 3 || (p.Service() == GroupsService && len(folders) < 5) { return nil, clues. New("folder path doesn't match expected format for Drive items"). With("path_folders", p.Folder(false))