From 23338c2aa371d277b8ab9d5ed55c9bd6a1f4f136 Mon Sep 17 00:00:00 2001 From: ryanfkeepers Date: Wed, 19 Jul 2023 18:35:18 -0600 Subject: [PATCH] add restore to alternate resource adds support for restoring to a resource that differs from the one whose data appears in the backup. --- src/internal/common/idname/idname.go | 4 +- src/internal/common/idname/idname_test.go | 60 +++++ src/internal/m365/controller.go | 13 +- src/internal/m365/exchange/restore.go | 5 +- src/internal/m365/onedrive/restore.go | 104 +------- src/internal/m365/onedrive/restore_caches.go | 116 +++++++++ src/internal/operations/restore.go | 2 +- src/internal/operations/test/exchange_test.go | 233 +++++++++++++++++- src/internal/operations/test/helper_test.go | 89 ++++--- src/internal/operations/test/onedrive_test.go | 190 +++++++++++++- .../operations/test/sharepoint_test.go | 49 ++-- src/internal/tester/tconfig/config.go | 14 +- .../tester/tconfig/protected_resources.go | 11 + 13 files changed, 697 insertions(+), 193 deletions(-) create mode 100644 src/internal/common/idname/idname_test.go create mode 100644 src/internal/m365/onedrive/restore_caches.go diff --git a/src/internal/common/idname/idname.go b/src/internal/common/idname/idname.go index ebc40842c..e2a48fca3 100644 --- a/src/internal/common/idname/idname.go +++ b/src/internal/common/idname/idname.go @@ -77,8 +77,8 @@ func NewCache(idToName map[string]string) *cache { } func (c *cache) Add(id, name string) { - c.idToName[id] = name - c.nameToID[name] = id + c.idToName[strings.ToLower(id)] = name + c.nameToID[strings.ToLower(name)] = id } // IDOf returns the id associated with the given name. diff --git a/src/internal/common/idname/idname_test.go b/src/internal/common/idname/idname_test.go new file mode 100644 index 000000000..229177d61 --- /dev/null +++ b/src/internal/common/idname/idname_test.go @@ -0,0 +1,60 @@ +package idname + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/alcionai/corso/src/internal/tester" +) + +type IDNameUnitSuite struct { + tester.Suite +} + +func TestIDNameUnitSuite(t *testing.T) { + suite.Run(t, &IDNameUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *IDNameUnitSuite) TestAdd() { + table := []struct { + name string + inID string + inName string + searchID string + searchName string + }{ + { + name: "basic", + inID: "foo", + inName: "bar", + searchID: "foo", + searchName: "bar", + }, + { + name: "change casing", + inID: "FNORDS", + inName: "SMARF", + searchID: "fnords", + searchName: "smarf", + }, + } + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + cache := NewCache(nil) + + cache.Add(test.inID, test.inName) + + id, found := cache.IDOf(test.searchName) + assert.True(t, found) + assert.Equal(t, test.inID, id) + + name, found := cache.NameOf(test.searchID) + assert.True(t, found) + assert.Equal(t, test.inName, name) + }) + } +} diff --git a/src/internal/m365/controller.go b/src/internal/m365/controller.go index e3d3d6a80..d8be22886 100644 --- a/src/internal/m365/controller.go +++ b/src/internal/m365/controller.go @@ -83,10 +83,11 @@ func NewController( AC: ac, IDNameLookup: idname.NewCache(nil), - credentials: creds, - ownerLookup: rCli, - tenant: acct.ID(), - wg: &sync.WaitGroup{}, + credentials: creds, + ownerLookup: rCli, + tenant: acct.ID(), + wg: &sync.WaitGroup{}, + backupDriveIDNames: idname.NewCache(nil), } return &ctrl, nil @@ -149,10 +150,6 @@ func (ctrl *Controller) incrementAwaitingMessages() { } func (ctrl *Controller) CacheItemInfo(dii details.ItemInfo) { - if ctrl.backupDriveIDNames == nil { - ctrl.backupDriveIDNames = idname.NewCache(map[string]string{}) - } - if dii.SharePoint != nil { ctrl.backupDriveIDNames.Add(dii.SharePoint.DriveID, dii.SharePoint.DriveName) } diff --git a/src/internal/m365/exchange/restore.go b/src/internal/m365/exchange/restore.go index c95cb53f6..94e3b946a 100644 --- a/src/internal/m365/exchange/restore.go +++ b/src/internal/m365/exchange/restore.go @@ -40,16 +40,13 @@ func ConsumeRestoreCollections( } var ( - userID = dcs[0].FullPath().ResourceOwner() + userID = rcc.ProtectedResource.ID() directoryCache = make(map[path.CategoryType]graph.ContainerResolver) handlers = restoreHandlers(ac) metrics support.CollectionMetrics el = errs.Local() ) - // FIXME: should be user name - ctx = clues.Add(ctx, "resource_owner", clues.Hide(userID)) - for _, dc := range dcs { if el.Failure() != nil { break diff --git a/src/internal/m365/onedrive/restore.go b/src/internal/m365/onedrive/restore.go index 14e4dfa19..5dde04a03 100644 --- a/src/internal/m365/onedrive/restore.go +++ b/src/internal/m365/onedrive/restore.go @@ -39,107 +39,6 @@ const ( maxUploadRetries = 3 ) -type driveInfo struct { - id string - name string - rootFolderID string -} - -type restoreCaches struct { - BackupDriveIDName idname.Cacher - collisionKeyToItemID map[string]api.DriveItemIDType - DriveIDToDriveInfo map[string]driveInfo - DriveNameToDriveInfo map[string]driveInfo - Folders *folderCache - OldLinkShareIDToNewID map[string]string - OldPermIDToNewID map[string]string - ParentDirToMeta map[string]metadata.Metadata - - pool sync.Pool -} - -func (rc *restoreCaches) AddDrive( - ctx context.Context, - md models.Driveable, - grf GetRootFolderer, -) error { - di := driveInfo{ - id: ptr.Val(md.GetId()), - name: ptr.Val(md.GetName()), - } - - ctx = clues.Add(ctx, "drive_info", di) - - root, err := grf.GetRootFolder(ctx, di.id) - if err != nil { - return clues.Wrap(err, "getting drive root id") - } - - di.rootFolderID = ptr.Val(root.GetId()) - - rc.DriveIDToDriveInfo[di.id] = di - rc.DriveNameToDriveInfo[di.name] = di - - return nil -} - -// Populate looks up drive items available to the protectedResource -// and adds their info to the caches. -func (rc *restoreCaches) Populate( - ctx context.Context, - gdparf GetDrivePagerAndRootFolderer, - protectedResourceID string, -) error { - drives, err := api.GetAllDrives( - ctx, - gdparf.NewDrivePager(protectedResourceID, nil), - true, - maxDrivesRetries) - if err != nil { - return clues.Wrap(err, "getting drives") - } - - for _, md := range drives { - if err := rc.AddDrive(ctx, md, gdparf); err != nil { - return clues.Wrap(err, "caching drive") - } - } - - return nil -} - -type GetDrivePagerAndRootFolderer interface { - GetRootFolderer - NewDrivePagerer -} - -func NewRestoreCaches( - backupDriveIDNames idname.Cacher, -) *restoreCaches { - // avoid nil panics - if backupDriveIDNames == nil { - backupDriveIDNames = idname.NewCache(nil) - } - - return &restoreCaches{ - BackupDriveIDName: backupDriveIDNames, - collisionKeyToItemID: map[string]api.DriveItemIDType{}, - DriveIDToDriveInfo: map[string]driveInfo{}, - DriveNameToDriveInfo: map[string]driveInfo{}, - Folders: NewFolderCache(), - OldLinkShareIDToNewID: map[string]string{}, - OldPermIDToNewID: map[string]string{}, - ParentDirToMeta: map[string]metadata.Metadata{}, - // Buffer pool for uploads - pool: sync.Pool{ - New: func() any { - b := make([]byte, graph.CopyBufferSize) - return &b - }, - }, - } -} - // ConsumeRestoreCollections will restore the specified data collections into OneDrive func ConsumeRestoreCollections( ctx context.Context, @@ -155,7 +54,7 @@ func ConsumeRestoreCollections( restoreMetrics support.CollectionMetrics el = errs.Local() caches = NewRestoreCaches(backupDriveIDNames) - protectedResourceID = dcs[0].FullPath().ResourceOwner() + protectedResourceID = rcc.ProtectedResource.ID() fallbackDriveName = "" // onedrive cannot create drives ) @@ -182,7 +81,6 @@ func ConsumeRestoreCollections( ictx = clues.Add( ctx, "category", dc.FullPath().Category(), - "resource_owner", clues.Hide(protectedResourceID), "full_path", dc.FullPath()) ) diff --git a/src/internal/m365/onedrive/restore_caches.go b/src/internal/m365/onedrive/restore_caches.go new file mode 100644 index 000000000..6951a8bfe --- /dev/null +++ b/src/internal/m365/onedrive/restore_caches.go @@ -0,0 +1,116 @@ +package onedrive + +import ( + "context" + "sync" + + "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" + + "github.com/alcionai/corso/src/internal/common/idname" + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/pkg/services/m365/api" +) + +type driveInfo struct { + id string + name string + rootFolderID string +} + +type restoreCaches struct { + BackupDriveIDName idname.Cacher + collisionKeyToItemID map[string]api.DriveItemIDType + DriveIDToDriveInfo map[string]driveInfo + DriveNameToDriveInfo map[string]driveInfo + Folders *folderCache + OldLinkShareIDToNewID map[string]string + OldPermIDToNewID map[string]string + ParentDirToMeta map[string]metadata.Metadata + + pool sync.Pool +} + +func (rc *restoreCaches) AddDrive( + ctx context.Context, + md models.Driveable, + grf GetRootFolderer, +) error { + di := driveInfo{ + id: ptr.Val(md.GetId()), + name: ptr.Val(md.GetName()), + } + + ctx = clues.Add(ctx, "drive_info", di) + + root, err := grf.GetRootFolder(ctx, di.id) + if err != nil { + return clues.Wrap(err, "getting drive root id") + } + + di.rootFolderID = ptr.Val(root.GetId()) + + rc.DriveIDToDriveInfo[di.id] = di + rc.DriveNameToDriveInfo[di.name] = di + + return nil +} + +// Populate looks up drive items available to the protectedResource +// and adds their info to the caches. +func (rc *restoreCaches) Populate( + ctx context.Context, + gdparf GetDrivePagerAndRootFolderer, + protectedResourceID string, +) error { + drives, err := api.GetAllDrives( + ctx, + gdparf.NewDrivePager(protectedResourceID, nil), + true, + maxDrivesRetries) + if err != nil { + return clues.Wrap(err, "getting drives") + } + + for _, md := range drives { + if err := rc.AddDrive(ctx, md, gdparf); err != nil { + return clues.Wrap(err, "caching drive") + } + } + + return nil +} + +type GetDrivePagerAndRootFolderer interface { + GetRootFolderer + NewDrivePagerer +} + +func NewRestoreCaches( + backupDriveIDNames idname.Cacher, +) *restoreCaches { + // avoid nil panics + if backupDriveIDNames == nil { + backupDriveIDNames = idname.NewCache(nil) + } + + return &restoreCaches{ + BackupDriveIDName: backupDriveIDNames, + collisionKeyToItemID: map[string]api.DriveItemIDType{}, + DriveIDToDriveInfo: map[string]driveInfo{}, + DriveNameToDriveInfo: map[string]driveInfo{}, + Folders: NewFolderCache(), + OldLinkShareIDToNewID: map[string]string{}, + OldPermIDToNewID: map[string]string{}, + ParentDirToMeta: map[string]metadata.Metadata{}, + // Buffer pool for uploads + pool: sync.Pool{ + New: func() any { + b := make([]byte, graph.CopyBufferSize) + return &b + }, + }, + } +} diff --git a/src/internal/operations/restore.go b/src/internal/operations/restore.go index 63cc936ec..a43c4f615 100644 --- a/src/internal/operations/restore.go +++ b/src/internal/operations/restore.go @@ -173,7 +173,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De logger.CtxErr(ctx, err).Error("running restore") if errors.Is(err, kopia.ErrNoRestorePath) { - op.Errors.Fail(clues.New("empty backup or unknown path provided")) + op.Errors.Fail(clues.Wrap(err, "empty backup or unknown path provided")) } op.Errors.Fail(clues.Wrap(err, "running restore")) diff --git a/src/internal/operations/test/exchange_test.go b/src/internal/operations/test/exchange_test.go index a08001d37..55cc16a31 100644 --- a/src/internal/operations/test/exchange_test.go +++ b/src/internal/operations/test/exchange_test.go @@ -67,9 +67,9 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() { { name: "Mail", selector: func() *selectors.ExchangeBackup { - sel := selectors.NewExchangeBackup([]string{suite.its.userID}) + sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) - sel.DiscreteOwner = suite.its.userID + sel.DiscreteOwner = suite.its.user.ID return sel }, @@ -79,7 +79,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() { { name: "Contacts", selector: func() *selectors.ExchangeBackup { - sel := selectors.NewExchangeBackup([]string{suite.its.userID}) + sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch())) return sel }, @@ -89,7 +89,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() { { name: "Calendar Events", selector: func() *selectors.ExchangeBackup { - sel := selectors.NewExchangeBackup([]string{suite.its.userID}) + sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch())) return sel }, @@ -258,7 +258,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr // later on during the tests. Putting their identifiers into the selector // at this point is harmless. containers = []string{container1, container2, container3, containerRename} - sel = selectors.NewExchangeBackup([]string{suite.its.userID}) + sel = selectors.NewExchangeBackup([]string{suite.its.user.ID}) whatSet = deeTD.CategoryFromRepoRef opts = control.DefaultOptions() ) @@ -295,7 +295,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr mailDBF := func(id, timeStamp, subject, body string) []byte { return exchMock.MessageWith( - suite.its.userID, suite.its.userID, suite.its.userID, + suite.its.user.ID, suite.its.user.ID, suite.its.user.ID, subject, body, body, now, now, now, now) } @@ -312,7 +312,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr eventDBF := func(id, timeStamp, subject, body string) []byte { return exchMock.EventWith( - suite.its.userID, subject, body, body, + suite.its.user.ID, subject, body, body, exchMock.NoOriginalStartDate, now, now, exchMock.NoRecurrence, exchMock.NoAttendees, exchMock.NoAttachments, exchMock.NoCancelledOccurrences, @@ -578,7 +578,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr service, category, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector, - creds.AzureTenantID, suite.its.userID, "", container3, + creds.AzureTenantID, suite.its.user.ID, "", container3, 2, version.Backup, gen.dbf) @@ -897,7 +897,7 @@ func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeWithAdvancedOptio // a backup is required to run restores - baseSel := selectors.NewExchangeBackup([]string{suite.its.userID}) + baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) baseSel.Include( // events cannot be run, for the same reason as incremental backups: the user needs // to have their account recycled. @@ -905,7 +905,7 @@ func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeWithAdvancedOptio baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()), baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) - baseSel.DiscreteOwner = suite.its.userID + baseSel.DiscreteOwner = suite.its.user.ID var ( mb = evmock.NewBus() @@ -1272,3 +1272,216 @@ func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeWithAdvancedOptio assert.Len(t, result, 0, "no items should have been added as copies") }) } + +func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeAlternateProtectedResource() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + // a backup is required to run restores + + baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) + baseSel.Include( + // events cannot be run, for the same reason as incremental backups: the user needs + // to have their account recycled. + // base_sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()), + baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()), + baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) + + baseSel.DiscreteOwner = suite.its.user.ID + + var ( + mb = evmock.NewBus() + opts = control.DefaultOptions() + ) + + bo, bod := prepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup) + defer bod.close(t, ctx) + + runAndCheckBackup(t, ctx, &bo, mb, false) + + rsel, err := baseSel.ToExchangeRestore() + require.NoError(t, err, clues.ToCore(err)) + + var ( + restoreCfg = ctrlTD.DefaultRestoreConfig("exchange_restore_to_user") + sel = rsel.Selector + userID = suite.its.user.ID + secondaryUserID = suite.its.secondaryUser.ID + uid = userID + acCont = suite.its.ac.Contacts() + acMail = suite.its.ac.Mail() + // acEvts = suite.its.ac.Events() + firstCtr = count.New() + ) + + restoreCfg.OnCollision = control.Copy + mb = evmock.NewBus() + + // first restore to the current user + + ro1, _ := prepNewTestRestoreOp( + t, + ctx, + bod.st, + bo.Results.BackupID, + mb, + firstCtr, + sel, + opts, + restoreCfg) + + runAndCheckRestore(t, ctx, &ro1, mb, false) + + // get all files in folder, use these as the base + // set of files to compare against. + + var ( + userItemIDs = map[path.CategoryType]map[string]struct{}{} + userCollisionKeys = map[path.CategoryType]map[string]string{} + ) + + // --- contacts + cat := path.ContactsCategory + userItemIDs[cat], userCollisionKeys[cat] = getCollKeysAndItemIDs( + t, + ctx, + acCont, + uid, + "", + restoreCfg.Location) + + // --- events + // cat = path.EventsCategory + // userItemIDs[cat], userCollisionKeys[cat] = getCollKeysAndItemIDs( + // t, + // ctx, + // acEvts, + // uid, + // "", + // restoreCfg.Location) + + // --- mail + cat = path.EmailCategory + userItemIDs[cat], userCollisionKeys[cat] = getCollKeysAndItemIDs( + t, + ctx, + acMail, + uid, + "", + restoreCfg.Location, + api.MailInbox) + + // then restore to the secondary user + + uid = secondaryUserID + mb = evmock.NewBus() + secondCtr := count.New() + restoreCfg.ProtectedResource = uid + + ro2, _ := prepNewTestRestoreOp( + t, + ctx, + bod.st, + bo.Results.BackupID, + mb, + secondCtr, + sel, + opts, + restoreCfg) + + runAndCheckRestore(t, ctx, &ro2, mb, false) + + var ( + secondaryItemIDs = map[path.CategoryType]map[string]struct{}{} + secondaryCollisionKeys = map[path.CategoryType]map[string]string{} + ) + + // --- contacts + cat = path.ContactsCategory + secondaryItemIDs[cat], secondaryCollisionKeys[cat] = getCollKeysAndItemIDs( + t, + ctx, + acCont, + uid, + "", + restoreCfg.Location) + + // --- events + // cat = path.EventsCategory + // secondaryItemIDs[cat], secondaryCollisionKeys[cat] = getCollKeysAndItemIDs( + // t, + // ctx, + // acEvts, + // uid, + // "", + // restoreCfg.Location) + + // --- mail + cat = path.EmailCategory + secondaryItemIDs[cat], secondaryCollisionKeys[cat] = getCollKeysAndItemIDs( + t, + ctx, + acMail, + uid, + "", + restoreCfg.Location, + api.MailInbox) + + // compare restore results + for _, cat := range []path.CategoryType{path.ContactsCategory, path.EmailCategory, path.EventsCategory} { + assert.Equal(t, len(userItemIDs[cat]), len(secondaryItemIDs[cat])) + assert.ElementsMatch(t, maps.Keys(userCollisionKeys[cat]), maps.Keys(secondaryCollisionKeys[cat])) + } +} + +type GetItemsKeysAndContainerByNameer interface { + GetItemIDsInContainer( + ctx context.Context, + userID, containerID string, + ) (map[string]struct{}, error) + GetContainerByName( + ctx context.Context, + userID, parentContainerID, containerName string, + ) (graph.Container, error) + GetItemsInContainerByCollisionKey( + ctx context.Context, + userID, containerID string, + ) (map[string]string, error) +} + +func getCollKeysAndItemIDs( + t *testing.T, + ctx context.Context, //revive:disable-line:context-as-argument + gikacbn GetItemsKeysAndContainerByNameer, + userID, parentContainerID string, + containerNames ...string, +) (map[string]struct{}, map[string]string) { + var ( + c graph.Container + err error + cID string + ) + + for _, cn := range containerNames { + pcid := parentContainerID + + if len(cID) != 0 { + pcid = cID + } + + c, err = gikacbn.GetContainerByName(ctx, userID, pcid, cn) + require.NoError(t, err, clues.ToCore(err)) + + cID = ptr.Val(c.GetId()) + } + + itemIDs, err := gikacbn.GetItemIDsInContainer(ctx, userID, cID) + require.NoError(t, err, clues.ToCore(err)) + + collisionKeys, err := gikacbn.GetItemsInContainerByCollisionKey(ctx, userID, cID) + require.NoError(t, err, clues.ToCore(err)) + + return itemIDs, collisionKeys +} diff --git a/src/internal/operations/test/helper_test.go b/src/internal/operations/test/helper_test.go index 7329b65ba..c3b9ceb81 100644 --- a/src/internal/operations/test/helper_test.go +++ b/src/internal/operations/test/helper_test.go @@ -574,15 +574,19 @@ func ControllerWithSelector( // Suite Setup // --------------------------------------------------------------------------- +type ids struct { + ID string + DriveID string + DriveRootFolderID string +} + type intgTesterSetup struct { - ac api.Client - gockAC api.Client - userID string - userDriveID string - userDriveRootFolderID string - siteID string - siteDriveID string - siteDriveRootFolderID string + ac api.Client + gockAC api.Client + user ids + secondaryUser ids + site ids + secondarySite ids } func newIntegrationTesterSetup(t *testing.T) intgTesterSetup { @@ -603,37 +607,52 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup { its.gockAC, err = mock.NewClient(creds) require.NoError(t, err, clues.ToCore(err)) - // user drive - - its.userID = tconfig.M365UserID(t) - - userDrive, err := its.ac.Users().GetDefaultDrive(ctx, its.userID) - require.NoError(t, err, clues.ToCore(err)) - - its.userDriveID = ptr.Val(userDrive.GetId()) - - userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.userDriveID) - require.NoError(t, err, clues.ToCore(err)) - - its.userDriveRootFolderID = ptr.Val(userDriveRootFolder.GetId()) - - its.siteID = tconfig.M365SiteID(t) - - // site - - siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, its.siteID) - require.NoError(t, err, clues.ToCore(err)) - - its.siteDriveID = ptr.Val(siteDrive.GetId()) - - siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.siteDriveID) - require.NoError(t, err, clues.ToCore(err)) - - its.siteDriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId()) + its.user = userIDs(t, tconfig.M365UserID(t), its.ac) + its.secondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.ac) + its.site = siteIDs(t, tconfig.M365SiteID(t), its.ac) + its.secondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.ac) return its } +func userIDs(t *testing.T, id string, ac api.Client) ids { + ctx, flush := tester.NewContext(t) + defer flush() + + r := ids{ID: id} + + drive, err := ac.Users().GetDefaultDrive(ctx, id) + require.NoError(t, err, clues.ToCore(err)) + + r.DriveID = ptr.Val(drive.GetId()) + + driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.DriveID) + require.NoError(t, err, clues.ToCore(err)) + + r.DriveRootFolderID = ptr.Val(driveRootFolder.GetId()) + + return r +} + +func siteIDs(t *testing.T, id string, ac api.Client) ids { + ctx, flush := tester.NewContext(t) + defer flush() + + r := ids{ID: id} + + drive, err := ac.Sites().GetDefaultDrive(ctx, id) + require.NoError(t, err, clues.ToCore(err)) + + r.DriveID = ptr.Val(drive.GetId()) + + driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.DriveID) + require.NoError(t, err, clues.ToCore(err)) + + r.DriveRootFolderID = ptr.Val(driveRootFolder.GetId()) + + return r +} + func getTestExtensionFactories() []extensions.CreateItemExtensioner { return []extensions.CreateItemExtensioner{ &extensions.MockItemExtensionFactory{}, diff --git a/src/internal/operations/test/onedrive_test.go b/src/internal/operations/test/onedrive_test.go index ee5289e02..b5057be31 100644 --- a/src/internal/operations/test/onedrive_test.go +++ b/src/internal/operations/test/onedrive_test.go @@ -106,7 +106,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDrive() { } func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() { - sel := selectors.NewOneDriveRestore([]string{suite.its.userID}) + sel := selectors.NewOneDriveRestore([]string{suite.its.user.ID}) ic := func(cs []string) selectors.Selector { sel.Include(sel.Folders(cs, selectors.PrefixMatch())) @@ -117,10 +117,10 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() { t *testing.T, ctx context.Context, ) string { - d, err := suite.its.ac.Users().GetDefaultDrive(ctx, suite.its.userID) + d, err := suite.its.ac.Users().GetDefaultDrive(ctx, suite.its.user.ID) if err != nil { err = graph.Wrap(ctx, err, "retrieving default user drive"). - With("user", suite.its.userID) + With("user", suite.its.user.ID) } require.NoError(t, err, clues.ToCore(err)) @@ -137,8 +137,8 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() { runDriveIncrementalTest( suite, - suite.its.userID, - suite.its.userID, + suite.its.user.ID, + suite.its.user.ID, resource.Users, path.OneDriveService, path.FilesCategory, @@ -804,7 +804,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() { control.DefaultOptions()) require.NoError(t, err, clues.ToCore(err)) - userable, err := ctrl.AC.Users().GetByID(ctx, suite.its.userID) + userable, err := ctrl.AC.Users().GetByID(ctx, suite.its.user.ID) require.NoError(t, err, clues.ToCore(err)) uid := ptr.Val(userable.GetId()) @@ -982,17 +982,17 @@ func (suite *OneDriveRestoreIntgSuite) SetupSuite() { } func (suite *OneDriveRestoreIntgSuite) TestRestore_Run_onedriveWithAdvancedOptions() { - sel := selectors.NewOneDriveBackup([]string{suite.its.userID}) + sel := selectors.NewOneDriveBackup([]string{suite.its.user.ID}) sel.Include(selTD.OneDriveBackupFolderScope(sel)) - sel.DiscreteOwner = suite.its.userID + sel.DiscreteOwner = suite.its.user.ID runDriveRestoreWithAdvancedOptions( suite.T(), suite, suite.its.ac, sel.Selector, - suite.its.userDriveID, - suite.its.userDriveRootFolderID) + suite.its.user.DriveID, + suite.its.user.DriveRootFolderID) } func runDriveRestoreWithAdvancedOptions( @@ -1250,3 +1250,173 @@ func runDriveRestoreWithAdvancedOptions( assert.Subset(t, maps.Keys(currentFileIDs), maps.Keys(fileIDs), "original item should exist after copy") }) } + +func (suite *OneDriveRestoreIntgSuite) TestRestore_Run_onedriveAlternateProtectedResource() { + sel := selectors.NewOneDriveBackup([]string{suite.its.user.ID}) + sel.Include(selTD.OneDriveBackupFolderScope(sel)) + sel.DiscreteOwner = suite.its.user.ID + + runDriveRestoreToAlternateProtectedResource( + suite.T(), + suite, + suite.its.ac, + sel.Selector, + suite.its.user, + suite.its.secondaryUser) +} + +func runDriveRestoreToAlternateProtectedResource( + t *testing.T, + suite tester.Suite, + ac api.Client, + sel selectors.Selector, // owner should match 'from', both Restore and Backup types work. + from, to ids, +) { + ctx, flush := tester.NewContext(t) + defer flush() + + // a backup is required to run restores + + var ( + mb = evmock.NewBus() + opts = control.DefaultOptions() + ) + + bo, bod := prepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup) + defer bod.close(t, ctx) + + runAndCheckBackup(t, ctx, &bo, mb, false) + + var ( + restoreCfg = ctrlTD.DefaultRestoreConfig("drive_restore_to_resource") + fromCollisionKeys map[string]api.DriveItemIDType + fromItemIDs map[string]api.DriveItemIDType + acd = ac.Drives() + ) + + // first restore to the 'from' resource + + suite.Run("restore original resource", func() { + mb = evmock.NewBus() + fromCtr := count.New() + driveID := from.DriveID + rootFolderID := from.DriveRootFolderID + restoreCfg.OnCollision = control.Copy + + ro, _ := prepNewTestRestoreOp( + t, + ctx, + bod.st, + bo.Results.BackupID, + mb, + fromCtr, + sel, + opts, + restoreCfg) + + runAndCheckRestore(t, ctx, &ro, mb, false) + + // get all files in folder, use these as the base + // set of files to compare against. + fromItemIDs, fromCollisionKeys = getDriveCollKeysAndItemIDs( + t, + ctx, + acd, + driveID, + rootFolderID, + restoreCfg.Location, + selTD.TestFolderName) + }) + + // then restore to the 'to' resource + var ( + toCollisionKeys map[string]api.DriveItemIDType + toItemIDs map[string]api.DriveItemIDType + ) + + suite.Run("restore to alternate resource", func() { + mb = evmock.NewBus() + toCtr := count.New() + driveID := to.DriveID + rootFolderID := to.DriveRootFolderID + restoreCfg.ProtectedResource = to.ID + + ro, _ := prepNewTestRestoreOp( + t, + ctx, + bod.st, + bo.Results.BackupID, + mb, + toCtr, + sel, + opts, + restoreCfg) + + runAndCheckRestore(t, ctx, &ro, mb, false) + + // get all files in folder, use these as the base + // set of files to compare against. + toItemIDs, toCollisionKeys = getDriveCollKeysAndItemIDs( + t, + ctx, + acd, + driveID, + rootFolderID, + restoreCfg.Location, + selTD.TestFolderName) + }) + + // compare restore results + assert.Equal(t, len(fromItemIDs), len(toItemIDs)) + assert.ElementsMatch(t, maps.Keys(fromCollisionKeys), maps.Keys(toCollisionKeys)) +} + +type GetItemsKeysAndFolderByNameer interface { + GetItemIDsInContainer( + ctx context.Context, + driveID, containerID string, + ) (map[string]api.DriveItemIDType, error) + GetFolderByName( + ctx context.Context, + driveID, parentFolderID, folderName string, + ) (models.DriveItemable, error) + GetItemsInContainerByCollisionKey( + ctx context.Context, + driveID, containerID string, + ) (map[string]api.DriveItemIDType, error) +} + +func getDriveCollKeysAndItemIDs( + t *testing.T, + ctx context.Context, //revive:disable-line:context-as-argument + gikafbn GetItemsKeysAndFolderByNameer, + driveID, parentContainerID string, + containerNames ...string, +) (map[string]api.DriveItemIDType, map[string]api.DriveItemIDType) { + var ( + c models.DriveItemable + err error + cID string + ) + + for _, cn := range containerNames { + pcid := parentContainerID + + if len(cID) != 0 { + pcid = cID + } + + c, err = gikafbn.GetFolderByName(ctx, driveID, pcid, cn) + require.NoError(t, err, clues.ToCore(err)) + + cID = ptr.Val(c.GetId()) + } + + itemIDs, err := gikafbn.GetItemIDsInContainer(ctx, driveID, cID) + require.NoError(t, err, clues.ToCore(err)) + + collisionKeys, err := gikafbn.GetItemsInContainerByCollisionKey(ctx, driveID, cID) + require.NoError(t, err, clues.ToCore(err)) + + return itemIDs, collisionKeys +} diff --git a/src/internal/operations/test/sharepoint_test.go b/src/internal/operations/test/sharepoint_test.go index d31b4e9b7..ad7c609d0 100644 --- a/src/internal/operations/test/sharepoint_test.go +++ b/src/internal/operations/test/sharepoint_test.go @@ -49,7 +49,7 @@ func (suite *SharePointBackupIntgSuite) SetupSuite() { } func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() { - sel := selectors.NewSharePointRestore([]string{suite.its.siteID}) + sel := selectors.NewSharePointRestore([]string{suite.its.site.ID}) ic := func(cs []string) selectors.Selector { sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch())) @@ -60,10 +60,10 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() { t *testing.T, ctx context.Context, ) string { - d, err := suite.its.ac.Sites().GetDefaultDrive(ctx, suite.its.siteID) + d, err := suite.its.ac.Sites().GetDefaultDrive(ctx, suite.its.site.ID) if err != nil { err = graph.Wrap(ctx, err, "retrieving default site drive"). - With("site", suite.its.siteID) + With("site", suite.its.site.ID) } require.NoError(t, err, clues.ToCore(err)) @@ -80,8 +80,8 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() { runDriveIncrementalTest( suite, - suite.its.siteID, - suite.its.userID, + suite.its.site.ID, + suite.its.user.ID, resource.Sites, path.SharePointService, path.LibrariesCategory, @@ -99,7 +99,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() { var ( mb = evmock.NewBus() - sel = selectors.NewSharePointBackup([]string{suite.its.siteID}) + sel = selectors.NewSharePointBackup([]string{suite.its.site.ID}) opts = control.DefaultOptions() ) @@ -116,7 +116,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() { bod.sw, &bo, bod.sel, - suite.its.siteID, + suite.its.site.ID, path.LibrariesCategory) } @@ -128,7 +128,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointExtensions() { var ( mb = evmock.NewBus() - sel = selectors.NewSharePointBackup([]string{suite.its.siteID}) + sel = selectors.NewSharePointBackup([]string{suite.its.site.ID}) opts = control.DefaultOptions() tenID = tconfig.M365TenantID(t) svc = path.SharePointService @@ -150,7 +150,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointExtensions() { bod.sw, &bo, bod.sel, - suite.its.siteID, + suite.its.site.ID, path.LibrariesCategory) bID := bo.Results.BackupID @@ -201,18 +201,33 @@ func (suite *SharePointRestoreIntgSuite) SetupSuite() { } func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() { - sel := selectors.NewSharePointBackup([]string{suite.its.siteID}) + sel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Filter(sel.Library("documents")) - sel.DiscreteOwner = suite.its.siteID + sel.DiscreteOwner = suite.its.site.ID runDriveRestoreWithAdvancedOptions( suite.T(), suite, suite.its.ac, sel.Selector, - suite.its.siteDriveID, - suite.its.siteDriveRootFolderID) + suite.its.site.DriveID, + suite.its.site.DriveRootFolderID) +} + +func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointAlternateProtectedResource() { + sel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) + sel.Include(selTD.SharePointBackupFolderScope(sel)) + sel.Filter(sel.Library("documents")) + sel.DiscreteOwner = suite.its.site.ID + + runDriveRestoreToAlternateProtectedResource( + suite.T(), + suite, + suite.its.ac, + sel.Selector, + suite.its.site, + suite.its.secondarySite) } func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives() { @@ -229,7 +244,7 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives rc.OnCollision = control.Copy // create a new drive - md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.siteID, rc.Location) + md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.site.ID, rc.Location) require.NoError(t, err, clues.ToCore(err)) driveID := ptr.Val(md.GetId()) @@ -264,10 +279,10 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives graphClient = suite.its.ac.Stable.Client() ) - bsel := selectors.NewSharePointBackup([]string{suite.its.siteID}) + bsel := selectors.NewSharePointBackup([]string{suite.its.site.ID}) bsel.Include(selTD.SharePointBackupFolderScope(bsel)) bsel.Filter(bsel.Library(rc.Location)) - bsel.DiscreteOwner = suite.its.siteID + bsel.DiscreteOwner = suite.its.site.ID bo, bod := prepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup) defer bod.close(t, ctx) @@ -367,7 +382,7 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives pgr := suite.its.ac. Drives(). - NewSiteDrivePager(suite.its.siteID, []string{"id", "name"}) + NewSiteDrivePager(suite.its.site.ID, []string{"id", "name"}) drives, err := api.GetAllDrives(ctx, pgr, false, -1) require.NoError(t, err, clues.ToCore(err)) diff --git a/src/internal/tester/tconfig/config.go b/src/internal/tester/tconfig/config.go index c6bcd6e4b..ad047973a 100644 --- a/src/internal/tester/tconfig/config.go +++ b/src/internal/tester/tconfig/config.go @@ -23,6 +23,7 @@ const ( // M365 config TestCfgAzureTenantID = "azure_tenantid" + TestCfgSecondarySiteID = "secondarym365siteid" TestCfgSiteID = "m365siteid" TestCfgSiteURL = "m365siteurl" TestCfgUserID = "m365userid" @@ -36,13 +37,14 @@ const ( // test specific env vars const ( + EnvCorsoM365LoadTestUserID = "CORSO_M365_LOAD_TEST_USER_ID" + EnvCorsoM365LoadTestOrgUsers = "CORSO_M365_LOAD_TEST_ORG_USERS" EnvCorsoM365TestSiteID = "CORSO_M365_TEST_SITE_ID" EnvCorsoM365TestSiteURL = "CORSO_M365_TEST_SITE_URL" EnvCorsoM365TestUserID = "CORSO_M365_TEST_USER_ID" + EnvCorsoSecondaryM365TestSiteID = "CORSO_SECONDARY_M365_TEST_SITE_ID" EnvCorsoSecondaryM365TestUserID = "CORSO_SECONDARY_M365_TEST_USER_ID" EnvCorsoTertiaryM365TestUserID = "CORSO_TERTIARY_M365_TEST_USER_ID" - EnvCorsoM365LoadTestUserID = "CORSO_M365_LOAD_TEST_USER_ID" - EnvCorsoM365LoadTestOrgUsers = "CORSO_M365_LOAD_TEST_ORG_USERS" EnvCorsoTestConfigFilePath = "CORSO_TEST_CONFIG_FILE" EnvCorsoUnlicensedM365TestUserID = "CORSO_M365_TEST_UNLICENSED_USER" ) @@ -147,13 +149,19 @@ func ReadTestConfig() (map[string]string, error) { TestCfgSiteID, os.Getenv(EnvCorsoM365TestSiteID), vpr.GetString(TestCfgSiteID), - "10rqc2.sharepoint.com,4892edf5-2ebf-46be-a6e5-a40b2cbf1c1a,38ab6d06-fc82-4417-af93-22d8733c22be") + "4892edf5-2ebf-46be-a6e5-a40b2cbf1c1a,38ab6d06-fc82-4417-af93-22d8733c22be") fallbackTo( testEnv, TestCfgSiteURL, os.Getenv(EnvCorsoM365TestSiteURL), vpr.GetString(TestCfgSiteURL), "https://10rqc2.sharepoint.com/sites/CorsoCI") + fallbackTo( + testEnv, + TestCfgSecondarySiteID, + os.Getenv(EnvCorsoSecondaryM365TestSiteID), + vpr.GetString(TestCfgSecondarySiteID), + "053684d8-ca6c-4376-a03e-2567816bb091,9b3e9abe-6a5e-4084-8b44-ea5a356fe02c") fallbackTo( testEnv, TestCfgUnlicensedUserID, diff --git a/src/internal/tester/tconfig/protected_resources.go b/src/internal/tester/tconfig/protected_resources.go index b9e31ce06..bd2fded46 100644 --- a/src/internal/tester/tconfig/protected_resources.go +++ b/src/internal/tester/tconfig/protected_resources.go @@ -198,6 +198,17 @@ func GetM365SiteID(ctx context.Context) string { return strings.ToLower(cfg[TestCfgSiteID]) } +// SecondaryM365SiteID returns a siteID string representing the secondarym365SiteID described +// by either the env var CORSO_SECONDARY_M365_TEST_SITE_ID, the corso_test.toml config +// file or the default value (in that order of priority). The default is a +// last-attempt fallback that will only work on alcion's testing org. +func SecondaryM365SiteID(t *testing.T) string { + cfg, err := ReadTestConfig() + require.NoError(t, err, "retrieving secondary m365 site id from test configuration: %+v", clues.ToCore(err)) + + return strings.ToLower(cfg[TestCfgSecondarySiteID]) +} + // UnlicensedM365UserID returns an userID string representing the m365UserID // described by either the env var CORSO_M365_TEST_UNLICENSED_USER, the // corso_test.toml config file or the default value (in that order of priority).