add restore to alternate resource

adds support for restoring to a resource that
differs from the one whose data appears in the backup.
This commit is contained in:
ryanfkeepers 2023-07-19 18:35:18 -06:00
parent 683fb248e3
commit 23338c2aa3
13 changed files with 697 additions and 193 deletions

View File

@ -77,8 +77,8 @@ func NewCache(idToName map[string]string) *cache {
} }
func (c *cache) Add(id, name string) { func (c *cache) Add(id, name string) {
c.idToName[id] = name c.idToName[strings.ToLower(id)] = name
c.nameToID[name] = id c.nameToID[strings.ToLower(name)] = id
} }
// IDOf returns the id associated with the given name. // IDOf returns the id associated with the given name.

View File

@ -0,0 +1,60 @@
package idname
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
)
type IDNameUnitSuite struct {
tester.Suite
}
func TestIDNameUnitSuite(t *testing.T) {
suite.Run(t, &IDNameUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *IDNameUnitSuite) TestAdd() {
table := []struct {
name string
inID string
inName string
searchID string
searchName string
}{
{
name: "basic",
inID: "foo",
inName: "bar",
searchID: "foo",
searchName: "bar",
},
{
name: "change casing",
inID: "FNORDS",
inName: "SMARF",
searchID: "fnords",
searchName: "smarf",
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cache := NewCache(nil)
cache.Add(test.inID, test.inName)
id, found := cache.IDOf(test.searchName)
assert.True(t, found)
assert.Equal(t, test.inID, id)
name, found := cache.NameOf(test.searchID)
assert.True(t, found)
assert.Equal(t, test.inName, name)
})
}
}

View File

@ -83,10 +83,11 @@ func NewController(
AC: ac, AC: ac,
IDNameLookup: idname.NewCache(nil), IDNameLookup: idname.NewCache(nil),
credentials: creds, credentials: creds,
ownerLookup: rCli, ownerLookup: rCli,
tenant: acct.ID(), tenant: acct.ID(),
wg: &sync.WaitGroup{}, wg: &sync.WaitGroup{},
backupDriveIDNames: idname.NewCache(nil),
} }
return &ctrl, nil return &ctrl, nil
@ -149,10 +150,6 @@ func (ctrl *Controller) incrementAwaitingMessages() {
} }
func (ctrl *Controller) CacheItemInfo(dii details.ItemInfo) { func (ctrl *Controller) CacheItemInfo(dii details.ItemInfo) {
if ctrl.backupDriveIDNames == nil {
ctrl.backupDriveIDNames = idname.NewCache(map[string]string{})
}
if dii.SharePoint != nil { if dii.SharePoint != nil {
ctrl.backupDriveIDNames.Add(dii.SharePoint.DriveID, dii.SharePoint.DriveName) ctrl.backupDriveIDNames.Add(dii.SharePoint.DriveID, dii.SharePoint.DriveName)
} }

View File

@ -40,16 +40,13 @@ func ConsumeRestoreCollections(
} }
var ( var (
userID = dcs[0].FullPath().ResourceOwner() userID = rcc.ProtectedResource.ID()
directoryCache = make(map[path.CategoryType]graph.ContainerResolver) directoryCache = make(map[path.CategoryType]graph.ContainerResolver)
handlers = restoreHandlers(ac) handlers = restoreHandlers(ac)
metrics support.CollectionMetrics metrics support.CollectionMetrics
el = errs.Local() el = errs.Local()
) )
// FIXME: should be user name
ctx = clues.Add(ctx, "resource_owner", clues.Hide(userID))
for _, dc := range dcs { for _, dc := range dcs {
if el.Failure() != nil { if el.Failure() != nil {
break break

View File

@ -39,107 +39,6 @@ const (
maxUploadRetries = 3 maxUploadRetries = 3
) )
type driveInfo struct {
id string
name string
rootFolderID string
}
type restoreCaches struct {
BackupDriveIDName idname.Cacher
collisionKeyToItemID map[string]api.DriveItemIDType
DriveIDToDriveInfo map[string]driveInfo
DriveNameToDriveInfo map[string]driveInfo
Folders *folderCache
OldLinkShareIDToNewID map[string]string
OldPermIDToNewID map[string]string
ParentDirToMeta map[string]metadata.Metadata
pool sync.Pool
}
func (rc *restoreCaches) AddDrive(
ctx context.Context,
md models.Driveable,
grf GetRootFolderer,
) error {
di := driveInfo{
id: ptr.Val(md.GetId()),
name: ptr.Val(md.GetName()),
}
ctx = clues.Add(ctx, "drive_info", di)
root, err := grf.GetRootFolder(ctx, di.id)
if err != nil {
return clues.Wrap(err, "getting drive root id")
}
di.rootFolderID = ptr.Val(root.GetId())
rc.DriveIDToDriveInfo[di.id] = di
rc.DriveNameToDriveInfo[di.name] = di
return nil
}
// Populate looks up drive items available to the protectedResource
// and adds their info to the caches.
func (rc *restoreCaches) Populate(
ctx context.Context,
gdparf GetDrivePagerAndRootFolderer,
protectedResourceID string,
) error {
drives, err := api.GetAllDrives(
ctx,
gdparf.NewDrivePager(protectedResourceID, nil),
true,
maxDrivesRetries)
if err != nil {
return clues.Wrap(err, "getting drives")
}
for _, md := range drives {
if err := rc.AddDrive(ctx, md, gdparf); err != nil {
return clues.Wrap(err, "caching drive")
}
}
return nil
}
type GetDrivePagerAndRootFolderer interface {
GetRootFolderer
NewDrivePagerer
}
func NewRestoreCaches(
backupDriveIDNames idname.Cacher,
) *restoreCaches {
// avoid nil panics
if backupDriveIDNames == nil {
backupDriveIDNames = idname.NewCache(nil)
}
return &restoreCaches{
BackupDriveIDName: backupDriveIDNames,
collisionKeyToItemID: map[string]api.DriveItemIDType{},
DriveIDToDriveInfo: map[string]driveInfo{},
DriveNameToDriveInfo: map[string]driveInfo{},
Folders: NewFolderCache(),
OldLinkShareIDToNewID: map[string]string{},
OldPermIDToNewID: map[string]string{},
ParentDirToMeta: map[string]metadata.Metadata{},
// Buffer pool for uploads
pool: sync.Pool{
New: func() any {
b := make([]byte, graph.CopyBufferSize)
return &b
},
},
}
}
// ConsumeRestoreCollections will restore the specified data collections into OneDrive // ConsumeRestoreCollections will restore the specified data collections into OneDrive
func ConsumeRestoreCollections( func ConsumeRestoreCollections(
ctx context.Context, ctx context.Context,
@ -155,7 +54,7 @@ func ConsumeRestoreCollections(
restoreMetrics support.CollectionMetrics restoreMetrics support.CollectionMetrics
el = errs.Local() el = errs.Local()
caches = NewRestoreCaches(backupDriveIDNames) caches = NewRestoreCaches(backupDriveIDNames)
protectedResourceID = dcs[0].FullPath().ResourceOwner() protectedResourceID = rcc.ProtectedResource.ID()
fallbackDriveName = "" // onedrive cannot create drives fallbackDriveName = "" // onedrive cannot create drives
) )
@ -182,7 +81,6 @@ func ConsumeRestoreCollections(
ictx = clues.Add( ictx = clues.Add(
ctx, ctx,
"category", dc.FullPath().Category(), "category", dc.FullPath().Category(),
"resource_owner", clues.Hide(protectedResourceID),
"full_path", dc.FullPath()) "full_path", dc.FullPath())
) )

View File

@ -0,0 +1,116 @@
package onedrive
import (
"context"
"sync"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type driveInfo struct {
id string
name string
rootFolderID string
}
type restoreCaches struct {
BackupDriveIDName idname.Cacher
collisionKeyToItemID map[string]api.DriveItemIDType
DriveIDToDriveInfo map[string]driveInfo
DriveNameToDriveInfo map[string]driveInfo
Folders *folderCache
OldLinkShareIDToNewID map[string]string
OldPermIDToNewID map[string]string
ParentDirToMeta map[string]metadata.Metadata
pool sync.Pool
}
func (rc *restoreCaches) AddDrive(
ctx context.Context,
md models.Driveable,
grf GetRootFolderer,
) error {
di := driveInfo{
id: ptr.Val(md.GetId()),
name: ptr.Val(md.GetName()),
}
ctx = clues.Add(ctx, "drive_info", di)
root, err := grf.GetRootFolder(ctx, di.id)
if err != nil {
return clues.Wrap(err, "getting drive root id")
}
di.rootFolderID = ptr.Val(root.GetId())
rc.DriveIDToDriveInfo[di.id] = di
rc.DriveNameToDriveInfo[di.name] = di
return nil
}
// Populate looks up drive items available to the protectedResource
// and adds their info to the caches.
func (rc *restoreCaches) Populate(
ctx context.Context,
gdparf GetDrivePagerAndRootFolderer,
protectedResourceID string,
) error {
drives, err := api.GetAllDrives(
ctx,
gdparf.NewDrivePager(protectedResourceID, nil),
true,
maxDrivesRetries)
if err != nil {
return clues.Wrap(err, "getting drives")
}
for _, md := range drives {
if err := rc.AddDrive(ctx, md, gdparf); err != nil {
return clues.Wrap(err, "caching drive")
}
}
return nil
}
type GetDrivePagerAndRootFolderer interface {
GetRootFolderer
NewDrivePagerer
}
func NewRestoreCaches(
backupDriveIDNames idname.Cacher,
) *restoreCaches {
// avoid nil panics
if backupDriveIDNames == nil {
backupDriveIDNames = idname.NewCache(nil)
}
return &restoreCaches{
BackupDriveIDName: backupDriveIDNames,
collisionKeyToItemID: map[string]api.DriveItemIDType{},
DriveIDToDriveInfo: map[string]driveInfo{},
DriveNameToDriveInfo: map[string]driveInfo{},
Folders: NewFolderCache(),
OldLinkShareIDToNewID: map[string]string{},
OldPermIDToNewID: map[string]string{},
ParentDirToMeta: map[string]metadata.Metadata{},
// Buffer pool for uploads
pool: sync.Pool{
New: func() any {
b := make([]byte, graph.CopyBufferSize)
return &b
},
},
}
}

View File

@ -173,7 +173,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
logger.CtxErr(ctx, err).Error("running restore") logger.CtxErr(ctx, err).Error("running restore")
if errors.Is(err, kopia.ErrNoRestorePath) { if errors.Is(err, kopia.ErrNoRestorePath) {
op.Errors.Fail(clues.New("empty backup or unknown path provided")) op.Errors.Fail(clues.Wrap(err, "empty backup or unknown path provided"))
} }
op.Errors.Fail(clues.Wrap(err, "running restore")) op.Errors.Fail(clues.Wrap(err, "running restore"))

View File

@ -67,9 +67,9 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{ {
name: "Mail", name: "Mail",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.userID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.its.userID sel.DiscreteOwner = suite.its.user.ID
return sel return sel
}, },
@ -79,7 +79,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{ {
name: "Contacts", name: "Contacts",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.userID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch())) sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
return sel return sel
}, },
@ -89,7 +89,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{ {
name: "Calendar Events", name: "Calendar Events",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.its.userID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch())) sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()))
return sel return sel
}, },
@ -258,7 +258,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// later on during the tests. Putting their identifiers into the selector // later on during the tests. Putting their identifiers into the selector
// at this point is harmless. // at this point is harmless.
containers = []string{container1, container2, container3, containerRename} containers = []string{container1, container2, container3, containerRename}
sel = selectors.NewExchangeBackup([]string{suite.its.userID}) sel = selectors.NewExchangeBackup([]string{suite.its.user.ID})
whatSet = deeTD.CategoryFromRepoRef whatSet = deeTD.CategoryFromRepoRef
opts = control.DefaultOptions() opts = control.DefaultOptions()
) )
@ -295,7 +295,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
mailDBF := func(id, timeStamp, subject, body string) []byte { mailDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.MessageWith( return exchMock.MessageWith(
suite.its.userID, suite.its.userID, suite.its.userID, suite.its.user.ID, suite.its.user.ID, suite.its.user.ID,
subject, body, body, subject, body, body,
now, now, now, now) now, now, now, now)
} }
@ -312,7 +312,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
eventDBF := func(id, timeStamp, subject, body string) []byte { eventDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.EventWith( return exchMock.EventWith(
suite.its.userID, subject, body, body, suite.its.user.ID, subject, body, body,
exchMock.NoOriginalStartDate, now, now, exchMock.NoOriginalStartDate, now, now,
exchMock.NoRecurrence, exchMock.NoAttendees, exchMock.NoRecurrence, exchMock.NoAttendees,
exchMock.NoAttachments, exchMock.NoCancelledOccurrences, exchMock.NoAttachments, exchMock.NoCancelledOccurrences,
@ -578,7 +578,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
service, service,
category, category,
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
creds.AzureTenantID, suite.its.userID, "", container3, creds.AzureTenantID, suite.its.user.ID, "", container3,
2, 2,
version.Backup, version.Backup,
gen.dbf) gen.dbf)
@ -897,7 +897,7 @@ func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeWithAdvancedOptio
// a backup is required to run restores // a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.its.userID}) baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
baseSel.Include( baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs // events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled. // to have their account recycled.
@ -905,7 +905,7 @@ func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeWithAdvancedOptio
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()), baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.its.userID baseSel.DiscreteOwner = suite.its.user.ID
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
@ -1272,3 +1272,216 @@ func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeWithAdvancedOptio
assert.Len(t, result, 0, "no items should have been added as copies") assert.Len(t, result, 0, "no items should have been added as copies")
}) })
} }
func (suite *ExchangeRestoreIntgSuite) TestRestore_Run_exchangeAlternateProtectedResource() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled.
// base_sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()),
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.its.user.ID
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
)
bo, bod := prepNewTestBackupOp(t, ctx, mb, baseSel.Selector, opts, version.Backup)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
rsel, err := baseSel.ToExchangeRestore()
require.NoError(t, err, clues.ToCore(err))
var (
restoreCfg = ctrlTD.DefaultRestoreConfig("exchange_restore_to_user")
sel = rsel.Selector
userID = suite.its.user.ID
secondaryUserID = suite.its.secondaryUser.ID
uid = userID
acCont = suite.its.ac.Contacts()
acMail = suite.its.ac.Mail()
// acEvts = suite.its.ac.Events()
firstCtr = count.New()
)
restoreCfg.OnCollision = control.Copy
mb = evmock.NewBus()
// first restore to the current user
ro1, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
firstCtr,
sel,
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro1, mb, false)
// get all files in folder, use these as the base
// set of files to compare against.
var (
userItemIDs = map[path.CategoryType]map[string]struct{}{}
userCollisionKeys = map[path.CategoryType]map[string]string{}
)
// --- contacts
cat := path.ContactsCategory
userItemIDs[cat], userCollisionKeys[cat] = getCollKeysAndItemIDs(
t,
ctx,
acCont,
uid,
"",
restoreCfg.Location)
// --- events
// cat = path.EventsCategory
// userItemIDs[cat], userCollisionKeys[cat] = getCollKeysAndItemIDs(
// t,
// ctx,
// acEvts,
// uid,
// "",
// restoreCfg.Location)
// --- mail
cat = path.EmailCategory
userItemIDs[cat], userCollisionKeys[cat] = getCollKeysAndItemIDs(
t,
ctx,
acMail,
uid,
"",
restoreCfg.Location,
api.MailInbox)
// then restore to the secondary user
uid = secondaryUserID
mb = evmock.NewBus()
secondCtr := count.New()
restoreCfg.ProtectedResource = uid
ro2, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
secondCtr,
sel,
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro2, mb, false)
var (
secondaryItemIDs = map[path.CategoryType]map[string]struct{}{}
secondaryCollisionKeys = map[path.CategoryType]map[string]string{}
)
// --- contacts
cat = path.ContactsCategory
secondaryItemIDs[cat], secondaryCollisionKeys[cat] = getCollKeysAndItemIDs(
t,
ctx,
acCont,
uid,
"",
restoreCfg.Location)
// --- events
// cat = path.EventsCategory
// secondaryItemIDs[cat], secondaryCollisionKeys[cat] = getCollKeysAndItemIDs(
// t,
// ctx,
// acEvts,
// uid,
// "",
// restoreCfg.Location)
// --- mail
cat = path.EmailCategory
secondaryItemIDs[cat], secondaryCollisionKeys[cat] = getCollKeysAndItemIDs(
t,
ctx,
acMail,
uid,
"",
restoreCfg.Location,
api.MailInbox)
// compare restore results
for _, cat := range []path.CategoryType{path.ContactsCategory, path.EmailCategory, path.EventsCategory} {
assert.Equal(t, len(userItemIDs[cat]), len(secondaryItemIDs[cat]))
assert.ElementsMatch(t, maps.Keys(userCollisionKeys[cat]), maps.Keys(secondaryCollisionKeys[cat]))
}
}
type GetItemsKeysAndContainerByNameer interface {
GetItemIDsInContainer(
ctx context.Context,
userID, containerID string,
) (map[string]struct{}, error)
GetContainerByName(
ctx context.Context,
userID, parentContainerID, containerName string,
) (graph.Container, error)
GetItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error)
}
func getCollKeysAndItemIDs(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
gikacbn GetItemsKeysAndContainerByNameer,
userID, parentContainerID string,
containerNames ...string,
) (map[string]struct{}, map[string]string) {
var (
c graph.Container
err error
cID string
)
for _, cn := range containerNames {
pcid := parentContainerID
if len(cID) != 0 {
pcid = cID
}
c, err = gikacbn.GetContainerByName(ctx, userID, pcid, cn)
require.NoError(t, err, clues.ToCore(err))
cID = ptr.Val(c.GetId())
}
itemIDs, err := gikacbn.GetItemIDsInContainer(ctx, userID, cID)
require.NoError(t, err, clues.ToCore(err))
collisionKeys, err := gikacbn.GetItemsInContainerByCollisionKey(ctx, userID, cID)
require.NoError(t, err, clues.ToCore(err))
return itemIDs, collisionKeys
}

View File

@ -574,15 +574,19 @@ func ControllerWithSelector(
// Suite Setup // Suite Setup
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type ids struct {
ID string
DriveID string
DriveRootFolderID string
}
type intgTesterSetup struct { type intgTesterSetup struct {
ac api.Client ac api.Client
gockAC api.Client gockAC api.Client
userID string user ids
userDriveID string secondaryUser ids
userDriveRootFolderID string site ids
siteID string secondarySite ids
siteDriveID string
siteDriveRootFolderID string
} }
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup { func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
@ -603,37 +607,52 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its.gockAC, err = mock.NewClient(creds) its.gockAC, err = mock.NewClient(creds)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// user drive its.user = userIDs(t, tconfig.M365UserID(t), its.ac)
its.secondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.ac)
its.userID = tconfig.M365UserID(t) its.site = siteIDs(t, tconfig.M365SiteID(t), its.ac)
its.secondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.ac)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, its.userID)
require.NoError(t, err, clues.ToCore(err))
its.userDriveID = ptr.Val(userDrive.GetId())
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.userDriveID)
require.NoError(t, err, clues.ToCore(err))
its.userDriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
its.siteID = tconfig.M365SiteID(t)
// site
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, its.siteID)
require.NoError(t, err, clues.ToCore(err))
its.siteDriveID = ptr.Val(siteDrive.GetId())
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.siteDriveID)
require.NoError(t, err, clues.ToCore(err))
its.siteDriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
return its return its
} }
func userIDs(t *testing.T, id string, ac api.Client) ids {
ctx, flush := tester.NewContext(t)
defer flush()
r := ids{ID: id}
drive, err := ac.Users().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err))
r.DriveID = ptr.Val(drive.GetId())
driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.DriveID)
require.NoError(t, err, clues.ToCore(err))
r.DriveRootFolderID = ptr.Val(driveRootFolder.GetId())
return r
}
func siteIDs(t *testing.T, id string, ac api.Client) ids {
ctx, flush := tester.NewContext(t)
defer flush()
r := ids{ID: id}
drive, err := ac.Sites().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err))
r.DriveID = ptr.Val(drive.GetId())
driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.DriveID)
require.NoError(t, err, clues.ToCore(err))
r.DriveRootFolderID = ptr.Val(driveRootFolder.GetId())
return r
}
func getTestExtensionFactories() []extensions.CreateItemExtensioner { func getTestExtensionFactories() []extensions.CreateItemExtensioner {
return []extensions.CreateItemExtensioner{ return []extensions.CreateItemExtensioner{
&extensions.MockItemExtensionFactory{}, &extensions.MockItemExtensionFactory{},

View File

@ -106,7 +106,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDrive() {
} }
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() { func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
sel := selectors.NewOneDriveRestore([]string{suite.its.userID}) sel := selectors.NewOneDriveRestore([]string{suite.its.user.ID})
ic := func(cs []string) selectors.Selector { ic := func(cs []string) selectors.Selector {
sel.Include(sel.Folders(cs, selectors.PrefixMatch())) sel.Include(sel.Folders(cs, selectors.PrefixMatch()))
@ -117,10 +117,10 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
t *testing.T, t *testing.T,
ctx context.Context, ctx context.Context,
) string { ) string {
d, err := suite.its.ac.Users().GetDefaultDrive(ctx, suite.its.userID) d, err := suite.its.ac.Users().GetDefaultDrive(ctx, suite.its.user.ID)
if err != nil { if err != nil {
err = graph.Wrap(ctx, err, "retrieving default user drive"). err = graph.Wrap(ctx, err, "retrieving default user drive").
With("user", suite.its.userID) With("user", suite.its.user.ID)
} }
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -137,8 +137,8 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
runDriveIncrementalTest( runDriveIncrementalTest(
suite, suite,
suite.its.userID, suite.its.user.ID,
suite.its.userID, suite.its.user.ID,
resource.Users, resource.Users,
path.OneDriveService, path.OneDriveService,
path.FilesCategory, path.FilesCategory,
@ -804,7 +804,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
control.DefaultOptions()) control.DefaultOptions())
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
userable, err := ctrl.AC.Users().GetByID(ctx, suite.its.userID) userable, err := ctrl.AC.Users().GetByID(ctx, suite.its.user.ID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
uid := ptr.Val(userable.GetId()) uid := ptr.Val(userable.GetId())
@ -982,17 +982,17 @@ func (suite *OneDriveRestoreIntgSuite) SetupSuite() {
} }
func (suite *OneDriveRestoreIntgSuite) TestRestore_Run_onedriveWithAdvancedOptions() { func (suite *OneDriveRestoreIntgSuite) TestRestore_Run_onedriveWithAdvancedOptions() {
sel := selectors.NewOneDriveBackup([]string{suite.its.userID}) sel := selectors.NewOneDriveBackup([]string{suite.its.user.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel)) sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.its.userID sel.DiscreteOwner = suite.its.user.ID
runDriveRestoreWithAdvancedOptions( runDriveRestoreWithAdvancedOptions(
suite.T(), suite.T(),
suite, suite,
suite.its.ac, suite.its.ac,
sel.Selector, sel.Selector,
suite.its.userDriveID, suite.its.user.DriveID,
suite.its.userDriveRootFolderID) suite.its.user.DriveRootFolderID)
} }
func runDriveRestoreWithAdvancedOptions( func runDriveRestoreWithAdvancedOptions(
@ -1250,3 +1250,173 @@ func runDriveRestoreWithAdvancedOptions(
assert.Subset(t, maps.Keys(currentFileIDs), maps.Keys(fileIDs), "original item should exist after copy") assert.Subset(t, maps.Keys(currentFileIDs), maps.Keys(fileIDs), "original item should exist after copy")
}) })
} }
func (suite *OneDriveRestoreIntgSuite) TestRestore_Run_onedriveAlternateProtectedResource() {
sel := selectors.NewOneDriveBackup([]string{suite.its.user.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.its.user.ID
runDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.its.ac,
sel.Selector,
suite.its.user,
suite.its.secondaryUser)
}
func runDriveRestoreToAlternateProtectedResource(
t *testing.T,
suite tester.Suite,
ac api.Client,
sel selectors.Selector, // owner should match 'from', both Restore and Backup types work.
from, to ids,
) {
ctx, flush := tester.NewContext(t)
defer flush()
// a backup is required to run restores
var (
mb = evmock.NewBus()
opts = control.DefaultOptions()
)
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup)
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
var (
restoreCfg = ctrlTD.DefaultRestoreConfig("drive_restore_to_resource")
fromCollisionKeys map[string]api.DriveItemIDType
fromItemIDs map[string]api.DriveItemIDType
acd = ac.Drives()
)
// first restore to the 'from' resource
suite.Run("restore original resource", func() {
mb = evmock.NewBus()
fromCtr := count.New()
driveID := from.DriveID
rootFolderID := from.DriveRootFolderID
restoreCfg.OnCollision = control.Copy
ro, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
fromCtr,
sel,
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro, mb, false)
// get all files in folder, use these as the base
// set of files to compare against.
fromItemIDs, fromCollisionKeys = getDriveCollKeysAndItemIDs(
t,
ctx,
acd,
driveID,
rootFolderID,
restoreCfg.Location,
selTD.TestFolderName)
})
// then restore to the 'to' resource
var (
toCollisionKeys map[string]api.DriveItemIDType
toItemIDs map[string]api.DriveItemIDType
)
suite.Run("restore to alternate resource", func() {
mb = evmock.NewBus()
toCtr := count.New()
driveID := to.DriveID
rootFolderID := to.DriveRootFolderID
restoreCfg.ProtectedResource = to.ID
ro, _ := prepNewTestRestoreOp(
t,
ctx,
bod.st,
bo.Results.BackupID,
mb,
toCtr,
sel,
opts,
restoreCfg)
runAndCheckRestore(t, ctx, &ro, mb, false)
// get all files in folder, use these as the base
// set of files to compare against.
toItemIDs, toCollisionKeys = getDriveCollKeysAndItemIDs(
t,
ctx,
acd,
driveID,
rootFolderID,
restoreCfg.Location,
selTD.TestFolderName)
})
// compare restore results
assert.Equal(t, len(fromItemIDs), len(toItemIDs))
assert.ElementsMatch(t, maps.Keys(fromCollisionKeys), maps.Keys(toCollisionKeys))
}
type GetItemsKeysAndFolderByNameer interface {
GetItemIDsInContainer(
ctx context.Context,
driveID, containerID string,
) (map[string]api.DriveItemIDType, error)
GetFolderByName(
ctx context.Context,
driveID, parentFolderID, folderName string,
) (models.DriveItemable, error)
GetItemsInContainerByCollisionKey(
ctx context.Context,
driveID, containerID string,
) (map[string]api.DriveItemIDType, error)
}
func getDriveCollKeysAndItemIDs(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
gikafbn GetItemsKeysAndFolderByNameer,
driveID, parentContainerID string,
containerNames ...string,
) (map[string]api.DriveItemIDType, map[string]api.DriveItemIDType) {
var (
c models.DriveItemable
err error
cID string
)
for _, cn := range containerNames {
pcid := parentContainerID
if len(cID) != 0 {
pcid = cID
}
c, err = gikafbn.GetFolderByName(ctx, driveID, pcid, cn)
require.NoError(t, err, clues.ToCore(err))
cID = ptr.Val(c.GetId())
}
itemIDs, err := gikafbn.GetItemIDsInContainer(ctx, driveID, cID)
require.NoError(t, err, clues.ToCore(err))
collisionKeys, err := gikafbn.GetItemsInContainerByCollisionKey(ctx, driveID, cID)
require.NoError(t, err, clues.ToCore(err))
return itemIDs, collisionKeys
}

View File

@ -49,7 +49,7 @@ func (suite *SharePointBackupIntgSuite) SetupSuite() {
} }
func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() { func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
sel := selectors.NewSharePointRestore([]string{suite.its.siteID}) sel := selectors.NewSharePointRestore([]string{suite.its.site.ID})
ic := func(cs []string) selectors.Selector { ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch())) sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -60,10 +60,10 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
t *testing.T, t *testing.T,
ctx context.Context, ctx context.Context,
) string { ) string {
d, err := suite.its.ac.Sites().GetDefaultDrive(ctx, suite.its.siteID) d, err := suite.its.ac.Sites().GetDefaultDrive(ctx, suite.its.site.ID)
if err != nil { if err != nil {
err = graph.Wrap(ctx, err, "retrieving default site drive"). err = graph.Wrap(ctx, err, "retrieving default site drive").
With("site", suite.its.siteID) With("site", suite.its.site.ID)
} }
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -80,8 +80,8 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
runDriveIncrementalTest( runDriveIncrementalTest(
suite, suite,
suite.its.siteID, suite.its.site.ID,
suite.its.userID, suite.its.user.ID,
resource.Sites, resource.Sites,
path.SharePointService, path.SharePointService,
path.LibrariesCategory, path.LibrariesCategory,
@ -99,7 +99,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() {
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
sel = selectors.NewSharePointBackup([]string{suite.its.siteID}) sel = selectors.NewSharePointBackup([]string{suite.its.site.ID})
opts = control.DefaultOptions() opts = control.DefaultOptions()
) )
@ -116,7 +116,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() {
bod.sw, bod.sw,
&bo, &bo,
bod.sel, bod.sel,
suite.its.siteID, suite.its.site.ID,
path.LibrariesCategory) path.LibrariesCategory)
} }
@ -128,7 +128,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointExtensions() {
var ( var (
mb = evmock.NewBus() mb = evmock.NewBus()
sel = selectors.NewSharePointBackup([]string{suite.its.siteID}) sel = selectors.NewSharePointBackup([]string{suite.its.site.ID})
opts = control.DefaultOptions() opts = control.DefaultOptions()
tenID = tconfig.M365TenantID(t) tenID = tconfig.M365TenantID(t)
svc = path.SharePointService svc = path.SharePointService
@ -150,7 +150,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointExtensions() {
bod.sw, bod.sw,
&bo, &bo,
bod.sel, bod.sel,
suite.its.siteID, suite.its.site.ID,
path.LibrariesCategory) path.LibrariesCategory)
bID := bo.Results.BackupID bID := bo.Results.BackupID
@ -201,18 +201,33 @@ func (suite *SharePointRestoreIntgSuite) SetupSuite() {
} }
func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() { func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() {
sel := selectors.NewSharePointBackup([]string{suite.its.siteID}) sel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents")) sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.siteID sel.DiscreteOwner = suite.its.site.ID
runDriveRestoreWithAdvancedOptions( runDriveRestoreWithAdvancedOptions(
suite.T(), suite.T(),
suite, suite,
suite.its.ac, suite.its.ac,
sel.Selector, sel.Selector,
suite.its.siteDriveID, suite.its.site.DriveID,
suite.its.siteDriveRootFolderID) suite.its.site.DriveRootFolderID)
}
func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointAlternateProtectedResource() {
sel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.site.ID
runDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.its.ac,
sel.Selector,
suite.its.site,
suite.its.secondarySite)
} }
func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives() { func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives() {
@ -229,7 +244,7 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives
rc.OnCollision = control.Copy rc.OnCollision = control.Copy
// create a new drive // create a new drive
md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.siteID, rc.Location) md, err := suite.its.ac.Lists().PostDrive(ctx, suite.its.site.ID, rc.Location)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(md.GetId()) driveID := ptr.Val(md.GetId())
@ -264,10 +279,10 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives
graphClient = suite.its.ac.Stable.Client() graphClient = suite.its.ac.Stable.Client()
) )
bsel := selectors.NewSharePointBackup([]string{suite.its.siteID}) bsel := selectors.NewSharePointBackup([]string{suite.its.site.ID})
bsel.Include(selTD.SharePointBackupFolderScope(bsel)) bsel.Include(selTD.SharePointBackupFolderScope(bsel))
bsel.Filter(bsel.Library(rc.Location)) bsel.Filter(bsel.Library(rc.Location))
bsel.DiscreteOwner = suite.its.siteID bsel.DiscreteOwner = suite.its.site.ID
bo, bod := prepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup) bo, bod := prepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup)
defer bod.close(t, ctx) defer bod.close(t, ctx)
@ -367,7 +382,7 @@ func (suite *SharePointRestoreIntgSuite) TestRestore_Run_sharepointDeletedDrives
pgr := suite.its.ac. pgr := suite.its.ac.
Drives(). Drives().
NewSiteDrivePager(suite.its.siteID, []string{"id", "name"}) NewSiteDrivePager(suite.its.site.ID, []string{"id", "name"})
drives, err := api.GetAllDrives(ctx, pgr, false, -1) drives, err := api.GetAllDrives(ctx, pgr, false, -1)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))

View File

@ -23,6 +23,7 @@ const (
// M365 config // M365 config
TestCfgAzureTenantID = "azure_tenantid" TestCfgAzureTenantID = "azure_tenantid"
TestCfgSecondarySiteID = "secondarym365siteid"
TestCfgSiteID = "m365siteid" TestCfgSiteID = "m365siteid"
TestCfgSiteURL = "m365siteurl" TestCfgSiteURL = "m365siteurl"
TestCfgUserID = "m365userid" TestCfgUserID = "m365userid"
@ -36,13 +37,14 @@ const (
// test specific env vars // test specific env vars
const ( const (
EnvCorsoM365LoadTestUserID = "CORSO_M365_LOAD_TEST_USER_ID"
EnvCorsoM365LoadTestOrgUsers = "CORSO_M365_LOAD_TEST_ORG_USERS"
EnvCorsoM365TestSiteID = "CORSO_M365_TEST_SITE_ID" EnvCorsoM365TestSiteID = "CORSO_M365_TEST_SITE_ID"
EnvCorsoM365TestSiteURL = "CORSO_M365_TEST_SITE_URL" EnvCorsoM365TestSiteURL = "CORSO_M365_TEST_SITE_URL"
EnvCorsoM365TestUserID = "CORSO_M365_TEST_USER_ID" EnvCorsoM365TestUserID = "CORSO_M365_TEST_USER_ID"
EnvCorsoSecondaryM365TestSiteID = "CORSO_SECONDARY_M365_TEST_SITE_ID"
EnvCorsoSecondaryM365TestUserID = "CORSO_SECONDARY_M365_TEST_USER_ID" EnvCorsoSecondaryM365TestUserID = "CORSO_SECONDARY_M365_TEST_USER_ID"
EnvCorsoTertiaryM365TestUserID = "CORSO_TERTIARY_M365_TEST_USER_ID" EnvCorsoTertiaryM365TestUserID = "CORSO_TERTIARY_M365_TEST_USER_ID"
EnvCorsoM365LoadTestUserID = "CORSO_M365_LOAD_TEST_USER_ID"
EnvCorsoM365LoadTestOrgUsers = "CORSO_M365_LOAD_TEST_ORG_USERS"
EnvCorsoTestConfigFilePath = "CORSO_TEST_CONFIG_FILE" EnvCorsoTestConfigFilePath = "CORSO_TEST_CONFIG_FILE"
EnvCorsoUnlicensedM365TestUserID = "CORSO_M365_TEST_UNLICENSED_USER" EnvCorsoUnlicensedM365TestUserID = "CORSO_M365_TEST_UNLICENSED_USER"
) )
@ -147,13 +149,19 @@ func ReadTestConfig() (map[string]string, error) {
TestCfgSiteID, TestCfgSiteID,
os.Getenv(EnvCorsoM365TestSiteID), os.Getenv(EnvCorsoM365TestSiteID),
vpr.GetString(TestCfgSiteID), vpr.GetString(TestCfgSiteID),
"10rqc2.sharepoint.com,4892edf5-2ebf-46be-a6e5-a40b2cbf1c1a,38ab6d06-fc82-4417-af93-22d8733c22be") "4892edf5-2ebf-46be-a6e5-a40b2cbf1c1a,38ab6d06-fc82-4417-af93-22d8733c22be")
fallbackTo( fallbackTo(
testEnv, testEnv,
TestCfgSiteURL, TestCfgSiteURL,
os.Getenv(EnvCorsoM365TestSiteURL), os.Getenv(EnvCorsoM365TestSiteURL),
vpr.GetString(TestCfgSiteURL), vpr.GetString(TestCfgSiteURL),
"https://10rqc2.sharepoint.com/sites/CorsoCI") "https://10rqc2.sharepoint.com/sites/CorsoCI")
fallbackTo(
testEnv,
TestCfgSecondarySiteID,
os.Getenv(EnvCorsoSecondaryM365TestSiteID),
vpr.GetString(TestCfgSecondarySiteID),
"053684d8-ca6c-4376-a03e-2567816bb091,9b3e9abe-6a5e-4084-8b44-ea5a356fe02c")
fallbackTo( fallbackTo(
testEnv, testEnv,
TestCfgUnlicensedUserID, TestCfgUnlicensedUserID,

View File

@ -198,6 +198,17 @@ func GetM365SiteID(ctx context.Context) string {
return strings.ToLower(cfg[TestCfgSiteID]) return strings.ToLower(cfg[TestCfgSiteID])
} }
// SecondaryM365SiteID returns a siteID string representing the secondarym365SiteID described
// by either the env var CORSO_SECONDARY_M365_TEST_SITE_ID, the corso_test.toml config
// file or the default value (in that order of priority). The default is a
// last-attempt fallback that will only work on alcion's testing org.
func SecondaryM365SiteID(t *testing.T) string {
cfg, err := ReadTestConfig()
require.NoError(t, err, "retrieving secondary m365 site id from test configuration: %+v", clues.ToCore(err))
return strings.ToLower(cfg[TestCfgSecondarySiteID])
}
// UnlicensedM365UserID returns an userID string representing the m365UserID // UnlicensedM365UserID returns an userID string representing the m365UserID
// described by either the env var CORSO_M365_TEST_UNLICENSED_USER, the // described by either the env var CORSO_M365_TEST_UNLICENSED_USER, the
// corso_test.toml config file or the default value (in that order of priority). // corso_test.toml config file or the default value (in that order of priority).