errant cleanups before incremental support (#3201)

a handful of errant cleanups to help the PR for adding incremental support to sharepoint.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #3136

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-04-25 15:09:41 -06:00 committed by GitHub
parent 0ae1a04097
commit edef23bfc4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 302 additions and 232 deletions

View File

@ -19,6 +19,7 @@ import (
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -69,12 +70,17 @@ func (gc *GraphConnector) ProduceBackupCollections(
return []data.BackupCollection{}, nil, nil return []data.BackupCollection{}, nil, nil
} }
var (
colls []data.BackupCollection
excludes map[string]map[string]struct{}
)
switch sels.Service { switch sels.Service {
case selectors.ServiceExchange: case selectors.ServiceExchange:
colls, excludes, err := exchange.DataCollections( colls, excludes, err = exchange.DataCollections(
ctx, ctx,
sels, sels,
sels, owner,
metadata, metadata,
gc.credentials, gc.credentials,
gc.UpdateStatus, gc.UpdateStatus,
@ -84,25 +90,11 @@ func (gc *GraphConnector) ProduceBackupCollections(
return nil, nil, err return nil, nil, err
} }
for _, c := range colls {
// kopia doesn't stream Items() from deleted collections,
// and so they never end up calling the UpdateStatus closer.
// This is a brittle workaround, since changes in consumer
// behavior (such as calling Items()) could inadvertently
// break the process state, putting us into deadlock or
// panics.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
}
}
return colls, excludes, nil
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
colls, excludes, err := onedrive.DataCollections( colls, excludes, err = onedrive.DataCollections(
ctx, ctx,
sels, sels,
sels, owner,
metadata, metadata,
lastBackupVersion, lastBackupVersion,
gc.credentials.AzureTenantID, gc.credentials.AzureTenantID,
@ -115,20 +107,13 @@ func (gc *GraphConnector) ProduceBackupCollections(
return nil, nil, err return nil, nil, err
} }
for _, c := range colls {
// kopia doesn't stream Items() from deleted collections.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
}
}
return colls, excludes, nil
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
colls, excludes, err := sharepoint.DataCollections( colls, excludes, err = sharepoint.DataCollections(
ctx, ctx,
gc.itemClient, gc.itemClient,
sels, sels,
owner,
metadata,
gc.credentials, gc.credentials,
gc.Service, gc.Service,
gc, gc,
@ -138,13 +123,23 @@ func (gc *GraphConnector) ProduceBackupCollections(
return nil, nil, err return nil, nil, err
} }
gc.incrementMessagesBy(len(colls))
return colls, excludes, nil
default: default:
return nil, nil, clues.Wrap(clues.New(sels.Service.String()), "service not supported").WithClues(ctx) return nil, nil, clues.Wrap(clues.New(sels.Service.String()), "service not supported").WithClues(ctx)
} }
for _, c := range colls {
// kopia doesn't stream Items() from deleted collections,
// and so they never end up calling the UpdateStatus closer.
// This is a brittle workaround, since changes in consumer
// behavior (such as calling Items()) could inadvertently
// break the process state, putting us into deadlock or
// panics.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
}
}
return colls, excludes, nil
} }
func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error { func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error {
@ -161,16 +156,7 @@ func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error {
resourceOwner := strings.ToLower(sels.DiscreteOwner) resourceOwner := strings.ToLower(sels.DiscreteOwner)
var found bool if !filters.Equal(ids).Compare(resourceOwner) {
for _, id := range ids {
if strings.ToLower(id) == resourceOwner {
found = true
break
}
}
if !found {
return clues.Stack(graph.ErrResourceOwnerNotFound).With("missing_resource_owner", sels.DiscreteOwner) return clues.Stack(graph.ErrResourceOwnerNotFound).With("missing_resource_owner", sels.DiscreteOwner)
} }

View File

@ -263,6 +263,8 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
ctx, ctx,
graph.NewNoTimeoutHTTPWrapper(), graph.NewNoTimeoutHTTPWrapper(),
sel, sel,
sel,
nil,
connector.credentials, connector.credentials,
connector.Service, connector.Service,
connector, connector,

View File

@ -163,8 +163,8 @@ func parseMetadataCollections(
// Add iota to this call -> mail, contacts, calendar, etc. // Add iota to this call -> mail, contacts, calendar, etc.
func DataCollections( func DataCollections(
ctx context.Context, ctx context.Context,
user idname.Provider,
selector selectors.Selector, selector selectors.Selector,
user idname.Provider,
metadata []data.RestoreCollection, metadata []data.RestoreCollection,
acct account.M365Config, acct account.M365Config,
su support.StatusUpdater, su support.StatusUpdater,

View File

@ -10,6 +10,8 @@ func IsMetadataFile(p path.Path) bool {
case path.OneDriveService: case path.OneDriveService:
return metadata.HasMetaSuffix(p.Item()) return metadata.HasMetaSuffix(p.Item())
case path.SharePointService:
return p.Category() == path.LibrariesCategory && metadata.HasMetaSuffix(p.Item())
default: default:
return false return false
} }

View File

@ -61,7 +61,7 @@ var (
{ {
service: path.SharePointService, service: path.SharePointService,
category: path.LibrariesCategory, category: path.LibrariesCategory,
expected: assert.Falsef, expected: assert.Truef,
}, },
{ {
service: path.SharePointService, service: path.SharePointService,

View File

@ -167,10 +167,6 @@ func (gc *GraphConnector) incrementAwaitingMessages() {
gc.wg.Add(1) gc.wg.Add(1)
} }
func (gc *GraphConnector) incrementMessagesBy(num int) {
gc.wg.Add(num)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Resource Lookup Handling // Resource Lookup Handling
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -279,7 +275,6 @@ func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
owner string, // input value, can be either id or name owner string, // input value, can be either id or name
ins idname.Cacher, ins idname.Cacher,
) (string, string, error) { ) (string, string, error) {
// move this to GC method
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, ins) id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, ins)
if err != nil { if err != nil {
return "", "", clues.Wrap(err, "identifying resource owner") return "", "", clues.Wrap(err, "identifying resource owner")

View File

@ -194,6 +194,7 @@ func newColl(
) *Collection { ) *Collection {
c := &Collection{ c := &Collection{
itemClient: gr, itemClient: gr,
itemGetter: api.GetDriveItem,
folderPath: folderPath, folderPath: folderPath,
prevPath: prevPath, prevPath: prevPath,
driveItems: map[string]models.DriveItemable{}, driveItems: map[string]models.DriveItemable{},
@ -211,11 +212,9 @@ func newColl(
// Allows tests to set a mock populator // Allows tests to set a mock populator
switch source { switch source {
case SharePointSource: case SharePointSource:
c.itemGetter = api.GetDriveItem
c.itemReader = sharePointItemReader c.itemReader = sharePointItemReader
c.itemMetaReader = sharePointItemMetaReader c.itemMetaReader = sharePointItemMetaReader
default: default:
c.itemGetter = api.GetDriveItem
c.itemReader = oneDriveItemReader c.itemReader = oneDriveItemReader
c.itemMetaReader = oneDriveItemMetaReader c.itemMetaReader = oneDriveItemMetaReader
} }

View File

@ -299,7 +299,7 @@ func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.ShareP
} }
return &details.SharePointInfo{ return &details.SharePointInfo{
ItemType: details.OneDriveItem, ItemType: details.SharePointLibrary,
ItemName: ptr.Val(di.GetName()), ItemName: ptr.Val(di.GetName()),
Created: ptr.Val(di.GetCreatedDateTime()), Created: ptr.Val(di.GetCreatedDateTime()),
Modified: ptr.Val(di.GetLastModifiedDateTime()), Modified: ptr.Val(di.GetLastModifiedDateTime()),

View File

@ -5,6 +5,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/sharepoint/api" "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
@ -30,6 +31,8 @@ func DataCollections(
ctx context.Context, ctx context.Context,
itemClient graph.Requester, itemClient graph.Requester,
selector selectors.Selector, selector selectors.Selector,
site idname.Provider,
metadata []data.RestoreCollection,
creds account.M365Config, creds account.M365Config,
serv graph.Servicer, serv graph.Servicer,
su statusUpdater, su statusUpdater,
@ -41,9 +44,13 @@ func DataCollections(
return nil, nil, clues.Wrap(err, "sharePointDataCollection: parsing selector") return nil, nil, clues.Wrap(err, "sharePointDataCollection: parsing selector")
} }
ctx = clues.Add(
ctx,
"site_id", clues.Hide(site.ID()),
"site_url", clues.Hide(site.Name()))
var ( var (
el = errs.Local() el = errs.Local()
site = b.DiscreteOwner
collections = []data.BackupCollection{} collections = []data.BackupCollection{}
categories = map[path.CategoryType]struct{}{} categories = map[path.CategoryType]struct{}{}
) )
@ -83,6 +90,7 @@ func DataCollections(
serv, serv,
creds.AzureTenantID, creds.AzureTenantID,
site, site,
metadata,
scope, scope,
su, su,
ctrlOpts, ctrlOpts,
@ -118,7 +126,7 @@ func DataCollections(
ctx, ctx,
collections, collections,
creds.AzureTenantID, creds.AzureTenantID,
site, site.ID(),
path.SharePointService, path.SharePointService,
categories, categories,
su.UpdateStatus, su.UpdateStatus,
@ -136,19 +144,20 @@ func DataCollections(
func collectLists( func collectLists(
ctx context.Context, ctx context.Context,
serv graph.Servicer, serv graph.Servicer,
tenantID, siteID string, tenantID string,
site idname.Provider,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, error) { ) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections") logger.Ctx(ctx).Debug("Creating SharePoint List Collections")
var ( var (
el = errs.Local() el = errs.Local()
spcs = make([]data.BackupCollection, 0) spcs = make([]data.BackupCollection, 0)
) )
lists, err := preFetchLists(ctx, serv, siteID) lists, err := preFetchLists(ctx, serv, site.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -160,7 +169,7 @@ func collectLists(
dir, err := path.Build( dir, err := path.Build(
tenantID, tenantID,
siteID, site.ID(),
path.SharePointService, path.SharePointService,
path.ListsCategory, path.ListsCategory,
false, false,
@ -184,7 +193,9 @@ func collectLibraries(
ctx context.Context, ctx context.Context,
itemClient graph.Requester, itemClient graph.Requester,
serv graph.Servicer, serv graph.Servicer,
tenantID, siteID string, tenantID string,
site idname.Provider,
metadata []data.RestoreCollection,
scope selectors.SharePointScope, scope selectors.SharePointScope,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
@ -197,7 +208,7 @@ func collectLibraries(
colls = onedrive.NewCollections( colls = onedrive.NewCollections(
itemClient, itemClient,
tenantID, tenantID,
siteID, site.ID(),
onedrive.SharePointSource, onedrive.SharePointSource,
folderMatcher{scope}, folderMatcher{scope},
serv, serv,
@ -207,7 +218,7 @@ func collectLibraries(
// TODO(ashmrtn): Pass previous backup metadata when SharePoint supports delta // TODO(ashmrtn): Pass previous backup metadata when SharePoint supports delta
// token-based incrementals. // token-based incrementals.
odcs, excludes, err := colls.Get(ctx, nil, errs) odcs, excludes, err := colls.Get(ctx, metadata, errs)
if err != nil { if err != nil {
return nil, nil, graph.Wrap(ctx, err, "getting library") return nil, nil, graph.Wrap(ctx, err, "getting library")
} }
@ -221,7 +232,7 @@ func collectPages(
ctx context.Context, ctx context.Context,
creds account.M365Config, creds account.M365Config,
serv graph.Servicer, serv graph.Servicer,
siteID string, site idname.Provider,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus, errs *fault.Bus,
@ -245,7 +256,7 @@ func collectPages(
betaService := m365api.NewBetaService(adpt) betaService := m365api.NewBetaService(adpt)
tuples, err := api.FetchPages(ctx, betaService, siteID) tuples, err := api.FetchPages(ctx, betaService, site.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -257,7 +268,7 @@ func collectPages(
dir, err := path.Build( dir, err := path.Build(
creds.AzureTenantID, creds.AzureTenantID,
siteID, site.ID(),
path.SharePointService, path.SharePointService,
path.PagesCategory, path.PagesCategory,
false, false,

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -194,9 +195,11 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T() var (
siteID := tester.M365SiteID(t) t = suite.T()
a := tester.NewM365Account(t) siteID = tester.M365SiteID(t)
a = tester.NewM365Account(t)
)
account, err := a.M365Config() account, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -205,7 +208,7 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
ctx, ctx,
account, account,
nil, nil,
siteID, mock.NewProvider(siteID, siteID),
&MockGraphService{}, &MockGraphService{},
control.Defaults(), control.Defaults(),
fault.New(true)) fault.New(true))

View File

@ -97,7 +97,7 @@ func RestoreCollections(
deets, deets,
errs) errs)
default: default:
return nil, clues.Wrap(clues.New(category.String()), "category not supported") return nil, clues.Wrap(clues.New(category.String()), "category not supported").With("category", category)
} }
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics) restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)

View File

@ -332,14 +332,7 @@ func makeFallbackReasons(sel selectors.Selector) []kopia.Reason {
// checker to see if conditions are correct for incremental backup behavior such as // checker to see if conditions are correct for incremental backup behavior such as
// retrieving metadata like delta tokens and previous paths. // retrieving metadata like delta tokens and previous paths.
func useIncrementalBackup(sel selectors.Selector, opts control.Options) bool { func useIncrementalBackup(sel selectors.Selector, opts control.Options) bool {
enabled := !opts.ToggleFeatures.DisableIncrementals return !opts.ToggleFeatures.DisableIncrementals
if sel.Service == selectors.ServiceExchange ||
sel.Service == selectors.ServiceOneDrive {
return enabled
}
return false
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -761,12 +754,14 @@ func mergeDetails(
"base_item_count_added", manifestAddedEntries) "base_item_count_added", manifestAddedEntries)
} }
if addedEntries != dataFromBackup.ItemsToMerge() { checkCount := dataFromBackup.ItemsToMerge()
if addedEntries != checkCount {
return clues.New("incomplete migration of backup details"). return clues.New("incomplete migration of backup details").
WithClues(ctx). WithClues(ctx).
With( With(
"item_count", addedEntries, "item_count", addedEntries,
"expected_item_count", dataFromBackup.ItemsToMerge()) "expected_item_count", checkCount)
} }
return nil return nil

View File

@ -17,6 +17,7 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/idname"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
@ -43,7 +44,7 @@ import (
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" "github.com/alcionai/corso/src/pkg/selectors/testdata"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -71,6 +72,7 @@ func prepNewTestBackupOp(
*kopia.Wrapper, *kopia.Wrapper,
*kopia.ModelStore, *kopia.ModelStore,
*connector.GraphConnector, *connector.GraphConnector,
selectors.Selector,
func(), func(),
) { ) {
//revive:enable:context-as-argument //revive:enable:context-as-argument
@ -116,10 +118,10 @@ func prepNewTestBackupOp(
connectorResource = connector.Sites connectorResource = connector.Sites
} }
gc := GCWithSelector(t, ctx, acct, connectorResource, sel, nil, closer) gc, sel := GCWithSelector(t, ctx, acct, connectorResource, sel, nil, closer)
bo := newTestBackupOp(t, ctx, kw, ms, gc, acct, sel, bus, featureToggles, closer) bo := newTestBackupOp(t, ctx, kw, ms, gc, acct, sel, bus, featureToggles, closer)
return bo, acct, kw, ms, gc, closer return bo, acct, kw, ms, gc, sel, closer
} }
// newTestBackupOp accepts the clients required to compose a backup operation, plus // newTestBackupOp accepts the clients required to compose a backup operation, plus
@ -147,6 +149,7 @@ func newTestBackupOp(
) )
opts.ToggleFeatures = featureToggles opts.ToggleFeatures = featureToggles
gc.IDNameLookup = idname.NewCache(map[string]string{sel.ID(): sel.Name()})
bo, err := NewBackupOperation(ctx, opts, kw, sw, gc, acct, sel, sel, bus) bo, err := NewBackupOperation(ctx, opts, kw, sw, gc, acct, sel, sel, bus)
if !assert.NoError(t, err, clues.ToCore(err)) { if !assert.NoError(t, err, clues.ToCore(err)) {
@ -250,7 +253,7 @@ func checkMetadataFilesExist(
backupID model.StableID, backupID model.StableID,
kw *kopia.Wrapper, kw *kopia.Wrapper,
ms *kopia.ModelStore, ms *kopia.ModelStore,
tenant, user string, tenant, resourceOwner string,
service path.ServiceType, service path.ServiceType,
filesByCat map[path.CategoryType][]string, filesByCat map[path.CategoryType][]string,
) { ) {
@ -270,7 +273,7 @@ func checkMetadataFilesExist(
for _, fName := range files { for _, fName := range files {
p, err := path.Builder{}. p, err := path.Builder{}.
Append(fName). Append(fName).
ToServiceCategoryMetadataPath(tenant, user, service, category, true) ToServiceCategoryMetadataPath(tenant, resourceOwner, service, category, true)
if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) { if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) {
continue continue
} }
@ -339,7 +342,7 @@ func generateContainerOfItems(
acct account.Account, acct account.Account,
cat path.CategoryType, cat path.CategoryType,
sel selectors.Selector, sel selectors.Selector,
tenantID, userID, driveID, destFldr string, tenantID, resourceOwner, driveID, destFldr string,
howManyItems int, howManyItems int,
backupVersion int, backupVersion int,
dbf dataBuilderFunc, dbf dataBuilderFunc,
@ -350,7 +353,7 @@ func generateContainerOfItems(
items := make([]incrementalItem, 0, howManyItems) items := make([]incrementalItem, 0, howManyItems)
for i := 0; i < howManyItems; i++ { for i := 0; i < howManyItems; i++ {
id, d := generateItemData(t, cat, userID, dbf) id, d := generateItemData(t, cat, resourceOwner, dbf)
items = append(items, incrementalItem{ items = append(items, incrementalItem{
name: id, name: id,
@ -359,7 +362,9 @@ func generateContainerOfItems(
} }
pathFolders := []string{destFldr} pathFolders := []string{destFldr}
if service == path.OneDriveService {
switch service {
case path.OneDriveService, path.SharePointService:
pathFolders = []string{"drives", driveID, "root:", destFldr} pathFolders = []string{"drives", driveID, "root:", destFldr}
} }
@ -375,7 +380,7 @@ func generateContainerOfItems(
dataColls := buildCollections( dataColls := buildCollections(
t, t,
service, service,
tenantID, userID, tenantID, resourceOwner,
dest, dest,
collections) collections)
@ -462,7 +467,7 @@ func buildCollections(
func toDataLayerPath( func toDataLayerPath(
t *testing.T, t *testing.T,
service path.ServiceType, service path.ServiceType,
tenant, user string, tenant, resourceOwner string,
category path.CategoryType, category path.CategoryType,
elements []string, elements []string,
isItem bool, isItem bool,
@ -477,9 +482,11 @@ func toDataLayerPath(
switch service { switch service {
case path.ExchangeService: case path.ExchangeService:
p, err = pb.ToDataLayerExchangePathForCategory(tenant, user, category, isItem) p, err = pb.ToDataLayerExchangePathForCategory(tenant, resourceOwner, category, isItem)
case path.OneDriveService: case path.OneDriveService:
p, err = pb.ToDataLayerOneDrivePath(tenant, user, isItem) p, err = pb.ToDataLayerOneDrivePath(tenant, resourceOwner, isItem)
case path.SharePointService:
p, err = pb.ToDataLayerSharePointPath(tenant, resourceOwner, category, isItem)
default: default:
err = clues.New(fmt.Sprintf("unknown service: %s", service)) err = clues.New(fmt.Sprintf("unknown service: %s", service))
} }
@ -489,29 +496,6 @@ func toDataLayerPath(
return p return p
} }
func mustGetDefaultDriveID(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
service graph.Servicer,
userID string,
) string {
d, err := service.Client().UsersById(userID).Drive().Get(ctx, nil)
if err != nil {
err = graph.Wrap(
ctx,
err,
"retrieving default user drive").
With("user", userID)
}
require.Nil(t, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID not set")
return id
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// integration tests // integration tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -589,12 +573,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
owners := []string{suite.user}
tests := []struct { tests := []struct {
name string name string
selector func() *selectors.ExchangeBackup selector func() *selectors.ExchangeBackup
resourceOwner string
category path.CategoryType category path.CategoryType
metadataFiles []string metadataFiles []string
runIncremental bool runIncremental bool
@ -602,13 +583,12 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
{ {
name: "Mail", name: "Mail",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup(owners) sel := selectors.NewExchangeBackup([]string{suite.user})
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.user sel.DiscreteOwner = suite.user
return sel return sel
}, },
resourceOwner: suite.user,
category: path.EmailCategory, category: path.EmailCategory,
metadataFiles: exchange.MetadataFileNames(path.EmailCategory), metadataFiles: exchange.MetadataFileNames(path.EmailCategory),
runIncremental: true, runIncremental: true,
@ -616,11 +596,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
{ {
name: "Contacts", name: "Contacts",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup(owners) sel := selectors.NewExchangeBackup([]string{suite.user})
sel.Include(sel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch())) sel.Include(sel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch()))
return sel return sel
}, },
resourceOwner: suite.user,
category: path.ContactsCategory, category: path.ContactsCategory,
metadataFiles: exchange.MetadataFileNames(path.ContactsCategory), metadataFiles: exchange.MetadataFileNames(path.ContactsCategory),
runIncremental: true, runIncremental: true,
@ -628,11 +607,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
{ {
name: "Calendar Events", name: "Calendar Events",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup(owners) sel := selectors.NewExchangeBackup([]string{suite.user})
sel.Include(sel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch())) sel.Include(sel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch()))
return sel return sel
}, },
resourceOwner: suite.user,
category: path.EventsCategory, category: path.EventsCategory,
metadataFiles: exchange.MetadataFileNames(path.EventsCategory), metadataFiles: exchange.MetadataFileNames(path.EventsCategory),
}, },
@ -646,15 +624,17 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
ffs = control.Toggles{} ffs = control.Toggles{}
) )
bo, acct, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup) bo, acct, kw, ms, gc, sel, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup)
defer closer() defer closer()
userID := sel.ID()
m365, err := acct.M365Config() m365, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// run the tests // run the tests
runAndCheckBackup(t, ctx, &bo, mb, false) runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(t, ctx, kw, &bo, sel, test.resourceOwner, test.category) checkBackupIsInManifests(t, ctx, kw, &bo, sel, userID, test.category)
checkMetadataFilesExist( checkMetadataFilesExist(
t, t,
ctx, ctx,
@ -662,7 +642,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
kw, kw,
ms, ms,
m365.AzureTenantID, m365.AzureTenantID,
test.resourceOwner, userID,
path.ExchangeService, path.ExchangeService,
map[path.CategoryType][]string{test.category: test.metadataFiles}) map[path.CategoryType][]string{test.category: test.metadataFiles})
@ -679,7 +659,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
) )
runAndCheckBackup(t, ctx, &incBO, incMB, true) runAndCheckBackup(t, ctx, &incBO, incMB, true)
checkBackupIsInManifests(t, ctx, kw, &incBO, sel, test.resourceOwner, test.category) checkBackupIsInManifests(t, ctx, kw, &incBO, sel, userID, test.category)
checkMetadataFilesExist( checkMetadataFilesExist(
t, t,
ctx, ctx,
@ -687,7 +667,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
kw, kw,
ms, ms,
m365.AzureTenantID, m365.AzureTenantID,
test.resourceOwner, userID,
path.ExchangeService, path.ExchangeService,
map[path.CategoryType][]string{test.category: test.metadataFiles}) map[path.CategoryType][]string{test.category: test.metadataFiles})
@ -722,7 +702,6 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
ffs = control.Toggles{} ffs = control.Toggles{}
mb = evmock.NewBus() mb = evmock.NewBus()
now = common.Now() now = common.Now()
owners = []string{suite.user}
categories = map[path.CategoryType][]string{ categories = map[path.CategoryType][]string{
path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory), path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory),
path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory), path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory),
@ -738,14 +717,18 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
// later on during the tests. Putting their identifiers into the selector // later on during the tests. Putting their identifiers into the selector
// at this point is harmless. // at this point is harmless.
containers = []string{container1, container2, container3, containerRename} containers = []string{container1, container2, container3, containerRename}
sel = selectors.NewExchangeBackup(owners) sel = selectors.NewExchangeBackup([]string{suite.user})
gc = GCWithSelector(t, ctx, acct, connector.Users, sel.Selector, nil, nil)
) )
gc, sels := GCWithSelector(t, ctx, acct, connector.Users, sel.Selector, nil, nil)
sel, err := sels.ToExchangeBackup()
require.NoError(t, err, clues.ToCore(err))
uidn := inMock.NewProvider(sels.ID(), sels.Name())
sel.Include( sel.Include(
sel.MailFolders(containers, selectors.PrefixMatch()), sel.MailFolders(containers, selectors.PrefixMatch()),
sel.ContactFolders(containers, selectors.PrefixMatch()), sel.ContactFolders(containers, selectors.PrefixMatch()))
)
m365, err := acct.M365Config() m365, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -778,8 +761,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
given+" "+sur, given+" "+sur,
sur+", "+given, sur+", "+given,
given, mid, sur, given, mid, sur,
"123-456-7890", "123-456-7890")
)
} }
eventDBF := func(id, timeStamp, subject, body string) []byte { eventDBF := func(id, timeStamp, subject, body string) []byte {
@ -827,8 +809,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
path.ExchangeService, path.ExchangeService,
acct, acct,
category, category,
selectors.NewExchangeRestore(owners).Selector, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
m365.AzureTenantID, suite.user, "", destName, m365.AzureTenantID, uidn.ID(), "", destName,
2, 2,
version.Backup, version.Backup,
gen.dbf) gen.dbf)
@ -841,7 +823,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
for category, gen := range dataset { for category, gen := range dataset {
qp := graph.QueryParams{ qp := graph.QueryParams{
Category: category, Category: category,
ResourceOwner: inMock.NewProvider(suite.user, suite.user), ResourceOwner: uidn,
Credentials: m365, Credentials: m365,
} }
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true)) cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
@ -860,9 +842,12 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
} }
} }
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs, version.Backup) bo, _, kw, ms, gc, sels, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs, version.Backup)
defer closer() defer closer()
sel, err = sels.ToExchangeBackup()
require.NoError(t, err, clues.ToCore(err))
// run the initial backup // run the initial backup
runAndCheckBackup(t, ctx, &bo, mb, false) runAndCheckBackup(t, ctx, &bo, mb, false)
@ -895,7 +880,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
_, err := gc.Service. _, err := gc.Service.
Client(). Client().
UsersById(suite.user). UsersById(uidn.ID()).
MailFoldersById(fromContainer). MailFoldersById(fromContainer).
Move(). Move().
Post(ctx, body, nil) Post(ctx, body, nil)
@ -912,13 +897,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
err := ac.Mail().DeleteContainer(ctx, suite.user, containerID) err := ac.Mail().DeleteContainer(ctx, uidn.ID(), containerID)
require.NoError(t, err, "deleting an email folder", clues.ToCore(err)) require.NoError(t, err, "deleting an email folder", clues.ToCore(err))
case path.ContactsCategory: case path.ContactsCategory:
err := ac.Contacts().DeleteContainer(ctx, suite.user, containerID) err := ac.Contacts().DeleteContainer(ctx, uidn.ID(), containerID)
require.NoError(t, err, "deleting a contacts folder", clues.ToCore(err)) require.NoError(t, err, "deleting a contacts folder", clues.ToCore(err))
case path.EventsCategory: case path.EventsCategory:
err := ac.Events().DeleteContainer(ctx, suite.user, containerID) err := ac.Events().DeleteContainer(ctx, uidn.ID(), containerID)
require.NoError(t, err, "deleting a calendar", clues.ToCore(err)) require.NoError(t, err, "deleting a calendar", clues.ToCore(err))
} }
} }
@ -937,7 +922,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
path.ExchangeService, path.ExchangeService,
acct, acct,
category, category,
selectors.NewExchangeRestore(owners).Selector, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
m365.AzureTenantID, suite.user, "", container3, m365.AzureTenantID, suite.user, "", container3,
2, 2,
version.Backup, version.Backup,
@ -945,9 +930,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
qp := graph.QueryParams{ qp := graph.QueryParams{
Category: category, Category: category,
ResourceOwner: inMock.NewProvider(suite.user, suite.user), ResourceOwner: uidn,
Credentials: m365, Credentials: m365,
} }
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true)) cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
require.NoError(t, err, "populating container resolver", category, clues.ToCore(err)) require.NoError(t, err, "populating container resolver", category, clues.ToCore(err))
@ -968,7 +954,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container3].containerID containerID := d.dests[container3].containerID
cli := gc.Service.Client().UsersById(suite.user) cli := gc.Service.Client().UsersById(uidn.ID())
// copy the container info, since both names should // copy the container info, since both names should
// reference the same container by id. Though the // reference the same container by id. Though the
@ -1019,11 +1005,11 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container1].containerID containerID := d.dests[container1].containerID
cli := gc.Service.Client().UsersById(suite.user) cli := gc.Service.Client().UsersById(uidn.ID())
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
_, itemData := generateItemData(t, category, suite.user, mailDBF) _, itemData := generateItemData(t, category, uidn.ID(), mailDBF)
body, err := support.CreateMessageFromBytes(itemData) body, err := support.CreateMessageFromBytes(itemData)
require.NoError(t, err, "transforming mail bytes to messageable", clues.ToCore(err)) require.NoError(t, err, "transforming mail bytes to messageable", clues.ToCore(err))
@ -1031,7 +1017,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
require.NoError(t, err, "posting email item", clues.ToCore(err)) require.NoError(t, err, "posting email item", clues.ToCore(err))
case path.ContactsCategory: case path.ContactsCategory:
_, itemData := generateItemData(t, category, suite.user, contactDBF) _, itemData := generateItemData(t, category, uidn.ID(), contactDBF)
body, err := support.CreateContactFromBytes(itemData) body, err := support.CreateContactFromBytes(itemData)
require.NoError(t, err, "transforming contact bytes to contactable", clues.ToCore(err)) require.NoError(t, err, "transforming contact bytes to contactable", clues.ToCore(err))
@ -1039,7 +1025,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
require.NoError(t, err, "posting contact item", clues.ToCore(err)) require.NoError(t, err, "posting contact item", clues.ToCore(err))
case path.EventsCategory: case path.EventsCategory:
_, itemData := generateItemData(t, category, suite.user, eventDBF) _, itemData := generateItemData(t, category, uidn.ID(), eventDBF)
body, err := support.CreateEventFromBytes(itemData) body, err := support.CreateEventFromBytes(itemData)
require.NoError(t, err, "transforming event bytes to eventable", clues.ToCore(err)) require.NoError(t, err, "transforming event bytes to eventable", clues.ToCore(err))
@ -1056,11 +1042,11 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container1].containerID containerID := d.dests[container1].containerID
cli := gc.Service.Client().UsersById(suite.user) cli := gc.Service.Client().UsersById(uidn.ID())
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
ids, _, _, err := ac.Mail().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "", false) ids, _, _, err := ac.Mail().GetAddedAndRemovedItemIDs(ctx, uidn.ID(), containerID, "", false)
require.NoError(t, err, "getting message ids", clues.ToCore(err)) require.NoError(t, err, "getting message ids", clues.ToCore(err))
require.NotEmpty(t, ids, "message ids in folder") require.NotEmpty(t, ids, "message ids in folder")
@ -1068,7 +1054,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
require.NoError(t, err, "deleting email item", clues.ToCore(err)) require.NoError(t, err, "deleting email item", clues.ToCore(err))
case path.ContactsCategory: case path.ContactsCategory:
ids, _, _, err := ac.Contacts().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "", false) ids, _, _, err := ac.Contacts().GetAddedAndRemovedItemIDs(ctx, uidn.ID(), containerID, "", false)
require.NoError(t, err, "getting contact ids", clues.ToCore(err)) require.NoError(t, err, "getting contact ids", clues.ToCore(err))
require.NotEmpty(t, ids, "contact ids in folder") require.NotEmpty(t, ids, "contact ids in folder")
@ -1076,7 +1062,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
require.NoError(t, err, "deleting contact item", clues.ToCore(err)) require.NoError(t, err, "deleting contact item", clues.ToCore(err))
case path.EventsCategory: case path.EventsCategory:
ids, _, _, err := ac.Events().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "", false) ids, _, _, err := ac.Events().GetAddedAndRemovedItemIDs(ctx, uidn.ID(), containerID, "", false)
require.NoError(t, err, "getting event ids", clues.ToCore(err)) require.NoError(t, err, "getting event ids", clues.ToCore(err))
require.NotEmpty(t, ids, "event ids in folder") require.NotEmpty(t, ids, "event ids in folder")
@ -1101,7 +1087,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
err := incBO.Run(ctx) err := incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...) checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, uidn.ID(), maps.Keys(categories)...)
checkMetadataFilesExist( checkMetadataFilesExist(
t, t,
ctx, ctx,
@ -1109,7 +1095,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
kw, kw,
ms, ms,
m365.AzureTenantID, m365.AzureTenantID,
suite.user, uidn.ID(),
path.ExchangeService, path.ExchangeService,
categories) categories)
@ -1140,19 +1126,100 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
t = suite.T() t = suite.T()
mb = evmock.NewBus() mb = evmock.NewBus()
m365UserID = tester.SecondaryM365UserID(t) m365UserID = tester.SecondaryM365UserID(t)
sel = selectors.NewOneDriveBackup([]string{m365UserID}) osel = selectors.NewOneDriveBackup([]string{m365UserID})
) )
sel.Include(sel.AllData()) osel.Include(osel.AllData())
bo, _, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{}, version.Backup) bo, _, _, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, osel.Selector, control.Toggles{}, version.Backup)
defer closer() defer closer()
runAndCheckBackup(t, ctx, &bo, mb, false) runAndCheckBackup(t, ctx, &bo, mb, false)
} }
// TestBackup_Run ensures that Integration Testing works for OneDrive
func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() { func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
sel := selectors.NewOneDriveRestore([]string{suite.user})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.Folders(cs, selectors.PrefixMatch()))
return sel.Selector
}
gtdi := func(
t *testing.T,
ctx context.Context,
svc graph.Servicer,
) string {
d, err := svc.Client().UsersById(suite.user).Drive().Get(ctx, nil)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default user drive").
With("user", suite.user)
}
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID")
return id
}
runDriveIncrementalTest(
suite,
suite.user,
connector.Users,
path.OneDriveService,
path.FilesCategory,
ic,
gtdi)
}
func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePointIncrementals() {
sel := selectors.NewSharePointRestore([]string{suite.site})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
return sel.Selector
}
gtdi := func(
t *testing.T,
ctx context.Context,
svc graph.Servicer,
) string {
d, err := svc.Client().SitesById(suite.site).Drive().Get(ctx, nil)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default site drive").
With("site", suite.site)
}
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID")
return id
}
runDriveIncrementalTest(
suite,
suite.site,
connector.Sites,
path.SharePointService,
path.LibrariesCategory,
ic,
gtdi)
}
func runDriveIncrementalTest(
suite *BackupOpIntegrationSuite,
owner string,
resource connector.Resource,
service path.ServiceType,
category path.CategoryType,
includeContainers func([]string) selectors.Selector,
getTestDriveID func(*testing.T, context.Context, graph.Servicer) string,
) {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
@ -1162,14 +1229,12 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
ffs = control.Toggles{} ffs = control.Toggles{}
mb = evmock.NewBus() mb = evmock.NewBus()
owners = []string{suite.user} // `now` has to be formatted with SimpleDateTimeTesting as
// some drives cannot have `:` in file/folder names
// `now` has to be formatted with SimpleDateTimeOneDrive as
// some onedrive cannot have `:` in file/folder names
now = common.FormatNow(common.SimpleTimeTesting) now = common.FormatNow(common.SimpleTimeTesting)
categories = map[path.CategoryType][]string{ categories = map[path.CategoryType][]string{
path.FilesCategory: {graph.DeltaURLsFileName, graph.PreviousPathFileName}, category: {graph.DeltaURLsFileName, graph.PreviousPathFileName},
} }
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now) container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now)
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now) container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now)
@ -1180,17 +1245,17 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
// container3 does not exist yet. It will get created later on // container3 does not exist yet. It will get created later on
// during the tests. // during the tests.
containers = []string{container1, container2, container3} containers = []string{container1, container2, container3}
sel = selectors.NewOneDriveBackup(owners)
) )
sel.Include(sel.Folders(containers, selectors.PrefixMatch())) sel := includeContainers(containers)
creds, err := acct.M365Config() creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
gc, sel := GCWithSelector(t, ctx, acct, resource, sel, nil, nil)
var ( var (
gc = GCWithSelector(t, ctx, acct, connector.Users, sel.Selector, nil, nil) driveID = getTestDriveID(t, ctx, gc.Service)
driveID = mustGetDefaultDriveID(t, ctx, gc.Service, suite.user)
fileDBF = func(id, timeStamp, subject, body string) []byte { fileDBF = func(id, timeStamp, subject, body string) []byte {
return []byte(id + subject) return []byte(id + subject)
} }
@ -1207,11 +1272,11 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
t, t,
ctx, ctx,
gc, gc,
path.OneDriveService, service,
acct, acct,
path.FilesCategory, category,
selectors.NewOneDriveRestore(owners).Selector, sel,
creds.AzureTenantID, suite.user, driveID, destName, creds.AzureTenantID, owner, driveID, destName,
2, 2,
// Use an old backup version so we don't need metadata files. // Use an old backup version so we don't need metadata files.
0, 0,
@ -1224,18 +1289,16 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
for _, destName := range genDests { for _, destName := range genDests {
// Use path-based indexing to get the folder's ID. This is sourced from the // Use path-based indexing to get the folder's ID. This is sourced from the
// onedrive package `getFolder` function. // onedrive package `getFolder` function.
itemURL := fmt.Sprintf( itemURL := fmt.Sprintf("https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, destName)
"https://graph.microsoft.com/v1.0/drives/%s/root:/%s", resp, err := drive.
driveID, NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
destName)
resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
Get(ctx, nil) Get(ctx, nil)
require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err)) require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
containerIDs[destName] = ptr.Val(resp.GetId()) containerIDs[destName] = ptr.Val(resp.GetId())
} }
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs, version.Backup) bo, _, kw, ms, gc, _, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup)
defer closer() defer closer()
// run the initial backup // run the initial backup
@ -1249,7 +1312,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
writePerm = onedrive.UserPermission{ writePerm = onedrive.UserPermission{
ID: "perm-id", ID: "perm-id",
Roles: []string{"write"}, Roles: []string{"write"},
EntityID: suite.user, EntityID: owner,
} }
) )
@ -1260,19 +1323,20 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
table := []struct { table := []struct {
name string name string
// performs the incremental update required for the test. // performs the incremental update required for the test.
updateUserData func(t *testing.T) updateFiles func(t *testing.T)
itemsRead int itemsRead int
itemsWritten int itemsWritten int
skip bool
}{ }{
{ {
name: "clean incremental, no changes", name: "clean incremental, no changes",
updateUserData: func(t *testing.T) {}, updateFiles: func(t *testing.T) {},
itemsRead: 0, itemsRead: 0,
itemsWritten: 0, itemsWritten: 0,
}, },
{ {
name: "create a new file", name: "create a new file",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
targetContainer := containerIDs[container1] targetContainer := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName) driveItem.SetName(&newFileName)
@ -1290,7 +1354,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "add permission to new file", name: "add permission to new file",
updateUserData: func(t *testing.T) { skip: service == path.SharePointService,
updateFiles: func(t *testing.T) {
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName) driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile()) driveItem.SetFile(models.NewFile())
@ -1311,7 +1376,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "remove permission from new file", name: "remove permission from new file",
updateUserData: func(t *testing.T) { skip: service == path.SharePointService,
updateFiles: func(t *testing.T) {
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName) driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile()) driveItem.SetFile(models.NewFile())
@ -1332,7 +1398,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "add permission to container", name: "add permission to container",
updateUserData: func(t *testing.T) { skip: service == path.SharePointService,
updateFiles: func(t *testing.T) {
targetContainer := containerIDs[container1] targetContainer := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName) driveItem.SetName(&newFileName)
@ -1354,7 +1421,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "remove permission from container", name: "remove permission from container",
updateUserData: func(t *testing.T) { skip: service == path.SharePointService,
updateFiles: func(t *testing.T) {
targetContainer := containerIDs[container1] targetContainer := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName) driveItem.SetName(&newFileName)
@ -1376,7 +1444,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "update contents of a file", name: "update contents of a file",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
err := gc.Service. err := gc.Service.
Client(). Client().
DrivesById(driveID). DrivesById(driveID).
@ -1390,7 +1458,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "rename a file", name: "rename a file",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
container := containerIDs[container1] container := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
@ -1412,7 +1480,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "move a file between folders", name: "move a file between folders",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
dest := containerIDs[container1] dest := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
@ -1433,7 +1501,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "delete file", name: "delete file",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
// deletes require unique http clients // deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707 // https://github.com/alcionai/corso/issues/2707
err = newDeleteServicer(t). err = newDeleteServicer(t).
@ -1448,7 +1516,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "move a folder to a subfolder", name: "move a folder to a subfolder",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
dest := containerIDs[container1] dest := containerIDs[container1]
source := containerIDs[container2] source := containerIDs[container2]
@ -1470,7 +1538,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "rename a folder", name: "rename a folder",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
parent := containerIDs[container1] parent := containerIDs[container1]
child := containerIDs[container2] child := containerIDs[container2]
@ -1493,7 +1561,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "delete a folder", name: "delete a folder",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
container := containerIDs[container2] container := containerIDs[container2]
// deletes require unique http clients // deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707 // https://github.com/alcionai/corso/issues/2707
@ -1509,16 +1577,16 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
}, },
{ {
name: "add a new folder", name: "add a new folder",
updateUserData: func(t *testing.T) { updateFiles: func(t *testing.T) {
generateContainerOfItems( generateContainerOfItems(
t, t,
ctx, ctx,
gc, gc,
path.OneDriveService, service,
acct, acct,
path.FilesCategory, category,
selectors.NewOneDriveRestore(owners).Selector, sel,
creds.AzureTenantID, suite.user, driveID, container3, creds.AzureTenantID, owner, driveID, container3,
2, 2,
0, 0,
fileDBF) fileDBF)
@ -1540,19 +1608,28 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
// TODO(rkeepers): remove when sharepoint supports permission.
if test.skip {
return
}
cleanGC, err := connector.NewGraphConnector(ctx, acct, resource)
require.NoError(t, err, clues.ToCore(err))
var ( var (
t = suite.T() t = suite.T()
incMB = evmock.NewBus() incMB = evmock.NewBus()
incBO = newTestBackupOp(t, ctx, kw, ms, gc, acct, sel.Selector, incMB, ffs, closer) incBO = newTestBackupOp(t, ctx, kw, ms, cleanGC, acct, sel, incMB, ffs, closer)
) )
tester.LogTimeOfTest(suite.T()) tester.LogTimeOfTest(suite.T())
test.updateUserData(t) test.updateFiles(t)
err := incBO.Run(ctx) err = incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...)
checkBackupIsInManifests(t, ctx, kw, &incBO, sel, sel.ID(), maps.Keys(categories)...)
checkMetadataFilesExist( checkMetadataFilesExist(
t, t,
ctx, ctx,
@ -1560,8 +1637,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
kw, kw,
ms, ms,
creds.AzureTenantID, creds.AzureTenantID,
suite.user, sel.ID(),
path.OneDriveService, service,
categories) categories)
// do some additional checks to ensure the incremental dealt with fewer items. // do some additional checks to ensure the incremental dealt with fewer items.
@ -1612,11 +1689,11 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() {
oldsel := selectors.NewOneDriveBackup([]string{uname}) oldsel := selectors.NewOneDriveBackup([]string{uname})
oldsel.Include(oldsel.Folders([]string{"test"}, selectors.ExactMatch())) oldsel.Include(oldsel.Folders([]string{"test"}, selectors.ExactMatch()))
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, oldsel.Selector, ffs, 0) bo, _, kw, ms, gc, sel, closer := prepNewTestBackupOp(t, ctx, mb, oldsel.Selector, ffs, 0)
defer closer() defer closer()
// ensure the initial owner uses name in both cases // ensure the initial owner uses name in both cases
bo.ResourceOwner = oldsel.SetDiscreteOwnerIDName(uname, uname) bo.ResourceOwner = sel.SetDiscreteOwnerIDName(uname, uname)
// required, otherwise we don't run the migration // required, otherwise we don't run the migration
bo.backupVersion = version.All8MigrateUserPNToID - 1 bo.backupVersion = version.All8MigrateUserPNToID - 1
@ -1633,7 +1710,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() {
newsel := selectors.NewOneDriveBackup([]string{uid}) newsel := selectors.NewOneDriveBackup([]string{uid})
newsel.Include(newsel.Folders([]string{"test"}, selectors.ExactMatch())) newsel.Include(newsel.Folders([]string{"test"}, selectors.ExactMatch()))
sel := newsel.SetDiscreteOwnerIDName(uid, uname) sel = newsel.SetDiscreteOwnerIDName(uid, uname)
var ( var (
incMB = evmock.NewBus() incMB = evmock.NewBus()
@ -1711,13 +1788,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
sel = selectors.NewSharePointBackup([]string{suite.site}) sel = selectors.NewSharePointBackup([]string{suite.site})
) )
sel.Include(selTD.SharePointBackupFolderScope(sel)) sel.Include(testdata.SharePointBackupFolderScope(sel))
bo, _, kw, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{}, version.Backup) bo, _, kw, _, _, sels, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{}, version.Backup)
defer closer() defer closer()
runAndCheckBackup(t, ctx, &bo, mb, false) runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(t, ctx, kw, &bo, sel.Selector, suite.site, path.LibrariesCategory) checkBackupIsInManifests(t, ctx, kw, &bo, sels, suite.site, path.LibrariesCategory)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -24,7 +24,7 @@ func GCWithSelector(
sel selectors.Selector, sel selectors.Selector,
ins idname.Cacher, ins idname.Cacher,
onFail func(), onFail func(),
) *connector.GraphConnector { ) (*connector.GraphConnector, selectors.Selector) {
gc, err := connector.NewGraphConnector(ctx, acct, cr) gc, err := connector.NewGraphConnector(ctx, acct, cr)
if !assert.NoError(t, err, clues.ToCore(err)) { if !assert.NoError(t, err, clues.ToCore(err)) {
if onFail != nil { if onFail != nil {
@ -43,7 +43,7 @@ func GCWithSelector(
t.FailNow() t.FailNow()
} }
sel.SetDiscreteOwnerIDName(id, name) sel = sel.SetDiscreteOwnerIDName(id, name)
return gc return gc, sel
} }

View File

@ -270,16 +270,16 @@ func setupExchangeBackup(
var ( var (
users = []string{owner} users = []string{owner}
sel = selectors.NewExchangeBackup(users) esel = selectors.NewExchangeBackup(users)
) )
sel.DiscreteOwner = owner esel.DiscreteOwner = owner
sel.Include( esel.Include(
sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()), esel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()),
sel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch()), esel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch()),
sel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch())) esel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch()))
gc := GCWithSelector(t, ctx, acct, connector.Users, sel.Selector, nil, nil) gc, sel := GCWithSelector(t, ctx, acct, connector.Users, esel.Selector, nil, nil)
bo, err := NewBackupOperation( bo, err := NewBackupOperation(
ctx, ctx,
@ -288,7 +288,7 @@ func setupExchangeBackup(
sw, sw,
gc, gc,
acct, acct,
sel.Selector, sel,
inMock.NewProvider(owner, owner), inMock.NewProvider(owner, owner),
evmock.NewBus()) evmock.NewBus())
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -320,17 +320,17 @@ func setupSharePointBackup(
var ( var (
sites = []string{owner} sites = []string{owner}
sel = selectors.NewSharePointBackup(sites) ssel = selectors.NewSharePointBackup(sites)
) )
// assume a folder name "test" exists in the drive. // assume a folder name "test" exists in the drive.
// this is brittle, and requires us to backfill anytime // this is brittle, and requires us to backfill anytime
// the site under test changes, but also prevents explosive // the site under test changes, but also prevents explosive
// growth from re-backup/restore of restored files. // growth from re-backup/restore of restored files.
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch())) ssel.Include(ssel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.DiscreteOwner = owner ssel.DiscreteOwner = owner
gc := GCWithSelector(t, ctx, acct, connector.Sites, sel.Selector, nil, nil) gc, sel := GCWithSelector(t, ctx, acct, connector.Sites, ssel.Selector, nil, nil)
bo, err := NewBackupOperation( bo, err := NewBackupOperation(
ctx, ctx,
@ -339,7 +339,7 @@ func setupSharePointBackup(
sw, sw,
gc, gc,
acct, acct,
sel.Selector, sel,
inMock.NewProvider(owner, owner), inMock.NewProvider(owner, owner),
evmock.NewBus()) evmock.NewBus())
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))