rename connector -> m365 (#3600)

renames /internal/connector to /internal/m365.  No logic changes in this PR.  Only the dir rename, import renames, and one linter shadowing rename.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #1996

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-06-13 12:35:39 -06:00 committed by GitHub
parent c74539338e
commit 2f6d731993
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
275 changed files with 1999 additions and 2108 deletions

View File

@ -118,7 +118,7 @@ issues:
linters:
- forbidigo
text: "context.(Background|TODO)"
- path: internal/connector/graph/betasdk
- path: internal/m365/graph/betasdk
linters:
- wsl
- revive

View File

@ -12,8 +12,8 @@ import (
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -19,7 +19,7 @@ import (
"github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"

View File

@ -14,7 +14,7 @@ import (
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"

View File

@ -15,9 +15,10 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/connector"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
@ -50,7 +51,7 @@ type dataBuilderFunc func(id, now, subject, body string) []byte
func generateAndRestoreItems(
ctx context.Context,
gc *connector.GraphConnector,
ctrl *m365.Controller,
service path.ServiceType,
cat path.CategoryType,
sel selectors.Selector,
@ -98,19 +99,19 @@ func generateAndRestoreItems(
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs)
return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs)
}
// ------------------------------------------------------------------------------------------
// Common Helpers
// ------------------------------------------------------------------------------------------
func getGCAndVerifyResourceOwner(
func getControllerAndVerifyResourceOwner(
ctx context.Context,
resource connector.Resource,
rc resource.Category,
resourceOwner string,
) (
*connector.GraphConnector,
*m365.Controller,
account.Account,
idname.Provider,
error,
@ -132,17 +133,17 @@ func getGCAndVerifyResourceOwner(
return nil, account.Account{}, nil, clues.Wrap(err, "finding m365 account details")
}
gc, err := connector.NewGraphConnector(ctx, acct, resource)
ctrl, err := m365.NewController(ctx, acct, rc)
if err != nil {
return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api")
}
id, _, err := gc.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil)
id, _, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil)
if err != nil {
return nil, account.Account{}, nil, clues.Wrap(err, "verifying user")
}
return gc, acct, gc.IDNameLookup.ProviderForID(id), nil
return ctrl, acct, ctrl.IDNameLookup.ProviderForID(id), nil
}
type item struct {
@ -208,7 +209,7 @@ var (
)
func generateAndRestoreDriveItems(
gc *connector.GraphConnector,
ctrl *m365.Controller,
resourceOwner, secondaryUserID, secondaryUserName string,
acct account.Account,
service path.ServiceType,
@ -232,14 +233,14 @@ func generateAndRestoreDriveItems(
switch service {
case path.SharePointService:
d, err := gc.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
d, err := ctrl.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
if err != nil {
return nil, clues.Wrap(err, "getting site's default drive")
}
driveID = ptr.Val(d.GetId())
default:
d, err := gc.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
d, err := ctrl.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
if err != nil {
return nil, clues.Wrap(err, "getting user's default drive")
}
@ -248,7 +249,7 @@ func generateAndRestoreDriveItems(
}
var (
cols []connector.OnedriveColInfo
cols []m365.OnedriveColInfo
rootPath = []string{"drives", driveID, "root:"}
folderAPath = []string{"drives", driveID, "root:", folderAName}
@ -262,15 +263,15 @@ func generateAndRestoreDriveItems(
)
for i := 0; i < count; i++ {
col := []connector.OnedriveColInfo{
col := []m365.OnedriveColInfo{
// basic folder and file creation
{
PathElements: rootPath,
Files: []connector.ItemData{
Files: []m365.ItemData{
{
Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
Data: fileAData,
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
@ -281,13 +282,13 @@ func generateAndRestoreDriveItems(
Data: fileBData,
},
},
Folders: []connector.ItemData{
Folders: []m365.ItemData{
{
Name: folderBName,
},
{
Name: folderAName,
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
@ -295,7 +296,7 @@ func generateAndRestoreDriveItems(
},
{
Name: folderCName,
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
@ -307,18 +308,18 @@ func generateAndRestoreDriveItems(
// a folder that has permissions with an item in the folder with
// the different permissions.
PathElements: folderAPath,
Files: []connector.ItemData{
Files: []m365.ItemData{
{
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileEData,
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
},
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
@ -328,13 +329,13 @@ func generateAndRestoreDriveItems(
// a folder that has permissions with an item in the folder with
// no permissions.
PathElements: folderCPath,
Files: []connector.ItemData{
Files: []m365.ItemData{
{
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileAData,
},
},
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
@ -342,23 +343,23 @@ func generateAndRestoreDriveItems(
},
{
PathElements: folderBPath,
Files: []connector.ItemData{
Files: []m365.ItemData{
{
// restoring a file in a non-root folder that doesn't inherit
// permissions.
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileBData,
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
},
Folders: []connector.ItemData{
Folders: []m365.ItemData{
{
Name: folderAName,
Perms: connector.PermData{
Perms: m365.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
@ -371,7 +372,7 @@ func generateAndRestoreDriveItems(
cols = append(cols, col...)
}
input, err := connector.DataForInfo(service, cols, version.Backup)
input, err := m365.DataForInfo(service, cols, version.Backup)
if err != nil {
return nil, err
}
@ -388,16 +389,16 @@ func generateAndRestoreDriveItems(
ToggleFeatures: control.Toggles{},
}
config := connector.ConfigInfo{
config := m365.ConfigInfo{
Opts: opts,
Resource: connector.Users,
Resource: resource.Users,
Service: service,
Tenant: tenantID,
ResourceOwners: []string{resourceOwner},
RestoreCfg: tester.DefaultTestRestoreConfig(""),
}
_, _, collections, _, err := connector.GetCollectionsAndExpected(
_, _, collections, _, err := m365.GetCollectionsAndExpected(
config,
input,
version.Backup)
@ -405,5 +406,5 @@ func generateAndRestoreDriveItems(
return nil, err
}
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs)
return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs)
}

View File

@ -5,8 +5,8 @@ import (
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -52,14 +52,14 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
return nil
}
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil {
return Only(ctx, err)
}
deets, err := generateAndRestoreItems(
ctx,
gc,
ctrl,
service,
category,
selectors.NewExchangeRestore([]string{User}).Selector,
@ -98,14 +98,14 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
return nil
}
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil {
return Only(ctx, err)
}
deets, err := generateAndRestoreItems(
ctx,
gc,
ctrl,
service,
category,
selectors.NewExchangeRestore([]string{User}).Selector,
@ -143,14 +143,14 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
return nil
}
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil {
return Only(ctx, err)
}
deets, err := generateAndRestoreItems(
ctx,
gc,
ctrl,
service,
category,
selectors.NewExchangeRestore([]string{User}).Selector,

View File

@ -7,7 +7,7 @@ import (
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
@ -36,7 +36,7 @@ func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error {
return nil
}
gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
ctrl, acct, inp, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil {
return Only(ctx, err)
}
@ -45,7 +45,7 @@ func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error {
sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name())
deets, err := generateAndRestoreDriveItems(
gc,
ctrl,
inp.ID(),
SecondaryUser,
strings.ToLower(SecondaryUser),

View File

@ -7,7 +7,7 @@ import (
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
@ -36,7 +36,7 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error
return nil
}
gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Sites, Site)
ctrl, acct, inp, err := getControllerAndVerifyResourceOwner(ctx, resource.Sites, Site)
if err != nil {
return Only(ctx, err)
}
@ -45,7 +45,7 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error
sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name())
deets, err := generateAndRestoreDriveItems(
gc,
ctrl,
inp.ID(),
SecondaryUser,
strings.ToLower(SecondaryUser),

View File

@ -21,7 +21,7 @@ import (
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -17,7 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/logger"

View File

@ -1,317 +0,0 @@
package exchange
import (
"context"
"encoding/json"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// MetadataFileNames produces the category-specific set of filenames used to
// store graph metadata such as delta tokens and folderID->path references.
func MetadataFileNames(cat path.CategoryType) []string {
switch cat {
case path.EmailCategory, path.ContactsCategory:
return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName}
default:
return []string{graph.PreviousPathFileName}
}
}
type CatDeltaPaths map[path.CategoryType]DeltaPaths
type DeltaPaths map[string]DeltaPath
func (dps DeltaPaths) AddDelta(k, d string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Delta = d
dps[k] = dp
}
func (dps DeltaPaths) AddPath(k, p string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Path = p
dps[k] = dp
}
type DeltaPath struct {
Delta string
Path string
}
// ParseMetadataCollections produces a map of structs holding delta
// and path lookup maps.
func parseMetadataCollections(
ctx context.Context,
colls []data.RestoreCollection,
) (CatDeltaPaths, bool, error) {
// cdp stores metadata
cdp := CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// found tracks the metadata we've loaded, to make sure we don't
// fetch overlapping copies.
found := map[path.CategoryType]map[string]struct{}{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// errors from metadata items should not stop the backup,
// but it should prevent us from using previous backups
errs := fault.New(true)
for _, coll := range colls {
var (
breakLoop bool
items = coll.Items(ctx, errs)
category = coll.FullPath().Category()
)
for {
select {
case <-ctx.Done():
return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
case item, ok := <-items:
if !ok || errs.Failure() != nil {
breakLoop = true
break
}
var (
m = map[string]string{}
cdps = cdp[category]
)
err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil {
return nil, false, clues.New("decoding metadata json").WithClues(ctx)
}
switch item.UUID() {
case graph.PreviousPathFileName:
if _, ok := found[category]["path"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
}
for k, p := range m {
cdps.AddPath(k, p)
}
found[category]["path"] = struct{}{}
case graph.DeltaURLsFileName:
if _, ok := found[category]["delta"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
}
for k, d := range m {
cdps.AddDelta(k, d)
}
found[category]["delta"] = struct{}{}
}
cdp[category] = cdps
}
if breakLoop {
break
}
}
}
if errs.Failure() != nil {
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
return CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}, false, nil
}
// Remove any entries that contain a path or a delta, but not both.
// That metadata is considered incomplete, and needs to incur a
// complete backup on the next run.
for _, dps := range cdp {
for k, dp := range dps {
if len(dp.Path) == 0 {
delete(dps, k)
}
}
}
return cdp, true, nil
}
// DataCollections returns a DataCollection which the caller can
// use to read mailbox data out for the specified user
func DataCollections(
ctx context.Context,
ac api.Client,
selector selectors.Selector,
tenantID string,
user idname.Provider,
metadata []data.RestoreCollection,
su support.StatusUpdater,
ctrlOpts control.Options,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
eb, err := selector.ToExchangeBackup()
if err != nil {
return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
}
var (
collections = []data.BackupCollection{}
el = errs.Local()
categories = map[path.CategoryType]struct{}{}
handlers = BackupHandlers(ac)
)
// Turn on concurrency limiter middleware for exchange backups
// unless explicitly disabled through DisableConcurrencyLimiterFN cli flag
if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter {
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
}
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata)
if err != nil {
return nil, nil, false, err
}
for _, scope := range eb.Scopes() {
if el.Failure() != nil {
break
}
dcs, err := createCollections(
ctx,
handlers,
tenantID,
user,
scope,
cdps[scope.Category().PathType()],
ctrlOpts,
su,
errs)
if err != nil {
el.AddRecoverable(err)
continue
}
categories[scope.Category().PathType()] = struct{}{}
collections = append(collections, dcs...)
}
if len(collections) > 0 {
baseCols, err := graph.BaseCollections(
ctx,
collections,
tenantID,
user.ID(),
path.ExchangeService,
categories,
su,
errs)
if err != nil {
return nil, nil, false, err
}
collections = append(collections, baseCols...)
}
return collections, nil, canUsePreviousBackup, el.Failure()
}
// createCollections - utility function that retrieves M365
// IDs through Microsoft Graph API. The selectors.ExchangeScope
// determines the type of collections that are retrieved.
func createCollections(
ctx context.Context,
handlers map[path.CategoryType]backupHandler,
tenantID string,
user idname.Provider,
scope selectors.ExchangeScope,
dps DeltaPaths,
ctrlOpts control.Options,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var (
allCollections = make([]data.BackupCollection, 0)
category = scope.Category().PathType()
qp = graph.QueryParams{
Category: category,
ResourceOwner: user,
TenantID: tenantID,
}
)
handler, ok := handlers[category]
if !ok {
return nil, clues.New("unsupported backup category type").WithClues(ctx)
}
foldersComplete := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", qp.Category))
defer close(foldersComplete)
rootFolder, cc := handler.NewContainerCache(user.ID())
if err := cc.Populate(ctx, errs, rootFolder); err != nil {
return nil, clues.Wrap(err, "populating container cache")
}
collections, err := filterContainersAndFillCollections(
ctx,
qp,
handler,
su,
cc,
scope,
dps,
ctrlOpts,
errs)
if err != nil {
return nil, clues.Wrap(err, "filling collections")
}
foldersComplete <- struct{}{}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}
return allCollections, nil
}

View File

@ -1,761 +0,0 @@
package exchange
import (
"bytes"
"context"
"sync"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// Unit tests
// ---------------------------------------------------------------------------
type DataCollectionsUnitSuite struct {
tester.Suite
}
func TestDataCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
type fileValues struct {
fileName string
value string
}
table := []struct {
name string
data []fileValues
expect map[string]DeltaPath
canUsePreviousBackup bool
expectError assert.ErrorAssertionFunc
}{
{
name: "delta urls only",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple delta urls",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaURLsFileName, "delta-link-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "previous path only",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple previous paths",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
{graph.PreviousPathFileName, "prev-path-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls and empty previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, ""},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "empty delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, ""},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with special chars",
data: []fileValues{
{graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "`!@#$%^&*()_[]{}/\"\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with escaped chars",
data: []fileValues{
{graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with newline char runes",
data: []fileValues{
// rune(92) = \, rune(110) = n. Ensuring it's not possible to
// error in serializing/deserializing and produce a single newline
// character from those two runes.
{graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
entries := []graph.MetadataCollectionEntry{}
for _, d := range test.data {
entries = append(
entries,
graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value}))
}
coll, err := graph.MakeMetadataCollection(
"t", "u",
path.ExchangeService,
path.EmailCategory,
entries,
func(cos *support.ConnectorOperationStatus) {},
)
require.NoError(t, err, clues.ToCore(err))
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: coll},
})
test.expectError(t, err, clues.ToCore(err))
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
emails := cdps[path.EmailCategory]
assert.Len(t, emails, len(test.expect))
for k, v := range emails {
assert.Equal(t, v.Delta, emails[k].Delta, "delta")
assert.Equal(t, v.Path, emails[k].Path, "path")
}
})
}
}
type failingColl struct {
t *testing.T
}
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
ic := make(chan data.Stream)
defer close(ic)
errs.AddRecoverable(assert.AnError)
return ic
}
func (f failingColl) FullPath() path.Path {
tmp, err := path.Build(
"tenant",
"user",
path.ExchangeService,
path.EmailCategory,
false,
"inbox")
require.NoError(f.t, err, clues.ToCore(err))
return tmp
}
func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) {
// no fetch calls will be made
return nil, nil
}
// This check is to ensure that we don't error out, but still return
// canUsePreviousBackup as false on read errors
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
fc := failingColl{t}
_, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc})
require.NoError(t, err)
require.False(t, canUsePreviousBackup)
}
// ---------------------------------------------------------------------------
// Integration tests
// ---------------------------------------------------------------------------
func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ConnectorOperationStatus) {
updater := func(status *support.ConnectorOperationStatus) {
defer wg.Done()
}
return updater
}
type DataCollectionsIntegrationSuite struct {
tester.Suite
user string
site string
tenantID string
ac api.Client
}
func TestDataCollectionsIntegrationSuite(t *testing.T) {
suite.Run(t, &DataCollectionsIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
})
}
func (suite *DataCollectionsIntegrationSuite) SetupSuite() {
suite.user = tester.M365UserID(suite.T())
suite.site = tester.M365SiteID(suite.T())
acct := tester.NewM365Account(suite.T())
creds, err := acct.M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
suite.ac, err = api.NewClient(creds)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(suite.T())
}
func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
folderNames map[string]struct{}
canMakeDeltaQueries bool
}{
{
name: "Folder Iterative Check Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: true,
},
{
name: "Folder Iterative Check Mail Non-Delta",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: false,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
ctrlOpts,
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService {
continue
}
require.NotEmpty(t, c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
continue
}
loc := c.(data.LocationPather).LocationPath().String()
require.NotEmpty(t, loc)
delete(test.folderNames, loc)
}
assert.Empty(t, test.folderNames)
})
}
}
func (suite *DataCollectionsIntegrationSuite) TestDelta() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Contacts",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Events",
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// get collections without providing any delta history (ie: full backup)
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
control.Defaults(),
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection
for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService {
metadata = coll
}
}
require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: metadata},
})
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")
dps := cdps[test.scope.Category().PathType()]
// now do another backup with the previous delta tokens,
// which should only contain the difference.
collections, err = createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
dps,
control.Defaults(),
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least
// assert some minimum assumptions, such as "deltas should retrieve fewer items".
// Delta usage is commented out at the moment, anyway. So this is currently
// a sanity check that the minimum behavior won't break.
for _, coll := range collections {
if coll.FullPath().Service() != path.ExchangeMetadataService {
ec, ok := coll.(*Collection)
require.True(t, ok, "collection is *Collection")
assert.NotNil(t, ec)
}
}
})
}
}
// TestMailSerializationRegression verifies that all mail data stored in the
// test account can be successfully downloaded into bytes and restored into
// M365 mail objects
func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
wg sync.WaitGroup
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
sel.Scopes()[0],
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(collections))
for _, edc := range collections {
suite.Run(edc.FullPath().String(), func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
streamChannel := edc.Items(ctx, fault.New(true))
// Verify that each message can be restored
for stream := range streamChannel {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
message, err := api.BytesToMessageable(buf.Bytes())
assert.NotNil(t, message)
assert.NoError(t, err, clues.ToCore(err))
}
})
}
wg.Wait()
}
// TestContactSerializationRegression verifies ability to query contact items
// and to store contact within Collection. Downloaded contacts are run through
// a regression test to ensure that downloaded items can be uploaded.
func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() {
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Default Contact Folder",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch())[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
edcs, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(edcs))
require.GreaterOrEqual(t, len(edcs), 1, "expected 1 <= num collections <= 2")
require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2")
for _, edc := range edcs {
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
count := 0
for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
contact, err := api.BytesToContactable(buf.Bytes())
assert.NotNil(t, contact)
assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err))
count++
}
if isMetadata {
continue
}
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), edc) {
continue
}
assert.Equal(
t,
edc.(data.LocationPather).LocationPath().String(),
DefaultContactFolder)
assert.NotZero(t, count)
}
wg.Wait()
})
}
}
// TestEventsSerializationRegression ensures functionality of createCollections
// to be able to successfully query, download and restore event objects
func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
calID string
bdayID string
)
fn := func(gcf graph.CachedContainer) error {
if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar {
calID = ptr.Val(gcf.GetId())
}
if ptr.Val(gcf.GetDisplayName()) == "Birthdays" {
bdayID = ptr.Val(gcf.GetId())
}
return nil
}
err := suite.ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))
require.NoError(t, err, clues.ToCore(err))
tests := []struct {
name, expected string
scope selectors.ExchangeScope
}{
{
name: "Default Event Calendar",
expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
{
name: "Birthday Calendar",
expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{"Birthdays"},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
require.Len(t, collections, 2)
wg.Add(len(collections))
for _, edc := range collections {
var isMetadata bool
if edc.FullPath().Service() != path.ExchangeMetadataService {
isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else {
assert.Equal(t, "", edc.FullPath().Folder(false))
}
for item := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
event, err := api.BytesToEventable(buf.Bytes())
assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err))
}
}
wg.Wait()
})
}
}

View File

@ -1,129 +0,0 @@
package exchange
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type CacheResolverSuite struct {
tester.Suite
credentials account.M365Config
}
func TestCacheResolverIntegrationSuite(t *testing.T) {
suite.Run(t, &CacheResolverSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
})
}
func (suite *CacheResolverSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
func (suite *CacheResolverSuite) TestPopulate() {
ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{
userID: tester.M365UserID(t),
enumer: ac.Events(),
getter: ac.Events(),
}
}
contactFunc := func(t *testing.T) graph.ContainerResolver {
return &contactFolderCache{
userID: tester.M365UserID(t),
enumer: ac.Contacts(),
getter: ac.Contacts(),
}
}
tests := []struct {
name, folderInCache, root, basePath string
resolverFunc func(t *testing.T) graph.ContainerResolver
canFind assert.BoolAssertionFunc
}{
{
name: "Default Event Cache",
// Fine as long as this isn't running against a migrated Exchange server.
folderInCache: DefaultCalendar,
root: DefaultCalendar,
basePath: DefaultCalendar,
resolverFunc: eventFunc,
canFind: assert.True,
},
{
name: "Default Event Folder Hidden",
folderInCache: DefaultContactFolder,
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Default Contact Cache",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
basePath: DefaultContactFolder,
canFind: assert.True,
resolverFunc: contactFunc,
},
{
name: "Default Contact Hidden",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
resolver := test.resolverFunc(t)
err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath)
require.NoError(t, err, clues.ToCore(err))
_, isFound := resolver.LocationInCache(test.folderInCache)
test.canFind(t, isFound, "folder path", test.folderInCache)
})
}
}

View File

@ -1,47 +0,0 @@
package exchange
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ExchangeIteratorSuite struct {
tester.Suite
}
func TestExchangeIteratorSuite(t *testing.T) {
suite.Run(t, &ExchangeIteratorSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeIteratorSuite) TestDisplayable() {
t := suite.T()
bytes := exchMock.ContactBytes("Displayable")
contact, err := api.BytesToContactable(bytes)
require.NoError(t, err, clues.ToCore(err))
aDisplayable, ok := contact.(graph.Displayable)
assert.True(t, ok)
assert.NotNil(t, aDisplayable.GetId())
assert.NotNil(t, aDisplayable.GetDisplayName())
}
func (suite *ExchangeIteratorSuite) TestDescendable() {
t := suite.T()
bytes := exchMock.MessageBytes("Descendable")
message, err := api.BytesToMessageable(bytes)
require.NoError(t, err, clues.ToCore(err))
aDescendable, ok := message.(graph.Descendable)
assert.True(t, ok)
assert.NotNil(t, aDescendable.GetId())
assert.NotNil(t, aDescendable.GetParentFolderId())
}

View File

@ -13,8 +13,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -12,8 +12,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -22,10 +22,10 @@ import (
"github.com/kopia/kopia/snapshot/snapshotfs"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/metadata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/graph/metadata"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"

View File

@ -20,8 +20,8 @@ import (
"github.com/stretchr/testify/suite"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"

View File

@ -19,10 +19,10 @@ import (
"golang.org/x/exp/maps"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/data/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control/repository"

View File

@ -1,4 +1,4 @@
package connector
package m365
import (
"context"
@ -8,15 +8,13 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/discovery"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/internal/m365/discovery"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
@ -34,7 +32,7 @@ import (
// The metadata field can include things like delta tokens or the previous backup's
// folder hierarchy. The absence of metadata causes the collection creation to ignore
// prior history (ie, incrementals) and run a full backup.
func (gc *GraphConnector) ProduceBackupCollections(
func (ctrl *Controller) ProduceBackupCollections(
ctx context.Context,
owner idname.Provider,
sels selectors.Selector,
@ -45,7 +43,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
ctx, end := diagnostics.Span(
ctx,
"gc:produceBackupCollections",
"m365:produceBackupCollections",
diagnostics.Index("service", sels.Service.String()))
defer end()
@ -55,14 +53,14 @@ func (gc *GraphConnector) ProduceBackupCollections(
ctrlOpts.Parallelism.ItemFetch = graph.Parallelism(sels.PathService()).
ItemOverride(ctx, ctrlOpts.Parallelism.ItemFetch)
err := verifyBackupInputs(sels, gc.IDNameLookup.IDs())
err := verifyBackupInputs(sels, ctrl.IDNameLookup.IDs())
if err != nil {
return nil, nil, false, clues.Stack(err).WithClues(ctx)
}
serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled(
ctx,
gc.AC.Users(),
ctrl.AC.Users(),
path.ServiceType(sels.Service),
sels.DiscreteOwner)
if err != nil {
@ -87,14 +85,14 @@ func (gc *GraphConnector) ProduceBackupCollections(
switch sels.Service {
case selectors.ServiceExchange:
colls, ssmb, canUsePreviousBackup, err = exchange.DataCollections(
colls, ssmb, canUsePreviousBackup, err = exchange.ProduceBackupCollections(
ctx,
gc.AC,
ctrl.AC,
sels,
gc.credentials.AzureTenantID,
ctrl.credentials.AzureTenantID,
owner,
metadata,
gc.UpdateStatus,
ctrl.UpdateStatus,
ctrlOpts,
errs)
if err != nil {
@ -102,15 +100,15 @@ func (gc *GraphConnector) ProduceBackupCollections(
}
case selectors.ServiceOneDrive:
colls, ssmb, canUsePreviousBackup, err = onedrive.DataCollections(
colls, ssmb, canUsePreviousBackup, err = onedrive.ProduceBackupCollections(
ctx,
gc.AC,
ctrl.AC,
sels,
owner,
metadata,
lastBackupVersion,
gc.credentials.AzureTenantID,
gc.UpdateStatus,
ctrl.credentials.AzureTenantID,
ctrl.UpdateStatus,
ctrlOpts,
errs)
if err != nil {
@ -118,14 +116,14 @@ func (gc *GraphConnector) ProduceBackupCollections(
}
case selectors.ServiceSharePoint:
colls, ssmb, canUsePreviousBackup, err = sharepoint.DataCollections(
colls, ssmb, canUsePreviousBackup, err = sharepoint.ProduceBackupCollections(
ctx,
gc.AC,
ctrl.AC,
sels,
owner,
metadata,
gc.credentials,
gc,
ctrl.credentials,
ctrl,
ctrlOpts,
errs)
if err != nil {
@ -144,7 +142,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
// break the process state, putting us into deadlock or
// panics.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
ctrl.incrementAwaitingMessages()
}
}
@ -154,7 +152,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
// IsBackupRunnable verifies that the users provided has the services enabled and
// data can be backed up. The canMakeDeltaQueries provides info if the mailbox is
// full and delta queries can be made on it.
func (gc *GraphConnector) IsBackupRunnable(
func (ctrl *Controller) IsBackupRunnable(
ctx context.Context,
service path.ServiceType,
resourceOwner string,
@ -164,7 +162,7 @@ func (gc *GraphConnector) IsBackupRunnable(
return true, nil
}
info, err := gc.AC.Users().GetInfo(ctx, resourceOwner)
info, err := ctrl.AC.Users().GetInfo(ctx, resourceOwner)
if err != nil {
return false, err
}
@ -225,59 +223,3 @@ func checkServiceEnabled(
return true, canMakeDeltaQueries, nil
}
// ConsumeRestoreCollections restores data from the specified collections
// into M365 using the GraphAPI.
// SideEffect: gc.status is updated at the completion of operation
func (gc *GraphConnector) ConsumeRestoreCollections(
ctx context.Context,
backupVersion int,
sels selectors.Selector,
restoreCfg control.RestoreConfig,
opts control.Options,
dcs []data.RestoreCollection,
errs *fault.Bus,
) (*details.Details, error) {
ctx, end := diagnostics.Span(ctx, "connector:restore")
defer end()
ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()})
var (
status *support.ConnectorOperationStatus
deets = &details.Builder{}
err error
)
switch sels.Service {
case selectors.ServiceExchange:
status, err = exchange.RestoreCollections(ctx, gc.AC, restoreCfg, dcs, deets, errs)
case selectors.ServiceOneDrive:
status, err = onedrive.RestoreCollections(
ctx,
onedrive.NewRestoreHandler(gc.AC),
backupVersion,
restoreCfg,
opts,
dcs,
deets,
errs)
case selectors.ServiceSharePoint:
status, err = sharepoint.RestoreCollections(
ctx,
backupVersion,
gc.AC,
restoreCfg,
opts,
dcs,
deets,
errs)
default:
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
}
gc.incrementAwaitingMessages()
gc.UpdateStatus(status)
return deets.Details(), err
}

View File

@ -1,4 +1,4 @@
package connector
package m365
import (
"bytes"
@ -11,8 +11,9 @@ import (
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
@ -59,19 +60,13 @@ func (suite *DataCollectionIntgSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err))
}
// TestExchangeDataCollection verifies interface between operation and
// GraphConnector remains stable to receive a non-zero amount of Collections
// for the Exchange Package. Enabled exchange applications:
// - mail
// - contacts
// - events
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
selUsers := []string{suite.user}
connector := loadConnector(ctx, suite.T(), Users)
ctrl := loadController(ctx, suite.T(), resource.Users)
tests := []struct {
name string
getSelector func(t *testing.T) selectors.Selector
@ -127,14 +122,14 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries
collections, excludes, canUsePreviousBackup, err := exchange.DataCollections(
collections, excludes, canUsePreviousBackup, err := exchange.ProduceBackupCollections(
ctx,
suite.ac,
sel,
suite.tenantID,
uidn,
nil,
connector.UpdateStatus,
ctrl.UpdateStatus,
ctrlOpts,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -142,7 +137,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
assert.True(t, excludes.Empty())
for range collections {
connector.incrementAwaitingMessages()
ctrl.incrementAwaitingMessages()
}
// Categories with delta endpoints will produce a collection for metadata
@ -158,7 +153,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
}
}
status := connector.Wait()
status := ctrl.Wait()
assert.NotZero(t, status.Successes)
t.Log(status.String())
})
@ -172,8 +167,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
defer flush()
owners := []string{"snuffleupagus"}
connector := loadConnector(ctx, suite.T(), Users)
ctrl := loadController(ctx, suite.T(), resource.Users)
tests := []struct {
name string
getSelector func(t *testing.T) selectors.Selector
@ -238,7 +232,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
ctx, flush := tester.NewContext(t)
defer flush()
collections, excludes, canUsePreviousBackup, err := connector.ProduceBackupCollections(
collections, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx,
test.getSelector(t),
test.getSelector(t),
@ -254,16 +248,12 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
}
}
// TestSharePointDataCollection verifies interface between operation and
// GraphConnector remains stable to receive a non-zero amount of Collections
// for the SharePoint Package.
func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
selSites := []string{suite.site}
connector := loadConnector(ctx, suite.T(), Sites)
ctrl := loadController(ctx, suite.T(), resource.Sites)
tests := []struct {
name string
expected int
@ -297,14 +287,14 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
sel := test.getSelector()
collections, excludes, canUsePreviousBackup, err := sharepoint.DataCollections(
collections, excludes, canUsePreviousBackup, err := sharepoint.ProduceBackupCollections(
ctx,
suite.ac,
sel,
sel,
nil,
connector.credentials,
connector,
ctrl.credentials,
ctrl,
control.Defaults(),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -313,7 +303,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
assert.True(t, excludes.Empty())
for range collections {
connector.incrementAwaitingMessages()
ctrl.incrementAwaitingMessages()
}
// we don't know an exact count of drives this will produce,
@ -328,7 +318,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
}
}
status := connector.Wait()
status := ctrl.Wait()
assert.NotZero(t, status.Successes)
t.Log(status.String())
})
@ -341,7 +331,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
type SPCollectionIntgSuite struct {
tester.Suite
connector *GraphConnector
connector *Controller
user string
}
@ -358,7 +348,7 @@ func (suite *SPCollectionIntgSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
suite.connector = loadConnector(ctx, suite.T(), Sites)
suite.connector = loadController(ctx, suite.T(), resource.Sites)
suite.user = tester.M365UserID(suite.T())
tester.LogTimeOfTest(suite.T())
@ -372,11 +362,11 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
var (
siteID = tester.M365SiteID(t)
gc = loadConnector(ctx, t, Sites)
ctrl = loadController(ctx, t, resource.Sites)
siteIDs = []string{siteID}
)
id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil)
id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup(siteIDs)
@ -384,7 +374,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
sel.SetDiscreteOwnerIDName(id, name)
cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections(
cols, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx,
inMock.NewProvider(id, name),
sel.Selector,
@ -419,11 +409,11 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
var (
siteID = tester.M365SiteID(t)
gc = loadConnector(ctx, t, Sites)
ctrl = loadController(ctx, t, resource.Sites)
siteIDs = []string{siteID}
)
id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil)
id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup(siteIDs)
@ -431,7 +421,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
sel.SetDiscreteOwnerIDName(id, name)
cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections(
cols, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx,
inMock.NewProvider(id, name),
sel.Selector,

View File

@ -1,6 +1,4 @@
// Package connector uploads and retrieves data from M365 through
// the msgraph-go-sdk.
package connector
package m365
import (
"context"
@ -10,28 +8,25 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// Graph Connector
// ---------------------------------------------------------------------------
// must comply with BackupProducer and RestoreConsumer
var (
_ inject.BackupProducer = &GraphConnector{}
_ inject.RestoreConsumer = &GraphConnector{}
_ inject.BackupProducer = &Controller{}
_ inject.RestoreConsumer = &Controller{}
)
// GraphConnector is a struct used to wrap the GraphServiceClient and
// Controller is a struct used to wrap the GraphServiceClient and
// GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for
// bookkeeping and interfacing with other component.
type GraphConnector struct {
type Controller struct {
AC api.Client
tenant string
@ -43,20 +38,20 @@ type GraphConnector struct {
// reference for processes that choose to populate the values.
IDNameLookup idname.Cacher
// wg is used to track completion of GC tasks
// wg is used to track completion of tasks
wg *sync.WaitGroup
region *trace.Region
// mutex used to synchronize updates to `status`
mu sync.Mutex
status support.ConnectorOperationStatus // contains the status of the last run status
status support.ControllerOperationStatus // contains the status of the last run status
}
func NewGraphConnector(
func NewController(
ctx context.Context,
acct account.Account,
r Resource,
) (*GraphConnector, error) {
rc resource.Category,
) (*Controller, error) {
creds, err := acct.M365Config()
if err != nil {
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
@ -67,106 +62,97 @@ func NewGraphConnector(
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
}
rc, err := r.resourceClient(ac)
rCli, err := getResourceClient(rc, ac)
if err != nil {
return nil, clues.Wrap(err, "creating resource client").WithClues(ctx)
}
gc := GraphConnector{
ctrl := Controller{
AC: ac,
IDNameLookup: idname.NewCache(nil),
credentials: creds,
ownerLookup: rc,
ownerLookup: rCli,
tenant: acct.ID(),
wg: &sync.WaitGroup{},
}
return &gc, nil
return &ctrl, nil
}
// ---------------------------------------------------------------------------
// Processing Status
// ---------------------------------------------------------------------------
// AwaitStatus waits for all gc tasks to complete and then returns status
func (gc *GraphConnector) Wait() *data.CollectionStats {
// AwaitStatus waits for all tasks to complete and then returns status
func (ctrl *Controller) Wait() *data.CollectionStats {
defer func() {
if gc.region != nil {
gc.region.End()
gc.region = nil
if ctrl.region != nil {
ctrl.region.End()
ctrl.region = nil
}
}()
gc.wg.Wait()
ctrl.wg.Wait()
// clean up and reset statefulness
dcs := data.CollectionStats{
Folders: gc.status.Folders,
Objects: gc.status.Metrics.Objects,
Successes: gc.status.Metrics.Successes,
Bytes: gc.status.Metrics.Bytes,
Details: gc.status.String(),
Folders: ctrl.status.Folders,
Objects: ctrl.status.Metrics.Objects,
Successes: ctrl.status.Metrics.Successes,
Bytes: ctrl.status.Metrics.Bytes,
Details: ctrl.status.String(),
}
gc.wg = &sync.WaitGroup{}
gc.status = support.ConnectorOperationStatus{}
ctrl.wg = &sync.WaitGroup{}
ctrl.status = support.ControllerOperationStatus{}
return &dcs
}
// UpdateStatus is used by gc initiated tasks to indicate completion
func (gc *GraphConnector) UpdateStatus(status *support.ConnectorOperationStatus) {
defer gc.wg.Done()
// UpdateStatus is used by initiated tasks to indicate completion
func (ctrl *Controller) UpdateStatus(status *support.ControllerOperationStatus) {
defer ctrl.wg.Done()
if status == nil {
return
}
gc.mu.Lock()
defer gc.mu.Unlock()
gc.status = support.MergeStatus(gc.status, *status)
ctrl.mu.Lock()
defer ctrl.mu.Unlock()
ctrl.status = support.MergeStatus(ctrl.status, *status)
}
// Status returns the current status of the graphConnector operation.
func (gc *GraphConnector) Status() support.ConnectorOperationStatus {
return gc.status
// Status returns the current status of the controller process.
func (ctrl *Controller) Status() support.ControllerOperationStatus {
return ctrl.status
}
// PrintableStatus returns a string formatted version of the GC status.
func (gc *GraphConnector) PrintableStatus() string {
return gc.status.String()
// PrintableStatus returns a string formatted version of the status.
func (ctrl *Controller) PrintableStatus() string {
return ctrl.status.String()
}
func (gc *GraphConnector) incrementAwaitingMessages() {
gc.wg.Add(1)
func (ctrl *Controller) incrementAwaitingMessages() {
ctrl.wg.Add(1)
}
// ---------------------------------------------------------------------------
// Resource Lookup Handling
// ---------------------------------------------------------------------------
type Resource int
const (
UnknownResource Resource = iota
AllResources // unused
Users
Sites
)
func (r Resource) resourceClient(ac api.Client) (*resourceClient, error) {
switch r {
case Users:
return &resourceClient{enum: r, getter: ac.Users()}, nil
case Sites:
return &resourceClient{enum: r, getter: ac.Sites()}, nil
func getResourceClient(rc resource.Category, ac api.Client) (*resourceClient, error) {
switch rc {
case resource.Users:
return &resourceClient{enum: rc, getter: ac.Users()}, nil
case resource.Sites:
return &resourceClient{enum: rc, getter: ac.Sites()}, nil
default:
return nil, clues.New("unrecognized owner resource enum").With("resource_enum", r)
return nil, clues.New("unrecognized owner resource enum").With("resource_enum", rc)
}
}
type resourceClient struct {
enum Resource
enum resource.Category
getter getIDAndNamer
}
@ -243,18 +229,18 @@ func (r resourceClient) getOwnerIDAndNameFrom(
// The id-name swapper is optional. Some processes will look up all owners in
// the tenant before reaching this step. In that case, the data gets handed
// down for this func to consume instead of performing further queries. The
// data gets stored inside the gc instance for later re-use.
func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
// data gets stored inside the controller instance for later re-use.
func (ctrl *Controller) PopulateOwnerIDAndNamesFrom(
ctx context.Context,
owner string, // input value, can be either id or name
ins idname.Cacher,
) (string, string, error) {
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.AC, owner, ins)
id, name, err := ctrl.ownerLookup.getOwnerIDAndNameFrom(ctx, ctrl.AC, owner, ins)
if err != nil {
return "", "", clues.Wrap(err, "identifying resource owner")
}
gc.IDNameLookup = idname.NewCache(map[string]string{id: name})
ctrl.IDNameLookup = idname.NewCache(map[string]string{id: name})
return id, name, nil
}

View File

@ -1,4 +1,4 @@
package connector
package m365
import (
"context"
@ -13,10 +13,11 @@ import (
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/mock"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/mock"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
@ -29,15 +30,15 @@ import (
// Unit tests
// ---------------------------------------------------------------------------
type GraphConnectorUnitSuite struct {
type ControllerUnitSuite struct {
tester.Suite
}
func TestGraphConnectorUnitSuite(t *testing.T) {
suite.Run(t, &GraphConnectorUnitSuite{Suite: tester.NewUnitSuite(t)})
func TestControllerUnitSuite(t *testing.T) {
suite.Run(t, &ControllerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
const (
id = "owner-id"
name = "owner-name"
@ -47,10 +48,10 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
itn = map[string]string{id: name}
nti = map[string]string{name: id}
lookup = &resourceClient{
enum: Users,
enum: resource.Users,
getter: &mock.IDNameGetter{ID: id, Name: name},
}
noLookup = &resourceClient{enum: Users, getter: &mock.IDNameGetter{}}
noLookup = &resourceClient{enum: resource.Users, getter: &mock.IDNameGetter{}}
)
table := []struct {
@ -211,9 +212,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
ctx, flush := tester.NewContext(t)
defer flush()
gc := &GraphConnector{ownerLookup: test.rc}
ctrl := &Controller{ownerLookup: test.rc}
rID, rName, err := gc.PopulateOwnerIDAndNamesFrom(ctx, test.owner, test.ins)
rID, rName, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, test.owner, test.ins)
test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectID, rID, "id")
assert.Equal(t, test.expectName, rName, "name")
@ -221,14 +222,14 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
}
}
func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() {
func (suite *ControllerUnitSuite) TestController_Wait() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
gc = &GraphConnector{
ctrl = &Controller{
wg: &sync.WaitGroup{},
region: &trace.Region{},
}
@ -240,13 +241,13 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() {
status = support.CreateStatus(ctx, support.Backup, 1, metrics, "details")
)
gc.wg.Add(1)
gc.UpdateStatus(status)
ctrl.wg.Add(1)
ctrl.UpdateStatus(status)
result := gc.Wait()
result := ctrl.Wait()
require.NotNil(t, result)
assert.Nil(t, gc.region, "region")
assert.Empty(t, gc.status, "status")
assert.Nil(t, ctrl.region, "region")
assert.Empty(t, ctrl.status, "status")
assert.Equal(t, 1, result.Folders)
assert.Equal(t, 2, result.Objects)
assert.Equal(t, 3, result.Successes)
@ -257,15 +258,15 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() {
// Integration tests
// ---------------------------------------------------------------------------
type GraphConnectorIntegrationSuite struct {
type ControllerIntegrationSuite struct {
tester.Suite
connector *GraphConnector
ctrl *Controller
user string
secondaryUser string
}
func TestGraphConnectorIntegrationSuite(t *testing.T) {
suite.Run(t, &GraphConnectorIntegrationSuite{
func TestControllerIntegrationSuite(t *testing.T) {
suite.Run(t, &ControllerIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
@ -273,20 +274,20 @@ func TestGraphConnectorIntegrationSuite(t *testing.T) {
})
}
func (suite *GraphConnectorIntegrationSuite) SetupSuite() {
func (suite *ControllerIntegrationSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.connector = loadConnector(ctx, t, Users)
suite.ctrl = loadController(ctx, t, resource.Users)
suite.user = tester.M365UserID(t)
suite.secondaryUser = tester.SecondaryM365UserID(t)
tester.LogTimeOfTest(t)
}
func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
func (suite *ControllerIntegrationSuite) TestRestoreFailsBadService() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -299,7 +300,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
}
)
deets, err := suite.connector.ConsumeRestoreCollections(
deets, err := suite.ctrl.ConsumeRestoreCollections(
ctx,
version.Backup,
sel,
@ -313,13 +314,13 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
assert.Error(t, err, clues.ToCore(err))
assert.NotNil(t, deets)
status := suite.connector.Wait()
status := suite.ctrl.Wait()
assert.Equal(t, 0, status.Objects)
assert.Equal(t, 0, status.Folders)
assert.Equal(t, 0, status.Successes)
}
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
func (suite *ControllerIntegrationSuite) TestEmptyCollections() {
restoreCfg := tester.DefaultTestRestoreConfig("")
table := []struct {
name string
@ -377,7 +378,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
ctx, flush := tester.NewContext(t)
defer flush()
deets, err := suite.connector.ConsumeRestoreCollections(
deets, err := suite.ctrl.ConsumeRestoreCollections(
ctx,
version.Backup,
test.sel,
@ -391,7 +392,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets)
stats := suite.connector.Wait()
stats := suite.ctrl.Wait()
assert.Zero(t, stats.Objects)
assert.Zero(t, stats.Folders)
assert.Zero(t, stats.Successes)
@ -418,9 +419,9 @@ func runRestore(
start := time.Now()
restoreGC := loadConnector(ctx, t, config.Resource)
restoreCtrl := loadController(ctx, t, config.Resource)
restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true)
deets, err := restoreGC.ConsumeRestoreCollections(
deets, err := restoreCtrl.ConsumeRestoreCollections(
ctx,
backupVersion,
restoreSel,
@ -431,7 +432,7 @@ func runRestore(
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets)
status := restoreGC.Wait()
status := restoreCtrl.Wait()
runTime := time.Since(start)
assert.Equal(t, numRestoreItems, status.Objects, "restored status.Objects")
@ -480,14 +481,14 @@ func runBackupAndCompare(
nameToID[ro] = ro
}
backupGC := loadConnector(ctx, t, config.Resource)
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
backupCtrl := loadController(ctx, t, config.Resource)
backupCtrl.IDNameLookup = inMock.NewCache(idToName, nameToID)
backupSel := backupSelectorForExpected(t, config.Service, expectedDests)
t.Logf("Selective backup of %s\n", backupSel)
start := time.Now()
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections(
dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections(
ctx,
backupSel,
backupSel,
@ -512,7 +513,7 @@ func runBackupAndCompare(
dcs,
config)
status := backupGC.Wait()
status := backupCtrl.Wait()
assert.Equalf(t, totalItems+skipped, status.Objects,
"backup status.Objects; wanted %d items + %d skipped", totalItems, skipped)
@ -532,7 +533,7 @@ func runRestoreBackupTest(
config := ConfigInfo{
Opts: opts,
Resource: test.resource,
Resource: test.resourceCat,
Service: test.service,
Tenant: tenant,
ResourceOwners: resourceOwners,
@ -652,15 +653,15 @@ func runRestoreBackupTestVersions(
test.collectionsLatest)
}
func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() {
bodyText := "This email has some text. However, all the text is on the same line."
subjectText := "Test message for restore"
table := []restoreBackupInfo{
{
name: "EmailsWithAttachments",
service: path.ExchangeService,
resource: Users,
name: "EmailsWithAttachments",
service: path.ExchangeService,
resourceCat: resource.Users,
collections: []ColInfo{
{
PathElements: []string{"Inbox"},
@ -685,9 +686,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
},
},
{
name: "MultipleEmailsMultipleFolders",
service: path.ExchangeService,
resource: Users,
name: "MultipleEmailsMultipleFolders",
service: path.ExchangeService,
resourceCat: resource.Users,
collections: []ColInfo{
{
PathElements: []string{"Inbox"},
@ -761,9 +762,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
},
},
{
name: "MultipleContactsSingleFolder",
service: path.ExchangeService,
resource: Users,
name: "MultipleContactsSingleFolder",
service: path.ExchangeService,
resourceCat: resource.Users,
collections: []ColInfo{
{
PathElements: []string{"Contacts"},
@ -789,9 +790,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
},
},
{
name: "MultipleContactsMultipleFolders",
service: path.ExchangeService,
resource: Users,
name: "MultipleContactsMultipleFolders",
service: path.ExchangeService,
resourceCat: resource.Users,
collections: []ColInfo{
{
PathElements: []string{"Work"},
@ -909,7 +910,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
runRestoreBackupTest(
suite.T(),
test,
suite.connector.tenant,
suite.ctrl.tenant,
[]string{suite.user},
control.Options{
RestorePermissions: true,
@ -919,12 +920,12 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
}
}
func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames() {
func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
table := []restoreBackupInfo{
{
name: "Contacts",
service: path.ExchangeService,
resource: Users,
name: "Contacts",
service: path.ExchangeService,
resourceCat: resource.Users,
collections: []ColInfo{
{
PathElements: []string{"Work"},
@ -1005,7 +1006,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
totalItems, _, collections, expectedData, err := collectionsForInfo(
test.service,
suite.connector.tenant,
suite.ctrl.tenant,
suite.user,
restoreCfg,
[]ColInfo{collection},
@ -1026,8 +1027,8 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
restoreCfg.Location,
)
restoreGC := loadConnector(ctx, t, test.resource)
deets, err := restoreGC.ConsumeRestoreCollections(
restoreCtrl := loadController(ctx, t, test.resourceCat)
deets, err := restoreCtrl.ConsumeRestoreCollections(
ctx,
version.Backup,
restoreSel,
@ -1041,7 +1042,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, deets)
status := restoreGC.Wait()
status := restoreCtrl.Wait()
// Always just 1 because it's just 1 collection.
assert.Equal(t, totalItems, status.Objects, "status.Objects")
assert.Equal(t, totalItems, status.Successes, "status.Successes")
@ -1056,11 +1057,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
// Run a backup and compare its output with what we put in.
backupGC := loadConnector(ctx, t, test.resource)
backupCtrl := loadController(ctx, t, test.resourceCat)
backupSel := backupSelectorForExpected(t, test.service, expectedDests)
t.Log("Selective backup of", backupSel)
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections(
dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections(
ctx,
backupSel,
backupSel,
@ -1088,7 +1089,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
// deadlock.
skipped := checkCollections(t, ctx, allItems, allExpectedData, dcs, ci)
status := backupGC.Wait()
status := backupCtrl.Wait()
assert.Equal(t, allItems+skipped, status.Objects, "status.Objects")
assert.Equal(t, allItems+skipped, status.Successes, "status.Successes")
})
@ -1097,13 +1098,13 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
// TODO: this should only be run during smoke tests, not part of the standard CI.
// That's why it's set aside instead of being included in the other test set.
func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttachment() {
func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_largeMailAttachment() {
subjectText := "Test message for restore with large attachment"
test := restoreBackupInfo{
name: "EmailsWithLargeAttachments",
service: path.ExchangeService,
resource: Users,
name: "EmailsWithLargeAttachments",
service: path.ExchangeService,
resourceCat: resource.Users,
collections: []ColInfo{
{
PathElements: []string{"Inbox"},
@ -1122,7 +1123,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
runRestoreBackupTest(
suite.T(),
test,
suite.connector.tenant,
suite.ctrl.tenant,
[]string{suite.user},
control.Options{
RestorePermissions: true,
@ -1131,17 +1132,17 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
)
}
func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections() {
func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
table := []struct {
name string
resource Resource
resourceCat resource.Category
selectorFunc func(t *testing.T) selectors.Selector
service path.ServiceType
categories []string
}{
{
name: "Exchange",
resource: Users,
name: "Exchange",
resourceCat: resource.Users,
selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup([]string{suite.user})
sel.Include(
@ -1160,8 +1161,8 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
},
},
{
name: "OneDrive",
resource: Users,
name: "OneDrive",
resourceCat: resource.Users,
selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{suite.user})
sel.Include(sel.Folders([]string{selectors.NoneTgt}))
@ -1174,8 +1175,8 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
},
},
{
name: "SharePoint",
resource: Sites,
name: "SharePoint",
resourceCat: resource.Sites,
selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{tester.M365SiteID(t)})
sel.Include(
@ -1205,18 +1206,18 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
defer flush()
var (
backupGC = loadConnector(ctx, t, test.resource)
backupSel = test.selectorFunc(t)
errs = fault.New(true)
start = time.Now()
backupCtrl = loadController(ctx, t, test.resourceCat)
backupSel = test.selectorFunc(t)
errs = fault.New(true)
start = time.Now()
)
id, name, err := backupGC.PopulateOwnerIDAndNamesFrom(ctx, backupSel.DiscreteOwner, nil)
id, name, err := backupCtrl.PopulateOwnerIDAndNamesFrom(ctx, backupSel.DiscreteOwner, nil)
require.NoError(t, err, clues.ToCore(err))
backupSel.SetDiscreteOwnerIDName(id, name)
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections(
dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections(
ctx,
inMock.NewProvider(id, name),
backupSel,
@ -1263,7 +1264,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
assert.ElementsMatch(t, test.categories, foundCategories)
backupGC.Wait()
backupCtrl.Wait()
assert.NoError(t, errs.Failure())
})

View File

@ -6,7 +6,7 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -9,7 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/discovery"
"github.com/alcionai/corso/src/internal/m365/discovery"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"

View File

@ -2,14 +2,18 @@ package exchange
import (
"context"
"encoding/json"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/pii"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -18,7 +22,303 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// filterContainersAndFillCollections is a utility function
// MetadataFileNames produces the category-specific set of filenames used to
// store graph metadata such as delta tokens and folderID->path references.
func MetadataFileNames(cat path.CategoryType) []string {
switch cat {
case path.EmailCategory, path.ContactsCategory:
return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName}
default:
return []string{graph.PreviousPathFileName}
}
}
type CatDeltaPaths map[path.CategoryType]DeltaPaths
type DeltaPaths map[string]DeltaPath
func (dps DeltaPaths) AddDelta(k, d string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Delta = d
dps[k] = dp
}
func (dps DeltaPaths) AddPath(k, p string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Path = p
dps[k] = dp
}
type DeltaPath struct {
Delta string
Path string
}
// ParseMetadataCollections produces a map of structs holding delta
// and path lookup maps.
func parseMetadataCollections(
ctx context.Context,
colls []data.RestoreCollection,
) (CatDeltaPaths, bool, error) {
// cdp stores metadata
cdp := CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// found tracks the metadata we've loaded, to make sure we don't
// fetch overlapping copies.
found := map[path.CategoryType]map[string]struct{}{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// errors from metadata items should not stop the backup,
// but it should prevent us from using previous backups
errs := fault.New(true)
for _, coll := range colls {
var (
breakLoop bool
items = coll.Items(ctx, errs)
category = coll.FullPath().Category()
)
for {
select {
case <-ctx.Done():
return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
case item, ok := <-items:
if !ok || errs.Failure() != nil {
breakLoop = true
break
}
var (
m = map[string]string{}
cdps = cdp[category]
)
err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil {
return nil, false, clues.New("decoding metadata json").WithClues(ctx)
}
switch item.UUID() {
case graph.PreviousPathFileName:
if _, ok := found[category]["path"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
}
for k, p := range m {
cdps.AddPath(k, p)
}
found[category]["path"] = struct{}{}
case graph.DeltaURLsFileName:
if _, ok := found[category]["delta"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
}
for k, d := range m {
cdps.AddDelta(k, d)
}
found[category]["delta"] = struct{}{}
}
cdp[category] = cdps
}
if breakLoop {
break
}
}
}
if errs.Failure() != nil {
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
return CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}, false, nil
}
// Remove any entries that contain a path or a delta, but not both.
// That metadata is considered incomplete, and needs to incur a
// complete backup on the next run.
for _, dps := range cdp {
for k, dp := range dps {
if len(dp.Path) == 0 {
delete(dps, k)
}
}
}
return cdp, true, nil
}
// ProduceBackupCollections returns a DataCollection which the caller can
// use to read mailbox data out for the specified user
func ProduceBackupCollections(
ctx context.Context,
ac api.Client,
selector selectors.Selector,
tenantID string,
user idname.Provider,
metadata []data.RestoreCollection,
su support.StatusUpdater,
ctrlOpts control.Options,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
eb, err := selector.ToExchangeBackup()
if err != nil {
return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
}
var (
collections = []data.BackupCollection{}
el = errs.Local()
categories = map[path.CategoryType]struct{}{}
handlers = BackupHandlers(ac)
)
// Turn on concurrency limiter middleware for exchange backups
// unless explicitly disabled through DisableConcurrencyLimiterFN cli flag
if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter {
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
}
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata)
if err != nil {
return nil, nil, false, err
}
for _, scope := range eb.Scopes() {
if el.Failure() != nil {
break
}
dcs, err := createCollections(
ctx,
handlers,
tenantID,
user,
scope,
cdps[scope.Category().PathType()],
ctrlOpts,
su,
errs)
if err != nil {
el.AddRecoverable(err)
continue
}
categories[scope.Category().PathType()] = struct{}{}
collections = append(collections, dcs...)
}
if len(collections) > 0 {
baseCols, err := graph.BaseCollections(
ctx,
collections,
tenantID,
user.ID(),
path.ExchangeService,
categories,
su,
errs)
if err != nil {
return nil, nil, false, err
}
collections = append(collections, baseCols...)
}
return collections, nil, canUsePreviousBackup, el.Failure()
}
// createCollections - utility function that retrieves M365
// IDs through Microsoft Graph API. The selectors.ExchangeScope
// determines the type of collections that are retrieved.
func createCollections(
ctx context.Context,
handlers map[path.CategoryType]backupHandler,
tenantID string,
user idname.Provider,
scope selectors.ExchangeScope,
dps DeltaPaths,
ctrlOpts control.Options,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var (
allCollections = make([]data.BackupCollection, 0)
category = scope.Category().PathType()
qp = graph.QueryParams{
Category: category,
ResourceOwner: user,
TenantID: tenantID,
}
)
handler, ok := handlers[category]
if !ok {
return nil, clues.New("unsupported backup category type").WithClues(ctx)
}
foldersComplete := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", qp.Category))
defer close(foldersComplete)
rootFolder, cc := handler.NewContainerCache(user.ID())
if err := cc.Populate(ctx, errs, rootFolder); err != nil {
return nil, clues.Wrap(err, "populating container cache")
}
collections, err := populateCollections(
ctx,
qp,
handler,
su,
cc,
scope,
dps,
ctrlOpts,
errs)
if err != nil {
return nil, clues.Wrap(err, "filling collections")
}
foldersComplete <- struct{}{}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}
return allCollections, nil
}
// populateCollections is a utility function
// that places the M365 object ids belonging to specific directories
// into a BackupCollection. Messages outside of those directories are omitted.
// @param collection is filled with during this function.
@ -27,7 +327,7 @@ import (
// TODO(ashmrtn): This should really return []data.BackupCollection but
// unfortunately some of our tests rely on being able to lookup returned
// collections by ID and it would be non-trivial to change them.
func filterContainersAndFillCollections(
func populateCollections(
ctx context.Context,
qp graph.QueryParams,
bh backupHandler,

View File

@ -1,7 +1,9 @@
package exchange
import (
"bytes"
"context"
"sync"
"testing"
"github.com/alcionai/clues"
@ -11,9 +13,9 @@ import (
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
@ -107,12 +109,12 @@ func (m mockResolver) Items() []graph.CachedContainer {
return m.items
}
func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container) error {
func (m mockResolver) AddToCache(ctx context.Context, ctrl graph.Container) error {
if len(m.added) == 0 {
m.added = map[string]string{}
}
m.added[ptr.Val(gc.GetDisplayName())] = ptr.Val(gc.GetId())
m.added[ptr.Val(ctrl.GetDisplayName())] = ptr.Val(ctrl.GetId())
return nil
}
@ -125,33 +127,765 @@ func (m mockResolver) LocationInCache(string) (string, bool)
func (m mockResolver) Populate(context.Context, *fault.Bus, string, ...string) error { return nil }
// ---------------------------------------------------------------------------
// tests
// Unit tests
// ---------------------------------------------------------------------------
type ServiceIteratorsSuite struct {
type DataCollectionsUnitSuite struct {
tester.Suite
}
func TestDataCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
type fileValues struct {
fileName string
value string
}
table := []struct {
name string
data []fileValues
expect map[string]DeltaPath
canUsePreviousBackup bool
expectError assert.ErrorAssertionFunc
}{
{
name: "delta urls only",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple delta urls",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaURLsFileName, "delta-link-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "previous path only",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple previous paths",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
{graph.PreviousPathFileName, "prev-path-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls and empty previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, ""},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "empty delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, ""},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with special chars",
data: []fileValues{
{graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "`!@#$%^&*()_[]{}/\"\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with escaped chars",
data: []fileValues{
{graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with newline char runes",
data: []fileValues{
// rune(92) = \, rune(110) = n. Ensuring it's not possible to
// error in serializing/deserializing and produce a single newline
// character from those two runes.
{graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
entries := []graph.MetadataCollectionEntry{}
for _, d := range test.data {
entries = append(
entries,
graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value}))
}
coll, err := graph.MakeMetadataCollection(
"t", "u",
path.ExchangeService,
path.EmailCategory,
entries,
func(cos *support.ControllerOperationStatus) {},
)
require.NoError(t, err, clues.ToCore(err))
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: coll},
})
test.expectError(t, err, clues.ToCore(err))
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
emails := cdps[path.EmailCategory]
assert.Len(t, emails, len(test.expect))
for k, v := range emails {
assert.Equal(t, v.Delta, emails[k].Delta, "delta")
assert.Equal(t, v.Path, emails[k].Path, "path")
}
})
}
}
type failingColl struct {
t *testing.T
}
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
ic := make(chan data.Stream)
defer close(ic)
errs.AddRecoverable(assert.AnError)
return ic
}
func (f failingColl) FullPath() path.Path {
tmp, err := path.Build(
"tenant",
"user",
path.ExchangeService,
path.EmailCategory,
false,
"inbox")
require.NoError(f.t, err, clues.ToCore(err))
return tmp
}
func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) {
// no fetch calls will be made
return nil, nil
}
// This check is to ensure that we don't error out, but still return
// canUsePreviousBackup as false on read errors
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
fc := failingColl{t}
_, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc})
require.NoError(t, err)
require.False(t, canUsePreviousBackup)
}
// ---------------------------------------------------------------------------
// Integration tests
// ---------------------------------------------------------------------------
func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ControllerOperationStatus) {
updater := func(status *support.ControllerOperationStatus) {
defer wg.Done()
}
return updater
}
type DataCollectionsIntegrationSuite struct {
tester.Suite
user string
site string
tenantID string
ac api.Client
}
func TestDataCollectionsIntegrationSuite(t *testing.T) {
suite.Run(t, &DataCollectionsIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
})
}
func (suite *DataCollectionsIntegrationSuite) SetupSuite() {
suite.user = tester.M365UserID(suite.T())
suite.site = tester.M365SiteID(suite.T())
acct := tester.NewM365Account(suite.T())
creds, err := acct.M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
suite.ac, err = api.NewClient(creds)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(suite.T())
}
func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
folderNames map[string]struct{}
canMakeDeltaQueries bool
}{
{
name: "Folder Iterative Check Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: true,
},
{
name: "Folder Iterative Check Mail Non-Delta",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: false,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
ctrlOpts,
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService {
continue
}
require.NotEmpty(t, c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
continue
}
loc := c.(data.LocationPather).LocationPath().String()
require.NotEmpty(t, loc)
delete(test.folderNames, loc)
}
assert.Empty(t, test.folderNames)
})
}
}
func (suite *DataCollectionsIntegrationSuite) TestDelta() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Contacts",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Events",
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// get collections without providing any delta history (ie: full backup)
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
control.Defaults(),
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection
for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService {
metadata = coll
}
}
require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: metadata},
})
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")
dps := cdps[test.scope.Category().PathType()]
// now do another backup with the previous delta tokens,
// which should only contain the difference.
collections, err = createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
dps,
control.Defaults(),
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least
// assert some minimum assumptions, such as "deltas should retrieve fewer items".
// Delta usage is commented out at the moment, anyway. So this is currently
// a sanity check that the minimum behavior won't break.
for _, coll := range collections {
if coll.FullPath().Service() != path.ExchangeMetadataService {
ec, ok := coll.(*Collection)
require.True(t, ok, "collection is *Collection")
assert.NotNil(t, ec)
}
}
})
}
}
// TestMailSerializationRegression verifies that all mail data stored in the
// test account can be successfully downloaded into bytes and restored into
// M365 mail objects
func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
wg sync.WaitGroup
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
sel.Scopes()[0],
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(collections))
for _, edc := range collections {
suite.Run(edc.FullPath().String(), func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
streamChannel := edc.Items(ctx, fault.New(true))
// Verify that each message can be restored
for stream := range streamChannel {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
message, err := api.BytesToMessageable(buf.Bytes())
assert.NotNil(t, message)
assert.NoError(t, err, clues.ToCore(err))
}
})
}
wg.Wait()
}
// TestContactSerializationRegression verifies ability to query contact items
// and to store contact within Collection. Downloaded contacts are run through
// a regression test to ensure that downloaded items can be uploaded.
func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() {
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Default Contact Folder",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch())[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
edcs, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(edcs))
require.GreaterOrEqual(t, len(edcs), 1, "expected 1 <= num collections <= 2")
require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2")
for _, edc := range edcs {
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
count := 0
for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
contact, err := api.BytesToContactable(buf.Bytes())
assert.NotNil(t, contact)
assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err))
count++
}
if isMetadata {
continue
}
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), edc) {
continue
}
assert.Equal(
t,
edc.(data.LocationPather).LocationPath().String(),
DefaultContactFolder)
assert.NotZero(t, count)
}
wg.Wait()
})
}
}
// TestEventsSerializationRegression ensures functionality of createCollections
// to be able to successfully query, download and restore event objects
func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
calID string
bdayID string
)
fn := func(gcc graph.CachedContainer) error {
if ptr.Val(gcc.GetDisplayName()) == DefaultCalendar {
calID = ptr.Val(gcc.GetId())
}
if ptr.Val(gcc.GetDisplayName()) == "Birthdays" {
bdayID = ptr.Val(gcc.GetId())
}
return nil
}
err := suite.ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))
require.NoError(t, err, clues.ToCore(err))
tests := []struct {
name, expected string
scope selectors.ExchangeScope
}{
{
name: "Default Event Calendar",
expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
{
name: "Birthday Calendar",
expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{"Birthdays"},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
require.Len(t, collections, 2)
wg.Add(len(collections))
for _, edc := range collections {
var isMetadata bool
if edc.FullPath().Service() != path.ExchangeMetadataService {
isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else {
assert.Equal(t, "", edc.FullPath().Folder(false))
}
for item := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
event, err := api.BytesToEventable(buf.Bytes())
assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err))
}
}
wg.Wait()
})
}
}
type CollectionPopulationSuite struct {
tester.Suite
creds account.M365Config
}
func TestServiceIteratorsUnitSuite(t *testing.T) {
suite.Run(t, &ServiceIteratorsSuite{Suite: tester.NewUnitSuite(t)})
suite.Run(t, &CollectionPopulationSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ServiceIteratorsSuite) SetupSuite() {
func (suite *CollectionPopulationSuite) SetupSuite() {
a := tester.NewMockM365Account(suite.T())
m365, err := a.M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
suite.creds = m365
}
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
func (suite *CollectionPopulationSuite) TestPopulateCollections() {
var (
qp = graph.QueryParams{
Category: path.EmailCategory, // doesn't matter which one we use.
ResourceOwner: inMock.NewProvider("user_id", "user_name"),
TenantID: suite.creds.AzureTenantID,
}
statusUpdater = func(*support.ConnectorOperationStatus) {}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
dps = DeltaPaths{} // incrementals are tested separately
commonResult = mockGetterResults{
@ -349,7 +1083,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
category: qp.Category,
}
collections, err := filterContainersAndFillCollections(
collections, err := populateCollections(
ctx,
qp,
mbh,
@ -435,7 +1169,7 @@ func checkMetadata(
assert.Equal(t, expect, catPaths[cat])
}
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_DuplicateFolders() {
func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_DuplicateFolders() {
type scopeCat struct {
scope selectors.ExchangeScope
cat path.CategoryType
@ -447,7 +1181,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli
TenantID: suite.creds.AzureTenantID,
}
statusUpdater = func(*support.ConnectorOperationStatus) {}
statusUpdater = func(*support.ControllerOperationStatus) {}
dataTypes = []scopeCat{
{
@ -687,7 +1421,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli
category: qp.Category,
}
collections, err := filterContainersAndFillCollections(
collections, err := populateCollections(
ctx,
qp,
mbh,
@ -754,7 +1488,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli
}
}
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repeatedItems() {
func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_repeatedItems() {
newDelta := api.DeltaUpdate{URL: "delta_url"}
table := []struct {
@ -832,7 +1566,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
ResourceOwner: inMock.NewProvider("user_id", "user_name"),
TenantID: suite.creds.AzureTenantID,
}
statusUpdater = func(*support.ConnectorOperationStatus) {}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
dps = DeltaPaths{} // incrementals are tested separately
container1 = mockContainer{
@ -851,7 +1585,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
require.Equal(t, "user_id", qp.ResourceOwner.ID(), qp.ResourceOwner)
require.Equal(t, "user_name", qp.ResourceOwner.Name(), qp.ResourceOwner)
collections, err := filterContainersAndFillCollections(
collections, err := populateCollections(
ctx,
qp,
mbh,
@ -907,7 +1641,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
}
}
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incrementals_nondelta() {
func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_incrementals_nondelta() {
var (
userID = "user_id"
tenantID = suite.creds.AzureTenantID
@ -917,7 +1651,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
ResourceOwner: inMock.NewProvider("user_id", "user_name"),
TenantID: suite.creds.AzureTenantID,
}
statusUpdater = func(*support.ConnectorOperationStatus) {}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
commonResults = mockGetterResults{
added: []string{"added"},
@ -1270,7 +2004,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
}
}
collections, err := filterContainersAndFillCollections(
collections, err := populateCollections(
ctx,
qp,
mbh,

View File

@ -4,7 +4,7 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
)
// checkIDAndName is a helper function to ensure that

View File

@ -13,9 +13,9 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -125,7 +125,7 @@ func (col *Collection) LocationPath() *path.Builder {
return col.locationPath
}
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
// TODO(ashmrtn): Fill in with previous path once the Controller compares old
// and new folder hierarchies.
func (col Collection) PreviousPath() path.Path {
return col.prevPath

View File

@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -46,20 +46,19 @@ func (mi *mockItemer) Serialize(
return nil, mi.serializeErr
}
type ExchangeDataCollectionSuite struct {
type CollectionSuite struct {
tester.Suite
}
func TestExchangeDataCollectionSuite(t *testing.T) {
suite.Run(t, &ExchangeDataCollectionSuite{Suite: tester.NewUnitSuite(t)})
func TestCollectionSuite(t *testing.T) {
suite.Run(t, &CollectionSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() {
func (suite *CollectionSuite) TestReader_Valid() {
m := []byte("test message")
description := "aFile"
ed := &Stream{id: description, message: m}
// Read the message using the `ExchangeData` reader and validate it matches what we set
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(ed.ToReader())
assert.NoError(suite.T(), err, clues.ToCore(err))
@ -67,7 +66,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() {
assert.Equal(suite.T(), description, ed.UUID())
}
func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() {
func (suite *CollectionSuite) TestReader_Empty() {
var (
empty []byte
expected int64
@ -82,7 +81,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() {
assert.NoError(t, err, clues.ToCore(err))
}
func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
func (suite *CollectionSuite) TestColleciton_FullPath() {
t := suite.T()
tenant := "a-tenant"
user := "a-user"
@ -105,7 +104,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
assert.Equal(t, fullPath, edc.FullPath())
}
func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchangeDataCollection() {
func (suite *CollectionSuite) TestCollection_NewCollection() {
t := suite.T()
tenant := "a-tenant"
user := "a-user"
@ -129,7 +128,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
assert.Equal(t, fullPath, edc.FullPath())
}
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
func (suite *CollectionSuite) TestNewCollection_state() {
fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo")
require.NoError(suite.T(), err, clues.ToCore(err))
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
@ -189,7 +188,7 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
}
}
func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
func (suite *CollectionSuite) TestGetItemWithRetries() {
table := []struct {
name string
items *mockItemer

View File

@ -6,13 +6,13 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
var (
_ graph.ContainerResolver = &contactFolderCache{}
_ graph.ContainerResolver = &contactContainerCache{}
_ containerRefresher = &contactRefresher{}
)
@ -35,14 +35,14 @@ func (r *contactRefresher) refreshContainer(
return &f, nil
}
type contactFolderCache struct {
type contactContainerCache struct {
*containerResolver
enumer containersEnumerator
getter containerGetter
userID string
}
func (cfc *contactFolderCache) populateContactRoot(
func (cfc *contactContainerCache) populateContactRoot(
ctx context.Context,
directoryID string,
baseContainerPath []string,
@ -67,7 +67,7 @@ func (cfc *contactFolderCache) populateContactRoot(
// objects into the Contact Folder Cache
// Function does NOT use Delta Queries as it is not supported
// as of (Oct-07-2022)
func (cfc *contactFolderCache) Populate(
func (cfc *contactContainerCache) Populate(
ctx context.Context,
errs *fault.Bus,
baseID string,
@ -89,7 +89,7 @@ func (cfc *contactFolderCache) Populate(
return nil
}
func (cfc *contactFolderCache) init(
func (cfc *contactContainerCache) init(
ctx context.Context,
baseNode string,
baseContainerPath []string,

View File

@ -1,7 +1,7 @@
package exchange
import (
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -32,7 +32,7 @@ func (h contactBackupHandler) itemHandler() itemGetterSerializer {
func (h contactBackupHandler) NewContainerCache(
userID string,
) (string, graph.ContainerResolver) {
return DefaultContactFolder, &contactFolderCache{
return DefaultContactFolder, &contactContainerCache{
userID: userID,
enumer: h.ac,
getter: h.ac,

View File

@ -7,7 +7,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -31,7 +31,7 @@ func newContactRestoreHandler(
}
func (h contactRestoreHandler) newContainerCache(userID string) graph.ContainerResolver {
return &contactFolderCache{
return &contactContainerCache{
userID: userID,
enumer: h.ac,
getter: h.ac,

View File

@ -6,7 +6,7 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -13,10 +13,12 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
@ -671,6 +673,118 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
assert.Equal(t, m.expectedLocation, l.String(), "location path")
}
type ContainerResolverSuite struct {
tester.Suite
credentials account.M365Config
}
func TestContainerResolverIntegrationSuite(t *testing.T) {
suite.Run(t, &ContainerResolverSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *ContainerResolverSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
func (suite *ContainerResolverSuite) TestPopulate() {
ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventContainerCache{
userID: tester.M365UserID(t),
enumer: ac.Events(),
getter: ac.Events(),
}
}
contactFunc := func(t *testing.T) graph.ContainerResolver {
return &contactContainerCache{
userID: tester.M365UserID(t),
enumer: ac.Contacts(),
getter: ac.Contacts(),
}
}
tests := []struct {
name, folderInCache, root, basePath string
resolverFunc func(t *testing.T) graph.ContainerResolver
canFind assert.BoolAssertionFunc
}{
{
name: "Default Event Cache",
// Fine as long as this isn't running against a migrated Exchange server.
folderInCache: DefaultCalendar,
root: DefaultCalendar,
basePath: DefaultCalendar,
resolverFunc: eventFunc,
canFind: assert.True,
},
{
name: "Default Event Folder Hidden",
folderInCache: DefaultContactFolder,
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Default Contact Cache",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
basePath: DefaultContactFolder,
canFind: assert.True,
resolverFunc: contactFunc,
},
{
name: "Default Contact Hidden",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
resolver := test.resolverFunc(t)
err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath)
require.NoError(t, err, clues.ToCore(err))
_, isFound := resolver.LocationInCache(test.folderInCache)
test.canFind(t, isFound, "folder path", test.folderInCache)
})
}
}
// ---------------------------------------------------------------------------
// integration suite
// ---------------------------------------------------------------------------
@ -688,7 +802,7 @@ func runCreateDestinationTest(
var (
svc = path.ExchangeService
gcr = handler.newContainerCache(userID)
gcc = handler.newContainerCache(userID)
)
path1, err := path.Build(
@ -700,17 +814,17 @@ func runCreateDestinationTest(
containerNames1...)
require.NoError(t, err, clues.ToCore(err))
containerID, gcr, err := createDestination(
containerID, gcc, err := createDestination(
ctx,
handler,
handler.formatRestoreDestination(destinationName, path1),
userID,
gcr,
gcc,
true,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
_, _, err = gcr.IDToPath(ctx, containerID)
_, _, err = gcc.IDToPath(ctx, containerID)
assert.NoError(t, err, clues.ToCore(err))
path2, err := path.Build(
@ -722,22 +836,22 @@ func runCreateDestinationTest(
containerNames2...)
require.NoError(t, err, clues.ToCore(err))
containerID, gcr, err = createDestination(
containerID, gcc, err = createDestination(
ctx,
handler,
handler.formatRestoreDestination(destinationName, path2),
userID,
gcr,
gcc,
false,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
p, l, err := gcr.IDToPath(ctx, containerID)
p, l, err := gcc.IDToPath(ctx, containerID)
require.NoError(t, err, clues.ToCore(err))
_, ok := gcr.LocationInCache(l.String())
_, ok := gcc.LocationInCache(l.String())
require.True(t, ok, "looking for location in cache: %s", l)
_, ok = gcr.PathInCache(p.String())
_, ok = gcc.PathInCache(p.String())
require.True(t, ok, "looking for path in cache: %s", p)
}

View File

@ -6,14 +6,14 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
var _ graph.ContainerResolver = &eventCalendarCache{}
var _ graph.ContainerResolver = &eventContainerCache{}
type eventCalendarCache struct {
type eventContainerCache struct {
*containerResolver
enumer containersEnumerator
getter containerGetter
@ -23,7 +23,7 @@ type eventCalendarCache struct {
// init ensures that the structure's fields are initialized.
// Fields Initialized when cache == nil:
// [mc.cache]
func (ecc *eventCalendarCache) init(
func (ecc *eventContainerCache) init(
ctx context.Context,
) error {
if ecc.containerResolver == nil {
@ -37,7 +37,7 @@ func (ecc *eventCalendarCache) init(
// DefaultCalendar is the traditional "Calendar".
// Action ensures that cache will stop at appropriate level.
// @error iff the struct is not properly instantiated
func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
func (ecc *eventContainerCache) populateEventRoot(ctx context.Context) error {
container := DefaultCalendar
f, err := ecc.getter.GetContainerByID(ctx, ecc.userID, container)
@ -59,7 +59,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
// Populate utility function for populating eventCalendarCache.
// Executes 1 additional Graph Query
// @param baseID: ignored. Present to conform to interface
func (ecc *eventCalendarCache) Populate(
func (ecc *eventContainerCache) Populate(
ctx context.Context,
errs *fault.Bus,
baseID string,
@ -88,7 +88,7 @@ func (ecc *eventCalendarCache) Populate(
// AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible.
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error {
func (ecc *eventContainerCache) AddToCache(ctx context.Context, f graph.Container) error {
if err := checkIDAndName(f); err != nil {
return clues.Wrap(err, "validating container").WithClues(ctx)
}

View File

@ -1,7 +1,7 @@
package exchange
import (
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -32,7 +32,7 @@ func (h eventBackupHandler) itemHandler() itemGetterSerializer {
func (h eventBackupHandler) NewContainerCache(
userID string,
) (string, graph.ContainerResolver) {
return DefaultCalendar, &eventCalendarCache{
return DefaultCalendar, &eventContainerCache{
userID: userID,
enumer: h.ac,
getter: h.ac,

View File

@ -7,7 +7,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -33,7 +33,7 @@ func newEventRestoreHandler(
}
func (h eventRestoreHandler) newContainerCache(userID string) graph.ContainerResolver {
return &eventCalendarCache{
return &eventContainerCache{
userID: userID,
enumer: h.ac,
getter: h.ac,

View File

@ -5,7 +5,7 @@ import (
"github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -1,7 +1,7 @@
package exchange
import (
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -32,7 +32,7 @@ func (h mailBackupHandler) itemHandler() itemGetterSerializer {
func (h mailBackupHandler) NewContainerCache(
userID string,
) (string, graph.ContainerResolver) {
return rootFolderAlias, &mailFolderCache{
return rootFolderAlias, &mailContainerCache{
userID: userID,
enumer: h.ac,
getter: h.ac,

View File

@ -5,13 +5,13 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
var (
_ graph.ContainerResolver = &mailFolderCache{}
_ graph.ContainerResolver = &mailContainerCache{}
_ containerRefresher = &mailRefresher{}
)
@ -34,10 +34,10 @@ func (r *mailRefresher) refreshContainer(
return &f, nil
}
// mailFolderCache struct used to improve lookup of directories within exchange.Mail
// mailContainerCache struct used to improve lookup of directories within exchange.Mail
// cache map of cachedContainers where the key = M365ID
// nameLookup map: Key: DisplayName Value: ID
type mailFolderCache struct {
type mailContainerCache struct {
*containerResolver
enumer containersEnumerator
getter containerGetter
@ -47,7 +47,7 @@ type mailFolderCache struct {
// init ensures that the structure's fields are initialized.
// Fields Initialized when cache == nil:
// [mc.cache]
func (mc *mailFolderCache) init(
func (mc *mailContainerCache) init(
ctx context.Context,
) error {
if mc.containerResolver == nil {
@ -64,7 +64,7 @@ func (mc *mailFolderCache) init(
// rootFolderAlias is the top-level directory for exchange.Mail.
// Action ensures that cache will stop at appropriate level.
// @error iff the struct is not properly instantiated
func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
func (mc *mailContainerCache) populateMailRoot(ctx context.Context) error {
f, err := mc.getter.GetContainerByID(ctx, mc.userID, rootFolderAlias)
if err != nil {
return clues.Wrap(err, "fetching root folder")
@ -89,7 +89,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
// @param baseID: M365ID of the base of the exchange.Mail.Folder
// @param baseContainerPath: the set of folder elements that make up the path
// for the base container in the cache.
func (mc *mailFolderCache) Populate(
func (mc *mailContainerCache) Populate(
ctx context.Context,
errs *fault.Bus,
baseID string,

View File

@ -87,7 +87,7 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
acm := ac.Mail()
mfc := mailFolderCache{
mfc := mailContainerCache{
userID: userID,
enumer: acm,
getter: acm,

View File

@ -8,7 +8,7 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -34,7 +34,7 @@ func newMailRestoreHandler(
}
func (h mailRestoreHandler) newContainerCache(userID string) graph.ContainerResolver {
return &mailFolderCache{
return &mailContainerCache{
userID: userID,
enumer: h.ac,
getter: h.ac,

View File

@ -9,10 +9,10 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -22,16 +22,16 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// RestoreCollections restores M365 objects in data.RestoreCollection to MSFT
// ConsumeRestoreCollections restores M365 objects in data.RestoreCollection to MSFT
// store through GraphAPI.
func RestoreCollections(
func ConsumeRestoreCollections(
ctx context.Context,
ac api.Client,
restoreCfg control.RestoreConfig,
dcs []data.RestoreCollection,
deets *details.Builder,
errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) {
) (*support.ControllerOperationStatus, error) {
if len(dcs) == 0 {
return support.CreateStatus(ctx, support.Restore, 0, support.CollectionMetrics{}, ""), nil
}
@ -73,7 +73,7 @@ func RestoreCollections(
isNewCache = true
}
containerID, gcr, err := createDestination(
containerID, gcc, err := createDestination(
ictx,
handler,
handler.formatRestoreDestination(restoreCfg.Location, dc.FullPath()),
@ -86,7 +86,7 @@ func RestoreCollections(
continue
}
directoryCache[category] = gcr
directoryCache[category] = gcc
ictx = clues.Add(ictx, "restore_destination_id", containerID)
@ -131,7 +131,7 @@ func restoreCollection(
deets *details.Builder,
errs *fault.Bus,
) (support.CollectionMetrics, error) {
ctx, end := diagnostics.Span(ctx, "gc:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath()))
ctx, end := diagnostics.Span(ctx, "m365:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath()))
defer end()
var (
@ -159,7 +159,7 @@ func restoreCollection(
}
ictx := clues.Add(ctx, "item_id", itemData.UUID())
trace.Log(ictx, "gc:exchange:restoreCollection:item", itemData.UUID())
trace.Log(ictx, "m365:exchange:restoreCollection:item", itemData.UUID())
metrics.Objects++
buf := &bytes.Buffer{}

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"

View File

@ -7,8 +7,8 @@ import (
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)

View File

@ -8,7 +8,7 @@ import (
ktext "github.com/microsoft/kiota-serialization-text-go"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/sites"
)
// BetaClient the main entry point of the SDK, exposes the configuration and the fluent API.

View File

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
)

View File

@ -2,7 +2,7 @@
"lockFileVersion": "1.0.0",
"kiotaVersion": "0.10.0.0",
"clientClassName": "BetaClient",
"clientNamespaceName": "github.com/alcionai/corso/src/internal/connector/graph/betasdk",
"clientNamespaceName": "github.com/alcionai/corso/src/internal/m365/graph/betasdk",
"language": "Go",
"betaVersion": "0.53.0",
"usesBackingStore": false,

Some files were not shown because too many files have changed in this diff Show More