merge main

This commit is contained in:
neha-Gupta1 2023-06-19 11:14:21 +05:30
commit 0c69289b41
328 changed files with 7502 additions and 5922 deletions

View File

@ -1,3 +1,4 @@
# See https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#common-dependabot-automations
name: auto-merge name: auto-merge
on: on:
@ -5,11 +6,24 @@ on:
paths-ignore: paths-ignore:
- "src/**" # prevent auto-merge for go dependencies - "src/**" # prevent auto-merge for go dependencies
permissions:
pull-requests: write
jobs: jobs:
auto-merge: auto-approve-label:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps: steps:
- uses: actions/checkout@v3 - name: Dependabot metadata
- uses: ahmadnassri/action-dependabot-auto-merge@v2 # https://github.com/marketplace/actions/dependabot-auto-merge id: metadata
uses: dependabot/fetch-metadata@v1
with: with:
github-token: ${{ secrets.DEPENDABOT_TOKEN }} github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
if: ${{steps.metadata.outputs.update-type == 'version-update:semver-minor'}}
run: |
gh pr edit "$PR_URL" --add-label "mergequeue"
gh pr review --approve "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Fix Exchange folder cache population error when parent folder isn't found. - Fix Exchange folder cache population error when parent folder isn't found.
- Fix Exchange backup issue caused by incorrect json serialization - Fix Exchange backup issue caused by incorrect json serialization
- Fix issues with details model containing duplicate entry for api consumers - Fix issues with details model containing duplicate entry for api consumers
- Handle OLE conversion errors when trying to fetch attachments
### Changed ### Changed
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`. - Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.

View File

@ -118,7 +118,7 @@ issues:
linters: linters:
- forbidigo - forbidigo
text: "context.(Background|TODO)" text: "context.(Background|TODO)"
- path: internal/connector/graph/betasdk - path: internal/m365/graph/betasdk
linters: linters:
- wsl - wsl
- revive - revive

View File

@ -12,8 +12,8 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -279,7 +279,7 @@ func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []st
ctx := clues.Add(cmd.Context(), "delete_backup_id", bID) ctx := clues.Add(cmd.Context(), "delete_backup_id", bID)
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -300,7 +300,7 @@ func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []st
func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error { func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -153,7 +153,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, acct, err := utils.GetAccountAndConnect(ctx) r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -262,7 +262,7 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -19,7 +19,7 @@ import (
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"

View File

@ -134,7 +134,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, acct, err := utils.GetAccountAndConnect(ctx) r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -220,7 +220,7 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -150,7 +150,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, acct, err := utils.GetAccountAndConnect(ctx) r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -312,7 +312,7 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -121,7 +121,7 @@ func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return print.Only(ctx, err) return print.Only(ctx, err)
} }

View File

@ -13,6 +13,7 @@ import (
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
@ -193,7 +194,7 @@ func connectS3Cmd(cmd *cobra.Command, args []string) error {
repoID := cfg.RepoID repoID := cfg.RepoID
if len(repoID) == 0 { if len(repoID) == 0 {
repoID = "not_found" repoID = events.RepoIDNotFound
} }
s3Cfg, err := cfg.Storage.S3Config() s3Cfg, err := cfg.Storage.S3Config()

View File

@ -89,20 +89,20 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
dest := control.DefaultRestoreDestination(dttm.HumanReadable) restoreCfg := control.DefaultRestoreConfig(dttm.HumanReadable)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", restoreCfg.Location)
sel := utils.IncludeExchangeRestoreDataSelectors(opts) sel := utils.IncludeExchangeRestoreDataSelectors(opts)
utils.FilterExchangeRestoreInfoSelectors(sel, opts) utils.FilterExchangeRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize Exchange restore")) return Only(ctx, clues.Wrap(err, "Failed to initialize Exchange restore"))
} }

View File

@ -14,7 +14,7 @@ import (
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"

View File

@ -90,20 +90,20 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
dest := control.DefaultRestoreDestination(dttm.HumanReadableDriveItem) restoreCfg := control.DefaultRestoreConfig(dttm.HumanReadableDriveItem)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", restoreCfg.Location)
sel := utils.IncludeOneDriveRestoreDataSelectors(opts) sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
utils.FilterOneDriveRestoreInfoSelectors(sel, opts) utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize OneDrive restore")) return Only(ctx, clues.Wrap(err, "Failed to initialize OneDrive restore"))
} }

View File

@ -95,20 +95,20 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, _, err := utils.GetAccountAndConnect(ctx) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
dest := control.DefaultRestoreDestination(dttm.HumanReadableDriveItem) restoreCfg := control.DefaultRestoreConfig(dttm.HumanReadableDriveItem)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", restoreCfg.Location)
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts) utils.FilterSharePointRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize SharePoint restore")) return Only(ctx, clues.Wrap(err, "Failed to initialize SharePoint restore"))
} }

View File

@ -24,23 +24,53 @@ const (
Wildcard = "*" Wildcard = "*"
) )
func GetAccountAndConnect(ctx context.Context) (repository.Repository, *account.Account, error) { func GetAccountAndConnect(ctx context.Context) (repository.Repository, *storage.Storage, *account.Account, error) {
cfg, err := config.GetConfigRepoDetails(ctx, true, nil) cfg, err := config.GetConfigRepoDetails(ctx, true, nil)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
repoID := cfg.RepoID repoID := cfg.RepoID
if len(repoID) == 0 { if len(repoID) == 0 {
repoID = "not_found" repoID = events.RepoIDNotFound
} }
r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, repoID, options.Control()) r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, repoID, options.Control())
if err != nil { if err != nil {
return nil, nil, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository") return nil, nil, nil, clues.Wrap(err, "connecting to the "+cfg.Storage.Provider.String()+" repository")
} }
return r, &cfg.Account, nil return r, &cfg.Storage, &cfg.Account, nil
}
func AccountConnectAndWriteRepoConfig(ctx context.Context) (repository.Repository, *account.Account, error) {
r, stg, acc, err := GetAccountAndConnect(ctx)
if err != nil {
logger.CtxErr(ctx, err).Info("getting and connecting account")
return nil, nil, err
}
s3Config, err := stg.S3Config()
if err != nil {
logger.CtxErr(ctx, err).Info("getting storage configuration")
return nil, nil, err
}
m365Config, err := acc.M365Config()
if err != nil {
logger.CtxErr(ctx, err).Info("getting m365 configuration")
return nil, nil, err
}
// repo config is already set while repo connect and init. This is just to confirm correct values.
// So won't fail is the write fails
err = config.WriteRepoConfig(ctx, s3Config, m365Config, r.GetID())
if err != nil {
logger.CtxErr(ctx, err).Info("writing to repository configuration")
return nil, nil, err
}
return r, acc, nil
} }
// CloseRepo handles closing a repo. // CloseRepo handles closing a repo.

View File

@ -15,14 +15,18 @@ import (
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/connector"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub"
"github.com/alcionai/corso/src/internal/m365/resource"
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -50,7 +54,7 @@ type dataBuilderFunc func(id, now, subject, body string) []byte
func generateAndRestoreItems( func generateAndRestoreItems(
ctx context.Context, ctx context.Context,
gc *connector.GraphConnector, ctrl *m365.Controller,
service path.ServiceType, service path.ServiceType,
cat path.CategoryType, cat path.CategoryType,
sel selectors.Selector, sel selectors.Selector,
@ -83,14 +87,14 @@ func generateAndRestoreItems(
items: items, items: items,
}} }}
dest := control.DefaultRestoreDestination(dttm.SafeForTesting) restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting)
dest.ContainerName = destFldr restoreCfg.Location = destFldr
print.Infof(ctx, "Restoring to folder %s", dest.ContainerName) print.Infof(ctx, "Restoring to folder %s", restoreCfg.Location)
dataColls, err := buildCollections( dataColls, err := buildCollections(
service, service,
tenantID, userID, tenantID, userID,
dest, restoreCfg,
collections) collections)
if err != nil { if err != nil {
return nil, err return nil, err
@ -98,19 +102,19 @@ func generateAndRestoreItems(
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination) print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs) return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, restoreCfg, opts, dataColls, errs)
} }
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
// Common Helpers // Common Helpers
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
func getGCAndVerifyResourceOwner( func getControllerAndVerifyResourceOwner(
ctx context.Context, ctx context.Context,
resource connector.Resource, resourceCat resource.Category,
resourceOwner string, resourceOwner string,
) ( ) (
*connector.GraphConnector, *m365.Controller,
account.Account, account.Account,
idname.Provider, idname.Provider,
error, error,
@ -132,17 +136,17 @@ func getGCAndVerifyResourceOwner(
return nil, account.Account{}, nil, clues.Wrap(err, "finding m365 account details") return nil, account.Account{}, nil, clues.Wrap(err, "finding m365 account details")
} }
gc, err := connector.NewGraphConnector(ctx, acct, resource) ctrl, err := m365.NewController(ctx, acct, resourceCat)
if err != nil { if err != nil {
return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api") return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api")
} }
id, _, err := gc.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil) id, _, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil)
if err != nil { if err != nil {
return nil, account.Account{}, nil, clues.Wrap(err, "verifying user") return nil, account.Account{}, nil, clues.Wrap(err, "verifying user")
} }
return gc, acct, gc.IDNameLookup.ProviderForID(id), nil return ctrl, acct, ctrl.IDNameLookup.ProviderForID(id), nil
} }
type item struct { type item struct {
@ -163,7 +167,7 @@ type collection struct {
func buildCollections( func buildCollections(
service path.ServiceType, service path.ServiceType,
tenant, user string, tenant, user string,
dest control.RestoreDestination, restoreCfg control.RestoreConfig,
colls []collection, colls []collection,
) ([]data.RestoreCollection, error) { ) ([]data.RestoreCollection, error) {
collections := make([]data.RestoreCollection, 0, len(colls)) collections := make([]data.RestoreCollection, 0, len(colls))
@ -208,7 +212,7 @@ var (
) )
func generateAndRestoreDriveItems( func generateAndRestoreDriveItems(
gc *connector.GraphConnector, ctrl *m365.Controller,
resourceOwner, secondaryUserID, secondaryUserName string, resourceOwner, secondaryUserID, secondaryUserName string,
acct account.Account, acct account.Account,
service path.ServiceType, service path.ServiceType,
@ -224,22 +228,22 @@ func generateAndRestoreDriveItems(
ctx, flush := tester.NewContext(nil) ctx, flush := tester.NewContext(nil)
defer flush() defer flush()
dest := control.DefaultRestoreDestination(dttm.SafeForTesting) restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting)
dest.ContainerName = destFldr restoreCfg.Location = destFldr
print.Infof(ctx, "Restoring to folder %s", dest.ContainerName) print.Infof(ctx, "Restoring to folder %s", restoreCfg.Location)
var driveID string var driveID string
switch service { switch service {
case path.SharePointService: case path.SharePointService:
d, err := gc.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil) d, err := ctrl.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting site's default drive") return nil, clues.Wrap(err, "getting site's default drive")
} }
driveID = ptr.Val(d.GetId()) driveID = ptr.Val(d.GetId())
default: default:
d, err := gc.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil) d, err := ctrl.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting user's default drive") return nil, clues.Wrap(err, "getting user's default drive")
} }
@ -248,7 +252,7 @@ func generateAndRestoreDriveItems(
} }
var ( var (
cols []connector.OnedriveColInfo cols []odStub.ColInfo
rootPath = []string{"drives", driveID, "root:"} rootPath = []string{"drives", driveID, "root:"}
folderAPath = []string{"drives", driveID, "root:", folderAName} folderAPath = []string{"drives", driveID, "root:", folderAName}
@ -262,15 +266,15 @@ func generateAndRestoreDriveItems(
) )
for i := 0; i < count; i++ { for i := 0; i < count; i++ {
col := []connector.OnedriveColInfo{ col := []odStub.ColInfo{
// basic folder and file creation // basic folder and file creation
{ {
PathElements: rootPath, PathElements: rootPath,
Files: []connector.ItemData{ Files: []odStub.ItemData{
{ {
Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
Data: fileAData, Data: fileAData,
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: writePerm, Roles: writePerm,
@ -281,13 +285,13 @@ func generateAndRestoreDriveItems(
Data: fileBData, Data: fileBData,
}, },
}, },
Folders: []connector.ItemData{ Folders: []odStub.ItemData{
{ {
Name: folderBName, Name: folderBName,
}, },
{ {
Name: folderAName, Name: folderAName,
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: readPerm, Roles: readPerm,
@ -295,7 +299,7 @@ func generateAndRestoreDriveItems(
}, },
{ {
Name: folderCName, Name: folderCName,
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: readPerm, Roles: readPerm,
@ -307,18 +311,18 @@ func generateAndRestoreDriveItems(
// a folder that has permissions with an item in the folder with // a folder that has permissions with an item in the folder with
// the different permissions. // the different permissions.
PathElements: folderAPath, PathElements: folderAPath,
Files: []connector.ItemData{ Files: []odStub.ItemData{
{ {
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileEData, Data: fileEData,
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: readPerm, Roles: readPerm,
@ -328,13 +332,13 @@ func generateAndRestoreDriveItems(
// a folder that has permissions with an item in the folder with // a folder that has permissions with an item in the folder with
// no permissions. // no permissions.
PathElements: folderCPath, PathElements: folderCPath,
Files: []connector.ItemData{ Files: []odStub.ItemData{
{ {
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileAData, Data: fileAData,
}, },
}, },
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: readPerm, Roles: readPerm,
@ -342,23 +346,23 @@ func generateAndRestoreDriveItems(
}, },
{ {
PathElements: folderBPath, PathElements: folderBPath,
Files: []connector.ItemData{ Files: []odStub.ItemData{
{ {
// restoring a file in a non-root folder that doesn't inherit // restoring a file in a non-root folder that doesn't inherit
// permissions. // permissions.
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileBData, Data: fileBData,
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
Folders: []connector.ItemData{ Folders: []odStub.ItemData{
{ {
Name: folderAName, Name: folderAName,
Perms: connector.PermData{ Perms: odStub.PermData{
User: secondaryUserName, User: secondaryUserName,
EntityID: secondaryUserID, EntityID: secondaryUserID,
Roles: readPerm, Roles: readPerm,
@ -371,7 +375,7 @@ func generateAndRestoreDriveItems(
cols = append(cols, col...) cols = append(cols, col...)
} }
input, err := connector.DataForInfo(service, cols, version.Backup) input, err := odStub.DataForInfo(service, cols, version.Backup)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -388,16 +392,16 @@ func generateAndRestoreDriveItems(
ToggleFeatures: control.Toggles{}, ToggleFeatures: control.Toggles{},
} }
config := connector.ConfigInfo{ config := m365Stub.ConfigInfo{
Opts: opts, Opts: opts,
Resource: connector.Users, Resource: resource.Users,
Service: service, Service: service,
Tenant: tenantID, Tenant: tenantID,
ResourceOwners: []string{resourceOwner}, ResourceOwners: []string{resourceOwner},
Dest: tester.DefaultTestRestoreDestination(""), RestoreCfg: testdata.DefaultRestoreConfig(""),
} }
_, _, collections, _, err := connector.GetCollectionsAndExpected( _, _, collections, _, err := m365Stub.GetCollectionsAndExpected(
config, config,
input, input,
version.Backup) version.Backup)
@ -405,5 +409,5 @@ func generateAndRestoreDriveItems(
return nil, err return nil, err
} }
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs) return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, restoreCfg, opts, collections, errs)
} }

View File

@ -5,8 +5,8 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -52,14 +52,14 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
return nil return nil
} }
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
deets, err := generateAndRestoreItems( deets, err := generateAndRestoreItems(
ctx, ctx,
gc, ctrl,
service, service,
category, category,
selectors.NewExchangeRestore([]string{User}).Selector, selectors.NewExchangeRestore([]string{User}).Selector,
@ -98,14 +98,14 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
return nil return nil
} }
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
deets, err := generateAndRestoreItems( deets, err := generateAndRestoreItems(
ctx, ctx,
gc, ctrl,
service, service,
category, category,
selectors.NewExchangeRestore([]string{User}).Selector, selectors.NewExchangeRestore([]string{User}).Selector,
@ -143,14 +143,14 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
return nil return nil
} }
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
deets, err := generateAndRestoreItems( deets, err := generateAndRestoreItems(
ctx, ctx,
gc, ctrl,
service, service,
category, category,
selectors.NewExchangeRestore([]string{User}).Selector, selectors.NewExchangeRestore([]string{User}).Selector,

View File

@ -7,7 +7,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -36,7 +36,7 @@ func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error {
return nil return nil
} }
gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) ctrl, acct, inp, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -45,7 +45,7 @@ func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error {
sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name()) sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name())
deets, err := generateAndRestoreDriveItems( deets, err := generateAndRestoreDriveItems(
gc, ctrl,
inp.ID(), inp.ID(),
SecondaryUser, SecondaryUser,
strings.ToLower(SecondaryUser), strings.ToLower(SecondaryUser),

View File

@ -7,7 +7,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -36,7 +36,7 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error
return nil return nil
} }
gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Sites, Site) ctrl, acct, inp, err := getControllerAndVerifyResourceOwner(ctx, resource.Sites, Site)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -45,7 +45,7 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error
sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name()) sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name())
deets, err := generateAndRestoreDriveItems( deets, err := generateAndRestoreDriveItems(
gc, ctrl,
inp.ID(), inp.ID(),
SecondaryUser, SecondaryUser,
strings.ToLower(SecondaryUser), strings.ToLower(SecondaryUser),

View File

@ -21,7 +21,7 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -17,7 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"

View File

@ -1,14 +1,14 @@
module github.com/alcionai/corso/src module github.com/alcionai/corso/src
go 1.19 go 1.20
replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230616023302-6c5412bbf417
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225
github.com/armon/go-metrics v0.4.1 github.com/armon/go-metrics v0.4.1
github.com/aws/aws-sdk-go v1.44.275 github.com/aws/aws-sdk-go v1.44.283
github.com/aws/aws-xray-sdk-go v1.8.1 github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.1 github.com/cenkalti/backoff/v4 v4.2.1
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0
@ -18,7 +18,7 @@ require (
github.com/microsoft/kiota-authentication-azure-go v1.0.0 github.com/microsoft/kiota-authentication-azure-go v1.0.0
github.com/microsoft/kiota-http-go v1.0.0 github.com/microsoft/kiota-http-go v1.0.0
github.com/microsoft/kiota-serialization-form-go v1.0.0 github.com/microsoft/kiota-serialization-form-go v1.0.0
github.com/microsoft/kiota-serialization-json-go v1.0.1 github.com/microsoft/kiota-serialization-json-go v1.0.2
github.com/microsoftgraph/msgraph-sdk-go v1.4.0 github.com/microsoftgraph/msgraph-sdk-go v1.4.0
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
@ -34,7 +34,7 @@ require (
go.uber.org/zap v1.24.0 go.uber.org/zap v1.24.0
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
golang.org/x/time v0.3.0 golang.org/x/time v0.3.0
golang.org/x/tools v0.9.3 golang.org/x/tools v0.10.0
) )
require ( require (
@ -115,12 +115,12 @@ require (
go.opentelemetry.io/otel/trace v1.15.1 // indirect go.opentelemetry.io/otel/trace v1.15.1 // indirect
go.uber.org/atomic v1.10.0 // indirect go.uber.org/atomic v1.10.0 // indirect
go.uber.org/multierr v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.9.0 // indirect golang.org/x/crypto v0.10.0 // indirect
golang.org/x/mod v0.10.0 // indirect golang.org/x/mod v0.11.0 // indirect
golang.org/x/net v0.10.0 golang.org/x/net v0.11.0
golang.org/x/sync v0.2.0 // indirect golang.org/x/sync v0.3.0 // indirect
golang.org/x/sys v0.8.0 // indirect golang.org/x/sys v0.9.0 // indirect
golang.org/x/text v0.9.0 // indirect golang.org/x/text v0.10.0 // indirect
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect
google.golang.org/grpc v1.55.0 // indirect google.golang.org/grpc v1.55.0 // indirect
google.golang.org/protobuf v1.30.0 // indirect google.golang.org/protobuf v1.30.0 // indirect

View File

@ -53,10 +53,10 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c h1:Njdw/Nnq2DN3f8QMaHuZZHdVHTUSxFqPMMxDIInDWB4= github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 h1:mjUjCCGvJpmnLh3fuVzpfOSFC9lp9TOIOfjj51L5Rs0=
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8=
github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 h1:Wrl99Y7jftZMnNDiOIcRJrjstZO3IEj3+Q/sip27vmI= github.com/alcionai/kopia v0.12.2-0.20230616023302-6c5412bbf417 h1:s0B7Be1qqZW+XDRStDYvyXZ7ovQAMkg0N1F/ji4TJyc=
github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79/go.mod h1:Iic7CcKhsq+A7MLR9hh6VJfgpcJhLx3Kn+BgjY+azvI= github.com/alcionai/kopia v0.12.2-0.20230616023302-6c5412bbf417/go.mod h1:Iic7CcKhsq+A7MLR9hh6VJfgpcJhLx3Kn+BgjY+azvI=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go v1.44.275 h1:VqRULgqrigvQLll4e4hXuc568EQAtZQ6jmBzLlQHzSI= github.com/aws/aws-sdk-go v1.44.283 h1:ObMaIvdhHJM2sIrbcljd7muHBaFb+Kp/QsX6iflGDg4=
github.com/aws/aws-sdk-go v1.44.275/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.44.283/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
@ -281,8 +281,8 @@ github.com/microsoft/kiota-http-go v1.0.0 h1:F1hd6gMlLeEgH2CkRB7z13ow7LxMKMWEmms
github.com/microsoft/kiota-http-go v1.0.0/go.mod h1:eujxJliqodotsYepIc6ihhK+vXMMt5Q8YiSNL7+7M7U= github.com/microsoft/kiota-http-go v1.0.0/go.mod h1:eujxJliqodotsYepIc6ihhK+vXMMt5Q8YiSNL7+7M7U=
github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjblsyVgifS11b3WCx+eFEsAI= github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjblsyVgifS11b3WCx+eFEsAI=
github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA= github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA=
github.com/microsoft/kiota-serialization-json-go v1.0.1 h1:nI3pLpqep7L6BLJPT7teCqkYFRmgyuA2G0zx6ZrwgFE= github.com/microsoft/kiota-serialization-json-go v1.0.2 h1:RXan8v7yWBD88XxVZ2W38BBcqu2UqWtgS54nCbOS5ow=
github.com/microsoft/kiota-serialization-json-go v1.0.1/go.mod h1:KS+eFtwtJGsosXRQr/Qilep7ZD1MRF+VtO7LnL7Oyuw= github.com/microsoft/kiota-serialization-json-go v1.0.2/go.mod h1:AUItT9exyxmjZQE8IeFD9ygP77q9GKVb+AQE2V5Ikho=
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA= github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M= github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
github.com/microsoftgraph/msgraph-sdk-go v1.4.0 h1:ibNwMDEZ6HikA9BVXu+TljCzCiE+yFsD6wLpJbTc1tc= github.com/microsoftgraph/msgraph-sdk-go v1.4.0 h1:ibNwMDEZ6HikA9BVXu+TljCzCiE+yFsD6wLpJbTc1tc=
@ -452,8 +452,8 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM=
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -490,8 +490,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU=
golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -529,8 +529,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -551,8 +551,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI= golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -603,8 +603,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -617,8 +617,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -672,8 +672,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.9.3 h1:Gn1I8+64MsuTb/HpH+LmQtNas23LhUVr3rYZ0eKuaMM= golang.org/x/tools v0.10.0 h1:tvDr/iQoUqNdohiYm0LmmKcBk+q86lb9EprIUFhHHGg=
golang.org/x/tools v0.9.3/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@ -41,7 +41,7 @@ const (
// M365dateTimeTimeZoneTimeFormat is the format used by M365 for datetimetimezone resource // M365dateTimeTimeZoneTimeFormat is the format used by M365 for datetimetimezone resource
// https://learn.microsoft.com/en-us/graph/api/resources/datetimetimezone?view=graph-rest-1.0 // https://learn.microsoft.com/en-us/graph/api/resources/datetimetimezone?view=graph-rest-1.0
M365DateTimeTimeZone TimeFormat = "2006-01-02T15:04:05.000000" M365DateTimeTimeZone TimeFormat = "2006-01-02T15:04:05.0000000"
) )
// these regexes are used to extract time formats from strings. Their primary purpose is to // these regexes are used to extract time formats from strings. Their primary purpose is to

View File

@ -1,317 +0,0 @@
package exchange
import (
"context"
"encoding/json"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// MetadataFileNames produces the category-specific set of filenames used to
// store graph metadata such as delta tokens and folderID->path references.
func MetadataFileNames(cat path.CategoryType) []string {
switch cat {
case path.EmailCategory, path.ContactsCategory:
return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName}
default:
return []string{graph.PreviousPathFileName}
}
}
type CatDeltaPaths map[path.CategoryType]DeltaPaths
type DeltaPaths map[string]DeltaPath
func (dps DeltaPaths) AddDelta(k, d string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Delta = d
dps[k] = dp
}
func (dps DeltaPaths) AddPath(k, p string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Path = p
dps[k] = dp
}
type DeltaPath struct {
Delta string
Path string
}
// ParseMetadataCollections produces a map of structs holding delta
// and path lookup maps.
func parseMetadataCollections(
ctx context.Context,
colls []data.RestoreCollection,
) (CatDeltaPaths, bool, error) {
// cdp stores metadata
cdp := CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// found tracks the metadata we've loaded, to make sure we don't
// fetch overlapping copies.
found := map[path.CategoryType]map[string]struct{}{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// errors from metadata items should not stop the backup,
// but it should prevent us from using previous backups
errs := fault.New(true)
for _, coll := range colls {
var (
breakLoop bool
items = coll.Items(ctx, errs)
category = coll.FullPath().Category()
)
for {
select {
case <-ctx.Done():
return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
case item, ok := <-items:
if !ok || errs.Failure() != nil {
breakLoop = true
break
}
var (
m = map[string]string{}
cdps = cdp[category]
)
err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil {
return nil, false, clues.New("decoding metadata json").WithClues(ctx)
}
switch item.UUID() {
case graph.PreviousPathFileName:
if _, ok := found[category]["path"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
}
for k, p := range m {
cdps.AddPath(k, p)
}
found[category]["path"] = struct{}{}
case graph.DeltaURLsFileName:
if _, ok := found[category]["delta"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
}
for k, d := range m {
cdps.AddDelta(k, d)
}
found[category]["delta"] = struct{}{}
}
cdp[category] = cdps
}
if breakLoop {
break
}
}
}
if errs.Failure() != nil {
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
return CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}, false, nil
}
// Remove any entries that contain a path or a delta, but not both.
// That metadata is considered incomplete, and needs to incur a
// complete backup on the next run.
for _, dps := range cdp {
for k, dp := range dps {
if len(dp.Path) == 0 {
delete(dps, k)
}
}
}
return cdp, true, nil
}
// DataCollections returns a DataCollection which the caller can
// use to read mailbox data out for the specified user
func DataCollections(
ctx context.Context,
ac api.Client,
selector selectors.Selector,
tenantID string,
user idname.Provider,
metadata []data.RestoreCollection,
su support.StatusUpdater,
ctrlOpts control.Options,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
eb, err := selector.ToExchangeBackup()
if err != nil {
return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
}
var (
collections = []data.BackupCollection{}
el = errs.Local()
categories = map[path.CategoryType]struct{}{}
handlers = BackupHandlers(ac)
)
// Turn on concurrency limiter middleware for exchange backups
// unless explicitly disabled through DisableConcurrencyLimiterFN cli flag
if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter {
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
}
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata)
if err != nil {
return nil, nil, false, err
}
for _, scope := range eb.Scopes() {
if el.Failure() != nil {
break
}
dcs, err := createCollections(
ctx,
handlers,
tenantID,
user,
scope,
cdps[scope.Category().PathType()],
ctrlOpts,
su,
errs)
if err != nil {
el.AddRecoverable(err)
continue
}
categories[scope.Category().PathType()] = struct{}{}
collections = append(collections, dcs...)
}
if len(collections) > 0 {
baseCols, err := graph.BaseCollections(
ctx,
collections,
tenantID,
user.ID(),
path.ExchangeService,
categories,
su,
errs)
if err != nil {
return nil, nil, false, err
}
collections = append(collections, baseCols...)
}
return collections, nil, canUsePreviousBackup, el.Failure()
}
// createCollections - utility function that retrieves M365
// IDs through Microsoft Graph API. The selectors.ExchangeScope
// determines the type of collections that are retrieved.
func createCollections(
ctx context.Context,
handlers map[path.CategoryType]backupHandler,
tenantID string,
user idname.Provider,
scope selectors.ExchangeScope,
dps DeltaPaths,
ctrlOpts control.Options,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var (
allCollections = make([]data.BackupCollection, 0)
category = scope.Category().PathType()
qp = graph.QueryParams{
Category: category,
ResourceOwner: user,
TenantID: tenantID,
}
)
handler, ok := handlers[category]
if !ok {
return nil, clues.New("unsupported backup category type").WithClues(ctx)
}
foldersComplete := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", qp.Category))
defer close(foldersComplete)
rootFolder, cc := handler.NewContainerCache(user.ID())
if err := cc.Populate(ctx, errs, rootFolder); err != nil {
return nil, clues.Wrap(err, "populating container cache")
}
collections, err := filterContainersAndFillCollections(
ctx,
qp,
handler,
su,
cc,
scope,
dps,
ctrlOpts,
errs)
if err != nil {
return nil, clues.Wrap(err, "filling collections")
}
foldersComplete <- struct{}{}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}
return allCollections, nil
}

View File

@ -1,761 +0,0 @@
package exchange
import (
"bytes"
"context"
"sync"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// Unit tests
// ---------------------------------------------------------------------------
type DataCollectionsUnitSuite struct {
tester.Suite
}
func TestDataCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
type fileValues struct {
fileName string
value string
}
table := []struct {
name string
data []fileValues
expect map[string]DeltaPath
canUsePreviousBackup bool
expectError assert.ErrorAssertionFunc
}{
{
name: "delta urls only",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple delta urls",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaURLsFileName, "delta-link-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "previous path only",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple previous paths",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
{graph.PreviousPathFileName, "prev-path-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls and empty previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, ""},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "empty delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, ""},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with special chars",
data: []fileValues{
{graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "`!@#$%^&*()_[]{}/\"\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with escaped chars",
data: []fileValues{
{graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with newline char runes",
data: []fileValues{
// rune(92) = \, rune(110) = n. Ensuring it's not possible to
// error in serializing/deserializing and produce a single newline
// character from those two runes.
{graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
entries := []graph.MetadataCollectionEntry{}
for _, d := range test.data {
entries = append(
entries,
graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value}))
}
coll, err := graph.MakeMetadataCollection(
"t", "u",
path.ExchangeService,
path.EmailCategory,
entries,
func(cos *support.ConnectorOperationStatus) {},
)
require.NoError(t, err, clues.ToCore(err))
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: coll},
})
test.expectError(t, err, clues.ToCore(err))
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
emails := cdps[path.EmailCategory]
assert.Len(t, emails, len(test.expect))
for k, v := range emails {
assert.Equal(t, v.Delta, emails[k].Delta, "delta")
assert.Equal(t, v.Path, emails[k].Path, "path")
}
})
}
}
type failingColl struct {
t *testing.T
}
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
ic := make(chan data.Stream)
defer close(ic)
errs.AddRecoverable(assert.AnError)
return ic
}
func (f failingColl) FullPath() path.Path {
tmp, err := path.Build(
"tenant",
"user",
path.ExchangeService,
path.EmailCategory,
false,
"inbox")
require.NoError(f.t, err, clues.ToCore(err))
return tmp
}
func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) {
// no fetch calls will be made
return nil, nil
}
// This check is to ensure that we don't error out, but still return
// canUsePreviousBackup as false on read errors
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
fc := failingColl{t}
_, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc})
require.NoError(t, err)
require.False(t, canUsePreviousBackup)
}
// ---------------------------------------------------------------------------
// Integration tests
// ---------------------------------------------------------------------------
func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ConnectorOperationStatus) {
updater := func(status *support.ConnectorOperationStatus) {
defer wg.Done()
}
return updater
}
type DataCollectionsIntegrationSuite struct {
tester.Suite
user string
site string
tenantID string
ac api.Client
}
func TestDataCollectionsIntegrationSuite(t *testing.T) {
suite.Run(t, &DataCollectionsIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
})
}
func (suite *DataCollectionsIntegrationSuite) SetupSuite() {
suite.user = tester.M365UserID(suite.T())
suite.site = tester.M365SiteID(suite.T())
acct := tester.NewM365Account(suite.T())
creds, err := acct.M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
suite.ac, err = api.NewClient(creds)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(suite.T())
}
func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
folderNames map[string]struct{}
canMakeDeltaQueries bool
}{
{
name: "Folder Iterative Check Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: true,
},
{
name: "Folder Iterative Check Mail Non-Delta",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: false,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
ctrlOpts,
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService {
continue
}
require.NotEmpty(t, c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
continue
}
loc := c.(data.LocationPather).LocationPath().String()
require.NotEmpty(t, loc)
delete(test.folderNames, loc)
}
assert.Empty(t, test.folderNames)
})
}
}
func (suite *DataCollectionsIntegrationSuite) TestDelta() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Contacts",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Events",
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// get collections without providing any delta history (ie: full backup)
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
control.Defaults(),
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection
for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService {
metadata = coll
}
}
require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: metadata},
})
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")
dps := cdps[test.scope.Category().PathType()]
// now do another backup with the previous delta tokens,
// which should only contain the difference.
collections, err = createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
dps,
control.Defaults(),
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least
// assert some minimum assumptions, such as "deltas should retrieve fewer items".
// Delta usage is commented out at the moment, anyway. So this is currently
// a sanity check that the minimum behavior won't break.
for _, coll := range collections {
if coll.FullPath().Service() != path.ExchangeMetadataService {
ec, ok := coll.(*Collection)
require.True(t, ok, "collection is *Collection")
assert.NotNil(t, ec)
}
}
})
}
}
// TestMailSerializationRegression verifies that all mail data stored in the
// test account can be successfully downloaded into bytes and restored into
// M365 mail objects
func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
wg sync.WaitGroup
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
sel.Scopes()[0],
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(collections))
for _, edc := range collections {
suite.Run(edc.FullPath().String(), func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
streamChannel := edc.Items(ctx, fault.New(true))
// Verify that each message can be restored
for stream := range streamChannel {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
message, err := api.BytesToMessageable(buf.Bytes())
assert.NotNil(t, message)
assert.NoError(t, err, clues.ToCore(err))
}
})
}
wg.Wait()
}
// TestContactSerializationRegression verifies ability to query contact items
// and to store contact within Collection. Downloaded contacts are run through
// a regression test to ensure that downloaded items can be uploaded.
func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() {
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Default Contact Folder",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch())[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
edcs, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(edcs))
require.GreaterOrEqual(t, len(edcs), 1, "expected 1 <= num collections <= 2")
require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2")
for _, edc := range edcs {
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
count := 0
for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
contact, err := api.BytesToContactable(buf.Bytes())
assert.NotNil(t, contact)
assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err))
count++
}
if isMetadata {
continue
}
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), edc) {
continue
}
assert.Equal(
t,
edc.(data.LocationPather).LocationPath().String(),
DefaultContactFolder)
assert.NotZero(t, count)
}
wg.Wait()
})
}
}
// TestEventsSerializationRegression ensures functionality of createCollections
// to be able to successfully query, download and restore event objects
func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
calID string
bdayID string
)
fn := func(gcf graph.CachedContainer) error {
if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar {
calID = ptr.Val(gcf.GetId())
}
if ptr.Val(gcf.GetDisplayName()) == "Birthdays" {
bdayID = ptr.Val(gcf.GetId())
}
return nil
}
err := suite.ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))
require.NoError(t, err, clues.ToCore(err))
tests := []struct {
name, expected string
scope selectors.ExchangeScope
}{
{
name: "Default Event Calendar",
expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
{
name: "Birthday Calendar",
expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{"Birthdays"},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
require.Len(t, collections, 2)
wg.Add(len(collections))
for _, edc := range collections {
var isMetadata bool
if edc.FullPath().Service() != path.ExchangeMetadataService {
isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else {
assert.Equal(t, "", edc.FullPath().Folder(false))
}
for item := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
event, err := api.BytesToEventable(buf.Bytes())
assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err))
}
}
wg.Wait()
})
}
}

View File

@ -1,129 +0,0 @@
package exchange
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type CacheResolverSuite struct {
tester.Suite
credentials account.M365Config
}
func TestCacheResolverIntegrationSuite(t *testing.T) {
suite.Run(t, &CacheResolverSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
})
}
func (suite *CacheResolverSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
func (suite *CacheResolverSuite) TestPopulate() {
ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{
userID: tester.M365UserID(t),
enumer: ac.Events(),
getter: ac.Events(),
}
}
contactFunc := func(t *testing.T) graph.ContainerResolver {
return &contactFolderCache{
userID: tester.M365UserID(t),
enumer: ac.Contacts(),
getter: ac.Contacts(),
}
}
tests := []struct {
name, folderInCache, root, basePath string
resolverFunc func(t *testing.T) graph.ContainerResolver
canFind assert.BoolAssertionFunc
}{
{
name: "Default Event Cache",
// Fine as long as this isn't running against a migrated Exchange server.
folderInCache: DefaultCalendar,
root: DefaultCalendar,
basePath: DefaultCalendar,
resolverFunc: eventFunc,
canFind: assert.True,
},
{
name: "Default Event Folder Hidden",
folderInCache: DefaultContactFolder,
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Default Contact Cache",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
basePath: DefaultContactFolder,
canFind: assert.True,
resolverFunc: contactFunc,
},
{
name: "Default Contact Hidden",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
resolver := test.resolverFunc(t)
err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath)
require.NoError(t, err, clues.ToCore(err))
_, isFound := resolver.LocationInCache(test.folderInCache)
test.canFind(t, isFound, "folder path", test.folderInCache)
})
}
}

View File

@ -1,47 +0,0 @@
package exchange
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ExchangeIteratorSuite struct {
tester.Suite
}
func TestExchangeIteratorSuite(t *testing.T) {
suite.Run(t, &ExchangeIteratorSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeIteratorSuite) TestDisplayable() {
t := suite.T()
bytes := exchMock.ContactBytes("Displayable")
contact, err := api.BytesToContactable(bytes)
require.NoError(t, err, clues.ToCore(err))
aDisplayable, ok := contact.(graph.Displayable)
assert.True(t, ok)
assert.NotNil(t, aDisplayable.GetId())
assert.NotNil(t, aDisplayable.GetDisplayName())
}
func (suite *ExchangeIteratorSuite) TestDescendable() {
t := suite.T()
bytes := exchMock.MessageBytes("Descendable")
message, err := api.BytesToMessageable(bytes)
require.NoError(t, err, clues.ToCore(err))
aDescendable, ok := message.(graph.Descendable)
assert.True(t, ok)
assert.NotNil(t, aDescendable.GetId())
assert.NotNil(t, aDescendable.GetParentFolderId())
}

View File

@ -1,181 +0,0 @@
package connector
import (
"sync"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
)
// ---------------------------------------------------------------
// Disconnected Test Section
// ---------------------------------------------------------------
type DisconnectedGraphConnectorSuite struct {
tester.Suite
}
func TestDisconnectedGraphSuite(t *testing.T) {
s := &DisconnectedGraphConnectorSuite{
Suite: tester.NewUnitSuite(t),
}
suite.Run(t, s)
}
func statusTestTask(
t *testing.T,
gc *GraphConnector,
objects, success, folder int,
) {
ctx, flush := tester.NewContext(t)
defer flush()
status := support.CreateStatus(
ctx,
support.Restore, folder,
support.CollectionMetrics{
Objects: objects,
Successes: success,
Bytes: 0,
},
"statusTestTask")
gc.UpdateStatus(status)
}
func (suite *DisconnectedGraphConnectorSuite) TestGraphConnector_Status() {
t := suite.T()
gc := GraphConnector{wg: &sync.WaitGroup{}}
// Two tasks
gc.incrementAwaitingMessages()
gc.incrementAwaitingMessages()
// Each helper task processes 4 objects, 1 success, 3 errors, 1 folders
go statusTestTask(t, &gc, 4, 1, 1)
go statusTestTask(t, &gc, 4, 1, 1)
stats := gc.Wait()
assert.NotEmpty(t, gc.PrintableStatus())
// Expect 8 objects
assert.Equal(t, 8, stats.Objects)
// Expect 2 success
assert.Equal(t, 2, stats.Successes)
// Expect 2 folders
assert.Equal(t, 2, stats.Folders)
}
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices() {
sites := []string{"abc.site.foo", "bar.site.baz"}
tests := []struct {
name string
excludes func(t *testing.T) selectors.Selector
filters func(t *testing.T) selectors.Selector
includes func(t *testing.T) selectors.Selector
checkError assert.ErrorAssertionFunc
}{
{
name: "Valid User",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Exclude(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "elliotReid@someHospital.org"
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Filter(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "elliotReid@someHospital.org"
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "elliotReid@someHospital.org"
return sel.Selector
},
},
{
name: "Invalid User",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Exclude(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Filter(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector
},
},
{
name: "valid sites",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo"
sel.Exclude(sel.AllData())
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo"
sel.Filter(sel.AllData())
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo"
sel.Include(sel.AllData())
return sel.Selector
},
},
{
name: "invalid sites",
checkError: assert.Error,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Exclude(sel.AllData())
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Filter(sel.AllData())
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Include(sel.AllData())
return sel.Selector
},
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
err := verifyBackupInputs(test.excludes(t), sites)
test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.filters(t), sites)
test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.includes(t), sites)
test.checkError(t, err, clues.ToCore(err))
})
}
}

View File

@ -1,154 +0,0 @@
package onedrive
import (
"net/http"
"sync"
"testing"
"time"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type URLCacheIntegrationSuite struct {
tester.Suite
ac api.Client
user string
driveID string
}
func TestURLCacheIntegrationSuite(t *testing.T) {
suite.Run(t, &URLCacheIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *URLCacheIntegrationSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.user = tester.SecondaryM365UserID(t)
acct := tester.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
drive, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
require.NoError(t, err, clues.ToCore(err))
suite.driveID = ptr.Val(drive.GetId())
}
// Basic test for urlCache. Create some files in onedrive, then access them via
// url cache
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
var (
t = suite.T()
ac = suite.ac.Drives()
driveID = suite.driveID
newFolderName = tester.DefaultTestRestoreDestination("folder").ContainerName
driveItemPager = suite.ac.Drives().NewItemPager(driveID, "", api.DriveItemSelectDefault())
)
ctx, flush := tester.NewContext(t)
defer flush()
// Create a new test folder
root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
newFolder, err := ac.Drives().PostItemInContainer(
ctx,
driveID,
ptr.Val(root.GetId()),
newItem(newFolderName, true))
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId())
nfid := ptr.Val(newFolder.GetId())
// Create a bunch of files in the new folder
var items []models.DriveItemable
for i := 0; i < 10; i++ {
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
item, err := ac.Drives().PostItemInContainer(
ctx,
driveID,
nfid,
newItem(newItemName, false))
if err != nil {
// Something bad happened, skip this item
continue
}
items = append(items, item)
}
// Create a new URL cache with a long TTL
cache, err := newURLCache(
suite.driveID,
1*time.Hour,
driveItemPager,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
err = cache.refreshCache(ctx)
require.NoError(t, err, clues.ToCore(err))
// Launch parallel requests to the cache, one per item
var wg sync.WaitGroup
for i := 0; i < len(items); i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
// Read item from URL cache
props, err := cache.getItemProperties(
ctx,
ptr.Val(items[i].GetId()))
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, props)
require.NotEmpty(t, props.downloadURL)
require.Equal(t, false, props.isDeleted)
// Validate download URL
c := graph.NewNoTimeoutHTTPWrapper()
resp, err := c.Request(
ctx,
http.MethodGet,
props.downloadURL,
nil,
nil)
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, http.StatusOK, resp.StatusCode)
}(i)
}
wg.Wait()
// Validate that <= 1 delta queries were made
require.LessOrEqual(t, cache.deltaQueryCount, 1)
}

View File

@ -1,28 +0,0 @@
package sharepoint
import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/backup/details"
)
// sharePointListInfo translates models.Listable metadata into searchable content
// List Details: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0
func sharePointListInfo(lst models.Listable, size int64) *details.SharePointInfo {
var (
name = ptr.Val(lst.GetDisplayName())
webURL = ptr.Val(lst.GetWebUrl())
created = ptr.Val(lst.GetCreatedDateTime())
modified = ptr.Val(lst.GetLastModifiedDateTime())
)
return &details.SharePointInfo{
ItemType: details.SharePointList,
ItemName: name,
Created: created,
Modified: modified,
WebURL: webURL,
Size: size,
}
}

View File

@ -1,59 +0,0 @@
package sharepoint
import (
"testing"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
)
type SharePointInfoSuite struct {
tester.Suite
}
func TestSharePointInfoSuite(t *testing.T) {
suite.Run(t, &SharePointInfoSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *SharePointInfoSuite) TestSharePointInfo() {
tests := []struct {
name string
listAndDeets func() (models.Listable, *details.SharePointInfo)
}{
{
name: "Empty List",
listAndDeets: func() (models.Listable, *details.SharePointInfo) {
i := &details.SharePointInfo{ItemType: details.SharePointList}
return models.NewList(), i
},
}, {
name: "Only Name",
listAndDeets: func() (models.Listable, *details.SharePointInfo) {
aTitle := "Whole List"
listing := models.NewList()
listing.SetDisplayName(&aTitle)
i := &details.SharePointInfo{
ItemType: details.SharePointList,
ItemName: aTitle,
}
return listing, i
},
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
list, expected := test.listAndDeets()
info := sharePointListInfo(list, 10)
assert.Equal(t, expected.ItemType, info.ItemType)
assert.Equal(t, expected.ItemName, info.ItemName)
assert.Equal(t, expected.WebURL, info.WebURL)
})
}
}

View File

@ -52,7 +52,9 @@ const (
Service = "service" Service = "service"
StartTime = "start_time" StartTime = "start_time"
Status = "status" Status = "status"
RepoID = "not_found"
// default values for keys
RepoIDNotFound = "not_found"
) )
const ( const (

View File

@ -0,0 +1,387 @@
package kopia
import (
"context"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/logger"
)
// TODO(ashmrtn): Move this into some inject package. Here to avoid import
// cycles.
type BackupBases interface {
RemoveMergeBaseByManifestID(manifestID manifest.ID)
Backups() []BackupEntry
MinBackupVersion() int
MergeBases() []ManifestEntry
ClearMergeBases()
AssistBases() []ManifestEntry
ClearAssistBases()
MergeBackupBases(
ctx context.Context,
other BackupBases,
reasonToKey func(Reason) string,
) BackupBases
}
type backupBases struct {
// backups and mergeBases should be modified together as they relate similar
// data.
backups []BackupEntry
mergeBases []ManifestEntry
assistBases []ManifestEntry
}
func (bb *backupBases) RemoveMergeBaseByManifestID(manifestID manifest.ID) {
idx := slices.IndexFunc(
bb.mergeBases,
func(man ManifestEntry) bool {
return man.ID == manifestID
})
if idx >= 0 {
bb.mergeBases = slices.Delete(bb.mergeBases, idx, idx+1)
}
// TODO(ashmrtn): This may not be strictly necessary but is at least easier to
// reason about.
idx = slices.IndexFunc(
bb.assistBases,
func(man ManifestEntry) bool {
return man.ID == manifestID
})
if idx >= 0 {
bb.assistBases = slices.Delete(bb.assistBases, idx, idx+1)
}
idx = slices.IndexFunc(
bb.backups,
func(bup BackupEntry) bool {
return bup.SnapshotID == string(manifestID)
})
if idx >= 0 {
bb.backups = slices.Delete(bb.backups, idx, idx+1)
}
}
func (bb backupBases) Backups() []BackupEntry {
return slices.Clone(bb.backups)
}
func (bb *backupBases) MinBackupVersion() int {
min := version.NoBackup
if bb == nil {
return min
}
for _, bup := range bb.backups {
if min == version.NoBackup || bup.Version < min {
min = bup.Version
}
}
return min
}
func (bb backupBases) MergeBases() []ManifestEntry {
return slices.Clone(bb.mergeBases)
}
func (bb *backupBases) ClearMergeBases() {
bb.mergeBases = nil
bb.backups = nil
}
func (bb backupBases) AssistBases() []ManifestEntry {
return slices.Clone(bb.assistBases)
}
func (bb *backupBases) ClearAssistBases() {
bb.assistBases = nil
}
// MergeBackupBases reduces the two BackupBases into a single BackupBase.
// Assumes the passed in BackupBases represents a prior backup version (across
// some migration that disrupts lookup), and that the BackupBases used to call
// this function contains the current version.
//
// reasonToKey should be a function that, given a Reason, will produce some
// string that represents Reason in the context of the merge operation. For
// example, to merge BackupBases across a ResourceOwner migration, the Reason's
// service and category can be used as the key.
//
// Selection priority, for each reason key generated by reasonsToKey, follows
// these rules:
// 1. If the called BackupBases has an entry for a given resaon, ignore the
// other BackupBases matching that reason.
// 2. If the the receiver BackupBases has only AssistBases, look for a matching
// MergeBase manifest in the passed in BackupBases.
// 3. If the called BackupBases has no entry for a reason, look for both
// AssistBases and MergeBases in the passed in BackupBases.
func (bb *backupBases) MergeBackupBases(
ctx context.Context,
other BackupBases,
reasonToKey func(reason Reason) string,
) BackupBases {
if other == nil || (len(other.MergeBases()) == 0 && len(other.AssistBases()) == 0) {
return bb
}
if bb == nil || (len(bb.MergeBases()) == 0 && len(bb.AssistBases()) == 0) {
return other
}
toMerge := map[string]struct{}{}
assist := map[string]struct{}{}
// Track the bases in bb.
for _, m := range bb.mergeBases {
for _, r := range m.Reasons {
k := reasonToKey(r)
toMerge[k] = struct{}{}
assist[k] = struct{}{}
}
}
for _, m := range bb.assistBases {
for _, r := range m.Reasons {
k := reasonToKey(r)
assist[k] = struct{}{}
}
}
var toAdd []ManifestEntry
// Calculate the set of mergeBases to pull from other into this one.
for _, m := range other.MergeBases() {
useReasons := []Reason{}
for _, r := range m.Reasons {
k := reasonToKey(r)
if _, ok := toMerge[k]; ok {
// Assume other contains prior manifest versions.
// We don't want to stack a prior version incomplete onto
// a current version's complete snapshot.
continue
}
useReasons = append(useReasons, r)
}
if len(useReasons) > 0 {
m.Reasons = useReasons
toAdd = append(toAdd, m)
}
}
res := &backupBases{
backups: bb.Backups(),
mergeBases: bb.MergeBases(),
assistBases: bb.AssistBases(),
}
// Add new mergeBases and backups.
for _, man := range toAdd {
// Will get empty string if not found which is fine, it'll fail one of the
// other checks.
bID, _ := man.GetTag(TagBackupID)
bup, ok := getBackupByID(other.Backups(), bID)
if !ok {
logger.Ctx(ctx).Infow(
"not unioning snapshot missing backup",
"other_manifest_id", man.ID,
"other_backup_id", bID)
continue
}
bup.Reasons = man.Reasons
res.backups = append(res.backups, bup)
res.mergeBases = append(res.mergeBases, man)
res.assistBases = append(res.assistBases, man)
}
// Add assistBases from other to this one as needed.
for _, m := range other.AssistBases() {
useReasons := []Reason{}
// Assume that all complete manifests in assist overlap with MergeBases.
if len(m.IncompleteReason) == 0 {
continue
}
for _, r := range m.Reasons {
k := reasonToKey(r)
if _, ok := assist[k]; ok {
// This reason is already covered by either:
// * complete manifest in bb
// * incomplete manifest in bb
//
// If it was already in the assist set then it must be the case that
// it's newer than any complete manifests in other for the same reason.
continue
}
useReasons = append(useReasons, r)
}
if len(useReasons) > 0 {
m.Reasons = useReasons
res.assistBases = append(res.assistBases, m)
}
}
return res
}
func findNonUniqueManifests(
ctx context.Context,
manifests []ManifestEntry,
) map[manifest.ID]struct{} {
// ReasonKey -> manifests with that reason.
reasons := map[string][]ManifestEntry{}
toDrop := map[manifest.ID]struct{}{}
for _, man := range manifests {
// Incomplete snapshots are used only for kopia-assisted incrementals. The
// fact that we need this check here makes it seem like this should live in
// the kopia code. However, keeping it here allows for better debugging as
// the kopia code only has access to a path builder which means it cannot
// remove the resource owner from the error/log output. That is also below
// the point where we decide if we should do a full backup or an incremental.
if len(man.IncompleteReason) > 0 {
logger.Ctx(ctx).Infow(
"dropping incomplete manifest",
"manifest_id", man.ID)
toDrop[man.ID] = struct{}{}
continue
}
for _, reason := range man.Reasons {
reasonKey := reason.ResourceOwner + reason.Service.String() + reason.Category.String()
reasons[reasonKey] = append(reasons[reasonKey], man)
}
}
for reason, mans := range reasons {
ictx := clues.Add(ctx, "reason", reason)
if len(mans) == 0 {
// Not sure how this would happen but just in case...
continue
} else if len(mans) > 1 {
mIDs := make([]manifest.ID, 0, len(mans))
for _, m := range mans {
toDrop[m.ID] = struct{}{}
mIDs = append(mIDs, m.ID)
}
// TODO(ashmrtn): We should actually just remove this reason from the
// manifests and then if they have no reasons remaining drop them from the
// set.
logger.Ctx(ictx).Infow(
"dropping manifests with duplicate reason",
"manifest_ids", mIDs)
continue
}
}
return toDrop
}
func getBackupByID(backups []BackupEntry, bID string) (BackupEntry, bool) {
if len(bID) == 0 {
return BackupEntry{}, false
}
idx := slices.IndexFunc(backups, func(b BackupEntry) bool {
return string(b.ID) == bID
})
if idx < 0 || idx >= len(backups) {
return BackupEntry{}, false
}
return backups[idx], true
}
// fixupAndVerify goes through the set of backups and snapshots used for merging
// and ensures:
// - the reasons for selecting merge snapshots are distinct
// - all bases used for merging have a backup model with item and details
// snapshot ID
//
// Backups that have overlapping reasons or that are not complete are removed
// from the set. Dropping these is safe because it only affects how much data we
// pull. On the other hand, *not* dropping them is unsafe as it will muck up
// merging when we add stuff to kopia (possibly multiple entries for the same
// item etc).
func (bb *backupBases) fixupAndVerify(ctx context.Context) {
toDrop := findNonUniqueManifests(ctx, bb.mergeBases)
var (
backupsToKeep []BackupEntry
mergeToKeep []ManifestEntry
)
for _, man := range bb.mergeBases {
if _, ok := toDrop[man.ID]; ok {
continue
}
bID, _ := man.GetTag(TagBackupID)
bup, ok := getBackupByID(bb.backups, bID)
if !ok {
toDrop[man.ID] = struct{}{}
logger.Ctx(ctx).Info(
"dropping manifest due to missing backup",
"manifest_id", man.ID)
continue
}
deetsID := bup.StreamStoreID
if len(deetsID) == 0 {
deetsID = bup.DetailsID
}
if len(bup.SnapshotID) == 0 || len(deetsID) == 0 {
toDrop[man.ID] = struct{}{}
logger.Ctx(ctx).Info(
"dropping manifest due to invalid backup",
"manifest_id", man.ID)
continue
}
backupsToKeep = append(backupsToKeep, bup)
mergeToKeep = append(mergeToKeep, man)
}
var assistToKeep []ManifestEntry
for _, man := range bb.assistBases {
if _, ok := toDrop[man.ID]; ok {
continue
}
assistToKeep = append(assistToKeep, man)
}
bb.backups = backupsToKeep
bb.mergeBases = mergeToKeep
bb.assistBases = assistToKeep
}

View File

@ -0,0 +1,705 @@
package kopia
import (
"fmt"
"testing"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/path"
)
func makeManifest(id, incmpl, bID string, reasons ...Reason) ManifestEntry {
bIDKey, _ := makeTagKV(TagBackupID)
return ManifestEntry{
Manifest: &snapshot.Manifest{
ID: manifest.ID(id),
IncompleteReason: incmpl,
Tags: map[string]string{bIDKey: bID},
},
Reasons: reasons,
}
}
type BackupBasesUnitSuite struct {
tester.Suite
}
func TestBackupBasesUnitSuite(t *testing.T) {
suite.Run(t, &BackupBasesUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *BackupBasesUnitSuite) TestMinBackupVersion() {
table := []struct {
name string
bb *backupBases
expectedVersion int
}{
{
name: "Nil BackupBase",
expectedVersion: version.NoBackup,
},
{
name: "No Backups",
bb: &backupBases{},
expectedVersion: version.NoBackup,
},
{
name: "Unsorted Backups",
bb: &backupBases{
backups: []BackupEntry{
{
Backup: &backup.Backup{
Version: 4,
},
},
{
Backup: &backup.Backup{
Version: 0,
},
},
{
Backup: &backup.Backup{
Version: 2,
},
},
},
},
expectedVersion: 0,
},
}
for _, test := range table {
suite.Run(test.name, func() {
assert.Equal(suite.T(), test.expectedVersion, test.bb.MinBackupVersion())
})
}
}
func (suite *BackupBasesUnitSuite) TestRemoveMergeBaseByManifestID() {
backups := []BackupEntry{
{Backup: &backup.Backup{SnapshotID: "1"}},
{Backup: &backup.Backup{SnapshotID: "2"}},
{Backup: &backup.Backup{SnapshotID: "3"}},
}
merges := []ManifestEntry{
makeManifest("1", "", ""),
makeManifest("2", "", ""),
makeManifest("3", "", ""),
}
expected := &backupBases{
backups: []BackupEntry{backups[0], backups[1]},
mergeBases: []ManifestEntry{merges[0], merges[1]},
assistBases: []ManifestEntry{merges[0], merges[1]},
}
delID := manifest.ID("3")
table := []struct {
name string
// Below indices specify which items to add from the defined sets above.
backup []int
merge []int
assist []int
}{
{
name: "Not In Bases",
backup: []int{0, 1},
merge: []int{0, 1},
assist: []int{0, 1},
},
{
name: "Different Indexes",
backup: []int{2, 0, 1},
merge: []int{0, 2, 1},
assist: []int{0, 1, 2},
},
{
name: "First Item",
backup: []int{2, 0, 1},
merge: []int{2, 0, 1},
assist: []int{2, 0, 1},
},
{
name: "Middle Item",
backup: []int{0, 2, 1},
merge: []int{0, 2, 1},
assist: []int{0, 2, 1},
},
{
name: "Final Item",
backup: []int{0, 1, 2},
merge: []int{0, 1, 2},
assist: []int{0, 1, 2},
},
{
name: "Only In Backups",
backup: []int{0, 1, 2},
merge: []int{0, 1},
assist: []int{0, 1},
},
{
name: "Only In Merges",
backup: []int{0, 1},
merge: []int{0, 1, 2},
assist: []int{0, 1},
},
{
name: "Only In Assists",
backup: []int{0, 1},
merge: []int{0, 1},
assist: []int{0, 1, 2},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
bb := &backupBases{}
for _, i := range test.backup {
bb.backups = append(bb.backups, backups[i])
}
for _, i := range test.merge {
bb.mergeBases = append(bb.mergeBases, merges[i])
}
for _, i := range test.assist {
bb.assistBases = append(bb.assistBases, merges[i])
}
bb.RemoveMergeBaseByManifestID(delID)
AssertBackupBasesEqual(t, expected, bb)
})
}
}
func (suite *BackupBasesUnitSuite) TestClearMergeBases() {
bb := &backupBases{
backups: make([]BackupEntry, 2),
mergeBases: make([]ManifestEntry, 2),
}
bb.ClearMergeBases()
assert.Empty(suite.T(), bb.Backups())
assert.Empty(suite.T(), bb.MergeBases())
}
func (suite *BackupBasesUnitSuite) TestClearAssistBases() {
bb := &backupBases{assistBases: make([]ManifestEntry, 2)}
bb.ClearAssistBases()
assert.Empty(suite.T(), bb.AssistBases())
}
func (suite *BackupBasesUnitSuite) TestMergeBackupBases() {
ro := "resource_owner"
type testInput struct {
id int
incomplete bool
cat []path.CategoryType
}
// Make a function so tests can modify things without messing with each other.
makeBackupBases := func(ti []testInput) *backupBases {
res := &backupBases{}
for _, i := range ti {
baseID := fmt.Sprintf("id%d", i.id)
ir := ""
if i.incomplete {
ir = "checkpoint"
}
reasons := make([]Reason, 0, len(i.cat))
for _, c := range i.cat {
reasons = append(reasons, Reason{
ResourceOwner: ro,
Service: path.ExchangeService,
Category: c,
})
}
m := makeManifest(baseID, ir, "b"+baseID, reasons...)
res.assistBases = append(res.assistBases, m)
if i.incomplete {
continue
}
b := BackupEntry{
Backup: &backup.Backup{
BaseModel: model.BaseModel{ID: model.StableID("b" + baseID)},
SnapshotID: baseID,
StreamStoreID: "ss" + baseID,
},
Reasons: reasons,
}
res.backups = append(res.backups, b)
res.mergeBases = append(res.mergeBases, m)
}
return res
}
table := []struct {
name string
bb []testInput
other []testInput
expect []testInput
}{
{
name: "Other Empty",
bb: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
},
expect: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
},
},
{
name: "BB Empty",
other: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
},
expect: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
},
},
{
name: "Other overlaps Complete And Incomplete",
bb: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
other: []testInput{
{
id: 2,
cat: []path.CategoryType{path.EmailCategory},
},
{
id: 3,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
expect: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
},
{
name: "Other Overlaps Complete",
bb: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
},
other: []testInput{
{
id: 2,
cat: []path.CategoryType{path.EmailCategory},
},
},
expect: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
},
},
{
name: "Other Overlaps Incomplete",
bb: []testInput{
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
other: []testInput{
{
id: 2,
cat: []path.CategoryType{path.EmailCategory},
},
{
id: 3,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
expect: []testInput{
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
{
id: 2,
cat: []path.CategoryType{path.EmailCategory},
},
},
},
{
name: "Other Disjoint",
bb: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
other: []testInput{
{
id: 2,
cat: []path.CategoryType{path.ContactsCategory},
},
{
id: 3,
cat: []path.CategoryType{path.ContactsCategory},
incomplete: true,
},
},
expect: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
{
id: 2,
cat: []path.CategoryType{path.ContactsCategory},
},
{
id: 3,
cat: []path.CategoryType{path.ContactsCategory},
incomplete: true,
},
},
},
{
name: "Other Reduced Reasons",
bb: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
},
other: []testInput{
{
id: 2,
cat: []path.CategoryType{
path.EmailCategory,
path.ContactsCategory,
},
},
{
id: 3,
cat: []path.CategoryType{
path.EmailCategory,
path.ContactsCategory,
},
incomplete: true,
},
},
expect: []testInput{
{cat: []path.CategoryType{path.EmailCategory}},
{
id: 1,
cat: []path.CategoryType{path.EmailCategory},
incomplete: true,
},
{
id: 2,
cat: []path.CategoryType{path.ContactsCategory},
},
{
id: 3,
cat: []path.CategoryType{path.ContactsCategory},
incomplete: true,
},
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
bb := makeBackupBases(test.bb)
other := makeBackupBases(test.other)
expect := makeBackupBases(test.expect)
ctx, flush := tester.NewContext(t)
defer flush()
got := bb.MergeBackupBases(
ctx,
other,
func(reason Reason) string {
return reason.Service.String() + reason.Category.String()
})
AssertBackupBasesEqual(t, expect, got)
})
}
}
func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
ro := "resource_owner"
makeMan := func(pct path.CategoryType, id, incmpl, bID string) ManifestEntry {
reason := Reason{
ResourceOwner: ro,
Service: path.ExchangeService,
Category: pct,
}
return makeManifest(id, incmpl, bID, reason)
}
// Make a function so tests can modify things without messing with each other.
validMail1 := func() *backupBases {
return &backupBases{
backups: []BackupEntry{
{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: "bid1",
},
SnapshotID: "id1",
StreamStoreID: "ssid1",
},
},
},
mergeBases: []ManifestEntry{
makeMan(path.EmailCategory, "id1", "", "bid1"),
},
assistBases: []ManifestEntry{
makeMan(path.EmailCategory, "id1", "", "bid1"),
},
}
}
table := []struct {
name string
bb *backupBases
expect BackupBases
}{
{
name: "empty BaseBackups",
bb: &backupBases{},
},
{
name: "Merge Base Without Backup",
bb: func() *backupBases {
res := validMail1()
res.backups = nil
return res
}(),
},
{
name: "Backup Missing Snapshot ID",
bb: func() *backupBases {
res := validMail1()
res.backups[0].SnapshotID = ""
return res
}(),
},
{
name: "Backup Missing Deets ID",
bb: func() *backupBases {
res := validMail1()
res.backups[0].StreamStoreID = ""
return res
}(),
},
{
name: "Incomplete Snapshot",
bb: func() *backupBases {
res := validMail1()
res.mergeBases[0].IncompleteReason = "ir"
res.assistBases[0].IncompleteReason = "ir"
return res
}(),
},
{
name: "Duplicate Reason",
bb: func() *backupBases {
res := validMail1()
res.mergeBases[0].Reasons = append(
res.mergeBases[0].Reasons,
res.mergeBases[0].Reasons[0])
res.assistBases = res.mergeBases
return res
}(),
},
{
name: "Single Valid Entry",
bb: validMail1(),
expect: validMail1(),
},
{
name: "Single Valid Entry With Incomplete Assist With Same Reason",
bb: func() *backupBases {
res := validMail1()
res.assistBases = append(
res.assistBases,
makeMan(path.EmailCategory, "id2", "checkpoint", "bid2"))
return res
}(),
expect: func() *backupBases {
res := validMail1()
res.assistBases = append(
res.assistBases,
makeMan(path.EmailCategory, "id2", "checkpoint", "bid2"))
return res
}(),
},
{
name: "Single Valid Entry With Backup With Old Deets ID",
bb: func() *backupBases {
res := validMail1()
res.backups[0].DetailsID = res.backups[0].StreamStoreID
res.backups[0].StreamStoreID = ""
return res
}(),
expect: func() *backupBases {
res := validMail1()
res.backups[0].DetailsID = res.backups[0].StreamStoreID
res.backups[0].StreamStoreID = ""
return res
}(),
},
{
name: "Single Valid Entry With Multiple Reasons",
bb: func() *backupBases {
res := validMail1()
res.mergeBases[0].Reasons = append(
res.mergeBases[0].Reasons,
Reason{
ResourceOwner: ro,
Service: path.ExchangeService,
Category: path.ContactsCategory,
})
res.assistBases = res.mergeBases
return res
}(),
expect: func() *backupBases {
res := validMail1()
res.mergeBases[0].Reasons = append(
res.mergeBases[0].Reasons,
Reason{
ResourceOwner: ro,
Service: path.ExchangeService,
Category: path.ContactsCategory,
})
res.assistBases = res.mergeBases
return res
}(),
},
{
name: "Two Entries Overlapping Reasons",
bb: func() *backupBases {
res := validMail1()
res.mergeBases = append(
res.mergeBases,
makeMan(path.EmailCategory, "id2", "", "bid2"))
res.assistBases = res.mergeBases
return res
}(),
},
{
name: "Three Entries One Invalid",
bb: func() *backupBases {
res := validMail1()
res.backups = append(
res.backups,
BackupEntry{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: "bid2",
},
},
},
BackupEntry{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: "bid3",
},
SnapshotID: "id3",
StreamStoreID: "ssid3",
},
})
res.mergeBases = append(
res.mergeBases,
makeMan(path.ContactsCategory, "id2", "checkpoint", "bid2"),
makeMan(path.EventsCategory, "id3", "", "bid3"))
res.assistBases = res.mergeBases
return res
}(),
expect: func() *backupBases {
res := validMail1()
res.backups = append(
res.backups,
BackupEntry{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: "bid3",
},
SnapshotID: "id3",
StreamStoreID: "ssid3",
},
})
res.mergeBases = append(
res.mergeBases,
makeMan(path.EventsCategory, "id3", "", "bid3"))
res.assistBases = res.mergeBases
return res
}(),
},
}
for _, test := range table {
suite.Run(test.name, func() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
test.bb.fixupAndVerify(ctx)
AssertBackupBasesEqual(suite.T(), test.expect, test.bb)
})
}
}

View File

@ -47,12 +47,6 @@ func (r Reason) Key() string {
return r.ResourceOwner + r.Service.String() + r.Category.String() return r.ResourceOwner + r.Service.String() + r.Category.String()
} }
type backupBases struct {
backups []BackupEntry
mergeBases []ManifestEntry
assistBases []ManifestEntry
}
type BackupEntry struct { type BackupEntry struct {
*backup.Backup *backup.Backup
Reasons []Reason Reasons []Reason
@ -198,6 +192,8 @@ func (b *baseFinder) findBasesInSet(
Manifest: man, Manifest: man,
Reasons: []Reason{reason}, Reasons: []Reason{reason},
}) })
logger.Ctx(ictx).Info("found incomplete backup")
} }
continue continue
@ -209,6 +205,18 @@ func (b *baseFinder) findBasesInSet(
// Safe to continue here as we'll just end up attempting to use an older // Safe to continue here as we'll just end up attempting to use an older
// backup as the base. // backup as the base.
logger.CtxErr(ictx, err).Debug("searching for base backup") logger.CtxErr(ictx, err).Debug("searching for base backup")
if !foundIncomplete {
foundIncomplete = true
kopiaAssistSnaps = append(kopiaAssistSnaps, ManifestEntry{
Manifest: man,
Reasons: []Reason{reason},
})
logger.Ctx(ictx).Info("found incomplete backup")
}
continue continue
} }
@ -222,12 +230,27 @@ func (b *baseFinder) findBasesInSet(
"empty backup stream store ID", "empty backup stream store ID",
"search_backup_id", bup.ID) "search_backup_id", bup.ID)
if !foundIncomplete {
foundIncomplete = true
kopiaAssistSnaps = append(kopiaAssistSnaps, ManifestEntry{
Manifest: man,
Reasons: []Reason{reason},
})
logger.Ctx(ictx).Infow(
"found incomplete backup",
"search_backup_id", bup.ID)
}
continue continue
} }
// If we've made it to this point then we're considering the backup // If we've made it to this point then we're considering the backup
// complete as it has both an item data snapshot and a backup details // complete as it has both an item data snapshot and a backup details
// snapshot. // snapshot.
logger.Ctx(ictx).Infow("found complete backup", "base_backup_id", bup.ID)
me := ManifestEntry{ me := ManifestEntry{
Manifest: man, Manifest: man,
Reasons: []Reason{reason}, Reasons: []Reason{reason},
@ -272,11 +295,11 @@ func (b *baseFinder) getBase(
return b.findBasesInSet(ctx, reason, metas) return b.findBasesInSet(ctx, reason, metas)
} }
func (b *baseFinder) findBases( func (b *baseFinder) FindBases(
ctx context.Context, ctx context.Context,
reasons []Reason, reasons []Reason,
tags map[string]string, tags map[string]string,
) (backupBases, error) { ) BackupBases {
var ( var (
// All maps go from ID -> entry. We need to track by ID so we can coalesce // All maps go from ID -> entry. We need to track by ID so we can coalesce
// the reason for selecting something. Kopia assisted snapshots also use // the reason for selecting something. Kopia assisted snapshots also use
@ -340,24 +363,13 @@ func (b *baseFinder) findBases(
} }
} }
return backupBases{ res := &backupBases{
backups: maps.Values(baseBups), backups: maps.Values(baseBups),
mergeBases: maps.Values(baseSnaps), mergeBases: maps.Values(baseSnaps),
assistBases: maps.Values(kopiaAssistSnaps), assistBases: maps.Values(kopiaAssistSnaps),
}, nil
} }
func (b *baseFinder) FindBases( res.fixupAndVerify(ctx)
ctx context.Context,
reasons []Reason,
tags map[string]string,
) ([]ManifestEntry, error) {
bb, err := b.findBases(ctx, reasons, tags)
if err != nil {
return nil, clues.Stack(err)
}
// assistBases contains all snapshots so we can return it while maintaining return res
// almost all compatibility.
return bb.assistBases, nil
} }

View File

@ -5,11 +5,9 @@ import (
"testing" "testing"
"time" "time"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest" "github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot" "github.com/kopia/kopia/snapshot"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -111,13 +109,6 @@ func (sm mockEmptySnapshotManager) FindManifests(
return nil, nil return nil, nil
} }
func (sm mockEmptySnapshotManager) LoadSnapshots(
context.Context,
[]manifest.ID,
) ([]*snapshot.Manifest, error) {
return nil, clues.New("not implemented")
}
func (sm mockEmptySnapshotManager) LoadSnapshot( func (sm mockEmptySnapshotManager) LoadSnapshot(
context.Context, context.Context,
manifest.ID, manifest.ID,
@ -145,7 +136,7 @@ type manifestInfo struct {
err error err error
} }
func newManifestInfo2( func newManifestInfo(
id manifest.ID, id manifest.ID,
modTime time.Time, modTime time.Time,
incomplete bool, incomplete bool,
@ -189,12 +180,12 @@ func newManifestInfo2(
return res return res
} }
type mockSnapshotManager2 struct { type mockSnapshotManager struct {
data []manifestInfo data []manifestInfo
findErr error findErr error
} }
func matchesTags2(mi manifestInfo, tags map[string]string) bool { func matchesTags(mi manifestInfo, tags map[string]string) bool {
for k := range tags { for k := range tags {
if _, ok := mi.tags[k]; !ok { if _, ok := mi.tags[k]; !ok {
return false return false
@ -204,7 +195,7 @@ func matchesTags2(mi manifestInfo, tags map[string]string) bool {
return true return true
} }
func (msm *mockSnapshotManager2) FindManifests( func (msm *mockSnapshotManager) FindManifests(
ctx context.Context, ctx context.Context,
tags map[string]string, tags map[string]string,
) ([]*manifest.EntryMetadata, error) { ) ([]*manifest.EntryMetadata, error) {
@ -219,7 +210,7 @@ func (msm *mockSnapshotManager2) FindManifests(
res := []*manifest.EntryMetadata{} res := []*manifest.EntryMetadata{}
for _, mi := range msm.data { for _, mi := range msm.data {
if matchesTags2(mi, tags) { if matchesTags(mi, tags) {
res = append(res, mi.metadata) res = append(res, mi.metadata)
} }
} }
@ -227,14 +218,7 @@ func (msm *mockSnapshotManager2) FindManifests(
return res, nil return res, nil
} }
func (msm *mockSnapshotManager2) LoadSnapshots( func (msm *mockSnapshotManager) LoadSnapshot(
ctx context.Context,
ids []manifest.ID,
) ([]*snapshot.Manifest, error) {
return nil, clues.New("not implemented")
}
func (msm *mockSnapshotManager2) LoadSnapshot(
ctx context.Context, ctx context.Context,
id manifest.ID, id manifest.ID,
) (*snapshot.Manifest, error) { ) (*snapshot.Manifest, error) {
@ -244,6 +228,10 @@ func (msm *mockSnapshotManager2) LoadSnapshot(
for _, mi := range msm.data { for _, mi := range msm.data {
if mi.man.ID == id { if mi.man.ID == id {
if mi.err != nil {
return nil, mi.err
}
return mi.man, nil return mi.man, nil
} }
} }
@ -273,11 +261,13 @@ func newBackupModel(
err: err, err: err,
} }
if hasDetailsSnap {
if !oldDetailsID { if !oldDetailsID {
res.b.StreamStoreID = "ssid" res.b.StreamStoreID = "ssid"
} else { } else {
res.b.DetailsID = "ssid" res.b.DetailsID = "ssid"
} }
}
return res return res
} }
@ -340,10 +330,9 @@ func (suite *BaseFinderUnitSuite) TestNoResult_NoBackupsOrSnapshots() {
}, },
} }
bb, err := bf.findBases(ctx, reasons, nil) bb := bf.FindBases(ctx, reasons, nil)
assert.NoError(t, err, "getting bases: %v", clues.ToCore(err)) assert.Empty(t, bb.MergeBases())
assert.Empty(t, bb.mergeBases) assert.Empty(t, bb.AssistBases())
assert.Empty(t, bb.assistBases)
} }
func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() { func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() {
@ -353,7 +342,7 @@ func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() {
defer flush() defer flush()
bf := baseFinder{ bf := baseFinder{
sm: &mockSnapshotManager2{findErr: assert.AnError}, sm: &mockSnapshotManager{findErr: assert.AnError},
bg: mockEmptyModelGetter{}, bg: mockEmptyModelGetter{},
} }
reasons := []Reason{ reasons := []Reason{
@ -364,10 +353,9 @@ func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() {
}, },
} }
bb, err := bf.findBases(ctx, reasons, nil) bb := bf.FindBases(ctx, reasons, nil)
assert.NoError(t, err, "getting bases: %v", clues.ToCore(err)) assert.Empty(t, bb.MergeBases())
assert.Empty(t, bb.mergeBases) assert.Empty(t, bb.AssistBases())
assert.Empty(t, bb.assistBases)
} }
func (suite *BaseFinderUnitSuite) TestGetBases() { func (suite *BaseFinderUnitSuite) TestGetBases() {
@ -387,7 +375,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "Return Older Base If Fail To Get Manifest", name: "Return Older Base If Fail To Get Manifest",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testCompleteMan, testCompleteMan,
@ -396,7 +384,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -407,21 +395,21 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
), ),
}, },
expectedBaseReasons: map[int][]Reason{ expectedBaseReasons: map[int][]Reason{
0: testUser1Mail, 1: testUser1Mail,
}, },
expectedAssistManifestReasons: map[int][]Reason{ expectedAssistManifestReasons: map[int][]Reason{
0: testUser1Mail, 1: testUser1Mail,
}, },
backupData: []backupInfo{ backupData: []backupInfo{
newBackupModel(testBackup2, true, true, false, nil), newBackupModel(testBackup2, true, true, false, nil),
newBackupModel(testBackup1, false, false, false, assert.AnError), newBackupModel(testBackup1, true, true, false, nil),
}, },
}, },
{ {
name: "Return Older Base If Fail To Get Backup", name: "Return Older Base If Fail To Get Backup",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testCompleteMan, testCompleteMan,
@ -430,7 +418,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -441,21 +429,22 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
), ),
}, },
expectedBaseReasons: map[int][]Reason{ expectedBaseReasons: map[int][]Reason{
0: testUser1Mail, 1: testUser1Mail,
}, },
expectedAssistManifestReasons: map[int][]Reason{ expectedAssistManifestReasons: map[int][]Reason{
0: testUser1Mail, 0: testUser1Mail,
1: testUser1Mail,
}, },
backupData: []backupInfo{ backupData: []backupInfo{
newBackupModel(testBackup2, true, true, false, nil), newBackupModel(testBackup2, false, false, false, assert.AnError),
newBackupModel(testBackup1, false, false, false, assert.AnError), newBackupModel(testBackup1, true, true, false, nil),
}, },
}, },
{ {
name: "Return Older Base If Missing Details", name: "Return Older Base If Missing Details",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testCompleteMan, testCompleteMan,
@ -464,7 +453,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -475,21 +464,22 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
), ),
}, },
expectedBaseReasons: map[int][]Reason{ expectedBaseReasons: map[int][]Reason{
0: testUser1Mail, 1: testUser1Mail,
}, },
expectedAssistManifestReasons: map[int][]Reason{ expectedAssistManifestReasons: map[int][]Reason{
0: testUser1Mail, 0: testUser1Mail,
1: testUser1Mail,
}, },
backupData: []backupInfo{ backupData: []backupInfo{
newBackupModel(testBackup2, true, true, false, nil), newBackupModel(testBackup2, true, false, false, nil),
newBackupModel(testBackup1, true, false, false, nil), newBackupModel(testBackup1, true, true, false, nil),
}, },
}, },
{ {
name: "Old Backup Details Pointer", name: "Old Backup Details Pointer",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -516,7 +506,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "All One Snapshot", name: "All One Snapshot",
input: testAllUsersAllCats, input: testAllUsersAllCats,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -543,7 +533,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "Multiple Bases Some Overlapping Reasons", name: "Multiple Bases Some Overlapping Reasons",
input: testAllUsersAllCats, input: testAllUsersAllCats,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -555,7 +545,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testUser2, testUser2,
testUser3, testUser3,
), ),
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testCompleteMan, testCompleteMan,
@ -648,7 +638,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "Newer Incomplete Assist Snapshot", name: "Newer Incomplete Assist Snapshot",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -657,7 +647,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testIncompleteMan, testIncompleteMan,
@ -684,7 +674,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "Incomplete Older Than Complete", name: "Incomplete Older Than Complete",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testIncompleteMan, testIncompleteMan,
@ -693,7 +683,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testCompleteMan, testCompleteMan,
@ -719,7 +709,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "Newest Incomplete Only Incomplete", name: "Newest Incomplete Only Incomplete",
input: testUser1Mail, input: testUser1Mail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testIncompleteMan, testIncompleteMan,
@ -728,7 +718,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testIncompleteMan, testIncompleteMan,
@ -752,7 +742,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
name: "Some Bases Not Found", name: "Some Bases Not Found",
input: testAllUsersMail, input: testAllUsersMail,
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -778,7 +768,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
// Manifests are currently returned in the order they're defined by the // Manifests are currently returned in the order they're defined by the
// mock. // mock.
manifestData: []manifestInfo{ manifestData: []manifestInfo{
newManifestInfo2( newManifestInfo(
testID2, testID2,
testT2, testT2,
testCompleteMan, testCompleteMan,
@ -787,7 +777,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail, testMail,
testUser1, testUser1,
), ),
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -819,38 +809,37 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
defer flush() defer flush()
bf := baseFinder{ bf := baseFinder{
sm: &mockSnapshotManager2{data: test.manifestData}, sm: &mockSnapshotManager{data: test.manifestData},
bg: &mockModelGetter{data: test.backupData}, bg: &mockModelGetter{data: test.backupData},
} }
bb, err := bf.findBases( bb := bf.FindBases(
ctx, ctx,
test.input, test.input,
nil) nil)
require.NoError(t, err, "getting bases: %v", clues.ToCore(err))
checkBackupEntriesMatch( checkBackupEntriesMatch(
t, t,
bb.backups, bb.Backups(),
test.backupData, test.backupData,
test.expectedBaseReasons) test.expectedBaseReasons)
checkManifestEntriesMatch( checkManifestEntriesMatch(
t, t,
bb.mergeBases, bb.MergeBases(),
test.manifestData, test.manifestData,
test.expectedBaseReasons) test.expectedBaseReasons)
checkManifestEntriesMatch( checkManifestEntriesMatch(
t, t,
bb.assistBases, bb.AssistBases(),
test.manifestData, test.manifestData,
test.expectedAssistManifestReasons) test.expectedAssistManifestReasons)
}) })
} }
} }
func (suite *BaseFinderUnitSuite) TestFetchPrevSnapshots_CustomTags() { func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() {
manifestData := []manifestInfo{ manifestData := []manifestInfo{
newManifestInfo2( newManifestInfo(
testID1, testID1,
testT1, testT1,
testCompleteMan, testCompleteMan,
@ -914,19 +903,18 @@ func (suite *BaseFinderUnitSuite) TestFetchPrevSnapshots_CustomTags() {
defer flush() defer flush()
bf := baseFinder{ bf := baseFinder{
sm: &mockSnapshotManager2{data: manifestData}, sm: &mockSnapshotManager{data: manifestData},
bg: &mockModelGetter{data: backupData}, bg: &mockModelGetter{data: backupData},
} }
bb, err := bf.findBases( bb := bf.FindBases(
ctx, ctx,
testAllUsersAllCats, testAllUsersAllCats,
test.tags) test.tags)
require.NoError(t, err, "getting bases: %v", clues.ToCore(err))
checkManifestEntriesMatch( checkManifestEntriesMatch(
t, t,
bb.mergeBases, bb.MergeBases(),
manifestData, manifestData,
test.expectedIdxs) test.expectedIdxs)
}) })

View File

@ -42,7 +42,7 @@ func (kdc *kopiaDataCollection) Items(
for _, item := range kdc.items { for _, item := range kdc.items {
s, err := kdc.FetchItemByName(ctx, item) s, err := kdc.FetchItemByName(ctx, item)
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "fetching item"). el.AddRecoverable(ctx, clues.Wrap(err, "fetching item").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))

View File

@ -13,8 +13,8 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"

View File

@ -39,6 +39,6 @@ type (
ctx context.Context, ctx context.Context,
reasons []kopia.Reason, reasons []kopia.Reason,
tags map[string]string, tags map[string]string,
) ([]kopia.ManifestEntry, error) ) kopia.BackupBases
} }
) )

View File

@ -12,8 +12,8 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"

View File

@ -0,0 +1,63 @@
package kopia
import (
"testing"
"github.com/stretchr/testify/assert"
)
func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
if expect == nil && got == nil {
return
}
if expect == nil {
assert.Empty(t, got.Backups(), "backups")
assert.Empty(t, got.MergeBases(), "merge bases")
assert.Empty(t, got.AssistBases(), "assist bases")
return
}
if got == nil {
if len(expect.Backups()) > 0 && len(expect.MergeBases()) > 0 && len(expect.AssistBases()) > 0 {
assert.Fail(t, "got was nil but expected non-nil result %v", expect)
}
return
}
assert.ElementsMatch(t, expect.Backups(), got.Backups(), "backups")
assert.ElementsMatch(t, expect.MergeBases(), got.MergeBases(), "merge bases")
assert.ElementsMatch(t, expect.AssistBases(), got.AssistBases(), "assist bases")
}
func NewMockBackupBases() *MockBackupBases {
return &MockBackupBases{backupBases: &backupBases{}}
}
type MockBackupBases struct {
*backupBases
}
func (bb *MockBackupBases) WithBackups(b ...BackupEntry) *MockBackupBases {
bb.backupBases.backups = append(bb.Backups(), b...)
return bb
}
func (bb *MockBackupBases) WithMergeBases(m ...ManifestEntry) *MockBackupBases {
bb.backupBases.mergeBases = append(bb.MergeBases(), m...)
bb.backupBases.assistBases = append(bb.AssistBases(), m...)
return bb
}
func (bb *MockBackupBases) WithAssistBases(m ...ManifestEntry) *MockBackupBases {
bb.backupBases.assistBases = append(bb.AssistBases(), m...)
return bb
}
func (bb *MockBackupBases) ClearMockAssistBases() *MockBackupBases {
bb.backupBases.ClearAssistBases()
return bb
}

View File

@ -35,6 +35,7 @@ func s3BlobStorage(ctx context.Context, s storage.Storage) (blob.Storage, error)
SessionName: s.SessionName, SessionName: s.SessionName,
RoleARN: s.Role, RoleARN: s.Role,
RoleDuration: s.SessionDuration, RoleDuration: s.SessionDuration,
TLSHandshakeTimeout: 60,
} }
store, err := s3.New(ctx, &opts, false) store, err := s3.New(ctx, &opts, false)

View File

@ -22,10 +22,10 @@ import (
"github.com/kopia/kopia/snapshot/snapshotfs" "github.com/kopia/kopia/snapshot/snapshotfs"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/metadata"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/graph/metadata"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -133,6 +133,12 @@ type itemDetails struct {
} }
type corsoProgress struct { type corsoProgress struct {
// this is an unwanted hack. We can't extend the kopia interface
// funcs to pass through a context. This is the second best way to
// get an at least partially formed context into funcs that need it
// for logging and other purposes.
ctx context.Context
snapshotfs.UploadProgress snapshotfs.UploadProgress
pending map[string]*itemDetails pending map[string]*itemDetails
deets *details.Builder deets *details.Builder
@ -183,11 +189,10 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
// never had to materialize their details in-memory. // never had to materialize their details in-memory.
if d.info == nil { if d.info == nil {
if d.prevPath == nil { if d.prevPath == nil {
cp.errs.AddRecoverable(clues.New("item sourced from previous backup with no previous path"). cp.errs.AddRecoverable(cp.ctx, clues.New("item sourced from previous backup with no previous path").
With( With(
"service", d.repoPath.Service().String(), "service", d.repoPath.Service().String(),
"category", d.repoPath.Category().String(), "category", d.repoPath.Category().String()).
).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
return return
@ -198,11 +203,10 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
err := cp.toMerge.addRepoRef(d.prevPath.ToBuilder(), d.repoPath, d.locationPath) err := cp.toMerge.addRepoRef(d.prevPath.ToBuilder(), d.repoPath, d.locationPath)
if err != nil { if err != nil {
cp.errs.AddRecoverable(clues.Wrap(err, "adding item to merge list"). cp.errs.AddRecoverable(cp.ctx, clues.Wrap(err, "adding item to merge list").
With( With(
"service", d.repoPath.Service().String(), "service", d.repoPath.Service().String(),
"category", d.repoPath.Category().String(), "category", d.repoPath.Category().String()).
).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
} }
@ -215,11 +219,10 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
!d.cached, !d.cached,
*d.info) *d.info)
if err != nil { if err != nil {
cp.errs.AddRecoverable(clues.New("adding item to details"). cp.errs.AddRecoverable(cp.ctx, clues.New("adding item to details").
With( With(
"service", d.repoPath.Service().String(), "service", d.repoPath.Service().String(),
"category", d.repoPath.Category().String(), "category", d.repoPath.Category().String()).
).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
return return
@ -278,7 +281,7 @@ func (cp *corsoProgress) Error(relpath string, err error, isIgnored bool) {
defer cp.UploadProgress.Error(relpath, err, isIgnored) defer cp.UploadProgress.Error(relpath, err, isIgnored)
cp.errs.AddRecoverable(clues.Wrap(err, "kopia reported error"). cp.errs.AddRecoverable(cp.ctx, clues.Wrap(err, "kopia reported error").
With("is_ignored", isIgnored, "relative_path", relpath). With("is_ignored", isIgnored, "relative_path", relpath).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
} }
@ -350,7 +353,7 @@ func collectionEntries(
itemPath, err := streamedEnts.FullPath().AppendItem(e.UUID()) itemPath, err := streamedEnts.FullPath().AppendItem(e.UUID())
if err != nil { if err != nil {
err = clues.Wrap(err, "getting full item path") err = clues.Wrap(err, "getting full item path")
progress.errs.AddRecoverable(err) progress.errs.AddRecoverable(ctx, err)
logger.CtxErr(ctx, err).Error("getting full item path") logger.CtxErr(ctx, err).Error("getting full item path")

View File

@ -20,8 +20,8 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -472,8 +472,12 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bd := &details.Builder{} bd := &details.Builder{}
cp := corsoProgress{ cp := corsoProgress{
ctx: ctx,
UploadProgress: &snapshotfs.NullUploadProgress{}, UploadProgress: &snapshotfs.NullUploadProgress{},
deets: bd, deets: bd,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
@ -526,6 +530,10 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() {
func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() { func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bd := &details.Builder{} bd := &details.Builder{}
cachedItems := map[string]testInfo{ cachedItems := map[string]testInfo{
suite.targetFileName: { suite.targetFileName: {
@ -535,6 +543,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() {
}, },
} }
cp := corsoProgress{ cp := corsoProgress{
ctx: ctx,
UploadProgress: &snapshotfs.NullUploadProgress{}, UploadProgress: &snapshotfs.NullUploadProgress{},
deets: bd, deets: bd,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
@ -565,6 +574,9 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
prevPath := makePath( prevPath := makePath(
suite.T(), suite.T(),
[]string{testTenant, service, testUser, category, testInboxDir, testFileName2}, []string{testTenant, service, testUser, category, testInboxDir, testFileName2},
@ -582,6 +594,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch
// Setup stuff. // Setup stuff.
db := &details.Builder{} db := &details.Builder{}
cp := corsoProgress{ cp := corsoProgress{
ctx: ctx,
UploadProgress: &snapshotfs.NullUploadProgress{}, UploadProgress: &snapshotfs.NullUploadProgress{},
deets: db, deets: db,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
@ -617,8 +630,12 @@ func (suite *CorsoProgressUnitSuite) TestFinishedHashingFile() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bd := &details.Builder{} bd := &details.Builder{}
cp := corsoProgress{ cp := corsoProgress{
ctx: ctx,
UploadProgress: &snapshotfs.NullUploadProgress{}, UploadProgress: &snapshotfs.NullUploadProgress{},
deets: bd, deets: bd,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
@ -682,6 +699,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
} }
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -801,6 +819,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
defer flush() defer flush()
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -908,6 +927,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
defer flush() defer flush()
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
} }
@ -1004,6 +1024,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
defer flush() defer flush()
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -1298,6 +1319,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
defer flush() defer flush()
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -2221,6 +2243,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
defer flush() defer flush()
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -2375,6 +2398,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
) )
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -2477,6 +2501,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
) )
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -2733,6 +2758,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
) )
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),
@ -2901,6 +2927,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
) )
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), errs: fault.New(true),

View File

@ -160,6 +160,7 @@ func (w Wrapper) ConsumeBackupCollections(
} }
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
deets: &details.Builder{}, deets: &details.Builder{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
@ -415,7 +416,7 @@ func loadDirsAndItems(
dir, err := getDir(ictx, dirItems.dir, snapshotRoot) dir, err := getDir(ictx, dirItems.dir, snapshotRoot)
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "loading storage directory"). el.AddRecoverable(ctx, clues.Wrap(err, "loading storage directory").
WithClues(ictx). WithClues(ictx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
@ -431,7 +432,7 @@ func loadDirsAndItems(
} }
if err := mergeCol.addCollection(dirItems.dir.String(), dc); err != nil { if err := mergeCol.addCollection(dirItems.dir.String(), dc); err != nil {
el.AddRecoverable(clues.Wrap(err, "adding collection to merge collection"). el.AddRecoverable(ctx, clues.Wrap(err, "adding collection to merge collection").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
@ -493,7 +494,7 @@ func (w Wrapper) ProduceRestoreCollections(
parentStoragePath, err := itemPaths.StoragePath.Dir() parentStoragePath, err := itemPaths.StoragePath.Dir()
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "getting storage directory path"). el.AddRecoverable(ictx, clues.Wrap(err, "getting storage directory path").
WithClues(ictx). WithClues(ictx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))

View File

@ -19,10 +19,10 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/data/mock" "github.com/alcionai/corso/src/internal/data/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"

View File

@ -1,4 +1,4 @@
package connector package m365
import ( import (
"context" "context"
@ -8,15 +8,13 @@ import (
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/discovery"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/internal/m365/discovery"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
@ -34,7 +32,7 @@ import (
// The metadata field can include things like delta tokens or the previous backup's // The metadata field can include things like delta tokens or the previous backup's
// folder hierarchy. The absence of metadata causes the collection creation to ignore // folder hierarchy. The absence of metadata causes the collection creation to ignore
// prior history (ie, incrementals) and run a full backup. // prior history (ie, incrementals) and run a full backup.
func (gc *GraphConnector) ProduceBackupCollections( func (ctrl *Controller) ProduceBackupCollections(
ctx context.Context, ctx context.Context,
owner idname.Provider, owner idname.Provider,
sels selectors.Selector, sels selectors.Selector,
@ -45,7 +43,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) { ) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
ctx, end := diagnostics.Span( ctx, end := diagnostics.Span(
ctx, ctx,
"gc:produceBackupCollections", "m365:produceBackupCollections",
diagnostics.Index("service", sels.Service.String())) diagnostics.Index("service", sels.Service.String()))
defer end() defer end()
@ -55,14 +53,14 @@ func (gc *GraphConnector) ProduceBackupCollections(
ctrlOpts.Parallelism.ItemFetch = graph.Parallelism(sels.PathService()). ctrlOpts.Parallelism.ItemFetch = graph.Parallelism(sels.PathService()).
ItemOverride(ctx, ctrlOpts.Parallelism.ItemFetch) ItemOverride(ctx, ctrlOpts.Parallelism.ItemFetch)
err := verifyBackupInputs(sels, gc.IDNameLookup.IDs()) err := verifyBackupInputs(sels, ctrl.IDNameLookup.IDs())
if err != nil { if err != nil {
return nil, nil, false, clues.Stack(err).WithClues(ctx) return nil, nil, false, clues.Stack(err).WithClues(ctx)
} }
serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled( serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled(
ctx, ctx,
gc.AC.Users(), ctrl.AC.Users(),
path.ServiceType(sels.Service), path.ServiceType(sels.Service),
sels.DiscreteOwner) sels.DiscreteOwner)
if err != nil { if err != nil {
@ -87,14 +85,14 @@ func (gc *GraphConnector) ProduceBackupCollections(
switch sels.Service { switch sels.Service {
case selectors.ServiceExchange: case selectors.ServiceExchange:
colls, ssmb, canUsePreviousBackup, err = exchange.DataCollections( colls, ssmb, canUsePreviousBackup, err = exchange.ProduceBackupCollections(
ctx, ctx,
gc.AC, ctrl.AC,
sels, sels,
gc.credentials.AzureTenantID, ctrl.credentials.AzureTenantID,
owner, owner,
metadata, metadata,
gc.UpdateStatus, ctrl.UpdateStatus,
ctrlOpts, ctrlOpts,
errs) errs)
if err != nil { if err != nil {
@ -102,15 +100,15 @@ func (gc *GraphConnector) ProduceBackupCollections(
} }
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
colls, ssmb, canUsePreviousBackup, err = onedrive.DataCollections( colls, ssmb, canUsePreviousBackup, err = onedrive.ProduceBackupCollections(
ctx, ctx,
gc.AC, ctrl.AC,
sels, sels,
owner, owner,
metadata, metadata,
lastBackupVersion, lastBackupVersion,
gc.credentials.AzureTenantID, ctrl.credentials.AzureTenantID,
gc.UpdateStatus, ctrl.UpdateStatus,
ctrlOpts, ctrlOpts,
errs) errs)
if err != nil { if err != nil {
@ -118,14 +116,14 @@ func (gc *GraphConnector) ProduceBackupCollections(
} }
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
colls, ssmb, canUsePreviousBackup, err = sharepoint.DataCollections( colls, ssmb, canUsePreviousBackup, err = sharepoint.ProduceBackupCollections(
ctx, ctx,
gc.AC, ctrl.AC,
sels, sels,
owner, owner,
metadata, metadata,
gc.credentials, ctrl.credentials,
gc, ctrl,
ctrlOpts, ctrlOpts,
errs) errs)
if err != nil { if err != nil {
@ -144,7 +142,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
// break the process state, putting us into deadlock or // break the process state, putting us into deadlock or
// panics. // panics.
if c.State() != data.DeletedState { if c.State() != data.DeletedState {
gc.incrementAwaitingMessages() ctrl.incrementAwaitingMessages()
} }
} }
@ -154,7 +152,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
// IsBackupRunnable verifies that the users provided has the services enabled and // IsBackupRunnable verifies that the users provided has the services enabled and
// data can be backed up. The canMakeDeltaQueries provides info if the mailbox is // data can be backed up. The canMakeDeltaQueries provides info if the mailbox is
// full and delta queries can be made on it. // full and delta queries can be made on it.
func (gc *GraphConnector) IsBackupRunnable( func (ctrl *Controller) IsBackupRunnable(
ctx context.Context, ctx context.Context,
service path.ServiceType, service path.ServiceType,
resourceOwner string, resourceOwner string,
@ -164,7 +162,7 @@ func (gc *GraphConnector) IsBackupRunnable(
return true, nil return true, nil
} }
info, err := gc.AC.Users().GetInfo(ctx, resourceOwner) info, err := ctrl.AC.Users().GetInfo(ctx, resourceOwner)
if err != nil { if err != nil {
return false, err return false, err
} }
@ -225,59 +223,3 @@ func checkServiceEnabled(
return true, canMakeDeltaQueries, nil return true, canMakeDeltaQueries, nil
} }
// ConsumeRestoreCollections restores data from the specified collections
// into M365 using the GraphAPI.
// SideEffect: gc.status is updated at the completion of operation
func (gc *GraphConnector) ConsumeRestoreCollections(
ctx context.Context,
backupVersion int,
sels selectors.Selector,
dest control.RestoreDestination,
opts control.Options,
dcs []data.RestoreCollection,
errs *fault.Bus,
) (*details.Details, error) {
ctx, end := diagnostics.Span(ctx, "connector:restore")
defer end()
ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()})
var (
status *support.ConnectorOperationStatus
deets = &details.Builder{}
err error
)
switch sels.Service {
case selectors.ServiceExchange:
status, err = exchange.RestoreCollections(ctx, gc.AC, dest, dcs, deets, errs)
case selectors.ServiceOneDrive:
status, err = onedrive.RestoreCollections(
ctx,
onedrive.NewRestoreHandler(gc.AC),
backupVersion,
dest,
opts,
dcs,
deets,
errs)
case selectors.ServiceSharePoint:
status, err = sharepoint.RestoreCollections(
ctx,
backupVersion,
gc.AC,
dest,
opts,
dcs,
deets,
errs)
default:
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
}
gc.incrementAwaitingMessages()
gc.UpdateStatus(status)
return deets.Details(), err
}

View File

@ -1,4 +1,4 @@
package connector package m365
import ( import (
"bytes" "bytes"
@ -11,8 +11,9 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -59,19 +60,13 @@ func (suite *DataCollectionIntgSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
} }
// TestExchangeDataCollection verifies interface between operation and
// GraphConnector remains stable to receive a non-zero amount of Collections
// for the Exchange Package. Enabled exchange applications:
// - mail
// - contacts
// - events
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() { func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
ctx, flush := tester.NewContext(suite.T()) ctx, flush := tester.NewContext(suite.T())
defer flush() defer flush()
selUsers := []string{suite.user} selUsers := []string{suite.user}
connector := loadConnector(ctx, suite.T(), Users) ctrl := newController(ctx, suite.T(), resource.Users)
tests := []struct { tests := []struct {
name string name string
getSelector func(t *testing.T) selectors.Selector getSelector func(t *testing.T) selectors.Selector
@ -127,14 +122,14 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
ctrlOpts := control.Defaults() ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries
collections, excludes, canUsePreviousBackup, err := exchange.DataCollections( collections, excludes, canUsePreviousBackup, err := exchange.ProduceBackupCollections(
ctx, ctx,
suite.ac, suite.ac,
sel, sel,
suite.tenantID, suite.tenantID,
uidn, uidn,
nil, nil,
connector.UpdateStatus, ctrl.UpdateStatus,
ctrlOpts, ctrlOpts,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -142,7 +137,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
assert.True(t, excludes.Empty()) assert.True(t, excludes.Empty())
for range collections { for range collections {
connector.incrementAwaitingMessages() ctrl.incrementAwaitingMessages()
} }
// Categories with delta endpoints will produce a collection for metadata // Categories with delta endpoints will produce a collection for metadata
@ -158,7 +153,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
} }
} }
status := connector.Wait() status := ctrl.Wait()
assert.NotZero(t, status.Successes) assert.NotZero(t, status.Successes)
t.Log(status.String()) t.Log(status.String())
}) })
@ -172,8 +167,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
defer flush() defer flush()
owners := []string{"snuffleupagus"} owners := []string{"snuffleupagus"}
ctrl := newController(ctx, suite.T(), resource.Users)
connector := loadConnector(ctx, suite.T(), Users)
tests := []struct { tests := []struct {
name string name string
getSelector func(t *testing.T) selectors.Selector getSelector func(t *testing.T) selectors.Selector
@ -238,7 +232,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
collections, excludes, canUsePreviousBackup, err := connector.ProduceBackupCollections( collections, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx, ctx,
test.getSelector(t), test.getSelector(t),
test.getSelector(t), test.getSelector(t),
@ -254,16 +248,12 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
} }
} }
// TestSharePointDataCollection verifies interface between operation and
// GraphConnector remains stable to receive a non-zero amount of Collections
// for the SharePoint Package.
func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
ctx, flush := tester.NewContext(suite.T()) ctx, flush := tester.NewContext(suite.T())
defer flush() defer flush()
selSites := []string{suite.site} selSites := []string{suite.site}
ctrl := newController(ctx, suite.T(), resource.Sites)
connector := loadConnector(ctx, suite.T(), Sites)
tests := []struct { tests := []struct {
name string name string
expected int expected int
@ -297,14 +287,14 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
sel := test.getSelector() sel := test.getSelector()
collections, excludes, canUsePreviousBackup, err := sharepoint.DataCollections( collections, excludes, canUsePreviousBackup, err := sharepoint.ProduceBackupCollections(
ctx, ctx,
suite.ac, suite.ac,
sel, sel,
sel, sel,
nil, nil,
connector.credentials, ctrl.credentials,
connector, ctrl,
control.Defaults(), control.Defaults(),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -313,7 +303,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
assert.True(t, excludes.Empty()) assert.True(t, excludes.Empty())
for range collections { for range collections {
connector.incrementAwaitingMessages() ctrl.incrementAwaitingMessages()
} }
// we don't know an exact count of drives this will produce, // we don't know an exact count of drives this will produce,
@ -328,7 +318,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
} }
} }
status := connector.Wait() status := ctrl.Wait()
assert.NotZero(t, status.Successes) assert.NotZero(t, status.Successes)
t.Log(status.String()) t.Log(status.String())
}) })
@ -341,7 +331,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
type SPCollectionIntgSuite struct { type SPCollectionIntgSuite struct {
tester.Suite tester.Suite
connector *GraphConnector connector *Controller
user string user string
} }
@ -358,7 +348,7 @@ func (suite *SPCollectionIntgSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T()) ctx, flush := tester.NewContext(suite.T())
defer flush() defer flush()
suite.connector = loadConnector(ctx, suite.T(), Sites) suite.connector = newController(ctx, suite.T(), resource.Sites)
suite.user = tester.M365UserID(suite.T()) suite.user = tester.M365UserID(suite.T())
tester.LogTimeOfTest(suite.T()) tester.LogTimeOfTest(suite.T())
@ -372,11 +362,11 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
var ( var (
siteID = tester.M365SiteID(t) siteID = tester.M365SiteID(t)
gc = loadConnector(ctx, t, Sites) ctrl = newController(ctx, t, resource.Sites)
siteIDs = []string{siteID} siteIDs = []string{siteID}
) )
id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil) id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup(siteIDs) sel := selectors.NewSharePointBackup(siteIDs)
@ -384,7 +374,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
sel.SetDiscreteOwnerIDName(id, name) sel.SetDiscreteOwnerIDName(id, name)
cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections( cols, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx, ctx,
inMock.NewProvider(id, name), inMock.NewProvider(id, name),
sel.Selector, sel.Selector,
@ -419,11 +409,11 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
var ( var (
siteID = tester.M365SiteID(t) siteID = tester.M365SiteID(t)
gc = loadConnector(ctx, t, Sites) ctrl = newController(ctx, t, resource.Sites)
siteIDs = []string{siteID} siteIDs = []string{siteID}
) )
id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil) id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup(siteIDs) sel := selectors.NewSharePointBackup(siteIDs)
@ -431,7 +421,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
sel.SetDiscreteOwnerIDName(id, name) sel.SetDiscreteOwnerIDName(id, name)
cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections( cols, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx, ctx,
inMock.NewProvider(id, name), inMock.NewProvider(id, name),
sel.Selector, sel.Selector,

View File

@ -1,6 +1,4 @@
// Package connector uploads and retrieves data from M365 through package m365
// the msgraph-go-sdk.
package connector
import ( import (
"context" "context"
@ -10,28 +8,25 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// ---------------------------------------------------------------------------
// Graph Connector
// ---------------------------------------------------------------------------
// must comply with BackupProducer and RestoreConsumer // must comply with BackupProducer and RestoreConsumer
var ( var (
_ inject.BackupProducer = &GraphConnector{} _ inject.BackupProducer = &Controller{}
_ inject.RestoreConsumer = &GraphConnector{} _ inject.RestoreConsumer = &Controller{}
) )
// GraphConnector is a struct used to wrap the GraphServiceClient and // Controller is a struct used to wrap the GraphServiceClient and
// GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for // GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for
// bookkeeping and interfacing with other component. // bookkeeping and interfacing with other component.
type GraphConnector struct { type Controller struct {
AC api.Client AC api.Client
tenant string tenant string
@ -43,20 +38,20 @@ type GraphConnector struct {
// reference for processes that choose to populate the values. // reference for processes that choose to populate the values.
IDNameLookup idname.Cacher IDNameLookup idname.Cacher
// wg is used to track completion of GC tasks // wg is used to track completion of tasks
wg *sync.WaitGroup wg *sync.WaitGroup
region *trace.Region region *trace.Region
// mutex used to synchronize updates to `status` // mutex used to synchronize updates to `status`
mu sync.Mutex mu sync.Mutex
status support.ConnectorOperationStatus // contains the status of the last run status status support.ControllerOperationStatus // contains the status of the last run status
} }
func NewGraphConnector( func NewController(
ctx context.Context, ctx context.Context,
acct account.Account, acct account.Account,
r Resource, rc resource.Category,
) (*GraphConnector, error) { ) (*Controller, error) {
creds, err := acct.M365Config() creds, err := acct.M365Config()
if err != nil { if err != nil {
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx) return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
@ -67,106 +62,97 @@ func NewGraphConnector(
return nil, clues.Wrap(err, "creating api client").WithClues(ctx) return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
} }
rc, err := r.resourceClient(ac) rCli, err := getResourceClient(rc, ac)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating resource client").WithClues(ctx) return nil, clues.Wrap(err, "creating resource client").WithClues(ctx)
} }
gc := GraphConnector{ ctrl := Controller{
AC: ac, AC: ac,
IDNameLookup: idname.NewCache(nil), IDNameLookup: idname.NewCache(nil),
credentials: creds, credentials: creds,
ownerLookup: rc, ownerLookup: rCli,
tenant: acct.ID(), tenant: acct.ID(),
wg: &sync.WaitGroup{}, wg: &sync.WaitGroup{},
} }
return &gc, nil return &ctrl, nil
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Processing Status // Processing Status
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// AwaitStatus waits for all gc tasks to complete and then returns status // AwaitStatus waits for all tasks to complete and then returns status
func (gc *GraphConnector) Wait() *data.CollectionStats { func (ctrl *Controller) Wait() *data.CollectionStats {
defer func() { defer func() {
if gc.region != nil { if ctrl.region != nil {
gc.region.End() ctrl.region.End()
gc.region = nil ctrl.region = nil
} }
}() }()
gc.wg.Wait() ctrl.wg.Wait()
// clean up and reset statefulness // clean up and reset statefulness
dcs := data.CollectionStats{ dcs := data.CollectionStats{
Folders: gc.status.Folders, Folders: ctrl.status.Folders,
Objects: gc.status.Metrics.Objects, Objects: ctrl.status.Metrics.Objects,
Successes: gc.status.Metrics.Successes, Successes: ctrl.status.Metrics.Successes,
Bytes: gc.status.Metrics.Bytes, Bytes: ctrl.status.Metrics.Bytes,
Details: gc.status.String(), Details: ctrl.status.String(),
} }
gc.wg = &sync.WaitGroup{} ctrl.wg = &sync.WaitGroup{}
gc.status = support.ConnectorOperationStatus{} ctrl.status = support.ControllerOperationStatus{}
return &dcs return &dcs
} }
// UpdateStatus is used by gc initiated tasks to indicate completion // UpdateStatus is used by initiated tasks to indicate completion
func (gc *GraphConnector) UpdateStatus(status *support.ConnectorOperationStatus) { func (ctrl *Controller) UpdateStatus(status *support.ControllerOperationStatus) {
defer gc.wg.Done() defer ctrl.wg.Done()
if status == nil { if status == nil {
return return
} }
gc.mu.Lock() ctrl.mu.Lock()
defer gc.mu.Unlock() defer ctrl.mu.Unlock()
gc.status = support.MergeStatus(gc.status, *status) ctrl.status = support.MergeStatus(ctrl.status, *status)
} }
// Status returns the current status of the graphConnector operation. // Status returns the current status of the controller process.
func (gc *GraphConnector) Status() support.ConnectorOperationStatus { func (ctrl *Controller) Status() support.ControllerOperationStatus {
return gc.status return ctrl.status
} }
// PrintableStatus returns a string formatted version of the GC status. // PrintableStatus returns a string formatted version of the status.
func (gc *GraphConnector) PrintableStatus() string { func (ctrl *Controller) PrintableStatus() string {
return gc.status.String() return ctrl.status.String()
} }
func (gc *GraphConnector) incrementAwaitingMessages() { func (ctrl *Controller) incrementAwaitingMessages() {
gc.wg.Add(1) ctrl.wg.Add(1)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Resource Lookup Handling // Resource Lookup Handling
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type Resource int func getResourceClient(rc resource.Category, ac api.Client) (*resourceClient, error) {
switch rc {
const ( case resource.Users:
UnknownResource Resource = iota return &resourceClient{enum: rc, getter: ac.Users()}, nil
AllResources // unused case resource.Sites:
Users return &resourceClient{enum: rc, getter: ac.Sites()}, nil
Sites
)
func (r Resource) resourceClient(ac api.Client) (*resourceClient, error) {
switch r {
case Users:
return &resourceClient{enum: r, getter: ac.Users()}, nil
case Sites:
return &resourceClient{enum: r, getter: ac.Sites()}, nil
default: default:
return nil, clues.New("unrecognized owner resource enum").With("resource_enum", r) return nil, clues.New("unrecognized owner resource enum").With("resource_enum", rc)
} }
} }
type resourceClient struct { type resourceClient struct {
enum Resource enum resource.Category
getter getIDAndNamer getter getIDAndNamer
} }
@ -243,18 +229,18 @@ func (r resourceClient) getOwnerIDAndNameFrom(
// The id-name swapper is optional. Some processes will look up all owners in // The id-name swapper is optional. Some processes will look up all owners in
// the tenant before reaching this step. In that case, the data gets handed // the tenant before reaching this step. In that case, the data gets handed
// down for this func to consume instead of performing further queries. The // down for this func to consume instead of performing further queries. The
// data gets stored inside the gc instance for later re-use. // data gets stored inside the controller instance for later re-use.
func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom( func (ctrl *Controller) PopulateOwnerIDAndNamesFrom(
ctx context.Context, ctx context.Context,
owner string, // input value, can be either id or name owner string, // input value, can be either id or name
ins idname.Cacher, ins idname.Cacher,
) (string, string, error) { ) (string, string, error) {
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.AC, owner, ins) id, name, err := ctrl.ownerLookup.getOwnerIDAndNameFrom(ctx, ctrl.AC, owner, ins)
if err != nil { if err != nil {
return "", "", clues.Wrap(err, "identifying resource owner") return "", "", clues.Wrap(err, "identifying resource owner")
} }
gc.IDNameLookup = idname.NewCache(map[string]string{id: name}) ctrl.IDNameLookup = idname.NewCache(map[string]string{id: name})
return id, name, nil return id, name, nil
} }

View File

@ -1,4 +1,4 @@
package connector package m365
import ( import (
"context" "context"
@ -13,31 +13,35 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/mock"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/mock"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Unit tests // Unit tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type GraphConnectorUnitSuite struct { type ControllerUnitSuite struct {
tester.Suite tester.Suite
} }
func TestGraphConnectorUnitSuite(t *testing.T) { func TestControllerUnitSuite(t *testing.T) {
suite.Run(t, &GraphConnectorUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &ControllerUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() { func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
const ( const (
id = "owner-id" id = "owner-id"
name = "owner-name" name = "owner-name"
@ -47,10 +51,10 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
itn = map[string]string{id: name} itn = map[string]string{id: name}
nti = map[string]string{name: id} nti = map[string]string{name: id}
lookup = &resourceClient{ lookup = &resourceClient{
enum: Users, enum: resource.Users,
getter: &mock.IDNameGetter{ID: id, Name: name}, getter: &mock.IDNameGetter{ID: id, Name: name},
} }
noLookup = &resourceClient{enum: Users, getter: &mock.IDNameGetter{}} noLookup = &resourceClient{enum: resource.Users, getter: &mock.IDNameGetter{}}
) )
table := []struct { table := []struct {
@ -211,9 +215,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
gc := &GraphConnector{ownerLookup: test.rc} ctrl := &Controller{ownerLookup: test.rc}
rID, rName, err := gc.PopulateOwnerIDAndNamesFrom(ctx, test.owner, test.ins) rID, rName, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, test.owner, test.ins)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectID, rID, "id") assert.Equal(t, test.expectID, rID, "id")
assert.Equal(t, test.expectName, rName, "name") assert.Equal(t, test.expectName, rName, "name")
@ -221,14 +225,14 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
} }
} }
func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() { func (suite *ControllerUnitSuite) TestController_Wait() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
var ( var (
gc = &GraphConnector{ ctrl = &Controller{
wg: &sync.WaitGroup{}, wg: &sync.WaitGroup{},
region: &trace.Region{}, region: &trace.Region{},
} }
@ -240,13 +244,13 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() {
status = support.CreateStatus(ctx, support.Backup, 1, metrics, "details") status = support.CreateStatus(ctx, support.Backup, 1, metrics, "details")
) )
gc.wg.Add(1) ctrl.wg.Add(1)
gc.UpdateStatus(status) ctrl.UpdateStatus(status)
result := gc.Wait() result := ctrl.Wait()
require.NotNil(t, result) require.NotNil(t, result)
assert.Nil(t, gc.region, "region") assert.Nil(t, ctrl.region, "region")
assert.Empty(t, gc.status, "status") assert.Empty(t, ctrl.status, "status")
assert.Equal(t, 1, result.Folders) assert.Equal(t, 1, result.Folders)
assert.Equal(t, 2, result.Objects) assert.Equal(t, 2, result.Objects)
assert.Equal(t, 3, result.Successes) assert.Equal(t, 3, result.Successes)
@ -257,15 +261,15 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() {
// Integration tests // Integration tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type GraphConnectorIntegrationSuite struct { type ControllerIntegrationSuite struct {
tester.Suite tester.Suite
connector *GraphConnector ctrl *Controller
user string user string
secondaryUser string secondaryUser string
} }
func TestGraphConnectorIntegrationSuite(t *testing.T) { func TestControllerIntegrationSuite(t *testing.T) {
suite.Run(t, &GraphConnectorIntegrationSuite{ suite.Run(t, &ControllerIntegrationSuite{
Suite: tester.NewIntegrationSuite( Suite: tester.NewIntegrationSuite(
t, t,
[][]string{tester.M365AcctCredEnvs}, [][]string{tester.M365AcctCredEnvs},
@ -273,37 +277,37 @@ func TestGraphConnectorIntegrationSuite(t *testing.T) {
}) })
} }
func (suite *GraphConnectorIntegrationSuite) SetupSuite() { func (suite *ControllerIntegrationSuite) SetupSuite() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.connector = loadConnector(ctx, t, Users) suite.ctrl = newController(ctx, t, resource.Users)
suite.user = tester.M365UserID(t) suite.user = tester.M365UserID(t)
suite.secondaryUser = tester.SecondaryM365UserID(t) suite.secondaryUser = tester.SecondaryM365UserID(t)
tester.LogTimeOfTest(t) tester.LogTimeOfTest(t)
} }
func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { func (suite *ControllerIntegrationSuite) TestRestoreFailsBadService() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
var ( var (
dest = tester.DefaultTestRestoreDestination("") restoreCfg = testdata.DefaultRestoreConfig("")
sel = selectors.Selector{ sel = selectors.Selector{
Service: selectors.ServiceUnknown, Service: selectors.ServiceUnknown,
} }
) )
deets, err := suite.connector.ConsumeRestoreCollections( deets, err := suite.ctrl.ConsumeRestoreCollections(
ctx, ctx,
version.Backup, version.Backup,
sel, sel,
dest, restoreCfg,
control.Options{ control.Options{
RestorePermissions: true, RestorePermissions: true,
ToggleFeatures: control.Toggles{}, ToggleFeatures: control.Toggles{},
@ -313,14 +317,14 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
assert.Error(t, err, clues.ToCore(err)) assert.Error(t, err, clues.ToCore(err))
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := suite.connector.Wait() status := suite.ctrl.Wait()
assert.Equal(t, 0, status.Objects) assert.Equal(t, 0, status.Objects)
assert.Equal(t, 0, status.Folders) assert.Equal(t, 0, status.Folders)
assert.Equal(t, 0, status.Successes) assert.Equal(t, 0, status.Successes)
} }
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { func (suite *ControllerIntegrationSuite) TestEmptyCollections() {
dest := tester.DefaultTestRestoreDestination("") restoreCfg := testdata.DefaultRestoreConfig("")
table := []struct { table := []struct {
name string name string
col []data.RestoreCollection col []data.RestoreCollection
@ -377,11 +381,11 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
deets, err := suite.connector.ConsumeRestoreCollections( deets, err := suite.ctrl.ConsumeRestoreCollections(
ctx, ctx,
version.Backup, version.Backup,
test.sel, test.sel,
dest, restoreCfg,
control.Options{ control.Options{
RestorePermissions: true, RestorePermissions: true,
ToggleFeatures: control.Toggles{}, ToggleFeatures: control.Toggles{},
@ -391,7 +395,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets) assert.NotNil(t, deets)
stats := suite.connector.Wait() stats := suite.ctrl.Wait()
assert.Zero(t, stats.Objects) assert.Zero(t, stats.Objects)
assert.Zero(t, stats.Folders) assert.Zero(t, stats.Folders)
assert.Zero(t, stats.Successes) assert.Zero(t, stats.Successes)
@ -406,32 +410,32 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
func runRestore( func runRestore(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
config ConfigInfo, config stub.ConfigInfo,
backupVersion int, backupVersion int,
collections []data.RestoreCollection, collections []data.RestoreCollection,
numRestoreItems int, numRestoreItems int,
) { ) {
t.Logf( t.Logf(
"Restoring collections to %s for resourceOwners(s) %v\n", "Restoring collections to %s for resourceOwners(s) %v\n",
config.Dest.ContainerName, config.RestoreCfg.Location,
config.ResourceOwners) config.ResourceOwners)
start := time.Now() start := time.Now()
restoreGC := loadConnector(ctx, t, config.Resource) restoreCtrl := newController(ctx, t, config.Resource)
restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true) restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true)
deets, err := restoreGC.ConsumeRestoreCollections( deets, err := restoreCtrl.ConsumeRestoreCollections(
ctx, ctx,
backupVersion, backupVersion,
restoreSel, restoreSel,
config.Dest, config.RestoreCfg,
config.Opts, config.Opts,
collections, collections,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := restoreGC.Wait() status := restoreCtrl.Wait()
runTime := time.Since(start) runTime := time.Since(start)
assert.Equal(t, numRestoreItems, status.Objects, "restored status.Objects") assert.Equal(t, numRestoreItems, status.Objects, "restored status.Objects")
@ -449,11 +453,11 @@ func runRestore(
func runBackupAndCompare( func runBackupAndCompare(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
config ConfigInfo, config stub.ConfigInfo,
expectedData map[string]map[string][]byte, expectedData map[string]map[string][]byte,
totalItems int, totalItems int,
totalKopiaItems int, totalKopiaItems int,
inputCollections []ColInfo, inputCollections []stub.ColInfo,
) { ) {
t.Helper() t.Helper()
@ -472,7 +476,7 @@ func runBackupAndCompare(
for _, ro := range config.ResourceOwners { for _, ro := range config.ResourceOwners {
expectedDests = append(expectedDests, destAndCats{ expectedDests = append(expectedDests, destAndCats{
resourceOwner: ro, resourceOwner: ro,
dest: config.Dest.ContainerName, dest: config.RestoreCfg.Location,
cats: cats, cats: cats,
}) })
@ -480,14 +484,14 @@ func runBackupAndCompare(
nameToID[ro] = ro nameToID[ro] = ro
} }
backupGC := loadConnector(ctx, t, config.Resource) backupCtrl := newController(ctx, t, config.Resource)
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID) backupCtrl.IDNameLookup = inMock.NewCache(idToName, nameToID)
backupSel := backupSelectorForExpected(t, config.Service, expectedDests) backupSel := backupSelectorForExpected(t, config.Service, expectedDests)
t.Logf("Selective backup of %s\n", backupSel) t.Logf("Selective backup of %s\n", backupSel)
start := time.Now() start := time.Now()
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections( dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections(
ctx, ctx,
backupSel, backupSel,
backupSel, backupSel,
@ -512,7 +516,7 @@ func runBackupAndCompare(
dcs, dcs,
config) config)
status := backupGC.Wait() status := backupCtrl.Wait()
assert.Equalf(t, totalItems+skipped, status.Objects, assert.Equalf(t, totalItems+skipped, status.Objects,
"backup status.Objects; wanted %d items + %d skipped", totalItems, skipped) "backup status.Objects; wanted %d items + %d skipped", totalItems, skipped)
@ -530,16 +534,16 @@ func runRestoreBackupTest(
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
config := ConfigInfo{ config := stub.ConfigInfo{
Opts: opts, Opts: opts,
Resource: test.resource, Resource: test.resourceCat,
Service: test.service, Service: test.service,
Tenant: tenant, Tenant: tenant,
ResourceOwners: resourceOwners, ResourceOwners: resourceOwners,
Dest: tester.DefaultTestRestoreDestination(""), RestoreCfg: testdata.DefaultRestoreConfig(""),
} }
totalItems, totalKopiaItems, collections, expectedData, err := GetCollectionsAndExpected( totalItems, totalKopiaItems, collections, expectedData, err := stub.GetCollectionsAndExpected(
config, config,
test.collections, test.collections,
version.Backup) version.Backup)
@ -575,16 +579,16 @@ func runRestoreTestWithVersion(
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
config := ConfigInfo{ config := stub.ConfigInfo{
Opts: opts, Opts: opts,
Resource: test.resource, Resource: test.resourceCat,
Service: test.service, Service: test.service,
Tenant: tenant, Tenant: tenant,
ResourceOwners: resourceOwners, ResourceOwners: resourceOwners,
Dest: tester.DefaultTestRestoreDestination(""), RestoreCfg: testdata.DefaultRestoreConfig(""),
} }
totalItems, _, collections, _, err := GetCollectionsAndExpected( totalItems, _, collections, _, err := stub.GetCollectionsAndExpected(
config, config,
test.collectionsPrevious, test.collectionsPrevious,
test.backupVersion) test.backupVersion)
@ -612,16 +616,16 @@ func runRestoreBackupTestVersions(
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
config := ConfigInfo{ config := stub.ConfigInfo{
Opts: opts, Opts: opts,
Resource: test.resource, Resource: test.resourceCat,
Service: test.service, Service: test.service,
Tenant: tenant, Tenant: tenant,
ResourceOwners: resourceOwners, ResourceOwners: resourceOwners,
Dest: tester.DefaultTestRestoreDestination(""), RestoreCfg: testdata.DefaultRestoreConfig(""),
} }
totalItems, _, collections, _, err := GetCollectionsAndExpected( totalItems, _, collections, _, err := stub.GetCollectionsAndExpected(
config, config,
test.collectionsPrevious, test.collectionsPrevious,
test.backupVersion) test.backupVersion)
@ -636,7 +640,7 @@ func runRestoreBackupTestVersions(
totalItems) totalItems)
// Get expected output for new version. // Get expected output for new version.
totalItems, totalKopiaItems, _, expectedData, err := GetCollectionsAndExpected( totalItems, totalKopiaItems, _, expectedData, err := stub.GetCollectionsAndExpected(
config, config,
test.collectionsLatest, test.collectionsLatest,
version.Backup) version.Backup)
@ -652,7 +656,7 @@ func runRestoreBackupTestVersions(
test.collectionsLatest) test.collectionsLatest)
} }
func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() {
bodyText := "This email has some text. However, all the text is on the same line." bodyText := "This email has some text. However, all the text is on the same line."
subjectText := "Test message for restore" subjectText := "Test message for restore"
@ -660,25 +664,25 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
{ {
name: "EmailsWithAttachments", name: "EmailsWithAttachments",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resourceCat: resource.Users,
collections: []ColInfo{ collections: []stub.ColInfo{
{ {
PathElements: []string{"Inbox"}, PathElements: []string{"Inbox"},
Category: path.EmailCategory, Category: path.EmailCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID", Name: "someencodeditemID",
data: exchMock.MessageWithDirectAttachment( Data: exchMock.MessageWithDirectAttachment(
subjectText + "-1", subjectText + "-1",
), ),
lookupKey: subjectText + "-1", LookupKey: subjectText + "-1",
}, },
{ {
name: "someencodeditemID2", Name: "someencodeditemID2",
data: exchMock.MessageWithTwoAttachments( Data: exchMock.MessageWithTwoAttachments(
subjectText + "-2", subjectText + "-2",
), ),
lookupKey: subjectText + "-2", LookupKey: subjectText + "-2",
}, },
}, },
}, },
@ -687,74 +691,74 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
{ {
name: "MultipleEmailsMultipleFolders", name: "MultipleEmailsMultipleFolders",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resourceCat: resource.Users,
collections: []ColInfo{ collections: []stub.ColInfo{
{ {
PathElements: []string{"Inbox"}, PathElements: []string{"Inbox"},
Category: path.EmailCategory, Category: path.EmailCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID", Name: "someencodeditemID",
data: exchMock.MessageWithBodyBytes( Data: exchMock.MessageWithBodyBytes(
subjectText+"-1", subjectText+"-1",
bodyText+" 1.", bodyText+" 1.",
bodyText+" 1.", bodyText+" 1.",
), ),
lookupKey: subjectText + "-1", LookupKey: subjectText + "-1",
}, },
}, },
}, },
{ {
PathElements: []string{"Work"}, PathElements: []string{"Work"},
Category: path.EmailCategory, Category: path.EmailCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID2", Name: "someencodeditemID2",
data: exchMock.MessageWithBodyBytes( Data: exchMock.MessageWithBodyBytes(
subjectText+"-2", subjectText+"-2",
bodyText+" 2.", bodyText+" 2.",
bodyText+" 2.", bodyText+" 2.",
), ),
lookupKey: subjectText + "-2", LookupKey: subjectText + "-2",
}, },
{ {
name: "someencodeditemID3", Name: "someencodeditemID3",
data: exchMock.MessageWithBodyBytes( Data: exchMock.MessageWithBodyBytes(
subjectText+"-3", subjectText+"-3",
bodyText+" 3.", bodyText+" 3.",
bodyText+" 3.", bodyText+" 3.",
), ),
lookupKey: subjectText + "-3", LookupKey: subjectText + "-3",
}, },
}, },
}, },
{ {
PathElements: []string{"Work", "Inbox"}, PathElements: []string{"Work", "Inbox"},
Category: path.EmailCategory, Category: path.EmailCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID4", Name: "someencodeditemID4",
data: exchMock.MessageWithBodyBytes( Data: exchMock.MessageWithBodyBytes(
subjectText+"-4", subjectText+"-4",
bodyText+" 4.", bodyText+" 4.",
bodyText+" 4.", bodyText+" 4.",
), ),
lookupKey: subjectText + "-4", LookupKey: subjectText + "-4",
}, },
}, },
}, },
{ {
PathElements: []string{"Work", "Inbox", "Work"}, PathElements: []string{"Work", "Inbox", "Work"},
Category: path.EmailCategory, Category: path.EmailCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID5", Name: "someencodeditemID5",
data: exchMock.MessageWithBodyBytes( Data: exchMock.MessageWithBodyBytes(
subjectText+"-5", subjectText+"-5",
bodyText+" 5.", bodyText+" 5.",
bodyText+" 5.", bodyText+" 5.",
), ),
lookupKey: subjectText + "-5", LookupKey: subjectText + "-5",
}, },
}, },
}, },
@ -763,26 +767,26 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
{ {
name: "MultipleContactsSingleFolder", name: "MultipleContactsSingleFolder",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resourceCat: resource.Users,
collections: []ColInfo{ collections: []stub.ColInfo{
{ {
PathElements: []string{"Contacts"}, PathElements: []string{"Contacts"},
Category: path.ContactsCategory, Category: path.ContactsCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID", Name: "someencodeditemID",
data: exchMock.ContactBytes("Ghimley"), Data: exchMock.ContactBytes("Ghimley"),
lookupKey: "Ghimley", LookupKey: "Ghimley",
}, },
{ {
name: "someencodeditemID2", Name: "someencodeditemID2",
data: exchMock.ContactBytes("Irgot"), Data: exchMock.ContactBytes("Irgot"),
lookupKey: "Irgot", LookupKey: "Irgot",
}, },
{ {
name: "someencodeditemID3", Name: "someencodeditemID3",
data: exchMock.ContactBytes("Jannes"), Data: exchMock.ContactBytes("Jannes"),
lookupKey: "Jannes", LookupKey: "Jannes",
}, },
}, },
}, },
@ -791,42 +795,42 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
{ {
name: "MultipleContactsMultipleFolders", name: "MultipleContactsMultipleFolders",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resourceCat: resource.Users,
collections: []ColInfo{ collections: []stub.ColInfo{
{ {
PathElements: []string{"Work"}, PathElements: []string{"Work"},
Category: path.ContactsCategory, Category: path.ContactsCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID", Name: "someencodeditemID",
data: exchMock.ContactBytes("Ghimley"), Data: exchMock.ContactBytes("Ghimley"),
lookupKey: "Ghimley", LookupKey: "Ghimley",
}, },
{ {
name: "someencodeditemID2", Name: "someencodeditemID2",
data: exchMock.ContactBytes("Irgot"), Data: exchMock.ContactBytes("Irgot"),
lookupKey: "Irgot", LookupKey: "Irgot",
}, },
{ {
name: "someencodeditemID3", Name: "someencodeditemID3",
data: exchMock.ContactBytes("Jannes"), Data: exchMock.ContactBytes("Jannes"),
lookupKey: "Jannes", LookupKey: "Jannes",
}, },
}, },
}, },
{ {
PathElements: []string{"Personal"}, PathElements: []string{"Personal"},
Category: path.ContactsCategory, Category: path.ContactsCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID4", Name: "someencodeditemID4",
data: exchMock.ContactBytes("Argon"), Data: exchMock.ContactBytes("Argon"),
lookupKey: "Argon", LookupKey: "Argon",
}, },
{ {
name: "someencodeditemID5", Name: "someencodeditemID5",
data: exchMock.ContactBytes("Bernard"), Data: exchMock.ContactBytes("Bernard"),
lookupKey: "Bernard", LookupKey: "Bernard",
}, },
}, },
}, },
@ -909,7 +913,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
runRestoreBackupTest( runRestoreBackupTest(
suite.T(), suite.T(),
test, test,
suite.connector.tenant, suite.ctrl.tenant,
[]string{suite.user}, []string{suite.user},
control.Options{ control.Options{
RestorePermissions: true, RestorePermissions: true,
@ -919,32 +923,32 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
} }
} }
func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames() { func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
table := []restoreBackupInfo{ table := []restoreBackupInfo{
{ {
name: "Contacts", name: "Contacts",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resourceCat: resource.Users,
collections: []ColInfo{ collections: []stub.ColInfo{
{ {
PathElements: []string{"Work"}, PathElements: []string{"Work"},
Category: path.ContactsCategory, Category: path.ContactsCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID", Name: "someencodeditemID",
data: exchMock.ContactBytes("Ghimley"), Data: exchMock.ContactBytes("Ghimley"),
lookupKey: "Ghimley", LookupKey: "Ghimley",
}, },
}, },
}, },
{ {
PathElements: []string{"Personal"}, PathElements: []string{"Personal"},
Category: path.ContactsCategory, Category: path.ContactsCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "someencodeditemID2", Name: "someencodeditemID2",
data: exchMock.ContactBytes("Irgot"), Data: exchMock.ContactBytes("Irgot"),
lookupKey: "Irgot", LookupKey: "Irgot",
}, },
}, },
}, },
@ -993,22 +997,22 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
allExpectedData := map[string]map[string][]byte{} allExpectedData := map[string]map[string][]byte{}
for i, collection := range test.collections { for i, collection := range test.collections {
// Get a dest per collection so they're independent. // Get a restoreCfg per collection so they're independent.
dest := tester.DefaultTestRestoreDestination("") restoreCfg := testdata.DefaultRestoreConfig("")
expectedDests = append(expectedDests, destAndCats{ expectedDests = append(expectedDests, destAndCats{
resourceOwner: suite.user, resourceOwner: suite.user,
dest: dest.ContainerName, dest: restoreCfg.Location,
cats: map[path.CategoryType]struct{}{ cats: map[path.CategoryType]struct{}{
collection.Category: {}, collection.Category: {},
}, },
}) })
totalItems, _, collections, expectedData, err := collectionsForInfo( totalItems, _, collections, expectedData, err := stub.CollectionsForInfo(
test.service, test.service,
suite.connector.tenant, suite.ctrl.tenant,
suite.user, suite.user,
dest, restoreCfg,
[]ColInfo{collection}, []stub.ColInfo{collection},
version.Backup, version.Backup,
) )
require.NoError(t, err) require.NoError(t, err)
@ -1023,15 +1027,15 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
"Restoring %v/%v collections to %s\n", "Restoring %v/%v collections to %s\n",
i+1, i+1,
len(test.collections), len(test.collections),
dest.ContainerName, restoreCfg.Location,
) )
restoreGC := loadConnector(ctx, t, test.resource) restoreCtrl := newController(ctx, t, test.resourceCat)
deets, err := restoreGC.ConsumeRestoreCollections( deets, err := restoreCtrl.ConsumeRestoreCollections(
ctx, ctx,
version.Backup, version.Backup,
restoreSel, restoreSel,
dest, restoreCfg,
control.Options{ control.Options{
RestorePermissions: true, RestorePermissions: true,
ToggleFeatures: control.Toggles{}, ToggleFeatures: control.Toggles{},
@ -1041,7 +1045,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, deets) require.NotNil(t, deets)
status := restoreGC.Wait() status := restoreCtrl.Wait()
// Always just 1 because it's just 1 collection. // Always just 1 because it's just 1 collection.
assert.Equal(t, totalItems, status.Objects, "status.Objects") assert.Equal(t, totalItems, status.Objects, "status.Objects")
assert.Equal(t, totalItems, status.Successes, "status.Successes") assert.Equal(t, totalItems, status.Successes, "status.Successes")
@ -1056,11 +1060,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
// Run a backup and compare its output with what we put in. // Run a backup and compare its output with what we put in.
backupGC := loadConnector(ctx, t, test.resource) backupCtrl := newController(ctx, t, test.resourceCat)
backupSel := backupSelectorForExpected(t, test.service, expectedDests) backupSel := backupSelectorForExpected(t, test.service, expectedDests)
t.Log("Selective backup of", backupSel) t.Log("Selective backup of", backupSel)
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections( dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections(
ctx, ctx,
backupSel, backupSel,
backupSel, backupSel,
@ -1078,17 +1082,17 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
t.Log("Backup enumeration complete") t.Log("Backup enumeration complete")
ci := ConfigInfo{ ci := stub.ConfigInfo{
Opts: control.Options{RestorePermissions: true}, Opts: control.Options{RestorePermissions: true},
// Alright to be empty, needed for OneDrive. // Alright to be empty, needed for OneDrive.
Dest: control.RestoreDestination{}, RestoreCfg: control.RestoreConfig{},
} }
// Pull the data prior to waiting for the status as otherwise it will // Pull the data prior to waiting for the status as otherwise it will
// deadlock. // deadlock.
skipped := checkCollections(t, ctx, allItems, allExpectedData, dcs, ci) skipped := checkCollections(t, ctx, allItems, allExpectedData, dcs, ci)
status := backupGC.Wait() status := backupCtrl.Wait()
assert.Equal(t, allItems+skipped, status.Objects, "status.Objects") assert.Equal(t, allItems+skipped, status.Objects, "status.Objects")
assert.Equal(t, allItems+skipped, status.Successes, "status.Successes") assert.Equal(t, allItems+skipped, status.Successes, "status.Successes")
}) })
@ -1097,22 +1101,22 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
// TODO: this should only be run during smoke tests, not part of the standard CI. // TODO: this should only be run during smoke tests, not part of the standard CI.
// That's why it's set aside instead of being included in the other test set. // That's why it's set aside instead of being included in the other test set.
func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttachment() { func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_largeMailAttachment() {
subjectText := "Test message for restore with large attachment" subjectText := "Test message for restore with large attachment"
test := restoreBackupInfo{ test := restoreBackupInfo{
name: "EmailsWithLargeAttachments", name: "EmailsWithLargeAttachments",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resourceCat: resource.Users,
collections: []ColInfo{ collections: []stub.ColInfo{
{ {
PathElements: []string{"Inbox"}, PathElements: []string{"Inbox"},
Category: path.EmailCategory, Category: path.EmailCategory,
Items: []ItemInfo{ Items: []stub.ItemInfo{
{ {
name: "35mbAttachment", Name: "35mbAttachment",
data: exchMock.MessageWithSizedAttachment(subjectText, 35), Data: exchMock.MessageWithSizedAttachment(subjectText, 35),
lookupKey: subjectText, LookupKey: subjectText,
}, },
}, },
}, },
@ -1122,7 +1126,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
runRestoreBackupTest( runRestoreBackupTest(
suite.T(), suite.T(),
test, test,
suite.connector.tenant, suite.ctrl.tenant,
[]string{suite.user}, []string{suite.user},
control.Options{ control.Options{
RestorePermissions: true, RestorePermissions: true,
@ -1131,17 +1135,17 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
) )
} }
func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections() { func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
table := []struct { table := []struct {
name string name string
resource Resource resourceCat resource.Category
selectorFunc func(t *testing.T) selectors.Selector selectorFunc func(t *testing.T) selectors.Selector
service path.ServiceType service path.ServiceType
categories []string categories []string
}{ }{
{ {
name: "Exchange", name: "Exchange",
resource: Users, resourceCat: resource.Users,
selectorFunc: func(t *testing.T) selectors.Selector { selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup([]string{suite.user}) sel := selectors.NewExchangeBackup([]string{suite.user})
sel.Include( sel.Include(
@ -1161,7 +1165,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
}, },
{ {
name: "OneDrive", name: "OneDrive",
resource: Users, resourceCat: resource.Users,
selectorFunc: func(t *testing.T) selectors.Selector { selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{suite.user}) sel := selectors.NewOneDriveBackup([]string{suite.user})
sel.Include(sel.Folders([]string{selectors.NoneTgt})) sel.Include(sel.Folders([]string{selectors.NoneTgt}))
@ -1175,7 +1179,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
}, },
{ {
name: "SharePoint", name: "SharePoint",
resource: Sites, resourceCat: resource.Sites,
selectorFunc: func(t *testing.T) selectors.Selector { selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{tester.M365SiteID(t)}) sel := selectors.NewSharePointBackup([]string{tester.M365SiteID(t)})
sel.Include( sel.Include(
@ -1205,18 +1209,18 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
defer flush() defer flush()
var ( var (
backupGC = loadConnector(ctx, t, test.resource) backupCtrl = newController(ctx, t, test.resourceCat)
backupSel = test.selectorFunc(t) backupSel = test.selectorFunc(t)
errs = fault.New(true) errs = fault.New(true)
start = time.Now() start = time.Now()
) )
id, name, err := backupGC.PopulateOwnerIDAndNamesFrom(ctx, backupSel.DiscreteOwner, nil) id, name, err := backupCtrl.PopulateOwnerIDAndNamesFrom(ctx, backupSel.DiscreteOwner, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
backupSel.SetDiscreteOwnerIDName(id, name) backupSel.SetDiscreteOwnerIDName(id, name)
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections( dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections(
ctx, ctx,
inMock.NewProvider(id, name), inMock.NewProvider(id, name),
backupSel, backupSel,
@ -1263,9 +1267,172 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
assert.ElementsMatch(t, test.categories, foundCategories) assert.ElementsMatch(t, test.categories, foundCategories)
backupGC.Wait() backupCtrl.Wait()
assert.NoError(t, errs.Failure()) assert.NoError(t, errs.Failure())
}) })
} }
} }
type DisconnectedUnitSuite struct {
tester.Suite
}
func TestDisconnectedUnitSuite(t *testing.T) {
s := &DisconnectedUnitSuite{
Suite: tester.NewUnitSuite(t),
}
suite.Run(t, s)
}
func statusTestTask(
t *testing.T,
ctrl *Controller,
objects, success, folder int,
) {
ctx, flush := tester.NewContext(t)
defer flush()
status := support.CreateStatus(
ctx,
support.Restore, folder,
support.CollectionMetrics{
Objects: objects,
Successes: success,
Bytes: 0,
},
"statusTestTask")
ctrl.UpdateStatus(status)
}
func (suite *DisconnectedUnitSuite) TestController_Status() {
t := suite.T()
ctrl := Controller{wg: &sync.WaitGroup{}}
// Two tasks
ctrl.incrementAwaitingMessages()
ctrl.incrementAwaitingMessages()
// Each helper task processes 4 objects, 1 success, 3 errors, 1 folders
go statusTestTask(t, &ctrl, 4, 1, 1)
go statusTestTask(t, &ctrl, 4, 1, 1)
stats := ctrl.Wait()
assert.NotEmpty(t, ctrl.PrintableStatus())
// Expect 8 objects
assert.Equal(t, 8, stats.Objects)
// Expect 2 success
assert.Equal(t, 2, stats.Successes)
// Expect 2 folders
assert.Equal(t, 2, stats.Folders)
}
func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() {
sites := []string{"abc.site.foo", "bar.site.baz"}
tests := []struct {
name string
excludes func(t *testing.T) selectors.Selector
filters func(t *testing.T) selectors.Selector
includes func(t *testing.T) selectors.Selector
checkError assert.ErrorAssertionFunc
}{
{
name: "Valid User",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Exclude(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "elliotReid@someHospital.org"
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Filter(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "elliotReid@someHospital.org"
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "elliotReid@someHospital.org"
return sel.Selector
},
},
{
name: "Invalid User",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Exclude(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Filter(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector
},
},
{
name: "valid sites",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo"
sel.Exclude(sel.AllData())
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo"
sel.Filter(sel.AllData())
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo"
sel.Include(sel.AllData())
return sel.Selector
},
},
{
name: "invalid sites",
checkError: assert.Error,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Exclude(sel.AllData())
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Filter(sel.AllData())
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Include(sel.AllData())
return sel.Selector
},
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
err := verifyBackupInputs(test.excludes(t), sites)
test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.filters(t), sites)
test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.includes(t), sites)
test.checkError(t, err, clues.ToCore(err))
})
}
}

View File

@ -6,7 +6,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -9,7 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/discovery" "github.com/alcionai/corso/src/internal/m365/discovery"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"

View File

@ -2,14 +2,18 @@ package exchange
import ( import (
"context" "context"
"encoding/json"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/pii" "github.com/alcionai/corso/src/internal/common/pii"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -18,7 +22,303 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// filterContainersAndFillCollections is a utility function // MetadataFileNames produces the category-specific set of filenames used to
// store graph metadata such as delta tokens and folderID->path references.
func MetadataFileNames(cat path.CategoryType) []string {
switch cat {
case path.EmailCategory, path.ContactsCategory:
return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName}
default:
return []string{graph.PreviousPathFileName}
}
}
type CatDeltaPaths map[path.CategoryType]DeltaPaths
type DeltaPaths map[string]DeltaPath
func (dps DeltaPaths) AddDelta(k, d string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Delta = d
dps[k] = dp
}
func (dps DeltaPaths) AddPath(k, p string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Path = p
dps[k] = dp
}
type DeltaPath struct {
Delta string
Path string
}
// ParseMetadataCollections produces a map of structs holding delta
// and path lookup maps.
func parseMetadataCollections(
ctx context.Context,
colls []data.RestoreCollection,
) (CatDeltaPaths, bool, error) {
// cdp stores metadata
cdp := CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// found tracks the metadata we've loaded, to make sure we don't
// fetch overlapping copies.
found := map[path.CategoryType]map[string]struct{}{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}
// errors from metadata items should not stop the backup,
// but it should prevent us from using previous backups
errs := fault.New(true)
for _, coll := range colls {
var (
breakLoop bool
items = coll.Items(ctx, errs)
category = coll.FullPath().Category()
)
for {
select {
case <-ctx.Done():
return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
case item, ok := <-items:
if !ok || errs.Failure() != nil {
breakLoop = true
break
}
var (
m = map[string]string{}
cdps = cdp[category]
)
err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil {
return nil, false, clues.New("decoding metadata json").WithClues(ctx)
}
switch item.UUID() {
case graph.PreviousPathFileName:
if _, ok := found[category]["path"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
}
for k, p := range m {
cdps.AddPath(k, p)
}
found[category]["path"] = struct{}{}
case graph.DeltaURLsFileName:
if _, ok := found[category]["delta"]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
}
for k, d := range m {
cdps.AddDelta(k, d)
}
found[category]["delta"] = struct{}{}
}
cdp[category] = cdps
}
if breakLoop {
break
}
}
}
if errs.Failure() != nil {
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
return CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
}, false, nil
}
// Remove any entries that contain a path or a delta, but not both.
// That metadata is considered incomplete, and needs to incur a
// complete backup on the next run.
for _, dps := range cdp {
for k, dp := range dps {
if len(dp.Path) == 0 {
delete(dps, k)
}
}
}
return cdp, true, nil
}
// ProduceBackupCollections returns a DataCollection which the caller can
// use to read mailbox data out for the specified user
func ProduceBackupCollections(
ctx context.Context,
ac api.Client,
selector selectors.Selector,
tenantID string,
user idname.Provider,
metadata []data.RestoreCollection,
su support.StatusUpdater,
ctrlOpts control.Options,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
eb, err := selector.ToExchangeBackup()
if err != nil {
return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
}
var (
collections = []data.BackupCollection{}
el = errs.Local()
categories = map[path.CategoryType]struct{}{}
handlers = BackupHandlers(ac)
)
// Turn on concurrency limiter middleware for exchange backups
// unless explicitly disabled through DisableConcurrencyLimiterFN cli flag
if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter {
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
}
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata)
if err != nil {
return nil, nil, false, err
}
for _, scope := range eb.Scopes() {
if el.Failure() != nil {
break
}
dcs, err := createCollections(
ctx,
handlers,
tenantID,
user,
scope,
cdps[scope.Category().PathType()],
ctrlOpts,
su,
errs)
if err != nil {
el.AddRecoverable(ctx, err)
continue
}
categories[scope.Category().PathType()] = struct{}{}
collections = append(collections, dcs...)
}
if len(collections) > 0 {
baseCols, err := graph.BaseCollections(
ctx,
collections,
tenantID,
user.ID(),
path.ExchangeService,
categories,
su,
errs)
if err != nil {
return nil, nil, false, err
}
collections = append(collections, baseCols...)
}
return collections, nil, canUsePreviousBackup, el.Failure()
}
// createCollections - utility function that retrieves M365
// IDs through Microsoft Graph API. The selectors.ExchangeScope
// determines the type of collections that are retrieved.
func createCollections(
ctx context.Context,
handlers map[path.CategoryType]backupHandler,
tenantID string,
user idname.Provider,
scope selectors.ExchangeScope,
dps DeltaPaths,
ctrlOpts control.Options,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var (
allCollections = make([]data.BackupCollection, 0)
category = scope.Category().PathType()
qp = graph.QueryParams{
Category: category,
ResourceOwner: user,
TenantID: tenantID,
}
)
handler, ok := handlers[category]
if !ok {
return nil, clues.New("unsupported backup category type").WithClues(ctx)
}
foldersComplete := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", qp.Category))
defer close(foldersComplete)
rootFolder, cc := handler.NewContainerCache(user.ID())
if err := cc.Populate(ctx, errs, rootFolder); err != nil {
return nil, clues.Wrap(err, "populating container cache")
}
collections, err := populateCollections(
ctx,
qp,
handler,
su,
cc,
scope,
dps,
ctrlOpts,
errs)
if err != nil {
return nil, clues.Wrap(err, "filling collections")
}
foldersComplete <- struct{}{}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}
return allCollections, nil
}
// populateCollections is a utility function
// that places the M365 object ids belonging to specific directories // that places the M365 object ids belonging to specific directories
// into a BackupCollection. Messages outside of those directories are omitted. // into a BackupCollection. Messages outside of those directories are omitted.
// @param collection is filled with during this function. // @param collection is filled with during this function.
@ -27,7 +327,7 @@ import (
// TODO(ashmrtn): This should really return []data.BackupCollection but // TODO(ashmrtn): This should really return []data.BackupCollection but
// unfortunately some of our tests rely on being able to lookup returned // unfortunately some of our tests rely on being able to lookup returned
// collections by ID and it would be non-trivial to change them. // collections by ID and it would be non-trivial to change them.
func filterContainersAndFillCollections( func populateCollections(
ctx context.Context, ctx context.Context,
qp graph.QueryParams, qp graph.QueryParams,
bh backupHandler, bh backupHandler,
@ -104,7 +404,7 @@ func filterContainersAndFillCollections(
!ctrlOpts.ToggleFeatures.DisableDelta) !ctrlOpts.ToggleFeatures.DisableDelta)
if err != nil { if err != nil {
if !graph.IsErrDeletedInFlight(err) { if !graph.IsErrDeletedInFlight(err) {
el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation)) el.AddRecoverable(ctx, clues.Stack(err).Label(fault.LabelForceNoBackupCreation))
continue continue
} }
@ -167,7 +467,7 @@ func filterContainersAndFillCollections(
) )
if collections[id] != nil { if collections[id] != nil {
el.AddRecoverable(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ictx)) el.AddRecoverable(ctx, clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ictx))
continue continue
} }

View File

@ -1,7 +1,9 @@
package exchange package exchange
import ( import (
"bytes"
"context" "context"
"sync"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -11,9 +13,9 @@ import (
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -107,12 +109,12 @@ func (m mockResolver) Items() []graph.CachedContainer {
return m.items return m.items
} }
func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container) error { func (m mockResolver) AddToCache(ctx context.Context, ctrl graph.Container) error {
if len(m.added) == 0 { if len(m.added) == 0 {
m.added = map[string]string{} m.added = map[string]string{}
} }
m.added[ptr.Val(gc.GetDisplayName())] = ptr.Val(gc.GetId()) m.added[ptr.Val(ctrl.GetDisplayName())] = ptr.Val(ctrl.GetId())
return nil return nil
} }
@ -125,33 +127,765 @@ func (m mockResolver) LocationInCache(string) (string, bool)
func (m mockResolver) Populate(context.Context, *fault.Bus, string, ...string) error { return nil } func (m mockResolver) Populate(context.Context, *fault.Bus, string, ...string) error { return nil }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests // Unit tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type ServiceIteratorsSuite struct { type DataCollectionsUnitSuite struct {
tester.Suite
}
func TestDataCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
type fileValues struct {
fileName string
value string
}
table := []struct {
name string
data []fileValues
expect map[string]DeltaPath
canUsePreviousBackup bool
expectError assert.ErrorAssertionFunc
}{
{
name: "delta urls only",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple delta urls",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaURLsFileName, "delta-link-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "previous path only",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple previous paths",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
{graph.PreviousPathFileName, "prev-path-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
},
{
name: "delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls and empty previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, ""},
},
expect: map[string]DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "empty delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, ""},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with special chars",
data: []fileValues{
{graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "`!@#$%^&*()_[]{}/\"\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with escaped chars",
data: []fileValues{
{graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "delta urls with newline char runes",
data: []fileValues{
// rune(92) = \, rune(110) = n. Ensuring it's not possible to
// error in serializing/deserializing and produce a single newline
// character from those two runes.
{graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
{graph.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
"key": {
Delta: "\\n",
Path: "prev-path",
},
},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
entries := []graph.MetadataCollectionEntry{}
for _, d := range test.data {
entries = append(
entries,
graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value}))
}
coll, err := graph.MakeMetadataCollection(
"t", "u",
path.ExchangeService,
path.EmailCategory,
entries,
func(cos *support.ControllerOperationStatus) {},
)
require.NoError(t, err, clues.ToCore(err))
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: coll},
})
test.expectError(t, err, clues.ToCore(err))
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
emails := cdps[path.EmailCategory]
assert.Len(t, emails, len(test.expect))
for k, v := range emails {
assert.Equal(t, v.Delta, emails[k].Delta, "delta")
assert.Equal(t, v.Path, emails[k].Path, "path")
}
})
}
}
type failingColl struct {
t *testing.T
}
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
ic := make(chan data.Stream)
defer close(ic)
errs.AddRecoverable(ctx, assert.AnError)
return ic
}
func (f failingColl) FullPath() path.Path {
tmp, err := path.Build(
"tenant",
"user",
path.ExchangeService,
path.EmailCategory,
false,
"inbox")
require.NoError(f.t, err, clues.ToCore(err))
return tmp
}
func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) {
// no fetch calls will be made
return nil, nil
}
// This check is to ensure that we don't error out, but still return
// canUsePreviousBackup as false on read errors
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
fc := failingColl{t}
_, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc})
require.NoError(t, err)
require.False(t, canUsePreviousBackup)
}
// ---------------------------------------------------------------------------
// Integration tests
// ---------------------------------------------------------------------------
func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ControllerOperationStatus) {
updater := func(status *support.ControllerOperationStatus) {
defer wg.Done()
}
return updater
}
type DataCollectionsIntegrationSuite struct {
tester.Suite
user string
site string
tenantID string
ac api.Client
}
func TestDataCollectionsIntegrationSuite(t *testing.T) {
suite.Run(t, &DataCollectionsIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
})
}
func (suite *DataCollectionsIntegrationSuite) SetupSuite() {
suite.user = tester.M365UserID(suite.T())
suite.site = tester.M365SiteID(suite.T())
acct := tester.NewM365Account(suite.T())
creds, err := acct.M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
suite.ac, err = api.NewClient(creds)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(suite.T())
}
func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
folderNames map[string]struct{}
canMakeDeltaQueries bool
}{
{
name: "Folder Iterative Check Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: true,
},
{
name: "Folder Iterative Check Mail Non-Delta",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: false,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
ctrlOpts,
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService {
continue
}
require.NotEmpty(t, c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
continue
}
loc := c.(data.LocationPather).LocationPath().String()
require.NotEmpty(t, loc)
delete(test.folderNames, loc)
}
assert.Empty(t, test.folderNames)
})
}
}
func (suite *DataCollectionsIntegrationSuite) TestDelta() {
var (
userID = tester.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Contacts",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch(),
)[0],
},
{
name: "Events",
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// get collections without providing any delta history (ie: full backup)
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
DeltaPaths{},
control.Defaults(),
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection
for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService {
metadata = coll
}
}
require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NoFetchRestoreCollection{Collection: metadata},
})
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")
dps := cdps[test.scope.Category().PathType()]
// now do another backup with the previous delta tokens,
// which should only contain the difference.
collections, err = createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(userID, userID),
test.scope,
dps,
control.Defaults(),
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least
// assert some minimum assumptions, such as "deltas should retrieve fewer items".
// Delta usage is commented out at the moment, anyway. So this is currently
// a sanity check that the minimum behavior won't break.
for _, coll := range collections {
if coll.FullPath().Service() != path.ExchangeMetadataService {
ec, ok := coll.(*Collection)
require.True(t, ok, "collection is *Collection")
assert.NotNil(t, ec)
}
}
})
}
}
// TestMailSerializationRegression verifies that all mail data stored in the
// test account can be successfully downloaded into bytes and restored into
// M365 mail objects
func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
wg sync.WaitGroup
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
sel.Scopes()[0],
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(collections))
for _, edc := range collections {
suite.Run(edc.FullPath().String(), func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
streamChannel := edc.Items(ctx, fault.New(true))
// Verify that each message can be restored
for stream := range streamChannel {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
message, err := api.BytesToMessageable(buf.Bytes())
assert.NotNil(t, message)
assert.NoError(t, err, clues.ToCore(err))
}
})
}
wg.Wait()
}
// TestContactSerializationRegression verifies ability to query contact items
// and to store contact within Collection. Downloaded contacts are run through
// a regression test to ensure that downloaded items can be uploaded.
func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() {
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
name string
scope selectors.ExchangeScope
}{
{
name: "Default Contact Folder",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{DefaultContactFolder},
selectors.PrefixMatch())[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
edcs, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(edcs))
require.GreaterOrEqual(t, len(edcs), 1, "expected 1 <= num collections <= 2")
require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2")
for _, edc := range edcs {
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService
count := 0
for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
contact, err := api.BytesToContactable(buf.Bytes())
assert.NotNil(t, contact)
assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err))
count++
}
if isMetadata {
continue
}
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), edc) {
continue
}
assert.Equal(
t,
edc.(data.LocationPather).LocationPath().String(),
DefaultContactFolder)
assert.NotZero(t, count)
}
wg.Wait()
})
}
}
// TestEventsSerializationRegression ensures functionality of createCollections
// to be able to successfully query, download and restore event objects
func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
calID string
bdayID string
)
fn := func(gcc graph.CachedContainer) error {
if ptr.Val(gcc.GetDisplayName()) == DefaultCalendar {
calID = ptr.Val(gcc.GetId())
}
if ptr.Val(gcc.GetDisplayName()) == "Birthdays" {
bdayID = ptr.Val(gcc.GetId())
}
return nil
}
err := suite.ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))
require.NoError(t, err, clues.ToCore(err))
tests := []struct {
name, expected string
scope selectors.ExchangeScope
}{
{
name: "Default Event Calendar",
expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
)[0],
},
{
name: "Birthday Calendar",
expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{"Birthdays"},
selectors.PrefixMatch(),
)[0],
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var wg sync.WaitGroup
collections, err := createCollections(
ctx,
handlers,
suite.tenantID,
inMock.NewProvider(suite.user, suite.user),
test.scope,
DeltaPaths{},
control.Defaults(),
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
require.Len(t, collections, 2)
wg.Add(len(collections))
for _, edc := range collections {
var isMetadata bool
if edc.FullPath().Service() != path.ExchangeMetadataService {
isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else {
assert.Equal(t, "", edc.FullPath().Folder(false))
}
for item := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
continue
}
event, err := api.BytesToEventable(buf.Bytes())
assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err))
}
}
wg.Wait()
})
}
}
type CollectionPopulationSuite struct {
tester.Suite tester.Suite
creds account.M365Config creds account.M365Config
} }
func TestServiceIteratorsUnitSuite(t *testing.T) { func TestServiceIteratorsUnitSuite(t *testing.T) {
suite.Run(t, &ServiceIteratorsSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &CollectionPopulationSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *ServiceIteratorsSuite) SetupSuite() { func (suite *CollectionPopulationSuite) SetupSuite() {
a := tester.NewMockM365Account(suite.T()) a := tester.NewMockM365Account(suite.T())
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
suite.creds = m365 suite.creds = m365
} }
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() { func (suite *CollectionPopulationSuite) TestPopulateCollections() {
var ( var (
qp = graph.QueryParams{ qp = graph.QueryParams{
Category: path.EmailCategory, // doesn't matter which one we use. Category: path.EmailCategory, // doesn't matter which one we use.
ResourceOwner: inMock.NewProvider("user_id", "user_name"), ResourceOwner: inMock.NewProvider("user_id", "user_name"),
TenantID: suite.creds.AzureTenantID, TenantID: suite.creds.AzureTenantID,
} }
statusUpdater = func(*support.ConnectorOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0] allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
dps = DeltaPaths{} // incrementals are tested separately dps = DeltaPaths{} // incrementals are tested separately
commonResult = mockGetterResults{ commonResult = mockGetterResults{
@ -190,7 +924,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
getter mockGetter getter mockGetter
resolver graph.ContainerResolver resolver graph.ContainerResolver
scope selectors.ExchangeScope scope selectors.ExchangeScope
failFast control.FailureBehavior failFast control.FailurePolicy
expectErr assert.ErrorAssertionFunc expectErr assert.ErrorAssertionFunc
expectNewColls int expectNewColls int
expectMetadataColls int expectMetadataColls int
@ -349,7 +1083,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
category: qp.Category, category: qp.Category,
} }
collections, err := filterContainersAndFillCollections( collections, err := populateCollections(
ctx, ctx,
qp, qp,
mbh, mbh,
@ -435,7 +1169,7 @@ func checkMetadata(
assert.Equal(t, expect, catPaths[cat]) assert.Equal(t, expect, catPaths[cat])
} }
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_DuplicateFolders() { func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_DuplicateFolders() {
type scopeCat struct { type scopeCat struct {
scope selectors.ExchangeScope scope selectors.ExchangeScope
cat path.CategoryType cat path.CategoryType
@ -447,7 +1181,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli
TenantID: suite.creds.AzureTenantID, TenantID: suite.creds.AzureTenantID,
} }
statusUpdater = func(*support.ConnectorOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
dataTypes = []scopeCat{ dataTypes = []scopeCat{
{ {
@ -687,7 +1421,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli
category: qp.Category, category: qp.Category,
} }
collections, err := filterContainersAndFillCollections( collections, err := populateCollections(
ctx, ctx,
qp, qp,
mbh, mbh,
@ -754,7 +1488,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli
} }
} }
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repeatedItems() { func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_repeatedItems() {
newDelta := api.DeltaUpdate{URL: "delta_url"} newDelta := api.DeltaUpdate{URL: "delta_url"}
table := []struct { table := []struct {
@ -832,7 +1566,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
ResourceOwner: inMock.NewProvider("user_id", "user_name"), ResourceOwner: inMock.NewProvider("user_id", "user_name"),
TenantID: suite.creds.AzureTenantID, TenantID: suite.creds.AzureTenantID,
} }
statusUpdater = func(*support.ConnectorOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0] allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
dps = DeltaPaths{} // incrementals are tested separately dps = DeltaPaths{} // incrementals are tested separately
container1 = mockContainer{ container1 = mockContainer{
@ -851,7 +1585,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
require.Equal(t, "user_id", qp.ResourceOwner.ID(), qp.ResourceOwner) require.Equal(t, "user_id", qp.ResourceOwner.ID(), qp.ResourceOwner)
require.Equal(t, "user_name", qp.ResourceOwner.Name(), qp.ResourceOwner) require.Equal(t, "user_name", qp.ResourceOwner.Name(), qp.ResourceOwner)
collections, err := filterContainersAndFillCollections( collections, err := populateCollections(
ctx, ctx,
qp, qp,
mbh, mbh,
@ -907,7 +1641,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
} }
} }
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incrementals_nondelta() { func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_incrementals_nondelta() {
var ( var (
userID = "user_id" userID = "user_id"
tenantID = suite.creds.AzureTenantID tenantID = suite.creds.AzureTenantID
@ -917,7 +1651,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
ResourceOwner: inMock.NewProvider("user_id", "user_name"), ResourceOwner: inMock.NewProvider("user_id", "user_name"),
TenantID: suite.creds.AzureTenantID, TenantID: suite.creds.AzureTenantID,
} }
statusUpdater = func(*support.ConnectorOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0] allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
commonResults = mockGetterResults{ commonResults = mockGetterResults{
added: []string{"added"}, added: []string{"added"},
@ -1270,7 +2004,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
} }
} }
collections, err := filterContainersAndFillCollections( collections, err := populateCollections(
ctx, ctx,
qp, qp,
mbh, mbh,

View File

@ -4,7 +4,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
) )
// checkIDAndName is a helper function to ensure that // checkIDAndName is a helper function to ensure that

View File

@ -13,9 +13,9 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -125,7 +125,7 @@ func (col *Collection) LocationPath() *path.Builder {
return col.locationPath return col.locationPath
} }
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old // TODO(ashmrtn): Fill in with previous path once the Controller compares old
// and new folder hierarchies. // and new folder hierarchies.
func (col Collection) PreviousPath() path.Path { func (col Collection) PreviousPath() path.Path {
return col.prevPath return col.prevPath
@ -230,7 +230,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
atomic.AddInt64(&success, 1) atomic.AddInt64(&success, 1)
log.With("err", err).Infow("item not found", clues.InErr(err).Slice()...) log.With("err", err).Infow("item not found", clues.InErr(err).Slice()...)
} else { } else {
errs.AddRecoverable(clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation)) errs.AddRecoverable(ctx, clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation))
} }
return return
@ -238,7 +238,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
data, err := col.items.Serialize(ctx, item, user, id) data, err := col.items.Serialize(ctx, item, user, id)
if err != nil { if err != nil {
errs.AddRecoverable(clues.Wrap(err, "serializing item").Label(fault.LabelForceNoBackupCreation)) errs.AddRecoverable(ctx, clues.Wrap(err, "serializing item").Label(fault.LabelForceNoBackupCreation))
return return
} }

View File

@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -46,20 +46,19 @@ func (mi *mockItemer) Serialize(
return nil, mi.serializeErr return nil, mi.serializeErr
} }
type ExchangeDataCollectionSuite struct { type CollectionSuite struct {
tester.Suite tester.Suite
} }
func TestExchangeDataCollectionSuite(t *testing.T) { func TestCollectionSuite(t *testing.T) {
suite.Run(t, &ExchangeDataCollectionSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &CollectionSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() { func (suite *CollectionSuite) TestReader_Valid() {
m := []byte("test message") m := []byte("test message")
description := "aFile" description := "aFile"
ed := &Stream{id: description, message: m} ed := &Stream{id: description, message: m}
// Read the message using the `ExchangeData` reader and validate it matches what we set
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(ed.ToReader()) _, err := buf.ReadFrom(ed.ToReader())
assert.NoError(suite.T(), err, clues.ToCore(err)) assert.NoError(suite.T(), err, clues.ToCore(err))
@ -67,7 +66,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() {
assert.Equal(suite.T(), description, ed.UUID()) assert.Equal(suite.T(), description, ed.UUID())
} }
func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() { func (suite *CollectionSuite) TestReader_Empty() {
var ( var (
empty []byte empty []byte
expected int64 expected int64
@ -82,7 +81,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() {
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
} }
func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() { func (suite *CollectionSuite) TestColleciton_FullPath() {
t := suite.T() t := suite.T()
tenant := "a-tenant" tenant := "a-tenant"
user := "a-user" user := "a-user"
@ -105,7 +104,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
assert.Equal(t, fullPath, edc.FullPath()) assert.Equal(t, fullPath, edc.FullPath())
} }
func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchangeDataCollection() { func (suite *CollectionSuite) TestCollection_NewCollection() {
t := suite.T() t := suite.T()
tenant := "a-tenant" tenant := "a-tenant"
user := "a-user" user := "a-user"
@ -129,7 +128,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
assert.Equal(t, fullPath, edc.FullPath()) assert.Equal(t, fullPath, edc.FullPath())
} }
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() { func (suite *CollectionSuite) TestNewCollection_state() {
fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo") fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo")
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar") barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
@ -189,7 +188,7 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
} }
} }
func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() { func (suite *CollectionSuite) TestGetItemWithRetries() {
table := []struct { table := []struct {
name string name string
items *mockItemer items *mockItemer

View File

@ -1,7 +1,7 @@
package exchange package exchange
import ( import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -32,7 +32,7 @@ func (h contactBackupHandler) itemHandler() itemGetterSerializer {
func (h contactBackupHandler) NewContainerCache( func (h contactBackupHandler) NewContainerCache(
userID string, userID string,
) (string, graph.ContainerResolver) { ) (string, graph.ContainerResolver) {
return DefaultContactFolder, &contactFolderCache{ return DefaultContactFolder, &contactContainerCache{
userID: userID, userID: userID,
enumer: h.ac, enumer: h.ac,
getter: h.ac, getter: h.ac,

View File

@ -6,13 +6,13 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
var ( var (
_ graph.ContainerResolver = &contactFolderCache{} _ graph.ContainerResolver = &contactContainerCache{}
_ containerRefresher = &contactRefresher{} _ containerRefresher = &contactRefresher{}
) )
@ -35,14 +35,14 @@ func (r *contactRefresher) refreshContainer(
return &f, nil return &f, nil
} }
type contactFolderCache struct { type contactContainerCache struct {
*containerResolver *containerResolver
enumer containersEnumerator enumer containersEnumerator
getter containerGetter getter containerGetter
userID string userID string
} }
func (cfc *contactFolderCache) populateContactRoot( func (cfc *contactContainerCache) populateContactRoot(
ctx context.Context, ctx context.Context,
directoryID string, directoryID string,
baseContainerPath []string, baseContainerPath []string,
@ -67,7 +67,7 @@ func (cfc *contactFolderCache) populateContactRoot(
// objects into the Contact Folder Cache // objects into the Contact Folder Cache
// Function does NOT use Delta Queries as it is not supported // Function does NOT use Delta Queries as it is not supported
// as of (Oct-07-2022) // as of (Oct-07-2022)
func (cfc *contactFolderCache) Populate( func (cfc *contactContainerCache) Populate(
ctx context.Context, ctx context.Context,
errs *fault.Bus, errs *fault.Bus,
baseID string, baseID string,
@ -89,7 +89,7 @@ func (cfc *contactFolderCache) Populate(
return nil return nil
} }
func (cfc *contactFolderCache) init( func (cfc *contactContainerCache) init(
ctx context.Context, ctx context.Context,
baseNode string, baseNode string,
baseContainerPath []string, baseContainerPath []string,

View File

@ -7,7 +7,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -31,7 +31,7 @@ func newContactRestoreHandler(
} }
func (h contactRestoreHandler) newContainerCache(userID string) graph.ContainerResolver { func (h contactRestoreHandler) newContainerCache(userID string) graph.ContainerResolver {
return &contactFolderCache{ return &contactContainerCache{
userID: userID, userID: userID,
enumer: h.ac, enumer: h.ac,
getter: h.ac, getter: h.ac,

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -51,7 +52,7 @@ func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() {
path.EmailCategory, path.EmailCategory,
suite.creds.AzureTenantID, suite.creds.AzureTenantID,
suite.userID, suite.userID,
tester.DefaultTestRestoreDestination("").ContainerName, testdata.DefaultRestoreConfig("").Location,
[]string{"Hufflepuff"}, []string{"Hufflepuff"},
[]string{"Ravenclaw"}) []string{"Ravenclaw"})
} }

View File

@ -6,7 +6,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -403,7 +403,7 @@ func (cr *containerResolver) populatePaths(
_, err := cr.idToPath(ctx, ptr.Val(f.GetId()), 0) _, err := cr.idToPath(ctx, ptr.Val(f.GetId()), 0)
if err != nil { if err != nil {
err = clues.Wrap(err, "populating path") err = clues.Wrap(err, "populating path")
el.AddRecoverable(err) el.AddRecoverable(ctx, err)
lastErr = err lastErr = err
} }
} }

View File

@ -13,10 +13,12 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -671,6 +673,118 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
assert.Equal(t, m.expectedLocation, l.String(), "location path") assert.Equal(t, m.expectedLocation, l.String(), "location path")
} }
type ContainerResolverSuite struct {
tester.Suite
credentials account.M365Config
}
func TestContainerResolverIntegrationSuite(t *testing.T) {
suite.Run(t, &ContainerResolverSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *ContainerResolverSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
func (suite *ContainerResolverSuite) TestPopulate() {
ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventContainerCache{
userID: tester.M365UserID(t),
enumer: ac.Events(),
getter: ac.Events(),
}
}
contactFunc := func(t *testing.T) graph.ContainerResolver {
return &contactContainerCache{
userID: tester.M365UserID(t),
enumer: ac.Contacts(),
getter: ac.Contacts(),
}
}
tests := []struct {
name, folderInCache, root, basePath string
resolverFunc func(t *testing.T) graph.ContainerResolver
canFind assert.BoolAssertionFunc
}{
{
name: "Default Event Cache",
// Fine as long as this isn't running against a migrated Exchange server.
folderInCache: DefaultCalendar,
root: DefaultCalendar,
basePath: DefaultCalendar,
resolverFunc: eventFunc,
canFind: assert.True,
},
{
name: "Default Event Folder Hidden",
folderInCache: DefaultContactFolder,
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Default Contact Cache",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
basePath: DefaultContactFolder,
canFind: assert.True,
resolverFunc: contactFunc,
},
{
name: "Default Contact Hidden",
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
{
name: "Name Not in Cache",
folderInCache: "testFooBarWhoBar",
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
resolver := test.resolverFunc(t)
err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath)
require.NoError(t, err, clues.ToCore(err))
_, isFound := resolver.LocationInCache(test.folderInCache)
test.canFind(t, isFound, "folder path", test.folderInCache)
})
}
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// integration suite // integration suite
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -688,7 +802,7 @@ func runCreateDestinationTest(
var ( var (
svc = path.ExchangeService svc = path.ExchangeService
gcr = handler.newContainerCache(userID) gcc = handler.newContainerCache(userID)
) )
path1, err := path.Build( path1, err := path.Build(
@ -700,17 +814,17 @@ func runCreateDestinationTest(
containerNames1...) containerNames1...)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
containerID, gcr, err := createDestination( containerID, gcc, err := createDestination(
ctx, ctx,
handler, handler,
handler.formatRestoreDestination(destinationName, path1), handler.formatRestoreDestination(destinationName, path1),
userID, userID,
gcr, gcc,
true, true,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
_, _, err = gcr.IDToPath(ctx, containerID) _, _, err = gcc.IDToPath(ctx, containerID)
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
path2, err := path.Build( path2, err := path.Build(
@ -722,22 +836,22 @@ func runCreateDestinationTest(
containerNames2...) containerNames2...)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
containerID, gcr, err = createDestination( containerID, gcc, err = createDestination(
ctx, ctx,
handler, handler,
handler.formatRestoreDestination(destinationName, path2), handler.formatRestoreDestination(destinationName, path2),
userID, userID,
gcr, gcc,
false, false,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
p, l, err := gcr.IDToPath(ctx, containerID) p, l, err := gcc.IDToPath(ctx, containerID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
_, ok := gcr.LocationInCache(l.String()) _, ok := gcc.LocationInCache(l.String())
require.True(t, ok, "looking for location in cache: %s", l) require.True(t, ok, "looking for location in cache: %s", l)
_, ok = gcr.PathInCache(p.String()) _, ok = gcc.PathInCache(p.String())
require.True(t, ok, "looking for path in cache: %s", p) require.True(t, ok, "looking for path in cache: %s", p)
} }

View File

@ -1,7 +1,7 @@
package exchange package exchange
import ( import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -32,7 +32,7 @@ func (h eventBackupHandler) itemHandler() itemGetterSerializer {
func (h eventBackupHandler) NewContainerCache( func (h eventBackupHandler) NewContainerCache(
userID string, userID string,
) (string, graph.ContainerResolver) { ) (string, graph.ContainerResolver) {
return DefaultCalendar, &eventCalendarCache{ return DefaultCalendar, &eventContainerCache{
userID: userID, userID: userID,
enumer: h.ac, enumer: h.ac,
getter: h.ac, getter: h.ac,

View File

@ -6,14 +6,14 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
var _ graph.ContainerResolver = &eventCalendarCache{} var _ graph.ContainerResolver = &eventContainerCache{}
type eventCalendarCache struct { type eventContainerCache struct {
*containerResolver *containerResolver
enumer containersEnumerator enumer containersEnumerator
getter containerGetter getter containerGetter
@ -23,7 +23,7 @@ type eventCalendarCache struct {
// init ensures that the structure's fields are initialized. // init ensures that the structure's fields are initialized.
// Fields Initialized when cache == nil: // Fields Initialized when cache == nil:
// [mc.cache] // [mc.cache]
func (ecc *eventCalendarCache) init( func (ecc *eventContainerCache) init(
ctx context.Context, ctx context.Context,
) error { ) error {
if ecc.containerResolver == nil { if ecc.containerResolver == nil {
@ -37,7 +37,7 @@ func (ecc *eventCalendarCache) init(
// DefaultCalendar is the traditional "Calendar". // DefaultCalendar is the traditional "Calendar".
// Action ensures that cache will stop at appropriate level. // Action ensures that cache will stop at appropriate level.
// @error iff the struct is not properly instantiated // @error iff the struct is not properly instantiated
func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error { func (ecc *eventContainerCache) populateEventRoot(ctx context.Context) error {
container := DefaultCalendar container := DefaultCalendar
f, err := ecc.getter.GetContainerByID(ctx, ecc.userID, container) f, err := ecc.getter.GetContainerByID(ctx, ecc.userID, container)
@ -59,7 +59,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
// Populate utility function for populating eventCalendarCache. // Populate utility function for populating eventCalendarCache.
// Executes 1 additional Graph Query // Executes 1 additional Graph Query
// @param baseID: ignored. Present to conform to interface // @param baseID: ignored. Present to conform to interface
func (ecc *eventCalendarCache) Populate( func (ecc *eventContainerCache) Populate(
ctx context.Context, ctx context.Context,
errs *fault.Bus, errs *fault.Bus,
baseID string, baseID string,
@ -88,7 +88,7 @@ func (ecc *eventCalendarCache) Populate(
// AddToCache adds container to map in field 'cache' // AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible. // @returns error iff the required values are not accessible.
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error { func (ecc *eventContainerCache) AddToCache(ctx context.Context, f graph.Container) error {
if err := checkIDAndName(f); err != nil { if err := checkIDAndName(f); err != nil {
return clues.Wrap(err, "validating container").WithClues(ctx) return clues.Wrap(err, "validating container").WithClues(ctx)
} }

View File

@ -7,7 +7,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -33,7 +33,7 @@ func newEventRestoreHandler(
} }
func (h eventRestoreHandler) newContainerCache(userID string) graph.ContainerResolver { func (h eventRestoreHandler) newContainerCache(userID string) graph.ContainerResolver {
return &eventCalendarCache{ return &eventContainerCache{
userID: userID, userID: userID,
enumer: h.ac, enumer: h.ac,
getter: h.ac, getter: h.ac,

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -51,7 +52,7 @@ func (suite *EventsRestoreIntgSuite) TestCreateContainerDestination() {
path.EmailCategory, path.EmailCategory,
suite.creds.AzureTenantID, suite.creds.AzureTenantID,
suite.userID, suite.userID,
tester.DefaultTestRestoreDestination("").ContainerName, testdata.DefaultRestoreConfig("").Location,
[]string{"Durmstrang"}, []string{"Durmstrang"},
[]string{"Beauxbatons"}) []string{"Beauxbatons"})
} }

View File

@ -5,7 +5,7 @@ import (
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"

View File

@ -1,7 +1,7 @@
package exchange package exchange
import ( import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -32,7 +32,7 @@ func (h mailBackupHandler) itemHandler() itemGetterSerializer {
func (h mailBackupHandler) NewContainerCache( func (h mailBackupHandler) NewContainerCache(
userID string, userID string,
) (string, graph.ContainerResolver) { ) (string, graph.ContainerResolver) {
return rootFolderAlias, &mailFolderCache{ return rootFolderAlias, &mailContainerCache{
userID: userID, userID: userID,
enumer: h.ac, enumer: h.ac,
getter: h.ac, getter: h.ac,

View File

@ -5,13 +5,13 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
var ( var (
_ graph.ContainerResolver = &mailFolderCache{} _ graph.ContainerResolver = &mailContainerCache{}
_ containerRefresher = &mailRefresher{} _ containerRefresher = &mailRefresher{}
) )
@ -34,10 +34,10 @@ func (r *mailRefresher) refreshContainer(
return &f, nil return &f, nil
} }
// mailFolderCache struct used to improve lookup of directories within exchange.Mail // mailContainerCache struct used to improve lookup of directories within exchange.Mail
// cache map of cachedContainers where the key = M365ID // cache map of cachedContainers where the key = M365ID
// nameLookup map: Key: DisplayName Value: ID // nameLookup map: Key: DisplayName Value: ID
type mailFolderCache struct { type mailContainerCache struct {
*containerResolver *containerResolver
enumer containersEnumerator enumer containersEnumerator
getter containerGetter getter containerGetter
@ -47,7 +47,7 @@ type mailFolderCache struct {
// init ensures that the structure's fields are initialized. // init ensures that the structure's fields are initialized.
// Fields Initialized when cache == nil: // Fields Initialized when cache == nil:
// [mc.cache] // [mc.cache]
func (mc *mailFolderCache) init( func (mc *mailContainerCache) init(
ctx context.Context, ctx context.Context,
) error { ) error {
if mc.containerResolver == nil { if mc.containerResolver == nil {
@ -64,7 +64,7 @@ func (mc *mailFolderCache) init(
// rootFolderAlias is the top-level directory for exchange.Mail. // rootFolderAlias is the top-level directory for exchange.Mail.
// Action ensures that cache will stop at appropriate level. // Action ensures that cache will stop at appropriate level.
// @error iff the struct is not properly instantiated // @error iff the struct is not properly instantiated
func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error { func (mc *mailContainerCache) populateMailRoot(ctx context.Context) error {
f, err := mc.getter.GetContainerByID(ctx, mc.userID, rootFolderAlias) f, err := mc.getter.GetContainerByID(ctx, mc.userID, rootFolderAlias)
if err != nil { if err != nil {
return clues.Wrap(err, "fetching root folder") return clues.Wrap(err, "fetching root folder")
@ -89,7 +89,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
// @param baseID: M365ID of the base of the exchange.Mail.Folder // @param baseID: M365ID of the base of the exchange.Mail.Folder
// @param baseContainerPath: the set of folder elements that make up the path // @param baseContainerPath: the set of folder elements that make up the path
// for the base container in the cache. // for the base container in the cache.
func (mc *mailFolderCache) Populate( func (mc *mailContainerCache) Populate(
ctx context.Context, ctx context.Context,
errs *fault.Bus, errs *fault.Bus,
baseID string, baseID string,

View File

@ -87,7 +87,7 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
acm := ac.Mail() acm := ac.Mail()
mfc := mailFolderCache{ mfc := mailContainerCache{
userID: userID, userID: userID,
enumer: acm, enumer: acm,
getter: acm, getter: acm,

View File

@ -8,7 +8,7 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -34,7 +34,7 @@ func newMailRestoreHandler(
} }
func (h mailRestoreHandler) newContainerCache(userID string) graph.ContainerResolver { func (h mailRestoreHandler) newContainerCache(userID string) graph.ContainerResolver {
return &mailFolderCache{ return &mailContainerCache{
userID: userID, userID: userID,
enumer: h.ac, enumer: h.ac,
getter: h.ac, getter: h.ac,

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -51,7 +52,7 @@ func (suite *MailRestoreIntgSuite) TestCreateContainerDestination() {
path.EmailCategory, path.EmailCategory,
suite.creds.AzureTenantID, suite.creds.AzureTenantID,
suite.userID, suite.userID,
tester.DefaultTestRestoreDestination("").ContainerName, testdata.DefaultRestoreConfig("").Location,
[]string{"Griffindor", "Croix"}, []string{"Griffindor", "Croix"},
[]string{"Griffindor", "Felicius"}) []string{"Griffindor", "Felicius"})
} }

View File

@ -89,8 +89,6 @@ func NewContactCollection(pathRepresentation path.Path, numMessagesToReturn int)
Names: []string{}, Names: []string{},
} }
rand.Seed(time.Now().UnixNano())
middleNames := []string{ middleNames := []string{
"Argon", "Argon",
"Bernard", "Bernard",
@ -140,6 +138,7 @@ func (medc *DataCollection) Items(
return res return res
} }
// TODO: move to data/mock for service-agnostic mocking
// Data represents a single item retrieved from exchange // Data represents a single item retrieved from exchange
type Data struct { type Data struct {
ID string ID string

View File

@ -744,10 +744,10 @@ func serialize(t *testing.T, item serialization.Parsable) []byte {
return byteArray return byteArray
} }
func hydrateMessage(byteArray []byte) (models.Messageable, error) { func hydrateMessage(body []byte) (models.Messageable, error) {
parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", byteArray) parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", body)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "deserializing bytes into base m365 object") return nil, clues.Wrap(err, "deserializing bytes into base m365 object").With("body_size", len(body))
} }
anObject, err := parseNode.GetObjectValue(models.CreateMessageFromDiscriminatorValue) anObject, err := parseNode.GetObjectValue(models.CreateMessageFromDiscriminatorValue)

View File

@ -9,10 +9,10 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -22,16 +22,16 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// RestoreCollections restores M365 objects in data.RestoreCollection to MSFT // ConsumeRestoreCollections restores M365 objects in data.RestoreCollection to MSFT
// store through GraphAPI. // store through GraphAPI.
func RestoreCollections( func ConsumeRestoreCollections(
ctx context.Context, ctx context.Context,
ac api.Client, ac api.Client,
dest control.RestoreDestination, restoreCfg control.RestoreConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
errs *fault.Bus, errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) { ) (*support.ControllerOperationStatus, error) {
if len(dcs) == 0 { if len(dcs) == 0 {
return support.CreateStatus(ctx, support.Restore, 0, support.CollectionMetrics{}, ""), nil return support.CreateStatus(ctx, support.Restore, 0, support.CollectionMetrics{}, ""), nil
} }
@ -64,7 +64,7 @@ func RestoreCollections(
handler, ok := handlers[category] handler, ok := handlers[category]
if !ok { if !ok {
el.AddRecoverable(clues.New("unsupported restore path category").WithClues(ictx)) el.AddRecoverable(ctx, clues.New("unsupported restore path category").WithClues(ictx))
continue continue
} }
@ -73,20 +73,20 @@ func RestoreCollections(
isNewCache = true isNewCache = true
} }
containerID, gcr, err := createDestination( containerID, gcc, err := createDestination(
ictx, ictx,
handler, handler,
handler.formatRestoreDestination(dest.ContainerName, dc.FullPath()), handler.formatRestoreDestination(restoreCfg.Location, dc.FullPath()),
userID, userID,
directoryCache[category], directoryCache[category],
isNewCache, isNewCache,
errs) errs)
if err != nil { if err != nil {
el.AddRecoverable(err) el.AddRecoverable(ctx, err)
continue continue
} }
directoryCache[category] = gcr directoryCache[category] = gcc
ictx = clues.Add(ictx, "restore_destination_id", containerID) ictx = clues.Add(ictx, "restore_destination_id", containerID)
@ -107,7 +107,7 @@ func RestoreCollections(
break break
} }
el.AddRecoverable(err) el.AddRecoverable(ctx, err)
} }
} }
@ -116,7 +116,7 @@ func RestoreCollections(
support.Restore, support.Restore,
len(dcs), len(dcs),
metrics, metrics,
dest.ContainerName) restoreCfg.Location)
return status, el.Failure() return status, el.Failure()
} }
@ -131,7 +131,7 @@ func restoreCollection(
deets *details.Builder, deets *details.Builder,
errs *fault.Bus, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
ctx, end := diagnostics.Span(ctx, "gc:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath())) ctx, end := diagnostics.Span(ctx, "m365:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath()))
defer end() defer end()
var ( var (
@ -159,14 +159,14 @@ func restoreCollection(
} }
ictx := clues.Add(ctx, "item_id", itemData.UUID()) ictx := clues.Add(ctx, "item_id", itemData.UUID())
trace.Log(ictx, "gc:exchange:restoreCollection:item", itemData.UUID()) trace.Log(ictx, "m365:exchange:restoreCollection:item", itemData.UUID())
metrics.Objects++ metrics.Objects++
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(itemData.ToReader()) _, err := buf.ReadFrom(itemData.ToReader())
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "reading item bytes").WithClues(ictx)) el.AddRecoverable(ctx, clues.Wrap(err, "reading item bytes").WithClues(ictx))
continue continue
} }
@ -174,7 +174,7 @@ func restoreCollection(
info, err := ir.restore(ictx, body, userID, destinationID, errs) info, err := ir.restore(ictx, body, userID, destinationID, errs)
if err != nil { if err != nil {
el.AddRecoverable(err) el.AddRecoverable(ictx, err)
continue continue
} }
@ -185,7 +185,7 @@ func restoreCollection(
// destination folder, then the restore path no longer matches the fullPath. // destination folder, then the restore path no longer matches the fullPath.
itemPath, err := fullPath.AppendItem(itemData.UUID()) itemPath, err := fullPath.AppendItem(itemData.UUID())
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "adding item to collection path").WithClues(ctx)) el.AddRecoverable(ctx, clues.Wrap(err, "adding item to collection path").WithClues(ctx))
continue continue
} }
@ -331,7 +331,7 @@ func uploadAttachments(
itemID, itemID,
a) a)
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "uploading mail attachment").WithClues(ctx)) el.AddRecoverable(ctx, clues.Wrap(err, "uploading mail attachment").WithClues(ctx))
} }
} }

View File

@ -10,9 +10,10 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
@ -54,7 +55,7 @@ func (suite *RestoreIntgSuite) TestRestoreContact() {
var ( var (
userID = tester.M365UserID(t) userID = tester.M365UserID(t)
folderName = tester.DefaultTestRestoreDestination("contact").ContainerName folderName = testdata.DefaultRestoreConfig("contact").Location
handler = newContactRestoreHandler(suite.ac) handler = newContactRestoreHandler(suite.ac)
) )
@ -88,7 +89,7 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() {
var ( var (
userID = tester.M365UserID(t) userID = tester.M365UserID(t)
subject = tester.DefaultTestRestoreDestination("event").ContainerName subject = testdata.DefaultRestoreConfig("event").Location
handler = newEventRestoreHandler(suite.ac) handler = newEventRestoreHandler(suite.ac)
) )
@ -154,7 +155,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageBytes("Restore Exchange Object"), bytes: exchMock.MessageBytes("Restore Exchange Object"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailobj").ContainerName folderName := testdata.DefaultRestoreConfig("mailobj").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -167,7 +168,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithDirectAttachment("Restore 1 Attachment"), bytes: exchMock.MessageWithDirectAttachment("Restore 1 Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailwattch").ContainerName folderName := testdata.DefaultRestoreConfig("mailwattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -180,7 +181,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithItemAttachmentEvent("Event Item Attachment"), bytes: exchMock.MessageWithItemAttachmentEvent("Event Item Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("eventwattch").ContainerName folderName := testdata.DefaultRestoreConfig("eventwattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -193,7 +194,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithItemAttachmentMail("Mail Item Attachment"), bytes: exchMock.MessageWithItemAttachmentMail("Mail Item Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailitemattch").ContainerName folderName := testdata.DefaultRestoreConfig("mailitemattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -201,24 +202,22 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
}, },
// TODO: Neha complete as part of https://github.com/alcionai/corso/issues/2428 {
// { name: "Test Mail: Hydrated Item Attachment Mail",
// name: "Test Mail: Hydrated Item Attachment Mail", bytes: exchMock.MessageWithNestedItemAttachmentMail(t,
// bytes: exchMock.MessageWithNestedItemAttachmentMail(t, exchMock.MessageBytes("Basic Item Attachment"),
// exchMock.MessageBytes("Basic Item Attachment"), "Mail Item Attachment",
// "Mail Item Attachment", ),
// ), category: path.EmailCategory,
// category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string {
// destination: func(t *testing.T, ctx context.Context) string { folderName := testdata.DefaultRestoreConfig("mailbasicattch").Location
// folderName := tester.DefaultTestRestoreDestination("mailbasicattch").ContainerName folder, err := handlers[path.EmailCategory].
// folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "")
// CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err))
// require.NoError(t, err, clues.ToCore(err))
// return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
// }, },
// }, },
// vales here////
{ {
name: "Test Mail: Hydrated Item Attachment Mail One Attach", name: "Test Mail: Hydrated Item Attachment Mail One Attach",
bytes: exchMock.MessageWithNestedItemAttachmentMail(t, bytes: exchMock.MessageWithNestedItemAttachmentMail(t,
@ -227,7 +226,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
), ),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailnestattch").ContainerName folderName := testdata.DefaultRestoreConfig("mailnestattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -244,7 +243,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
), ),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailcontactattch").ContainerName folderName := testdata.DefaultRestoreConfig("mailcontactattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -252,25 +251,25 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
}, },
// { // Restore will upload the Message without uploading the attachment { // Restore will upload the Message without uploading the attachment
// name: "Test Mail: Item Attachment_NestedEvent", name: "Test Mail: Item Attachment_NestedEvent",
// bytes: exchMock.MessageWithNestedItemAttachmentEvent("Nested Item Attachment"), bytes: exchMock.MessageWithNestedItemAttachmentEvent("Nested Item Attachment"),
// category: path.EmailCategory, category: path.EmailCategory,
// destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
// folderName := tester.DefaultTestRestoreDestination("nestedattch").ContainerName folderName := testdata.DefaultRestoreConfig("nestedattch").Location
// folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
// CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
// require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
// }, },
// }, },
{ {
name: "Test Mail: One Large Attachment", name: "Test Mail: One Large Attachment",
bytes: exchMock.MessageWithLargeAttachment("Restore Large Attachment"), bytes: exchMock.MessageWithLargeAttachment("Restore Large Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("maillargeattch").ContainerName folderName := testdata.DefaultRestoreConfig("maillargeattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -283,7 +282,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithTwoAttachments("Restore 2 Attachments"), bytes: exchMock.MessageWithTwoAttachments("Restore 2 Attachments"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailtwoattch").ContainerName folderName := testdata.DefaultRestoreConfig("mailtwoattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -296,7 +295,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithOneDriveAttachment("Restore Reference(OneDrive) Attachment"), bytes: exchMock.MessageWithOneDriveAttachment("Restore Reference(OneDrive) Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("mailrefattch").ContainerName folderName := testdata.DefaultRestoreConfig("mailrefattch").Location
folder, err := handlers[path.EmailCategory]. folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -309,7 +308,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.ContactBytes("Test_Omega"), bytes: exchMock.ContactBytes("Test_Omega"),
category: path.ContactsCategory, category: path.ContactsCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("contact").ContainerName folderName := testdata.DefaultRestoreConfig("contact").Location
folder, err := handlers[path.ContactsCategory]. folder, err := handlers[path.ContactsCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -322,7 +321,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.EventBytes("Restored Event Object"), bytes: exchMock.EventBytes("Restored Event Object"),
category: path.EventsCategory, category: path.EventsCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("event").ContainerName folderName := testdata.DefaultRestoreConfig("event").Location
calendar, err := handlers[path.EventsCategory]. calendar, err := handlers[path.EventsCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -335,7 +334,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
bytes: exchMock.EventWithAttachment("Restored Event Attachment"), bytes: exchMock.EventWithAttachment("Restored Event Attachment"),
category: path.EventsCategory, category: path.EventsCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := tester.DefaultTestRestoreDestination("eventobj").ContainerName folderName := testdata.DefaultRestoreConfig("eventobj").Location
calendar, err := handlers[path.EventsCategory]. calendar, err := handlers[path.EventsCategory].
CreateContainer(ctx, userID, folderName, "") CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))

View File

@ -7,8 +7,8 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )

View File

@ -8,7 +8,7 @@ import (
ktext "github.com/microsoft/kiota-serialization-text-go" ktext "github.com/microsoft/kiota-serialization-text-go"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites" i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/sites"
) )
// BetaClient the main entry point of the SDK, exposes the configuration and the fluent API. // BetaClient the main entry point of the SDK, exposes the configuration and the fluent API.

View File

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )

View File

@ -2,7 +2,7 @@
"lockFileVersion": "1.0.0", "lockFileVersion": "1.0.0",
"kiotaVersion": "0.10.0.0", "kiotaVersion": "0.10.0.0",
"clientClassName": "BetaClient", "clientClassName": "BetaClient",
"clientNamespaceName": "github.com/alcionai/corso/src/internal/connector/graph/betasdk", "clientNamespaceName": "github.com/alcionai/corso/src/internal/m365/graph/betasdk",
"language": "Go", "language": "Go",
"betaVersion": "0.53.0", "betaVersion": "0.53.0",
"usesBackingStore": false, "usesBackingStore": false,

Some files were not shown because too many files have changed in this diff Show More