Compare commits

...

19 Commits

Author SHA1 Message Date
ryanfkeepers
2eaeaf1f04 Wrap all assert/require error assertion funcs
Since structured error data from clues is not added
to error message logging from assert and require,
we're slicing out data that is valuable to debugging.
This adds a package in tester which wraps the error
assertion funcs so that structured error data is
appended to all failure outputs.
2023-02-23 15:25:27 -07:00
ryanfkeepers
4a882dec3b add logger CtxErr() qol func 2023-02-21 13:00:34 -07:00
ryanfkeepers
294ee7173e remove common errors
Now that clues is in place, we don't need the
errors.As handling provided by common errors.
Current errors.As can be replaced with either
errors.Is checks, or HasLabel checks on the
status code.
2023-02-21 10:42:41 -07:00
ryanfkeepers
5cf020d5c8 remove stats.Errs
Now that fault errors is in place, we don't need
the operation stats errs.
2023-02-20 18:58:26 -07:00
ryanfkeepers
3de82f9a5b refactor garph.ErrData 2023-02-20 18:53:44 -07:00
ryanfkeepers
27e9661e63 remove wrap and append support 2023-02-20 18:09:38 -07:00
ryanfkeepers
aa90a36e76 amend kopia error clues 2023-02-20 17:56:21 -07:00
ryanfkeepers
e2530a0bd9 remove readErr and writeErr from operations
Now that fault is in place, we can remove the
readErrs and writeErrs from operation persistence.
2023-02-20 17:32:49 -07:00
ryanfkeepers
7b50d2401a remove errors from connector status
Now that fault is in place, we can remove the error
tracking functionality of graph status, and let that
focus purely on metrics.
2023-02-20 17:21:03 -07:00
ryanfkeepers
5593352226 fix unit tests 2023-02-20 15:28:23 -07:00
ryanfkeepers
f88cf05117 fault package funcs rename
Renaming the funcs in the fault
package to be more clear about
their purpose and behavior.  Largely
just find&replace changes, except
for fault.go and the fault examples.
2023-02-20 14:44:09 -07:00
ryanfkeepers
7b1682c68d last little onedrive completion 2023-02-19 09:05:17 -07:00
ryanfkeepers
88b5df728b add clues & fault to onedrive collections 2023-02-19 09:03:10 -07:00
ryanfkeepers
c62c246ee2 fix clues addall and withall 2023-02-19 08:31:41 -07:00
ryanfkeepers
c5b5a60d4e use tracker 2023-02-19 08:30:38 -07:00
ryanfkeepers
734e90c960 adding clues & fault to onedrive restore 2023-02-19 08:30:38 -07:00
ryanfkeepers
3edc74c170 linter fix 2023-02-19 08:26:37 -07:00
ryanfkeepers
66f734b4db watch for et errors 2023-02-19 08:26:09 -07:00
ryanfkeepers
b4a31c08dd add fault.tracker for error additions
Realized we had a race condition: in an async
runtime it's possible for an errs.Err() to be
returned by multiple functions, even though that
Err() was only sourced by one of them.  The
addition of a tracker contains the returned
error into the scope of that func so that only
the error produced in the current iteration is
returned.
2023-02-19 08:25:02 -07:00
170 changed files with 2721 additions and 3104 deletions

View File

@ -314,8 +314,8 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
bups, ferrs := r.Backups(ctx, bIDs) bups, ferrs := r.Backups(ctx, bIDs)
// TODO: print/log recoverable errors // TODO: print/log recoverable errors
if ferrs.Err() != nil { if ferrs.Failure() != nil {
return Only(ctx, errors.Wrap(ferrs.Err(), "Unable to retrieve backup results from storage")) return Only(ctx, errors.Wrap(ferrs.Failure(), "Unable to retrieve backup results from storage"))
} }
backup.PrintAll(ctx, bups) backup.PrintAll(ctx, bups)
@ -492,7 +492,7 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
// runDetailsExchangeCmd actually performs the lookup in backup details. // runDetailsExchangeCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if // the fault.Errors return is always non-nil. Callers should check if
// errs.Err() == nil. // errs.Failure() == nil.
func runDetailsExchangeCmd( func runDetailsExchangeCmd(
ctx context.Context, ctx context.Context,
r repository.BackupGetter, r repository.BackupGetter,
@ -505,12 +505,12 @@ func runDetailsExchangeCmd(
d, _, errs := r.BackupDetails(ctx, backupID) d, _, errs := r.BackupDetails(ctx, backupID)
// TODO: log/track recoverable errors // TODO: log/track recoverable errors
if errs.Err() != nil { if errs.Failure() != nil {
if errors.Is(errs.Err(), data.ErrNotFound) { if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, errors.Errorf("No backup exists with the id %s", backupID) return nil, errors.Errorf("No backup exists with the id %s", backupID)
} }
return nil, errors.Wrap(errs.Err(), "Failed to get backup details in the repository") return nil, errors.Wrap(errs.Failure(), "Failed to get backup details in the repository")
} }
sel := utils.IncludeExchangeRestoreDataSelectors(opts) sel := utils.IncludeExchangeRestoreDataSelectors(opts)

View File

@ -18,6 +18,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -73,7 +74,7 @@ func (suite *NoBackupExchangeIntegrationSuite) SetupSuite() {
suite.recorder = strings.Builder{} suite.recorder = strings.Builder{}
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -88,7 +89,7 @@ func (suite *NoBackupExchangeIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *NoBackupExchangeIntegrationSuite) TestExchangeBackupListCmd_empty() { func (suite *NoBackupExchangeIntegrationSuite) TestExchangeBackupListCmd_empty() {
@ -110,7 +111,7 @@ func (suite *NoBackupExchangeIntegrationSuite) TestExchangeBackupListCmd_empty()
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String() result := suite.recorder.String()
@ -155,7 +156,7 @@ func (suite *BackupExchangeIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -170,7 +171,7 @@ func (suite *BackupExchangeIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *BackupExchangeIntegrationSuite) TestExchangeBackupCmd() { func (suite *BackupExchangeIntegrationSuite) TestExchangeBackupCmd() {
@ -196,7 +197,7 @@ func (suite *BackupExchangeIntegrationSuite) TestExchangeBackupCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
result := recorder.String() result := recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
@ -243,7 +244,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) SetupSuite() {
suite.recorder = strings.Builder{} suite.recorder = strings.Builder{}
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -261,7 +262,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
suite.backupOps = make(map[path.CategoryType]string) suite.backupOps = make(map[path.CategoryType]string)
@ -287,18 +288,18 @@ func (suite *PreparedBackupExchangeIntegrationSuite) SetupSuite() {
sel.Include(scopes) sel.Include(scopes)
bop, err := suite.repo.NewBackup(ctx, sel.Selector) bop, err := suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, bop.Run(ctx)) aw.MustNoErr(t, bop.Run(ctx))
require.NoError(t, err) aw.MustNoErr(t, err)
bIDs := string(bop.Results.BackupID) bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately // sanity check, ensure we can find the backup and its details immediately
b, err := suite.repo.Backup(ctx, bop.Results.BackupID) b, err := suite.repo.Backup(ctx, bop.Results.BackupID)
require.NoError(t, err, "retrieving recent backup by ID") aw.MustNoErr(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id") require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.repo.BackupDetails(ctx, bIDs) _, b, errs := suite.repo.BackupDetails(ctx, bIDs)
require.NoError(t, errs.Err(), "retrieving recent backup details by ID") aw.MustNoErr(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Errs(), "retrieving recent backup details by ID") require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id") require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID) suite.backupOps[set] = string(b.ID)
@ -324,7 +325,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
// compare the output // compare the output
result := suite.recorder.String() result := suite.recorder.String()
@ -355,7 +356,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd_singleI
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
// compare the output // compare the output
result := suite.recorder.String() result := suite.recorder.String()
@ -380,7 +381,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd_badID()
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
}) })
} }
} }
@ -398,8 +399,8 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeDetailsCmd() {
// fetch the details from the repo first // fetch the details from the repo first
deets, _, errs := suite.repo.BackupDetails(ctx, string(bID)) deets, _, errs := suite.repo.BackupDetails(ctx, string(bID))
require.NoError(t, errs.Err()) aw.MustNoErr(t, errs.Failure())
require.Empty(t, errs.Errs()) require.Empty(t, errs.Recovered())
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
@ -412,7 +413,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeDetailsCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
// compare the output // compare the output
result := suite.recorder.String() result := suite.recorder.String()
@ -473,7 +474,7 @@ func (suite *BackupDeleteExchangeIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -489,7 +490,7 @@ func (suite *BackupDeleteExchangeIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
m365UserID := tester.M365UserID(t) m365UserID := tester.M365UserID(t)
users := []string{m365UserID} users := []string{m365UserID}
@ -499,8 +500,8 @@ func (suite *BackupDeleteExchangeIntegrationSuite) SetupSuite() {
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector) suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx)) aw.MustNoErr(t, suite.backupOp.Run(ctx))
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd() { func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd() {
@ -517,7 +518,7 @@ func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd()
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = tester.StubRootCmd( cmd = tester.StubRootCmd(
@ -526,7 +527,7 @@ func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd()
"--backup", string(suite.backupOp.Results.BackupID)) "--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }
func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd_UnknownID() { func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd_UnknownID() {
@ -543,5 +544,5 @@ func (suite *BackupDeleteExchangeIntegrationSuite) TestExchangeBackupDeleteCmd_U
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type ExchangeSuite struct { type ExchangeSuite struct {
@ -74,23 +75,23 @@ func (suite *ExchangeSuite) TestValidateBackupCreateFlags() {
}{ }{
{ {
name: "no users or data", name: "no users or data",
expect: assert.Error, expect: aw.Err,
}, },
{ {
name: "no users only data", name: "no users only data",
data: []string{dataEmail}, data: []string{dataEmail},
expect: assert.Error, expect: aw.Err,
}, },
{ {
name: "unrecognized data category", name: "unrecognized data category",
user: []string{"fnord"}, user: []string{"fnord"},
data: []string{"smurfs"}, data: []string{"smurfs"},
expect: assert.Error, expect: aw.Err,
}, },
{ {
name: "only users no data", name: "only users no data",
user: []string{"fnord"}, user: []string{"fnord"},
expect: assert.NoError, expect: aw.NoErr,
}, },
} }
for _, test := range table { for _, test := range table {
@ -224,7 +225,7 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectors() {
test.BackupGetter, test.BackupGetter,
"backup-ID", "backup-ID",
test.Opts) test.Opts)
assert.NoError(t, err, "failure") aw.NoErr(t, err, "failure")
assert.ElementsMatch(t, test.Expected, output.Entries) assert.ElementsMatch(t, test.Expected, output.Entries)
}) })
} }
@ -241,7 +242,7 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectorsBadFormats() {
test.BackupGetter, test.BackupGetter,
"backup-ID", "backup-ID",
test.Opts) test.Opts)
assert.Error(t, err, "failure") aw.Err(t, err, "failure")
assert.Empty(t, output) assert.Empty(t, output)
}) })
} }

View File

@ -237,8 +237,8 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
bups, ferrs := r.Backups(ctx, bIDs) bups, ferrs := r.Backups(ctx, bIDs)
// TODO: print/log recoverable errors // TODO: print/log recoverable errors
if ferrs.Err() != nil { if ferrs.Failure() != nil {
return Only(ctx, errors.Wrap(ferrs.Err(), "Unable to retrieve backup results from storage")) return Only(ctx, errors.Wrap(ferrs.Failure(), "Unable to retrieve backup results from storage"))
} }
backup.PrintAll(ctx, bups) backup.PrintAll(ctx, bups)
@ -384,7 +384,7 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
// runDetailsOneDriveCmd actually performs the lookup in backup details. // runDetailsOneDriveCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if // the fault.Errors return is always non-nil. Callers should check if
// errs.Err() == nil. // errs.Failure() == nil.
func runDetailsOneDriveCmd( func runDetailsOneDriveCmd(
ctx context.Context, ctx context.Context,
r repository.BackupGetter, r repository.BackupGetter,
@ -397,12 +397,12 @@ func runDetailsOneDriveCmd(
d, _, errs := r.BackupDetails(ctx, backupID) d, _, errs := r.BackupDetails(ctx, backupID)
// TODO: log/track recoverable errors // TODO: log/track recoverable errors
if errs.Err() != nil { if errs.Failure() != nil {
if errors.Is(errs.Err(), data.ErrNotFound) { if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, errors.Errorf("no backup exists with the id %s", backupID) return nil, errors.Errorf("no backup exists with the id %s", backupID)
} }
return nil, errors.Wrap(errs.Err(), "Failed to get backup details in the repository") return nil, errors.Wrap(errs.Failure(), "Failed to get backup details in the repository")
} }
sel := utils.IncludeOneDriveRestoreDataSelectors(opts) sel := utils.IncludeOneDriveRestoreDataSelectors(opts)

View File

@ -8,7 +8,6 @@ import (
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
@ -17,6 +16,7 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
@ -58,7 +58,7 @@ func (suite *NoBackupOneDriveIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -79,7 +79,7 @@ func (suite *NoBackupOneDriveIntegrationSuite) SetupSuite() {
control.Options{ control.Options{
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}) })
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *NoBackupOneDriveIntegrationSuite) TestOneDriveBackupListCmd_empty() { func (suite *NoBackupOneDriveIntegrationSuite) TestOneDriveBackupListCmd_empty() {
@ -101,7 +101,7 @@ func (suite *NoBackupOneDriveIntegrationSuite) TestOneDriveBackupListCmd_empty()
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String() result := suite.recorder.String()
@ -143,7 +143,7 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -165,7 +165,7 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) SetupSuite() {
control.Options{ control.Options{
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}) })
require.NoError(t, err) aw.MustNoErr(t, err)
m365UserID := tester.M365UserID(t) m365UserID := tester.M365UserID(t)
users := []string{m365UserID} users := []string{m365UserID}
@ -175,8 +175,8 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) SetupSuite() {
sel.Include(sel.Folders(selectors.Any())) sel.Include(sel.Folders(selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector) suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx)) aw.MustNoErr(t, suite.backupOp.Run(ctx))
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd() { func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd() {
@ -198,7 +198,7 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd()
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String() result := suite.recorder.String()
@ -211,7 +211,7 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd()
"--backup", string(suite.backupOp.Results.BackupID)) "--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }
func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd_unknownID() { func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd_unknownID() {
@ -228,5 +228,5 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) TestOneDriveBackupDeleteCmd_u
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type OneDriveSuite struct { type OneDriveSuite struct {
@ -73,12 +74,12 @@ func (suite *OneDriveSuite) TestValidateOneDriveBackupCreateFlags() {
}{ }{
{ {
name: "no users", name: "no users",
expect: assert.Error, expect: aw.Err,
}, },
{ {
name: "users", name: "users",
user: []string{"fnord"}, user: []string{"fnord"},
expect: assert.NoError, expect: aw.NoErr,
}, },
} }
for _, test := range table { for _, test := range table {
@ -99,7 +100,7 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectors() {
test.BackupGetter, test.BackupGetter,
"backup-ID", "backup-ID",
test.Opts) test.Opts)
assert.NoError(t, err) aw.NoErr(t, err)
assert.ElementsMatch(t, test.Expected, output.Entries) assert.ElementsMatch(t, test.Expected, output.Entries)
}) })
} }
@ -116,7 +117,7 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectorsBadFormats() {
test.BackupGetter, test.BackupGetter,
"backup-ID", "backup-ID",
test.Opts) test.Opts)
assert.Error(t, err) aw.Err(t, err)
assert.Empty(t, output) assert.Empty(t, output)
}) })
} }

View File

@ -257,8 +257,8 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
bups, ferrs := r.Backups(ctx, bIDs) bups, ferrs := r.Backups(ctx, bIDs)
// TODO: print/log recoverable errors // TODO: print/log recoverable errors
if ferrs.Err() != nil { if ferrs.Failure() != nil {
return Only(ctx, errors.Wrap(ferrs.Err(), "Unable to retrieve backup results from storage")) return Only(ctx, errors.Wrap(ferrs.Failure(), "Unable to retrieve backup results from storage"))
} }
backup.PrintAll(ctx, bups) backup.PrintAll(ctx, bups)
@ -506,7 +506,7 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
// runDetailsSharePointCmd actually performs the lookup in backup details. // runDetailsSharePointCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if // the fault.Errors return is always non-nil. Callers should check if
// errs.Err() == nil. // errs.Failure() == nil.
func runDetailsSharePointCmd( func runDetailsSharePointCmd(
ctx context.Context, ctx context.Context,
r repository.BackupGetter, r repository.BackupGetter,
@ -519,12 +519,12 @@ func runDetailsSharePointCmd(
d, _, errs := r.BackupDetails(ctx, backupID) d, _, errs := r.BackupDetails(ctx, backupID)
// TODO: log/track recoverable errors // TODO: log/track recoverable errors
if errs.Err() != nil { if errs.Failure() != nil {
if errors.Is(errs.Err(), data.ErrNotFound) { if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, errors.Errorf("no backup exists with the id %s", backupID) return nil, errors.Errorf("no backup exists with the id %s", backupID)
} }
return nil, errors.Wrap(errs.Err(), "Failed to get backup details in the repository") return nil, errors.Wrap(errs.Failure(), "Failed to get backup details in the repository")
} }
sel := utils.IncludeSharePointRestoreDataSelectors(opts) sel := utils.IncludeSharePointRestoreDataSelectors(opts)

View File

@ -8,7 +8,6 @@ import (
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
@ -17,6 +16,7 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
@ -58,7 +58,7 @@ func (suite *NoBackupSharePointIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -73,7 +73,7 @@ func (suite *NoBackupSharePointIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *NoBackupSharePointIntegrationSuite) TestSharePointBackupListCmd_empty() { func (suite *NoBackupSharePointIntegrationSuite) TestSharePointBackupListCmd_empty() {
@ -95,7 +95,7 @@ func (suite *NoBackupSharePointIntegrationSuite) TestSharePointBackupListCmd_emp
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String() result := suite.recorder.String()
@ -133,7 +133,7 @@ func (suite *BackupDeleteSharePointIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -149,7 +149,7 @@ func (suite *BackupDeleteSharePointIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
m365SiteID := tester.M365SiteID(t) m365SiteID := tester.M365SiteID(t)
sites := []string{m365SiteID} sites := []string{m365SiteID}
@ -159,8 +159,8 @@ func (suite *BackupDeleteSharePointIntegrationSuite) SetupSuite() {
sel.Include(sel.Libraries(selectors.Any())) sel.Include(sel.Libraries(selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector) suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx)) aw.MustNoErr(t, suite.backupOp.Run(ctx))
require.NoError(t, err) aw.MustNoErr(t, err)
} }
func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd() { func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd() {
@ -182,7 +182,7 @@ func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteC
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String() result := suite.recorder.String()
@ -197,7 +197,7 @@ func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteC
// "--backup", string(suite.backupOp.Results.BackupID)) // "--backup", string(suite.backupOp.Results.BackupID))
// cli.BuildCommandTree(cmd) // cli.BuildCommandTree(cmd)
// require.Error(t, cmd.ExecuteContext(ctx)) // aw.MustErr(t, cmd.ExecuteContext(ctx))
func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd_unknownID() { func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd_unknownID() {
t := suite.T() t := suite.T()
@ -213,5 +213,5 @@ func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteC
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -77,23 +78,23 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
}{ }{
{ {
name: "no sites or urls", name: "no sites or urls",
expect: assert.Error, expect: aw.Err,
}, },
{ {
name: "sites", name: "sites",
site: []string{"smarf"}, site: []string{"smarf"},
expect: assert.NoError, expect: aw.NoErr,
}, },
{ {
name: "urls", name: "urls",
weburl: []string{"fnord"}, weburl: []string{"fnord"},
expect: assert.NoError, expect: aw.NoErr,
}, },
{ {
name: "both", name: "both",
site: []string{"smarf"}, site: []string{"smarf"},
weburl: []string{"fnord"}, weburl: []string{"fnord"},
expect: assert.NoError, expect: aw.NoErr,
}, },
} }
for _, test := range table { for _, test := range table {
@ -196,7 +197,7 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
defer flush() defer flush()
sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc) sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners()) assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
}) })
@ -214,7 +215,7 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectors() {
test.BackupGetter, test.BackupGetter,
"backup-ID", "backup-ID",
test.Opts) test.Opts)
assert.NoError(t, err) aw.NoErr(t, err)
assert.ElementsMatch(t, test.Expected, output.Entries) assert.ElementsMatch(t, test.Expected, output.Entries)
}) })
} }
@ -231,7 +232,7 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
test.BackupGetter, test.BackupGetter,
"backup-ID", "backup-ID",
test.Opts) test.Opts)
assert.Error(t, err) aw.Err(t, err)
assert.Empty(t, output) assert.Empty(t, output)
}) })
} }

View File

@ -6,7 +6,6 @@ import (
"regexp" "regexp"
"strings" "strings"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
@ -130,9 +129,7 @@ func Handle() {
}() }()
if err := corsoCmd.ExecuteContext(ctx); err != nil { if err := corsoCmd.ExecuteContext(ctx); err != nil {
logger.Ctx(ctx). logger.CtxErr(ctx, err).Error("cli execution")
With("err", err).
Errorw("cli execution", clues.InErr(err).Slice()...)
os.Exit(1) os.Exit(1)
} }
} }

View File

@ -8,10 +8,10 @@ import (
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
@ -53,20 +53,20 @@ func (suite *ConfigSuite) TestReadRepoConfigBasic() {
testConfigData := fmt.Sprintf(configFileTemplate, b, tID) testConfigData := fmt.Sprintf(configFileTemplate, b, tID)
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700) err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700)
require.NoError(t, err) aw.MustNoErr(t, err)
// Configure viper to read test config file // Configure viper to read test config file
vpr.SetConfigFile(testConfigFilePath) vpr.SetConfigFile(testConfigFilePath)
// Read and validate config // Read and validate config
require.NoError(t, vpr.ReadInConfig(), "reading repo config") aw.MustNoErr(t, vpr.ReadInConfig(), "reading repo config")
s3Cfg, err := s3ConfigsFromViper(vpr) s3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, b, s3Cfg.Bucket) assert.Equal(t, b, s3Cfg.Bucket)
m365, err := m365ConfigsFromViper(vpr) m365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, tID, m365.AzureTenantID) assert.Equal(t, tID, m365.AzureTenantID)
} }
@ -83,22 +83,22 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
// Configure viper to read test config file // Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config") aw.MustNoErr(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true} s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true}
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365), "writing repo config") aw.MustNoErr(t, writeRepoConfigWithViper(vpr, s3Cfg, m365), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config") aw.MustNoErr(t, vpr.ReadInConfig(), "reading repo config")
readS3Cfg, err := s3ConfigsFromViper(vpr) readS3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket) assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS) assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS) assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
readM365, err := m365ConfigsFromViper(vpr) readM365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
} }
@ -115,13 +115,13 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
// Configure viper to read test config file // Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config") aw.MustNoErr(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
s3Cfg := storage.S3Config{Bucket: bkt} s3Cfg := storage.S3Config{Bucket: bkt}
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365), "writing repo config") aw.MustNoErr(t, writeRepoConfigWithViper(vpr, s3Cfg, m365), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config") aw.MustNoErr(t, vpr.ReadInConfig(), "reading repo config")
table := []struct { table := []struct {
name string name string
@ -134,7 +134,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
storage.Bucket: bkt, storage.Bucket: bkt,
account.AzureTenantID: tid, account.AzureTenantID: tid,
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "empty values", name: "empty values",
@ -142,17 +142,17 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
storage.Bucket: "", storage.Bucket: "",
account.AzureTenantID: "", account.AzureTenantID: "",
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "no overrides", name: "no overrides",
input: map[string]string{}, input: map[string]string{},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "nil map", name: "nil map",
input: nil, input: nil,
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "no recognized keys", name: "no recognized keys",
@ -160,7 +160,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
"fnords": "smurfs", "fnords": "smurfs",
"nonsense": "", "nonsense": "",
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "mismatch", name: "mismatch",
@ -168,7 +168,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
storage.Bucket: tid, storage.Bucket: tid,
account.AzureTenantID: bkt, account.AzureTenantID: bkt,
}, },
errCheck: assert.Error, errCheck: aw.Err,
}, },
} }
for _, test := range table { for _, test := range table {
@ -212,7 +212,7 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
// Configure viper to read test config file // Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config") aw.MustNoErr(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
s3Cfg := storage.S3Config{ s3Cfg := storage.S3Config{
Bucket: bkt, Bucket: bkt,
@ -223,14 +223,14 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
} }
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365), "writing repo config") aw.MustNoErr(t, writeRepoConfigWithViper(vpr, s3Cfg, m365), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config") aw.MustNoErr(t, vpr.ReadInConfig(), "reading repo config")
st, ac, err := getStorageAndAccountWithViper(vpr, true, nil) st, ac, err := getStorageAndAccountWithViper(vpr, true, nil)
require.NoError(t, err, "getting storage and account from config") aw.MustNoErr(t, err, "getting storage and account from config")
readS3Cfg, err := st.S3Config() readS3Cfg, err := st.S3Config()
require.NoError(t, err, "reading s3 config from storage") aw.MustNoErr(t, err, "reading s3 config from storage")
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket) assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint) assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint)
assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix) assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix)
@ -238,11 +238,11 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS) assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
common, err := st.CommonConfig() common, err := st.CommonConfig()
require.NoError(t, err, "reading common config from storage") aw.MustNoErr(t, err, "reading common config from storage")
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase)) assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := ac.M365Config() readM365, err := ac.M365Config()
require.NoError(t, err, "reading m365 config from account") aw.MustNoErr(t, err, "reading m365 config from account")
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID)) assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret)) assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
@ -273,10 +273,10 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
} }
st, ac, err := getStorageAndAccountWithViper(vpr, false, overrides) st, ac, err := getStorageAndAccountWithViper(vpr, false, overrides)
require.NoError(t, err, "getting storage and account from config") aw.MustNoErr(t, err, "getting storage and account from config")
readS3Cfg, err := st.S3Config() readS3Cfg, err := st.S3Config()
require.NoError(t, err, "reading s3 config from storage") aw.MustNoErr(t, err, "reading s3 config from storage")
assert.Equal(t, readS3Cfg.Bucket, bkt) assert.Equal(t, readS3Cfg.Bucket, bkt)
assert.Equal(t, readS3Cfg.Endpoint, end) assert.Equal(t, readS3Cfg.Endpoint, end)
assert.Equal(t, readS3Cfg.Prefix, pfx) assert.Equal(t, readS3Cfg.Prefix, pfx)
@ -284,11 +284,11 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
assert.True(t, readS3Cfg.DoNotVerifyTLS) assert.True(t, readS3Cfg.DoNotVerifyTLS)
common, err := st.CommonConfig() common, err := st.CommonConfig()
require.NoError(t, err, "reading common config from storage") aw.MustNoErr(t, err, "reading common config from storage")
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase)) assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := ac.M365Config() readM365, err := ac.M365Config()
require.NoError(t, err, "reading m365 config from account") aw.MustNoErr(t, err, "reading m365 config from account")
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID)) assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret)) assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))

View File

@ -5,6 +5,7 @@ import (
"context" "context"
"testing" "testing"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -24,7 +25,7 @@ func (suite *PrintUnitSuite) TestOnly() {
// cannot use tester.NewContext() here: circular imports // cannot use tester.NewContext() here: circular imports
//nolint:forbidigo //nolint:forbidigo
ctx := SetRootCmd(context.Background(), c) ctx := SetRootCmd(context.Background(), c)
assert.NoError(t, Only(ctx, nil)) aw.NoErr(t, Only(ctx, nil))
assert.True(t, c.SilenceUsage) assert.True(t, c.SilenceUsage)
} }

View File

@ -3,13 +3,12 @@ package repo_test
import ( import (
"testing" "testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
@ -55,7 +54,7 @@ func (suite *S3IntegrationSuite) TestInitS3Cmd() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -69,12 +68,12 @@ func (suite *S3IntegrationSuite) TestInitS3Cmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
// a second initialization should result in an error // a second initialization should result in an error
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
assert.Error(t, err) aw.Err(t, err)
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists) aw.ErrIs(t, err, repository.ErrorRepoAlreadyExists)
}) })
} }
} }
@ -87,7 +86,7 @@ func (suite *S3IntegrationSuite) TestInitMultipleTimes() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -104,7 +103,7 @@ func (suite *S3IntegrationSuite) TestInitMultipleTimes() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
} }
} }
@ -116,7 +115,7 @@ func (suite *S3IntegrationSuite) TestInitS3Cmd_missingBucket() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -129,7 +128,7 @@ func (suite *S3IntegrationSuite) TestInitS3Cmd_missingBucket() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }
func (suite *S3IntegrationSuite) TestConnectS3Cmd() { func (suite *S3IntegrationSuite) TestConnectS3Cmd() {
@ -154,7 +153,7 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -167,7 +166,7 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd() {
// init the repo first // init the repo first
_, err = repository.Initialize(ctx, account.Account{}, st, control.Options{}) _, err = repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
// then test it // then test it
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
@ -179,7 +178,7 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
assert.NoError(t, cmd.ExecuteContext(ctx)) aw.NoErr(t, cmd.ExecuteContext(ctx))
}) })
} }
} }
@ -192,7 +191,7 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd_BadBucket() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -206,7 +205,7 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd_BadBucket() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }
func (suite *S3IntegrationSuite) TestConnectS3Cmd_BadPrefix() { func (suite *S3IntegrationSuite) TestConnectS3Cmd_BadPrefix() {
@ -217,7 +216,7 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd_BadPrefix() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -231,5 +230,5 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd_BadPrefix() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
} }

View File

@ -14,6 +14,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -64,7 +65,7 @@ func (suite *RestoreExchangeIntegrationSuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) aw.MustNoErr(t, err)
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -78,7 +79,7 @@ func (suite *RestoreExchangeIntegrationSuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) aw.MustNoErr(t, err)
suite.backupOps = make(map[path.CategoryType]operations.BackupOperation) suite.backupOps = make(map[path.CategoryType]operations.BackupOperation)
@ -102,18 +103,18 @@ func (suite *RestoreExchangeIntegrationSuite) SetupSuite() {
sel.Include(scopes) sel.Include(scopes)
bop, err := suite.repo.NewBackup(ctx, sel.Selector) bop, err := suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, bop.Run(ctx)) aw.MustNoErr(t, bop.Run(ctx))
require.NoError(t, err) aw.MustNoErr(t, err)
suite.backupOps[set] = bop suite.backupOps[set] = bop
// sanity check, ensure we can find the backup and its details immediately // sanity check, ensure we can find the backup and its details immediately
_, err = suite.repo.Backup(ctx, bop.Results.BackupID) _, err = suite.repo.Backup(ctx, bop.Results.BackupID)
require.NoError(t, err, "retrieving recent backup by ID") aw.MustNoErr(t, err, "retrieving recent backup by ID")
_, _, errs := suite.repo.BackupDetails(ctx, string(bop.Results.BackupID)) _, _, errs := suite.repo.BackupDetails(ctx, string(bop.Results.BackupID))
require.NoError(t, errs.Err(), "retrieving recent backup details by ID") aw.MustNoErr(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Errs(), "retrieving recent backup details by ID") require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
} }
} }
@ -132,7 +133,7 @@ func (suite *RestoreExchangeIntegrationSuite) TestExchangeRestoreCmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) aw.MustNoErr(t, cmd.ExecuteContext(ctx))
}) })
} }
} }
@ -165,7 +166,7 @@ func (suite *RestoreExchangeIntegrationSuite) TestExchangeRestoreCmd_badTimeFlag
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
}) })
} }
} }
@ -196,7 +197,7 @@ func (suite *RestoreExchangeIntegrationSuite) TestExchangeRestoreCmd_badBoolFlag
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) aw.MustErr(t, cmd.ExecuteContext(ctx))
}) })
} }
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -30,23 +31,23 @@ func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
name: "with backupid", name: "with backupid",
backupID: "bid", backupID: "bid",
opts: utils.ExchangeOpts{}, opts: utils.ExchangeOpts{},
expect: assert.NoError, expect: aw.NoErr,
}, },
{ {
name: "no backupid", name: "no backupid",
opts: utils.ExchangeOpts{}, opts: utils.ExchangeOpts{},
expect: assert.Error, expect: aw.Err,
}, },
{ {
name: "valid time", name: "valid time",
backupID: "bid", backupID: "bid",
opts: utils.ExchangeOpts{EmailReceivedAfter: common.Now()}, opts: utils.ExchangeOpts{EmailReceivedAfter: common.Now()},
expect: assert.NoError, expect: aw.NoErr,
}, },
{ {
name: "invalid time", name: "invalid time",
opts: utils.ExchangeOpts{EmailReceivedAfter: "fnords"}, opts: utils.ExchangeOpts{EmailReceivedAfter: "fnords"},
expect: assert.Error, expect: aw.Err,
}, },
} }
for _, test := range table { for _, test := range table {

View File

@ -501,7 +501,7 @@ func (MockBackupGetter) Backup(
func (MockBackupGetter) Backups( func (MockBackupGetter) Backups(
context.Context, context.Context,
[]model.StableID, []model.StableID,
) ([]*backup.Backup, *fault.Errors) { ) ([]*backup.Backup, *fault.Bus) {
return nil, fault.New(false).Fail(errors.New("unexpected call to mock")) return nil, fault.New(false).Fail(errors.New("unexpected call to mock"))
} }
@ -515,7 +515,7 @@ func (MockBackupGetter) BackupsByTag(
func (bg *MockBackupGetter) BackupDetails( func (bg *MockBackupGetter) BackupDetails(
ctx context.Context, ctx context.Context,
backupID string, backupID string,
) (*details.Details, *backup.Backup, *fault.Errors) { ) (*details.Details, *backup.Backup, *fault.Bus) {
if bg == nil { if bg == nil {
return testdata.GetDetailsSet(), nil, fault.New(true) return testdata.GetDetailsSet(), nil, fault.New(true)
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -25,11 +26,11 @@ func (suite *CliUtilsSuite) TestRequireProps() {
}{ }{
{ {
props: map[string]string{"exists": "I have seen the fnords!"}, props: map[string]string{"exists": "I have seen the fnords!"},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
props: map[string]string{"not-exists": ""}, props: map[string]string{"not-exists": ""},
errCheck: assert.Error, errCheck: aw.Err,
}, },
} }
for _, test := range table { for _, test := range table {

View File

@ -53,7 +53,7 @@ func generateAndRestoreItems(
howMany int, howMany int,
dbf dataBuilderFunc, dbf dataBuilderFunc,
opts control.Options, opts control.Options,
errs *fault.Errors, errs *fault.Bus,
) (*details.Details, error) { ) (*details.Details, error) {
items := make([]item, 0, howMany) items := make([]item, 0, howMany)

View File

@ -1,7 +1,6 @@
package impl package impl
import ( import (
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
@ -78,9 +77,8 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
return Only(ctx, err) return Only(ctx, err)
} }
log := logger.Ctx(ctx) for _, e := range errs.Recovered() {
for _, e := range errs.Errs() { logger.CtxErr(ctx, err).Error(e.Error())
log.Errorw(e.Error(), clues.InErr(err).Slice()...)
} }
deets.PrintEntries(ctx) deets.PrintEntries(ctx)
@ -125,9 +123,8 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
return Only(ctx, err) return Only(ctx, err)
} }
log := logger.Ctx(ctx) for _, e := range errs.Recovered() {
for _, e := range errs.Errs() { logger.CtxErr(ctx, err).Error(e.Error())
log.Errorw(e.Error(), clues.InErr(err).Slice()...)
} }
deets.PrintEntries(ctx) deets.PrintEntries(ctx)
@ -177,9 +174,8 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
return Only(ctx, err) return Only(ctx, err)
} }
log := logger.Ctx(ctx) for _, e := range errs.Recovered() {
for _, e := range errs.Errs() { logger.CtxErr(ctx, err).Error(e.Error())
log.Errorw(e.Error(), clues.InErr(err).Slice()...)
} }
deets.PrintEntries(ctx) deets.PrintEntries(ctx)

View File

@ -93,7 +93,7 @@ func runDisplayM365JSON(
ctx context.Context, ctx context.Context,
creds account.M365Config, creds account.M365Config,
user, itemID string, user, itemID string,
errs *fault.Errors, errs *fault.Bus,
) error { ) error {
var ( var (
bs []byte bs []byte
@ -143,7 +143,7 @@ type itemer interface {
GetItem( GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
errs *fault.Errors, errs *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) ) (serialization.Parsable, *details.ExchangeInfo, error)
Serialize( Serialize(
ctx context.Context, ctx context.Context,
@ -156,7 +156,7 @@ func getItem(
ctx context.Context, ctx context.Context,
itm itemer, itm itemer,
user, itemID string, user, itemID string,
errs *fault.Errors, errs *fault.Bus,
) ([]byte, error) { ) ([]byte, error) {
sp, _, err := itm.GetItem(ctx, user, itemID, errs) sp, _, err := itm.GetItem(ctx, user, itemID, errs)
if err != nil { if err != nil {

View File

@ -157,7 +157,7 @@ func purgeOneDriveFolders(
return nil, err return nil, err
} }
cfs, err := onedrive.GetAllFolders(ctx, gs, pager, prefix) cfs, err := onedrive.GetAllFolders(ctx, gs, pager, prefix, fault.New(true))
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type CommonConfigsSuite struct { type CommonConfigsSuite struct {
@ -49,9 +50,9 @@ func (suite *CommonConfigsSuite) TestUnionConfigs_string() {
bc stringConfig2 bc stringConfig2
errCheck assert.ErrorAssertionFunc errCheck assert.ErrorAssertionFunc
}{ }{
{"no error", stringConfig{keyExpect, nil}, stringConfig2{keyExpect2, nil}, assert.NoError}, {"no error", stringConfig{keyExpect, nil}, stringConfig2{keyExpect2, nil}, aw.NoErr},
{"tc error", stringConfig{keyExpect, assert.AnError}, stringConfig2{keyExpect2, nil}, assert.Error}, {"tc error", stringConfig{keyExpect, assert.AnError}, stringConfig2{keyExpect2, nil}, aw.Err},
{"fc error", stringConfig{keyExpect, nil}, stringConfig2{keyExpect2, assert.AnError}, assert.Error}, {"fc error", stringConfig{keyExpect, nil}, stringConfig2{keyExpect2, assert.AnError}, aw.Err},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {

View File

@ -1,62 +0,0 @@
package common
import (
"fmt"
"io"
)
// TODO: Remove in favor of clues.Stack()
// Err provides boiler-plate functions that other types of errors can use
// if they wish to be compared with `errors.As()`. This struct ensures that
// stack traces are printed when requested (if present) and that Err
// chains `errors.As()`, `errors.Is()`, and `errors.Cause()` calls properly.
//
// When using errors.As, note that the variable that is passed as the second
// parameter must be a pointer to a type that exactly matches the returned type of the error previously. For
// example, if a struct was returned, the second parameter should be a pointer
// to said struct. If a pointer to a struct was returned, then a pointer to a
// pointer of the struct should be passed.
type Err struct {
Err error
}
func EncapsulateError(e error) *Err {
return &Err{Err: e}
}
func (e Err) Error() string {
return e.Err.Error()
}
func (e Err) Cause() error {
return e.Err
}
func (e Err) Unwrap() error {
return e.Err
}
// Format complies with the Formatter interface and gives pretty printing when
// functions like `fmt.Printf("%+v")` are called. Implementing this allows Err
// to print stack traces from the encapsulated error.
func (e Err) Format(s fmt.State, verb rune) {
if f, ok := e.Err.(fmt.Formatter); ok {
f.Format(s, verb)
return
}
// Formatting magic courtesy of github.com/pkg/errors.
switch verb {
case 'v':
if s.Flag('+') {
fmt.Fprintf(s, "%+v\n", e.Cause())
return
}
fallthrough
case 's', 'q':
// nolint:errcheck
_, _ = io.WriteString(s, e.Error())
}
}

View File

@ -1,84 +0,0 @@
package common_test
import (
"fmt"
"testing"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
)
type testErr struct {
common.Err
}
type testErr2 struct {
common.Err
}
type ErrorsUnitSuite struct {
tester.Suite
}
func TestErrorsUnitSuite(t *testing.T) {
s := &ErrorsUnitSuite{Suite: tester.NewUnitSuite(t)}
suite.Run(t, s)
}
func (suite *ErrorsUnitSuite) TestPropagatesCause() {
err := assert.AnError
te := testErr{*common.EncapsulateError(err)}
te2 := testErr2{*common.EncapsulateError(te)}
assert.Equal(suite.T(), assert.AnError, errors.Cause(te2))
}
func (suite *ErrorsUnitSuite) TestPropagatesIs() {
err := assert.AnError
te := testErr{*common.EncapsulateError(err)}
te2 := testErr2{*common.EncapsulateError(te)}
assert.True(suite.T(), errors.Is(te2, err))
}
func (suite *ErrorsUnitSuite) TestPropagatesAs() {
err := assert.AnError
te := testErr{*common.EncapsulateError(err)}
te2 := testErr2{*common.EncapsulateError(te)}
tmp := testErr{}
assert.True(suite.T(), errors.As(te2, &tmp))
}
func (suite *ErrorsUnitSuite) TestAs() {
err := assert.AnError
te := testErr{*common.EncapsulateError(err)}
te2 := testErr2{*common.EncapsulateError(te)}
tmp := testErr2{}
assert.True(suite.T(), errors.As(te2, &tmp))
}
func (suite *ErrorsUnitSuite) TestAsIsUnique() {
err := assert.AnError
te := testErr{*common.EncapsulateError(err)}
tmp := testErr2{}
assert.False(suite.T(), errors.As(te, &tmp))
}
func (suite *ErrorsUnitSuite) TestPrintsStack() {
err := assert.AnError
err = errors.Wrap(err, "wrapped error")
te := testErr{*common.EncapsulateError(err)}
te2 := testErr2{*common.EncapsulateError(te)}
out := fmt.Sprintf("%+v", te2)
// Stack trace should include a line noting that we're running testify.
assert.Contains(suite.T(), out, "testify")
}

View File

@ -5,11 +5,11 @@ import (
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type CommonTimeUnitSuite struct { type CommonTimeUnitSuite struct {
@ -48,14 +48,14 @@ func (suite *CommonTimeUnitSuite) TestParseTime() {
nowStr := now.Format(time.RFC3339Nano) nowStr := now.Format(time.RFC3339Nano)
result, err := common.ParseTime(nowStr) result, err := common.ParseTime(nowStr)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, now.UTC(), result) assert.Equal(t, now.UTC(), result)
_, err = common.ParseTime("") _, err = common.ParseTime("")
require.Error(t, err) aw.MustErr(t, err)
_, err = common.ParseTime("flablabls") _, err = common.ParseTime("flablabls")
require.Error(t, err) aw.MustErr(t, err)
} }
func (suite *CommonTimeUnitSuite) TestExtractTime() { func (suite *CommonTimeUnitSuite) TestExtractTime() {
@ -68,14 +68,14 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
c, err := common.ParseTime(ts) c, err := common.ParseTime(ts)
require.NoError(t, err) aw.MustNoErr(t, err)
return c return c
} }
parseT := func(v string) time.Time { parseT := func(v string) time.Time {
t, err := time.Parse(time.RFC3339, v) t, err := time.Parse(time.RFC3339, v)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
return t return t
} }
@ -151,7 +151,7 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
t := suite.T() t := suite.T()
result, err := common.ExtractTime(test.input) result, err := common.ExtractTime(test.input)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat)) assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat))
}) })
} }

View File

@ -39,7 +39,7 @@ func (gc *GraphConnector) DataCollections(
sels selectors.Selector, sels selectors.Selector,
metadata []data.RestoreCollection, metadata []data.RestoreCollection,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Errors, errs *fault.Bus,
) ([]data.BackupCollection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String())) ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
defer end() defer end()
@ -91,7 +91,7 @@ func (gc *GraphConnector) DataCollections(
return colls, excludes, nil return colls, excludes, nil
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
return gc.OneDriveDataCollections(ctx, sels, metadata, ctrlOpts) return gc.OneDriveDataCollections(ctx, sels, metadata, ctrlOpts, errs)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
colls, excludes, err := sharepoint.DataCollections( colls, excludes, err := sharepoint.DataCollections(
@ -193,6 +193,7 @@ func (gc *GraphConnector) OneDriveDataCollections(
selector selectors.Selector, selector selectors.Selector,
metadata []data.RestoreCollection, metadata []data.RestoreCollection,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus,
) ([]data.BackupCollection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
odb, err := selector.ToOneDriveBackup() odb, err := selector.ToOneDriveBackup()
if err != nil { if err != nil {
@ -218,7 +219,7 @@ func (gc *GraphConnector) OneDriveDataCollections(
gc.Service, gc.Service,
gc.UpdateStatus, gc.UpdateStatus,
ctrlOpts, ctrlOpts,
).Get(ctx, metadata) ).Get(ctx, metadata, errs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -249,7 +250,7 @@ func (gc *GraphConnector) RestoreDataCollections(
dest control.RestoreDestination, dest control.RestoreDestination,
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
errs *fault.Errors, errs *fault.Bus,
) (*details.Details, error) { ) (*details.Details, error) {
ctx, end := D.Span(ctx, "connector:restore") ctx, end := D.Span(ctx, "connector:restore")
defer end() defer end()
@ -268,7 +269,7 @@ func (gc *GraphConnector) RestoreDataCollections(
case selectors.ServiceExchange: case selectors.ServiceExchange:
status, err = exchange.RestoreExchangeDataCollections(ctx, creds, gc.Service, dest, dcs, deets, errs) status, err = exchange.RestoreExchangeDataCollections(ctx, creds, gc.Service, dest, dcs, deets, errs)
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
status, err = onedrive.RestoreCollections(ctx, backupVersion, gc.Service, dest, opts, dcs, deets) status, err = onedrive.RestoreCollections(ctx, backupVersion, gc.Service, dest, opts, dcs, deets, errs)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
status, err = sharepoint.RestoreCollections(ctx, backupVersion, creds, gc.Service, dest, dcs, deets, errs) status, err = sharepoint.RestoreCollections(ctx, backupVersion, creds, gc.Service, dest, dcs, deets, errs)
default: default:

View File

@ -6,13 +6,13 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -108,7 +108,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
connector.UpdateStatus, connector.UpdateStatus,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Empty(t, excludes) assert.Empty(t, excludes)
for range collections { for range collections {
@ -124,12 +124,12 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
for object := range col.Items(ctx, fault.New(true)) { for object := range col.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader()) _, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "received a buf.Read error") aw.NoErr(t, err, "received a buf.Read error")
} }
} }
status := connector.AwaitStatus() status := connector.AwaitStatus()
assert.NotZero(t, status.Successful) assert.NotZero(t, status.Metrics.Successes)
t.Log(status.String()) t.Log(status.String())
}) })
} }
@ -208,7 +208,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestDataCollections_invali
nil, nil,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
assert.Error(t, err) aw.Err(t, err)
assert.Empty(t, collections) assert.Empty(t, collections)
assert.Empty(t, excludes) assert.Empty(t, excludes)
}) })
@ -260,7 +260,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
connector, connector,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
// Not expecting excludes as this isn't an incremental backup. // Not expecting excludes as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -276,12 +276,12 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
for object := range coll.Items(ctx, fault.New(true)) { for object := range coll.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader()) _, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "reading item") aw.NoErr(t, err, "reading item")
} }
} }
status := connector.AwaitStatus() status := connector.AwaitStatus()
assert.NotZero(t, status.Successful) assert.NotZero(t, status.Metrics.Successes)
t.Log(status.String()) t.Log(status.String())
}) })
} }
@ -338,7 +338,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
nil, nil,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Len(t, cols, 1) assert.Len(t, cols, 1)
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -369,7 +369,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
nil, nil,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Less(t, 0, len(cols)) assert.Less(t, 0, len(cols))
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -381,7 +381,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
t.Log("File: " + item.UUID()) t.Log("File: " + item.UUID())
bs, err := io.ReadAll(item.ToReader()) bs, err := io.ReadAll(item.ToReader())
require.NoError(t, err) aw.MustNoErr(t, err)
t.Log(string(bs)) t.Log(string(bs))
} }
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type BetaUnitSuite struct { type BetaUnitSuite struct {
@ -24,14 +25,14 @@ func (suite *BetaUnitSuite) TestBetaService_Adapter() {
t := suite.T() t := suite.T()
a := tester.NewMockM365Account(t) a := tester.NewMockM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
adpt, err := graph.CreateAdapter( adpt, err := graph.CreateAdapter(
m365.AzureTenantID, m365.AzureTenantID,
m365.AzureClientID, m365.AzureClientID,
m365.AzureClientSecret, m365.AzureClientSecret,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
service := NewBetaService(adpt) service := NewBetaService(adpt)
require.NotNil(t, service) require.NotNil(t, service)
@ -45,5 +46,5 @@ func (suite *BetaUnitSuite) TestBetaService_Adapter() {
byteArray, err := service.Serialize(testPage) byteArray, err := service.Serialize(testPage)
assert.NotEmpty(t, byteArray) assert.NotEmpty(t, byteArray)
assert.NoError(t, err) aw.NoErr(t, err)
} }

View File

@ -86,7 +86,7 @@ func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration {
} }
// GetAll retrieves all users. // GetAll retrieves all users.
func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userable, error) { func (c Users) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable, error) {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return nil, err return nil, err
@ -97,7 +97,7 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userabl
resp, err = service.Client().Users().Get(ctx, userOptions(&userFilterNoGuests)) resp, err = service.Client().Users().Get(ctx, userOptions(&userFilterNoGuests))
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting all users").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting all users")
} }
iter, err := msgraphgocore.NewPageIterator( iter, err := msgraphgocore.NewPageIterator(
@ -105,19 +105,22 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userabl
service.Adapter(), service.Adapter(),
models.CreateUserCollectionResponseFromDiscriminatorValue) models.CreateUserCollectionResponseFromDiscriminatorValue)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating users iterator").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "creating users iterator")
} }
us := make([]models.Userable, 0) var (
us = make([]models.Userable, 0)
el = errs.Local()
)
iterator := func(item any) bool { iterator := func(item any) bool {
if errs.Err() != nil { if el.Failure() != nil {
return false return false
} }
u, err := validateUser(item) u, err := validateUser(item)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "validating user").WithClues(ctx).With(graph.ErrData(err)...)) el.AddRecoverable(graph.Wrap(ctx, err, "validating user"))
} else { } else {
us = append(us, u) us = append(us, u)
} }
@ -126,10 +129,10 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userabl
} }
if err := iter.Iterate(ctx, iterator); err != nil { if err := iter.Iterate(ctx, iterator); err != nil {
return nil, clues.Wrap(err, "iterating all users").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "iterating all users")
} }
return us, errs.Err() return us, el.Failure()
} }
func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) { func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) {
@ -141,7 +144,7 @@ func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, err
resp, err = c.stable.Client().UsersById(userID).Get(ctx, nil) resp, err = c.stable.Client().UsersById(userID).Get(ctx, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting user").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting user")
} }
return resp, err return resp, err
@ -160,7 +163,7 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
if err != nil { if err != nil {
if !graph.IsErrExchangeMailFolderNotFound(err) { if !graph.IsErrExchangeMailFolderNotFound(err) {
return nil, clues.Wrap(err, "getting user's mail folder").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting user's mail folder")
} }
delete(userInfo.DiscoveredServices, path.ExchangeService) delete(userInfo.DiscoveredServices, path.ExchangeService)

View File

@ -15,7 +15,7 @@ import (
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type getAller interface { type getAller interface {
GetAll(context.Context, *fault.Errors) ([]models.Userable, error) GetAll(context.Context, *fault.Bus) ([]models.Userable, error)
} }
type getter interface { type getter interface {
@ -36,7 +36,7 @@ type getWithInfoer interface {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Users fetches all users in the tenant. // Users fetches all users in the tenant.
func Users(ctx context.Context, ga getAller, errs *fault.Errors) ([]models.Userable, error) { func Users(ctx context.Context, ga getAller, errs *fault.Bus) ([]models.Userable, error) {
return ga.GetAll(ctx, errs) return ga.GetAll(ctx, errs)
} }

View File

@ -5,13 +5,13 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -37,7 +37,7 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
@ -45,7 +45,7 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
m365.AzureTenantID, m365.AzureTenantID,
m365.AzureClientID, m365.AzureClientID,
m365.AzureClientSecret) m365.AzureClientSecret)
require.NoError(t, err) aw.MustNoErr(t, err)
suite.gs = graph.NewService(adpt) suite.gs = graph.NewService(adpt)
} }
@ -59,22 +59,22 @@ func (suite *ExchangeServiceSuite) TestOptionsForCalendars() {
{ {
name: "Empty Literal", name: "Empty Literal",
params: []string{}, params: []string{},
checkError: assert.NoError, checkError: aw.NoErr,
}, },
{ {
name: "Invalid Parameter", name: "Invalid Parameter",
params: []string{"status"}, params: []string{"status"},
checkError: assert.Error, checkError: aw.Err,
}, },
{ {
name: "Invalid Parameters", name: "Invalid Parameters",
params: []string{"status", "height", "month"}, params: []string{"status", "height", "month"},
checkError: assert.Error, checkError: aw.Err,
}, },
{ {
name: "Valid Parameters", name: "Valid Parameters",
params: []string{"changeKey", "events", "owner"}, params: []string{"changeKey", "events", "owner"},
checkError: assert.NoError, checkError: aw.NoErr,
}, },
} }
for _, test := range tests { for _, test := range tests {
@ -98,19 +98,19 @@ func (suite *ExchangeServiceSuite) TestOptionsForFolders() {
{ {
name: "Valid Folder Option", name: "Valid Folder Option",
params: []string{"parentFolderId"}, params: []string{"parentFolderId"},
checkError: assert.NoError, checkError: aw.NoErr,
expected: 2, expected: 2,
}, },
{ {
name: "Multiple Folder Options: Valid", name: "Multiple Folder Options: Valid",
params: []string{"displayName", "isHidden"}, params: []string{"displayName", "isHidden"},
checkError: assert.NoError, checkError: aw.NoErr,
expected: 3, expected: 3,
}, },
{ {
name: "Invalid Folder option param", name: "Invalid Folder option param",
params: []string{"status"}, params: []string{"status"},
checkError: assert.Error, checkError: aw.Err,
}, },
} }
for _, test := range tests { for _, test := range tests {
@ -135,19 +135,19 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() {
{ {
name: "Valid Contact Option", name: "Valid Contact Option",
params: []string{"displayName"}, params: []string{"displayName"},
checkError: assert.NoError, checkError: aw.NoErr,
expected: 2, expected: 2,
}, },
{ {
name: "Multiple Contact Options: Valid", name: "Multiple Contact Options: Valid",
params: []string{"displayName", "parentFolderId"}, params: []string{"displayName", "parentFolderId"},
checkError: assert.NoError, checkError: aw.NoErr,
expected: 3, expected: 3,
}, },
{ {
name: "Invalid Contact Option param", name: "Invalid Contact Option param",
params: []string{"status"}, params: []string{"status"},
checkError: assert.Error, checkError: aw.Err,
}, },
} }
for _, test := range tests { for _, test := range tests {
@ -180,7 +180,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
"This is testing", "This is testing",
) )
message, err := support.CreateMessageFromBytes(byteArray) message, err := support.CreateMessageFromBytes(byteArray)
require.NoError(t, err) aw.MustNoErr(t, err)
return message.GetBody() return message.GetBody()
}, },
}, },
@ -190,7 +190,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
getBodyable: func(t *testing.T) models.ItemBodyable { getBodyable: func(t *testing.T) models.ItemBodyable {
byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy") byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy")
message, err := support.CreateMessageFromBytes(byteArray) message, err := support.CreateMessageFromBytes(byteArray)
require.NoError(t, err) aw.MustNoErr(t, err)
return message.GetBody() return message.GetBody()
}, },
}, },

View File

@ -49,7 +49,7 @@ func (c Contacts) CreateContactFolder(
mdl, err := c.stable.Client().UsersById(user).ContactFolders().Post(ctx, requestBody, nil) mdl, err := c.stable.Client().UsersById(user).ContactFolders().Post(ctx, requestBody, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating contact folder").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "creating contact folder")
} }
return mdl, nil return mdl, nil
@ -62,7 +62,7 @@ func (c Contacts) DeleteContainer(
) error { ) error {
err := c.stable.Client().UsersById(user).ContactFoldersById(folderID).Delete(ctx, nil) err := c.stable.Client().UsersById(user).ContactFoldersById(folderID).Delete(ctx, nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
return nil return nil
@ -72,11 +72,11 @@ func (c Contacts) DeleteContainer(
func (c Contacts) GetItem( func (c Contacts) GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
_ *fault.Errors, // no attachments to iterate over, so this goes unused _ *fault.Bus, // no attachments to iterate over, so this goes unused
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
cont, err := c.stable.Client().UsersById(user).ContactsById(itemID).Get(ctx, nil) cont, err := c.stable.Client().UsersById(user).ContactsById(itemID).Get(ctx, nil)
if err != nil { if err != nil {
return nil, nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, graph.Stack(ctx, err)
} }
return cont, ContactInfo(cont), nil return cont, ContactInfo(cont), nil
@ -88,12 +88,12 @@ func (c Contacts) GetContainerByID(
) (graph.Container, error) { ) (graph.Container, error) {
ofcf, err := optionsForContactFolderByID([]string{"displayName", "parentFolderId"}) ofcf, err := optionsForContactFolderByID([]string{"displayName", "parentFolderId"})
if err != nil { if err != nil {
return nil, clues.Wrap(err, "setting contact folder options").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "setting contact folder options")
} }
resp, err := c.stable.Client().UsersById(userID).ContactFoldersById(dirID).Get(ctx, ofcf) resp, err := c.stable.Client().UsersById(userID).ContactFoldersById(dirID).Get(ctx, ofcf)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
return resp, nil return resp, nil
@ -101,29 +101,25 @@ func (c Contacts) GetContainerByID(
// EnumerateContainers iterates through all of the users current // EnumerateContainers iterates through all of the users current
// contacts folders, converting each to a graph.CacheFolder, and calling // contacts folders, converting each to a graph.CacheFolder, and calling
// fn(cf) on each one. If fn(cf) errors, the error is aggregated // fn(cf) on each one.
// into a multierror that gets returned to the caller.
// Folder hierarchy is represented in its current state, and does // Folder hierarchy is represented in its current state, and does
// not contain historical data. // not contain historical data.
func (c Contacts) EnumerateContainers( func (c Contacts) EnumerateContainers(
ctx context.Context, ctx context.Context,
userID, baseDirID string, userID, baseDirID string,
fn func(graph.CacheFolder) error, fn func(graph.CacheFolder) error,
errs *fault.Errors, errs *fault.Bus,
) error { ) error {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
fields := []string{"displayName", "parentFolderId"} fields := []string{"displayName", "parentFolderId"}
ofcf, err := optionsForContactChildFolders(fields) ofcf, err := optionsForContactChildFolders(fields)
if err != nil { if err != nil {
return clues.Wrap(err, "setting contact child folder options"). return graph.Wrap(ctx, err, "setting contact child folder options")
WithClues(ctx).
With(graph.ErrData(err)...).
With("options_fields", fields)
} }
builder := service.Client(). builder := service.Client().
@ -134,16 +130,16 @@ func (c Contacts) EnumerateContainers(
for { for {
resp, err := builder.Get(ctx, ofcf) resp, err := builder.Get(ctx, ofcf)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
for _, fold := range resp.GetValue() { for _, fold := range resp.GetValue() {
if errs.Err() != nil { if errs.Failure() != nil {
return errs.Err() return errs.Failure()
} }
if err := checkIDAndName(fold); err != nil { if err := checkIDAndName(fold); err != nil {
errs.Add(clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...)) errs.AddRecoverable(graph.Stack(ctx, err))
continue continue
} }
@ -154,7 +150,7 @@ func (c Contacts) EnumerateContainers(
temp := graph.NewCacheFolder(fold, nil, nil) temp := graph.NewCacheFolder(fold, nil, nil)
if err := fn(temp); err != nil { if err := fn(temp); err != nil {
errs.Add(clues.Stack(err).WithClues(fctx).With(graph.ErrData(err)...)) errs.AddRecoverable(graph.Stack(fctx, err))
continue continue
} }
} }
@ -167,7 +163,7 @@ func (c Contacts) EnumerateContainers(
builder = users.NewItemContactFoldersItemChildFoldersRequestBuilder(link, service.Adapter()) builder = users.NewItemContactFoldersItemChildFoldersRequestBuilder(link, service.Adapter())
} }
return errs.Err() return errs.Failure()
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -185,7 +181,7 @@ type contactPager struct {
func (p *contactPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) { func (p *contactPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) resp, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
return resp, nil return resp, nil
@ -205,7 +201,7 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
) ([]string, []string, DeltaUpdate, error) { ) ([]string, []string, DeltaUpdate, error) {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return nil, nil, DeltaUpdate{}, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, DeltaUpdate{}, graph.Stack(ctx, err)
} }
var resetDelta bool var resetDelta bool
@ -220,7 +216,7 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
return nil, return nil,
nil, nil,
DeltaUpdate{}, DeltaUpdate{},
clues.Wrap(err, "setting contact folder options").WithClues(ctx).With(graph.ErrData(err)...) graph.Wrap(ctx, err, "setting contact folder options")
} }
if len(oldDelta) > 0 { if len(oldDelta) > 0 {
@ -238,7 +234,7 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
// only return on error if it is NOT a delta issue. // only return on error if it is NOT a delta issue.
// on bad deltas we retry the call with the regular builder // on bad deltas we retry the call with the regular builder
if !graph.IsErrInvalidDelta(err) { if !graph.IsErrInvalidDelta(err) {
return nil, nil, DeltaUpdate{}, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, DeltaUpdate{}, graph.Stack(ctx, err)
} }
resetDelta = true resetDelta = true
@ -294,12 +290,12 @@ func (c Contacts) Serialize(
defer writer.Close() defer writer.Close()
if err = writer.WriteObjectValue("", contact); err != nil { if err = writer.WriteObjectValue("", contact); err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
bs, err := writer.GetSerializedContent() bs, err := writer.GetSerializedContent()
if err != nil { if err != nil {
return nil, clues.Wrap(err, "serializing contact").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "serializing contact")
} }
return bs, nil return bs, nil

View File

@ -50,7 +50,7 @@ func (c Events) CreateCalendar(
mdl, err := c.stable.Client().UsersById(user).Calendars().Post(ctx, requestbody, nil) mdl, err := c.stable.Client().UsersById(user).Calendars().Post(ctx, requestbody, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating calendar").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "creating calendar")
} }
return mdl, nil return mdl, nil
@ -64,7 +64,7 @@ func (c Events) DeleteContainer(
) error { ) error {
err := c.stable.Client().UsersById(user).CalendarsById(calendarID).Delete(ctx, nil) err := c.stable.Client().UsersById(user).CalendarsById(calendarID).Delete(ctx, nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
return nil return nil
@ -76,17 +76,17 @@ func (c Events) GetContainerByID(
) (graph.Container, error) { ) (graph.Container, error) {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
ofc, err := optionsForCalendarsByID([]string{"name", "owner"}) ofc, err := optionsForCalendarsByID([]string{"name", "owner"})
if err != nil { if err != nil {
return nil, clues.Wrap(err, "setting event calendar options").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "setting event calendar options")
} }
cal, err := service.Client().UsersById(userID).CalendarsById(containerID).Get(ctx, ofc) cal, err := service.Client().UsersById(userID).CalendarsById(containerID).Get(ctx, ofc)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err).WithClues(ctx)
} }
return graph.CalendarDisplayable{Calendarable: cal}, nil return graph.CalendarDisplayable{Calendarable: cal}, nil
@ -96,7 +96,7 @@ func (c Events) GetContainerByID(
func (c Events) GetItem( func (c Events) GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
errs *fault.Errors, errs *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
var ( var (
err error err error
@ -105,7 +105,7 @@ func (c Events) GetItem(
event, err = c.stable.Client().UsersById(user).EventsById(itemID).Get(ctx, nil) event, err = c.stable.Client().UsersById(user).EventsById(itemID).Get(ctx, nil)
if err != nil { if err != nil {
return nil, nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, graph.Stack(ctx, err)
} }
if *event.GetHasAttachments() || HasAttachments(event.GetBody()) { if *event.GetHasAttachments() || HasAttachments(event.GetBody()) {
@ -122,7 +122,7 @@ func (c Events) GetItem(
Attachments(). Attachments().
Get(ctx, options) Get(ctx, options)
if err != nil { if err != nil {
return nil, nil, clues.Wrap(err, "event attachment download").WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, graph.Wrap(ctx, err, "event attachment download")
} }
event.SetAttachments(attached.GetValue()) event.SetAttachments(attached.GetValue())
@ -133,24 +133,23 @@ func (c Events) GetItem(
// EnumerateContainers iterates through all of the users current // EnumerateContainers iterates through all of the users current
// calendars, converting each to a graph.CacheFolder, and // calendars, converting each to a graph.CacheFolder, and
// calling fn(cf) on each one. If fn(cf) errors, the error is // calling fn(cf) on each one.
// aggregated into a multierror that gets returned to the caller.
// Folder hierarchy is represented in its current state, and does // Folder hierarchy is represented in its current state, and does
// not contain historical data. // not contain historical data.
func (c Events) EnumerateContainers( func (c Events) EnumerateContainers(
ctx context.Context, ctx context.Context,
userID, baseDirID string, userID, baseDirID string,
fn func(graph.CacheFolder) error, fn func(graph.CacheFolder) error,
errs *fault.Errors, errs *fault.Bus,
) error { ) error {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
ofc, err := optionsForCalendars([]string{"name"}) ofc, err := optionsForCalendars([]string{"name"})
if err != nil { if err != nil {
return clues.Wrap(err, "setting calendar options").WithClues(ctx).With(graph.ErrData(err)...) return graph.Wrap(ctx, err, "setting calendar options")
} }
builder := service.Client().UsersById(userID).Calendars() builder := service.Client().UsersById(userID).Calendars()
@ -158,13 +157,13 @@ func (c Events) EnumerateContainers(
for { for {
resp, err := builder.Get(ctx, ofc) resp, err := builder.Get(ctx, ofc)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
for _, cal := range resp.GetValue() { for _, cal := range resp.GetValue() {
cd := CalendarDisplayable{Calendarable: cal} cd := CalendarDisplayable{Calendarable: cal}
if err := checkIDAndName(cd); err != nil { if err := checkIDAndName(cd); err != nil {
errs.Add(clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...)) errs.AddRecoverable(graph.Stack(ctx, err))
continue continue
} }
@ -178,7 +177,7 @@ func (c Events) EnumerateContainers(
path.Builder{}.Append(ptr.Val(cd.GetId())), // storage path path.Builder{}.Append(ptr.Val(cd.GetId())), // storage path
path.Builder{}.Append(ptr.Val(cd.GetDisplayName()))) // display location path.Builder{}.Append(ptr.Val(cd.GetDisplayName()))) // display location
if err := fn(temp); err != nil { if err := fn(temp); err != nil {
errs.Add(clues.Stack(err).WithClues(fctx).With(graph.ErrData(err)...)) errs.AddRecoverable(graph.Stack(fctx, err))
continue continue
} }
} }
@ -191,7 +190,7 @@ func (c Events) EnumerateContainers(
builder = users.NewItemCalendarsRequestBuilder(link, service.Adapter()) builder = users.NewItemCalendarsRequestBuilder(link, service.Adapter())
} }
return errs.Err() return errs.Failure()
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -213,7 +212,7 @@ type eventPager struct {
func (p *eventPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) { func (p *eventPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) resp, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
return resp, nil return resp, nil
@ -256,7 +255,7 @@ func (c Events) GetAddedAndRemovedItemIDs(
// only return on error if it is NOT a delta issue. // only return on error if it is NOT a delta issue.
// on bad deltas we retry the call with the regular builder // on bad deltas we retry the call with the regular builder
if !graph.IsErrInvalidDelta(err) { if !graph.IsErrInvalidDelta(err) {
return nil, nil, DeltaUpdate{}, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, DeltaUpdate{}, graph.Stack(ctx, err)
} }
resetDelta = true resetDelta = true
@ -322,12 +321,12 @@ func (c Events) Serialize(
defer writer.Close() defer writer.Close()
if err = writer.WriteObjectValue("", event); err != nil { if err = writer.WriteObjectValue("", event); err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
bs, err := writer.GetSerializedContent() bs, err := writer.GetSerializedContent()
if err != nil { if err != nil {
return nil, clues.Wrap(err, "serializing event").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "serializing event")
} }
return bs, nil return bs, nil

View File

@ -6,12 +6,12 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
) )
@ -128,7 +128,7 @@ func (suite *EventsAPIUnitSuite) TestEventInfo() {
event, err = support.CreateEventFromBytes(bytes) event, err = support.CreateEventFromBytes(bytes)
) )
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
return event, &details.ExchangeInfo{ return event, &details.ExchangeInfo{
ItemType: details.ExchangeEvent, ItemType: details.ExchangeEvent,

View File

@ -50,7 +50,7 @@ func (c Mail) CreateMailFolder(
mdl, err := c.stable.Client().UsersById(user).MailFolders().Post(ctx, requestBody, nil) mdl, err := c.stable.Client().UsersById(user).MailFolders().Post(ctx, requestBody, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating mail folder").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "creating mail folder")
} }
return mdl, nil return mdl, nil
@ -62,7 +62,7 @@ func (c Mail) CreateMailFolderWithParent(
) (models.MailFolderable, error) { ) (models.MailFolderable, error) {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
isHidden := false isHidden := false
@ -77,7 +77,7 @@ func (c Mail) CreateMailFolderWithParent(
ChildFolders(). ChildFolders().
Post(ctx, requestBody, nil) Post(ctx, requestBody, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating nested mail folder").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "creating nested mail folder")
} }
return mdl, nil return mdl, nil
@ -91,7 +91,7 @@ func (c Mail) DeleteContainer(
) error { ) error {
err := c.stable.Client().UsersById(user).MailFoldersById(folderID).Delete(ctx, nil) err := c.stable.Client().UsersById(user).MailFoldersById(folderID).Delete(ctx, nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
return nil return nil
@ -103,17 +103,17 @@ func (c Mail) GetContainerByID(
) (graph.Container, error) { ) (graph.Container, error) {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
ofmf, err := optionsForMailFoldersItem([]string{"displayName", "parentFolderId"}) ofmf, err := optionsForMailFoldersItem([]string{"displayName", "parentFolderId"})
if err != nil { if err != nil {
return nil, clues.Wrap(err, "setting mail folder options").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "setting mail folder options")
} }
resp, err := service.Client().UsersById(userID).MailFoldersById(dirID).Get(ctx, ofmf) resp, err := service.Client().UsersById(userID).MailFoldersById(dirID).Get(ctx, ofmf)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
return resp, nil return resp, nil
@ -124,11 +124,11 @@ func (c Mail) GetContainerByID(
func (c Mail) GetItem( func (c Mail) GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
errs *fault.Errors, errs *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
mail, err := c.stable.Client().UsersById(user).MessagesById(itemID).Get(ctx, nil) mail, err := c.stable.Client().UsersById(user).MessagesById(itemID).Get(ctx, nil)
if err != nil { if err != nil {
return nil, nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, graph.Stack(ctx, err)
} }
if *mail.GetHasAttachments() || HasAttachments(mail.GetBody()) { if *mail.GetHasAttachments() || HasAttachments(mail.GetBody()) {
@ -145,7 +145,7 @@ func (c Mail) GetItem(
Attachments(). Attachments().
Get(ctx, options) Get(ctx, options)
if err != nil { if err != nil {
return nil, nil, clues.Wrap(err, "mail attachment download").WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, graph.Wrap(ctx, err, "mail attachment download")
} }
mail.SetAttachments(attached.GetValue()) mail.SetAttachments(attached.GetValue())
@ -156,19 +156,18 @@ func (c Mail) GetItem(
// EnumerateContainers iterates through all of the users current // EnumerateContainers iterates through all of the users current
// mail folders, converting each to a graph.CacheFolder, and calling // mail folders, converting each to a graph.CacheFolder, and calling
// fn(cf) on each one. If fn(cf) errors, the error is aggregated // fn(cf) on each one.
// into a multierror that gets returned to the caller.
// Folder hierarchy is represented in its current state, and does // Folder hierarchy is represented in its current state, and does
// not contain historical data. // not contain historical data.
func (c Mail) EnumerateContainers( func (c Mail) EnumerateContainers(
ctx context.Context, ctx context.Context,
userID, baseDirID string, userID, baseDirID string,
fn func(graph.CacheFolder) error, fn func(graph.CacheFolder) error,
errs *fault.Errors, errs *fault.Bus,
) error { ) error {
service, err := c.service() service, err := c.service()
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
builder := service.Client(). builder := service.Client().
@ -179,7 +178,7 @@ func (c Mail) EnumerateContainers(
for { for {
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
for _, v := range resp.GetValue() { for _, v := range resp.GetValue() {
@ -190,7 +189,7 @@ func (c Mail) EnumerateContainers(
temp := graph.NewCacheFolder(v, nil, nil) temp := graph.NewCacheFolder(v, nil, nil)
if err := fn(temp); err != nil { if err := fn(temp); err != nil {
errs.Add(clues.Stack(err).WithClues(fctx).With(graph.ErrData(err)...)) errs.AddRecoverable(graph.Stack(fctx, err))
continue continue
} }
} }
@ -203,7 +202,7 @@ func (c Mail) EnumerateContainers(
builder = users.NewItemMailFoldersDeltaRequestBuilder(link, service.Adapter()) builder = users.NewItemMailFoldersDeltaRequestBuilder(link, service.Adapter())
} }
return errs.Err() return errs.Failure()
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -221,7 +220,7 @@ type mailPager struct {
func (p *mailPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) { func (p *mailPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
page, err := p.builder.Get(ctx, p.options) page, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
return page, nil return page, nil
@ -259,7 +258,7 @@ func (c Mail) GetAddedAndRemovedItemIDs(
return nil, return nil,
nil, nil,
DeltaUpdate{}, DeltaUpdate{},
clues.Wrap(err, "setting contact folder options").WithClues(ctx).With(graph.ErrData(err)...) graph.Wrap(ctx, err, "setting contact folder options")
} }
if len(oldDelta) > 0 { if len(oldDelta) > 0 {
@ -332,12 +331,12 @@ func (c Mail) Serialize(
defer writer.Close() defer writer.Close()
if err = writer.WriteObjectValue("", msg); err != nil { if err = writer.WriteObjectValue("", msg); err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
bs, err := writer.GetSerializedContent() bs, err := writer.GetSerializedContent()
if err != nil { if err != nil {
return nil, clues.Wrap(err, "serializing email").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "serializing email")
} }
return bs, nil return bs, nil

View File

@ -74,14 +74,14 @@ func getItemsAddedAndRemovedFromContainer(
// get the next page of data, check for standard errors // get the next page of data, check for standard errors
resp, err := pager.getPage(ctx) resp, err := pager.getPage(ctx)
if err != nil { if err != nil {
return nil, nil, deltaURL, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, deltaURL, graph.Stack(ctx, err)
} }
// each category type responds with a different interface, but all // each category type responds with a different interface, but all
// of them comply with GetValue, which is where we'll get our item data. // of them comply with GetValue, which is where we'll get our item data.
items, err := pager.valuesIn(resp) items, err := pager.valuesIn(resp)
if err != nil { if err != nil {
return nil, nil, "", clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, "", graph.Stack(ctx, err)
} }
itemCount += len(items) itemCount += len(items)

View File

@ -69,9 +69,7 @@ func uploadAttachment(
if attachmentType == models.ITEM_ATTACHMENTTYPE { if attachmentType == models.ITEM_ATTACHMENTTYPE {
a, err := support.ToItemAttachment(attachment) a, err := support.ToItemAttachment(attachment)
if err != nil { if err != nil {
logger.Ctx(ctx). logger.CtxErr(ctx, err).Info("item attachment restore not supported for this type. skipping upload.")
With("err", err).
Infow("item attachment restore not supported for this type. skipping upload.", clues.InErr(err).Slice()...)
return nil return nil
} }

View File

@ -3,7 +3,6 @@ package exchange
import ( import (
"context" "context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
msusers "github.com/microsoftgraph/msgraph-sdk-go/users" msusers "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -44,7 +43,7 @@ func (mau *mailAttachmentUploader) uploadSmallAttachment(ctx context.Context, at
Attachments(). Attachments().
Post(ctx, attach, nil) Post(ctx, attach, nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
return nil return nil
@ -68,7 +67,7 @@ func (mau *mailAttachmentUploader) uploadSession(
CreateUploadSession(). CreateUploadSession().
Post(ctx, session, nil) Post(ctx, session, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "uploading mail attachment").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "uploading mail attachment")
} }
return r, nil return r, nil
@ -94,7 +93,7 @@ func (eau *eventAttachmentUploader) uploadSmallAttachment(ctx context.Context, a
Attachments(). Attachments().
Post(ctx, attach, nil) Post(ctx, attach, nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return graph.Stack(ctx, err)
} }
return nil return nil
@ -116,7 +115,7 @@ func (eau *eventAttachmentUploader) uploadSession(
CreateUploadSession(). CreateUploadSession().
Post(ctx, session, nil) Post(ctx, session, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "uploading event attachment").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "uploading event attachment")
} }
return r, nil return r, nil

View File

@ -47,7 +47,7 @@ func (cfc *contactFolderCache) populateContactRoot(
// as of (Oct-07-2022) // as of (Oct-07-2022)
func (cfc *contactFolderCache) Populate( func (cfc *contactFolderCache) Populate(
ctx context.Context, ctx context.Context,
errs *fault.Errors, errs *fault.Bus,
baseID string, baseID string,
baseContainerPather ...string, baseContainerPather ...string,
) error { ) error {

View File

@ -28,7 +28,7 @@ type containersEnumerator interface {
ctx context.Context, ctx context.Context,
userID, baseDirID string, userID, baseDirID string,
fn func(graph.CacheFolder) error, fn func(graph.CacheFolder) error,
errs *fault.Errors, errs *fault.Bus,
) error ) error
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -78,7 +79,7 @@ var (
displayName: &testName, displayName: &testName,
parentID: &testParentID, parentID: &testParentID,
}, },
check: assert.Error, check: aw.Err,
}, },
{ {
name: "NilDisplayName", name: "NilDisplayName",
@ -87,7 +88,7 @@ var (
displayName: nil, displayName: nil,
parentID: &testParentID, parentID: &testParentID,
}, },
check: assert.Error, check: aw.Err,
}, },
{ {
name: "EmptyID", name: "EmptyID",
@ -96,7 +97,7 @@ var (
displayName: &testName, displayName: &testName,
parentID: &testParentID, parentID: &testParentID,
}, },
check: assert.NoError, check: aw.NoErr,
}, },
{ {
name: "EmptyDisplayName", name: "EmptyDisplayName",
@ -105,7 +106,7 @@ var (
displayName: &emptyString, displayName: &emptyString,
parentID: &testParentID, parentID: &testParentID,
}, },
check: assert.NoError, check: aw.NoErr,
}, },
{ {
name: "AllValues", name: "AllValues",
@ -114,7 +115,7 @@ var (
displayName: &testName, displayName: &testName,
parentID: &testParentID, parentID: &testParentID,
}, },
check: assert.NoError, check: aw.NoErr,
}, },
} }
) )
@ -136,7 +137,7 @@ func (suite *FolderCacheUnitSuite) TestCheckRequiredValues() {
displayName: &testName, displayName: &testName,
parentID: nil, parentID: nil,
}, },
check: assert.Error, check: aw.Err,
}, },
{ {
name: "EmptyParentFolderID", name: "EmptyParentFolderID",
@ -145,7 +146,7 @@ func (suite *FolderCacheUnitSuite) TestCheckRequiredValues() {
displayName: &testName, displayName: &testName,
parentID: &emptyString, parentID: &emptyString,
}, },
check: assert.NoError, check: aw.NoErr,
}, },
} }
@ -174,7 +175,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
}, },
nil, nil,
nil), nil),
check: assert.Error, check: aw.Err,
}, },
{ {
name: "NoParentPath", name: "NoParentPath",
@ -186,7 +187,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
path.Builder{}.Append("loc")), path.Builder{}.Append("loc")),
check: assert.NoError, check: aw.NoErr,
}, },
{ {
name: "NoName", name: "NoName",
@ -198,7 +199,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
path.Builder{}.Append("loc")), path.Builder{}.Append("loc")),
check: assert.Error, check: aw.Err,
}, },
{ {
name: "NoID", name: "NoID",
@ -210,7 +211,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
path.Builder{}.Append("loc")), path.Builder{}.Append("loc")),
check: assert.Error, check: aw.Err,
}, },
{ {
name: "NoPath", name: "NoPath",
@ -222,7 +223,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
}, },
nil, nil,
nil), nil),
check: assert.NoError, check: aw.NoErr,
}, },
} }
@ -341,12 +342,12 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() {
{ {
name: "AtLimit", name: "AtLimit",
numContainers: maxIterations, numContainers: maxIterations,
check: assert.NoError, check: aw.NoErr,
}, },
{ {
name: "OverLimit", name: "OverLimit",
numContainers: maxIterations + 1, numContainers: maxIterations + 1,
check: assert.Error, check: aw.Err,
}, },
} }
@ -365,7 +366,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() {
t := suite.T() t := suite.T()
require.NoError(t, suite.fc.populatePaths(ctx, false)) aw.MustNoErr(t, suite.fc.populatePaths(ctx, false))
items := suite.fc.Items() items := suite.fc.Items()
gotPaths := make([]string, 0, len(items)) gotPaths := make([]string, 0, len(items))
@ -389,7 +390,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
for _, c := range suite.allContainers { for _, c := range suite.allContainers {
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) { suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
p, l, err := suite.fc.IDToPath(ctx, c.id, false) p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
}) })
@ -403,7 +404,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
for _, c := range suite.containersWithID { for _, c := range suite.containersWithID {
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) { suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true) p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
}) })
@ -418,14 +419,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths()
c := suite.allContainers[len(suite.allContainers)-1] c := suite.allContainers[len(suite.allContainers)-1]
p, l, err := suite.fc.IDToPath(ctx, c.id, false) p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo" c.parentID = "foo"
p, l, err = suite.fc.IDToPath(ctx, c.id, false) p, l, err = suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
} }
@ -438,14 +439,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_u
c := suite.containersWithID[len(suite.containersWithID)-1] c := suite.containersWithID[len(suite.containersWithID)-1]
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true) p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo" c.parentID = "foo"
p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true) p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
} }
@ -461,7 +462,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN
delete(suite.fc.cache, almostLast.id) delete(suite.fc.cache, almostLast.id)
_, _, err := suite.fc.IDToPath(ctx, last.id, false) _, _, err := suite.fc.IDToPath(ctx, last.id, false)
assert.Error(t, err) aw.Err(t, err)
} }
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFound() { func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFound() {
@ -471,7 +472,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun
t := suite.T() t := suite.T()
_, _, err := suite.fc.IDToPath(ctx, "foo", false) _, _, err := suite.fc.IDToPath(ctx, "foo", false)
assert.Error(t, err) aw.Err(t, err)
} }
func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() { func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
@ -490,12 +491,12 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName) m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName)
require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache") require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache")
require.NoError(t, suite.fc.AddToCache(ctx, m, false)) aw.MustNoErr(t, suite.fc.AddToCache(ctx, m, false))
require.Empty(t, suite.fc.DestinationNameToID(dest), require.Empty(t, suite.fc.DestinationNameToID(dest),
"destination id from cache, still empty, because this is not a calendar") "destination id from cache, still empty, because this is not a calendar")
p, l, err := suite.fc.IDToPath(ctx, m.id, false) p, l, err := suite.fc.IDToPath(ctx, m.id, false)
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, m.expectedPath, p.String()) assert.Equal(t, m.expectedPath, p.String())
assert.Equal(t, m.expectedLocation, l.String()) assert.Equal(t, m.expectedLocation, l.String())
} }
@ -525,7 +526,7 @@ func (suite *FolderCacheIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
@ -533,11 +534,11 @@ func (suite *FolderCacheIntegrationSuite) SetupSuite() {
m365.AzureTenantID, m365.AzureTenantID,
m365.AzureClientID, m365.AzureClientID,
m365.AzureClientSecret) m365.AzureClientSecret)
require.NoError(t, err) aw.MustNoErr(t, err)
suite.gs = graph.NewService(adpt) suite.gs = graph.NewService(adpt)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
} }
// Testing to ensure that cache system works for in multiple different environments // Testing to ensure that cache system works for in multiple different environments
@ -547,7 +548,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
a := tester.NewM365Account(suite.T()) a := tester.NewM365Account(suite.T())
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
var ( var (
user = tester.M365UserID(suite.T()) user = tester.M365UserID(suite.T())
@ -573,7 +574,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
user, user,
path.EmailCategory, path.EmailCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return pth return pth
}, },
@ -586,7 +587,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
user, user,
path.EmailCategory, path.EmailCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return pth return pth
}, },
@ -602,7 +603,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
user, user,
path.ContactsCategory, path.ContactsCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return aPath return aPath
}, },
@ -614,7 +615,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
user, user,
path.ContactsCategory, path.ContactsCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return aPath return aPath
}, },
@ -631,7 +632,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
user, user,
path.EventsCategory, path.EventsCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return aPath return aPath
}, },
@ -643,7 +644,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
user, user,
path.EventsCategory, path.EventsCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return aPath return aPath
}, },
@ -660,12 +661,12 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
folderName, folderName,
directoryCaches, directoryCaches,
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
resolver := directoryCaches[test.category] resolver := directoryCaches[test.category]
_, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath) _, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath)
assert.NoError(t, err) aw.NoErr(t, err)
parentContainer := folderName parentContainer := folderName
if test.useIDForPath { if test.useIDForPath {
@ -679,10 +680,10 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
parentContainer, parentContainer,
directoryCaches, directoryCaches,
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
_, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath) _, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath)
require.NoError(t, err) aw.MustNoErr(t, err)
p := stdpath.Join(test.folderPrefix, parentContainer) p := stdpath.Join(test.folderPrefix, parentContainer)
_, ok := resolver.PathInCache(p) _, ok := resolver.PathInCache(p)

View File

@ -64,7 +64,7 @@ type DeltaPath struct {
func parseMetadataCollections( func parseMetadataCollections(
ctx context.Context, ctx context.Context,
colls []data.RestoreCollection, colls []data.RestoreCollection,
errs *fault.Errors, errs *fault.Bus,
) (CatDeltaPaths, error) { ) (CatDeltaPaths, error) {
// cdp stores metadata // cdp stores metadata
cdp := CatDeltaPaths{ cdp := CatDeltaPaths{
@ -168,7 +168,7 @@ func DataCollections(
acct account.M365Config, acct account.M365Config,
su support.StatusUpdater, su support.StatusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Errors, errs *fault.Bus,
) ([]data.BackupCollection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
eb, err := selector.ToExchangeBackup() eb, err := selector.ToExchangeBackup()
if err != nil { if err != nil {
@ -178,6 +178,7 @@ func DataCollections(
var ( var (
user = selector.DiscreteOwner user = selector.DiscreteOwner
collections = []data.BackupCollection{} collections = []data.BackupCollection{}
el = errs.Local()
) )
cdps, err := parseMetadataCollections(ctx, metadata, errs) cdps, err := parseMetadataCollections(ctx, metadata, errs)
@ -186,7 +187,7 @@ func DataCollections(
} }
for _, scope := range eb.Scopes() { for _, scope := range eb.Scopes() {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -200,14 +201,14 @@ func DataCollections(
su, su,
errs) errs)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
collections = append(collections, dcs...) collections = append(collections, dcs...)
} }
return collections, nil, errs.Err() return collections, nil, el.Failure()
} }
func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedItemIDsGetter, error) { func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedItemIDsGetter, error) {
@ -234,7 +235,7 @@ func createCollections(
dps DeltaPaths, dps DeltaPaths,
ctrlOpts control.Options, ctrlOpts control.Options,
su support.StatusUpdater, su support.StatusUpdater,
errs *fault.Errors, errs *fault.Bus,
) ([]data.BackupCollection, error) { ) ([]data.BackupCollection, error) {
var ( var (
allCollections = make([]data.BackupCollection, 0) allCollections = make([]data.BackupCollection, 0)

View File

@ -14,6 +14,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -50,7 +51,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.DeltaURLsFileName, "delta-link"}, {graph.DeltaURLsFileName, "delta-link"},
}, },
expect: map[string]DeltaPath{}, expect: map[string]DeltaPath{},
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "multiple delta urls", name: "multiple delta urls",
@ -58,7 +59,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.DeltaURLsFileName, "delta-link"}, {graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaURLsFileName, "delta-link-2"}, {graph.DeltaURLsFileName, "delta-link-2"},
}, },
expectError: assert.Error, expectError: aw.Err,
}, },
{ {
name: "previous path only", name: "previous path only",
@ -66,7 +67,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.PreviousPathFileName, "prev-path"}, {graph.PreviousPathFileName, "prev-path"},
}, },
expect: map[string]DeltaPath{}, expect: map[string]DeltaPath{},
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "multiple previous paths", name: "multiple previous paths",
@ -74,7 +75,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.PreviousPathFileName, "prev-path"}, {graph.PreviousPathFileName, "prev-path"},
{graph.PreviousPathFileName, "prev-path-2"}, {graph.PreviousPathFileName, "prev-path-2"},
}, },
expectError: assert.Error, expectError: aw.Err,
}, },
{ {
name: "delta urls and previous paths", name: "delta urls and previous paths",
@ -88,7 +89,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
path: "prev-path", path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "delta urls and empty previous paths", name: "delta urls and empty previous paths",
@ -97,7 +98,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.PreviousPathFileName, ""}, {graph.PreviousPathFileName, ""},
}, },
expect: map[string]DeltaPath{}, expect: map[string]DeltaPath{},
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "empty delta urls and previous paths", name: "empty delta urls and previous paths",
@ -106,7 +107,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.PreviousPathFileName, "prev-path"}, {graph.PreviousPathFileName, "prev-path"},
}, },
expect: map[string]DeltaPath{}, expect: map[string]DeltaPath{},
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "delta urls with special chars", name: "delta urls with special chars",
@ -120,7 +121,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
path: "prev-path", path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "delta urls with escaped chars", name: "delta urls with escaped chars",
@ -134,7 +135,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
path: "prev-path", path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: aw.NoErr,
}, },
{ {
name: "delta urls with newline char runes", name: "delta urls with newline char runes",
@ -151,7 +152,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
path: "prev-path", path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: aw.NoErr,
}, },
} }
for _, test := range table { for _, test := range table {
@ -174,7 +175,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
entries, entries,
func(cos *support.ConnectorOperationStatus) {}, func(cos *support.ConnectorOperationStatus) {},
) )
require.NoError(t, err) aw.MustNoErr(t, err)
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{ cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: coll}, data.NotFoundRestoreCollection{Collection: coll},
@ -200,7 +201,6 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ConnectorOperationStatus) { func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ConnectorOperationStatus) {
updater := func(status *support.ConnectorOperationStatus) { updater := func(status *support.ConnectorOperationStatus) {
defer wg.Done() defer wg.Done()
assert.Zero(t, status.ErrorCount)
} }
return updater return updater
@ -240,7 +240,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
acct, err = tester.NewM365Account(suite.T()).M365Config() acct, err = tester.NewM365Account(suite.T()).M365Config()
) )
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
tests := []struct { tests := []struct {
name string name string
@ -270,7 +270,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
control.Options{}, control.Options{},
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
for _, c := range collections { for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService { if c.FullPath().Service() == path.ExchangeMetadataService {
@ -298,7 +298,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
acct, err = tester.NewM365Account(suite.T()).M365Config() acct, err = tester.NewM365Account(suite.T()).M365Config()
) )
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
tests := []struct { tests := []struct {
name string name string
@ -338,7 +338,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
control.Options{}, control.Options{},
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Less(t, 1, len(collections), "retrieved metadata and data collections") assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection var metadata data.BackupCollection
@ -354,7 +354,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{ cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: metadata}, data.NotFoundRestoreCollection{Collection: metadata},
}, fault.New(true)) }, fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
dps := cdps[test.scope.Category().PathType()] dps := cdps[test.scope.Category().PathType()]
@ -369,7 +369,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
control.Options{}, control.Options{},
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
// TODO(keepers): this isn't a very useful test at the moment. It needs to // TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least // investigate the items in the original and delta collections to at least
@ -401,7 +401,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
) )
acct, err := tester.NewM365Account(t).M365Config() acct, err := tester.NewM365Account(t).M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
sel := selectors.NewExchangeBackup(users) sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
@ -415,7 +415,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
control.Options{}, control.Options{},
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
wg.Add(len(collections)) wg.Add(len(collections))
@ -429,7 +429,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader()) read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotZero(t, read) assert.NotZero(t, read)
if isMetadata { if isMetadata {
@ -438,7 +438,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
message, err := support.CreateMessageFromBytes(buf.Bytes()) message, err := support.CreateMessageFromBytes(buf.Bytes())
assert.NotNil(t, message) assert.NotNil(t, message)
assert.NoError(t, err) aw.NoErr(t, err)
} }
}) })
} }
@ -454,7 +454,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
defer flush() defer flush()
acct, err := tester.NewM365Account(suite.T()).M365Config() acct, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
users := []string{suite.user} users := []string{suite.user}
@ -483,7 +483,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
control.Options{}, control.Options{},
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
wg.Add(len(edcs)) wg.Add(len(edcs))
@ -497,7 +497,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
for stream := range edc.Items(ctx, fault.New(true)) { for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader()) read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotZero(t, read) assert.NotZero(t, read)
if isMetadata { if isMetadata {
@ -506,7 +506,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
contact, err := support.CreateContactFromBytes(buf.Bytes()) contact, err := support.CreateContactFromBytes(buf.Bytes())
assert.NotNil(t, contact) assert.NotNil(t, contact)
assert.NoError(t, err, "error on converting contact bytes: "+buf.String()) aw.NoErr(t, err, "error on converting contact bytes: "+buf.String())
count++ count++
} }
@ -530,12 +530,12 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
defer flush() defer flush()
acct, err := tester.NewM365Account(suite.T()).M365Config() acct, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
users := []string{suite.user} users := []string{suite.user}
ac, err := api.NewClient(acct) ac, err := api.NewClient(acct)
require.NoError(suite.T(), err, "creating client") aw.MustNoErr(suite.T(), err, "creating client")
var ( var (
calID string calID string
@ -554,7 +554,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
return nil return nil
} }
require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))) aw.MustNoErr(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true)))
tests := []struct { tests := []struct {
name, expected string name, expected string
@ -591,7 +591,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
control.Options{}, control.Options{},
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
require.Len(t, collections, 2) require.Len(t, collections, 2)
wg.Add(len(collections)) wg.Add(len(collections))
@ -610,7 +610,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader()) read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotZero(t, read) assert.NotZero(t, read)
if isMetadata { if isMetadata {
@ -619,7 +619,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
event, err := support.CreateEventFromBytes(buf.Bytes()) event, err := support.CreateEventFromBytes(buf.Bytes())
assert.NotNil(t, event) assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String()) aw.NoErr(t, err, "creating event from bytes: "+buf.String())
} }
} }

View File

@ -62,7 +62,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
// @param baseID: ignored. Present to conform to interface // @param baseID: ignored. Present to conform to interface
func (ecc *eventCalendarCache) Populate( func (ecc *eventCalendarCache) Populate(
ctx context.Context, ctx context.Context,
errs *fault.Errors, errs *fault.Bus,
baseID string, baseID string,
baseContainerPath ...string, baseContainerPath ...string,
) error { ) error {

View File

@ -45,7 +45,7 @@ type itemer interface {
GetItem( GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
errs *fault.Errors, errs *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) ) (serialization.Parsable, *details.ExchangeInfo, error)
Serialize( Serialize(
ctx context.Context, ctx context.Context,
@ -127,7 +127,7 @@ func NewCollection(
// Items utility function to asynchronously execute process to fill data channel with // Items utility function to asynchronously execute process to fill data channel with
// M365 exchange objects and returns the data channel // M365 exchange objects and returns the data channel
func (col *Collection) Items(ctx context.Context, errs *fault.Errors) <-chan data.Stream { func (col *Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
go col.streamItems(ctx, errs) go col.streamItems(ctx, errs)
return col.data return col.data
} }
@ -163,7 +163,7 @@ func (col Collection) DoNotMergeItems() bool {
// streamItems is a utility function that uses col.collectionType to be able to serialize // streamItems is a utility function that uses col.collectionType to be able to serialize
// all the M365IDs defined in the added field. data channel is closed by this function // all the M365IDs defined in the added field. data channel is closed by this function
func (col *Collection) streamItems(ctx context.Context, errs *fault.Errors) { func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
var ( var (
success int64 success int64
totalBytes int64 totalBytes int64
@ -177,7 +177,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Errors) {
) )
defer func() { defer func() {
col.finishPopulation(ctx, int(success), totalBytes, errs.Err()) col.finishPopulation(ctx, int(success), totalBytes, errs.Failure())
}() }()
if len(col.added)+len(col.removed) > 0 { if len(col.added)+len(col.removed) > 0 {
@ -226,7 +226,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Errors) {
// add any new items // add any new items
for id := range col.added { for id := range col.added {
if errs.Err() != nil { if errs.Failure() != nil {
break break
} }
@ -253,7 +253,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Errors) {
atomic.AddInt64(&success, 1) atomic.AddInt64(&success, 1)
log.With("err", err).Infow("item not found", clues.InErr(err).Slice()...) log.With("err", err).Infow("item not found", clues.InErr(err).Slice()...)
} else { } else {
errs.Add(clues.Wrap(err, "fetching item")) errs.AddRecoverable(clues.Wrap(err, "fetching item"))
} }
return return
@ -261,7 +261,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Errors) {
data, err := col.items.Serialize(ctx, item, user, id) data, err := col.items.Serialize(ctx, item, user, id)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "serializing item")) errs.AddRecoverable(clues.Wrap(err, "serializing item"))
return return
} }
@ -291,7 +291,7 @@ func getItemWithRetries(
ctx context.Context, ctx context.Context,
userID, itemID string, userID, itemID string,
items itemer, items itemer,
errs *fault.Errors, errs *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
item, info, err := items.GetItem(ctx, userID, itemID, errs) item, info, err := items.GetItem(ctx, userID, itemID, errs)
if err != nil { if err != nil {
@ -318,9 +318,8 @@ func (col *Collection) finishPopulation(
support.CollectionMetrics{ support.CollectionMetrics{
Objects: attempted, Objects: attempted,
Successes: success, Successes: success,
TotalBytes: totalBytes, Bytes: totalBytes,
}, },
err,
col.fullPath.Folder(false)) col.fullPath.Folder(false))
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String()) logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())

View File

@ -7,13 +7,12 @@ import (
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -30,7 +29,7 @@ type mockItemer struct {
func (mi *mockItemer) GetItem( func (mi *mockItemer) GetItem(
context.Context, context.Context,
string, string, string, string,
*fault.Errors, *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
mi.getCount++ mi.getCount++
return nil, nil, mi.getErr return nil, nil, mi.getErr
@ -92,7 +91,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
path.EmailCategory, path.EmailCategory,
false, false,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
edc := Collection{ edc := Collection{
user: user, user: user,
@ -114,7 +113,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
path.EmailCategory, path.EmailCategory,
false, false,
) )
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
edc := Collection{ edc := Collection{
user: name, user: name,
@ -128,15 +127,15 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
fooP, err := path.Builder{}. fooP, err := path.Builder{}.
Append("foo"). Append("foo").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
barP, err := path.Builder{}. barP, err := path.Builder{}.
Append("bar"). Append("bar").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
locP, err := path.Builder{}. locP, err := path.Builder{}.
Append("human-readable"). Append("human-readable").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
table := []struct { table := []struct {
name string name string
@ -199,7 +198,7 @@ func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
name: "happy", name: "happy",
items: &mockItemer{}, items: &mockItemer{},
expectErr: func(t *testing.T, err error) { expectErr: func(t *testing.T, err error) {
assert.NoError(t, err) aw.NoErr(t, err)
}, },
expectGetCalls: 1, expectGetCalls: 1,
}, },
@ -207,16 +206,14 @@ func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
name: "an error", name: "an error",
items: &mockItemer{getErr: assert.AnError}, items: &mockItemer{getErr: assert.AnError},
expectErr: func(t *testing.T, err error) { expectErr: func(t *testing.T, err error) {
assert.Error(t, err) aw.Err(t, err)
}, },
expectGetCalls: 3, expectGetCalls: 3,
}, },
{ {
name: "deleted in flight", name: "deleted in flight",
items: &mockItemer{ items: &mockItemer{
getErr: graph.ErrDeletedInFlight{ getErr: graph.ErrDeletedInFlight,
Err: *common.EncapsulateError(assert.AnError),
},
}, },
expectErr: func(t *testing.T, err error) { expectErr: func(t *testing.T, err error) {
assert.True(t, graph.IsErrDeletedInFlight(err), "is ErrDeletedInFlight") assert.True(t, graph.IsErrDeletedInFlight(err), "is ErrDeletedInFlight")

View File

@ -4,12 +4,12 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -36,7 +36,7 @@ func (suite *CacheResolverSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
} }
@ -46,10 +46,10 @@ func (suite *CacheResolverSuite) TestPopulate() {
defer flush() defer flush()
ac, err := api.NewClient(suite.credentials) ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar) cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
eventFunc := func(t *testing.T) graph.ContainerResolver { eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{ return &eventCalendarCache{
@ -120,7 +120,7 @@ func (suite *CacheResolverSuite) TestPopulate() {
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
resolver := test.resolverFunc(t) resolver := test.resolverFunc(t)
require.NoError(t, resolver.Populate(ctx, fault.New(true), test.root, test.basePath)) aw.MustNoErr(t, resolver.Populate(ctx, fault.New(true), test.root, test.basePath))
_, isFound := resolver.PathInCache(test.folderInCache) _, isFound := resolver.PathInCache(test.folderInCache)
test.canFind(t, isFound) test.canFind(t, isFound)

View File

@ -4,13 +4,13 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type ExchangeIteratorSuite struct { type ExchangeIteratorSuite struct {
@ -31,7 +31,7 @@ func (suite *ExchangeIteratorSuite) TestDisplayable() {
t := suite.T() t := suite.T()
bytes := mockconnector.GetMockContactBytes("Displayable") bytes := mockconnector.GetMockContactBytes("Displayable")
contact, err := support.CreateContactFromBytes(bytes) contact, err := support.CreateContactFromBytes(bytes)
require.NoError(t, err) aw.MustNoErr(t, err)
aDisplayable, ok := contact.(graph.Displayable) aDisplayable, ok := contact.(graph.Displayable)
assert.True(t, ok) assert.True(t, ok)
@ -43,7 +43,7 @@ func (suite *ExchangeIteratorSuite) TestDescendable() {
t := suite.T() t := suite.T()
bytes := mockconnector.GetMockMessageBytes("Descendable") bytes := mockconnector.GetMockMessageBytes("Descendable")
message, err := support.CreateMessageFromBytes(bytes) message, err := support.CreateMessageFromBytes(bytes)
require.NoError(t, err) aw.MustNoErr(t, err)
aDescendable, ok := message.(graph.Descendable) aDescendable, ok := message.(graph.Descendable)
assert.True(t, ok) assert.True(t, ok)

View File

@ -72,7 +72,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
// for the base container in the cache. // for the base container in the cache.
func (mc *mailFolderCache) Populate( func (mc *mailFolderCache) Populate(
ctx context.Context, ctx context.Context,
errs *fault.Errors, errs *fault.Bus,
baseID string, baseID string,
baseContainerPath ...string, baseContainerPath ...string,
) error { ) error {

View File

@ -5,11 +5,11 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -48,7 +48,7 @@ func (suite *MailFolderCacheIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
} }
@ -83,7 +83,7 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
ac, err := api.NewClient(suite.credentials) ac, err := api.NewClient(suite.credentials)
require.NoError(t, err) aw.MustNoErr(t, err)
acm := ac.Mail() acm := ac.Mail()
@ -93,10 +93,10 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
getter: acm, getter: acm,
} }
require.NoError(t, mfc.Populate(ctx, fault.New(true), test.root, test.path...)) aw.MustNoErr(t, mfc.Populate(ctx, fault.New(true), test.root, test.path...))
p, l, err := mfc.IDToPath(ctx, testFolderID, true) p, l, err := mfc.IDToPath(ctx, testFolderID, true)
require.NoError(t, err) aw.MustNoErr(t, err)
t.Logf("Path: %s\n", p.String()) t.Logf("Path: %s\n", p.String())
t.Logf("Location: %s\n", l.String()) t.Logf("Location: %s\n", l.String())

View File

@ -6,15 +6,14 @@ import (
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -43,18 +42,18 @@ func (suite *ExchangeRestoreSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
suite.ac, err = api.NewClient(m365) suite.ac, err = api.NewClient(m365)
require.NoError(t, err) aw.MustNoErr(t, err)
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret) adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err) aw.MustNoErr(t, err)
suite.gs = graph.NewService(adpt) suite.gs = graph.NewService(adpt)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
} }
// TestRestoreContact ensures contact object can be created, placed into // TestRestoreContact ensures contact object can be created, placed into
@ -71,14 +70,14 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
) )
aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName) aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
folderID := *aFolder.GetId() folderID := *aFolder.GetId()
defer func() { defer func() {
// Remove the folder containing contact prior to exiting test // Remove the folder containing contact prior to exiting test
err = suite.ac.Contacts().DeleteContainer(ctx, userID, folderID) err = suite.ac.Contacts().DeleteContainer(ctx, userID, folderID)
assert.NoError(t, err) aw.NoErr(t, err)
}() }()
info, err := RestoreExchangeContact( info, err := RestoreExchangeContact(
@ -88,7 +87,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
control.Copy, control.Copy,
folderID, folderID,
userID) userID)
assert.NoError(t, err, support.ConnectorStackErrorTrace(err)) aw.NoErr(t, err)
assert.NotNil(t, info, "contact item info") assert.NotNil(t, info, "contact item info")
} }
@ -105,14 +104,14 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
) )
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, name) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, name)
require.NoError(t, err) aw.MustNoErr(t, err)
calendarID := *calendar.GetId() calendarID := *calendar.GetId()
defer func() { defer func() {
// Removes calendar containing events created during the test // Removes calendar containing events created during the test
err = suite.ac.Events().DeleteContainer(ctx, userID, calendarID) err = suite.ac.Events().DeleteContainer(ctx, userID, calendarID)
assert.NoError(t, err) aw.NoErr(t, err)
}() }()
info, err := RestoreExchangeEvent(ctx, info, err := RestoreExchangeEvent(ctx,
@ -122,7 +121,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
calendarID, calendarID,
userID, userID,
fault.New(true)) fault.New(true))
assert.NoError(t, err, support.ConnectorStackErrorTrace(err)) aw.NoErr(t, err)
assert.NotNil(t, info, "event item info") assert.NotNil(t, info, "event item info")
} }
@ -135,10 +134,10 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
service, err := createService(m365) service, err := createService(m365)
require.NoError(t, err) aw.MustNoErr(t, err)
deleters := map[path.CategoryType]containerDeleter{ deleters := map[path.CategoryType]containerDeleter{
path.EmailCategory: suite.ac.Mail(), path.EmailCategory: suite.ac.Mail(),
@ -161,7 +160,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailObject: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailObject: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -173,7 +172,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -185,7 +184,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -197,7 +196,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -212,7 +211,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailBasicItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailBasicItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -227,7 +226,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachmentwAttachment " + common.FormatSimpleDateTime(now) folderName := "ItemMailAttachmentwAttachment " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -242,7 +241,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachment_Contact " + common.FormatSimpleDateTime(now) folderName := "ItemMailAttachment_Contact " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -254,7 +253,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -266,7 +265,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithLargeAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithLargeAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -278,7 +277,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachments: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithAttachments: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -290,7 +289,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithReferenceAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithReferenceAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -303,7 +302,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreContactObject: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreContactObject: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName) folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *folder.GetId() return *folder.GetId()
}, },
@ -315,7 +314,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject: " + common.FormatSimpleDateTime(now) calendarName := "TestRestoreEventObject: " + common.FormatSimpleDateTime(now)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *calendar.GetId() return *calendar.GetId()
}, },
@ -327,7 +326,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject_" + common.FormatSimpleDateTime(now) calendarName := "TestRestoreEventObject_" + common.FormatSimpleDateTime(now)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName)
require.NoError(t, err) aw.MustNoErr(t, err)
return *calendar.GetId() return *calendar.GetId()
}, },
@ -349,10 +348,10 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination, destination,
userID, userID,
fault.New(true)) fault.New(true))
assert.NoError(t, err, support.ConnectorStackErrorTrace(err)) aw.NoErr(t, err)
assert.NotNil(t, info, "item info was not populated") assert.NotNil(t, info, "item info was not populated")
assert.NotNil(t, deleters) assert.NotNil(t, deleters)
assert.NoError(t, deleters[test.category].DeleteContainer(ctx, userID, destination)) aw.NoErr(t, deleters[test.category].DeleteContainer(ctx, userID, destination))
}) })
} }
} }

View File

@ -36,7 +36,7 @@ func createService(credentials account.M365Config) (*graph.Service, error) {
func PopulateExchangeContainerResolver( func PopulateExchangeContainerResolver(
ctx context.Context, ctx context.Context,
qp graph.QueryParams, qp graph.QueryParams,
errs *fault.Errors, errs *fault.Bus,
) (graph.ContainerResolver, error) { ) (graph.ContainerResolver, error) {
var ( var (
res graph.ContainerResolver res graph.ContainerResolver

View File

@ -39,7 +39,7 @@ func filterContainersAndFillCollections(
scope selectors.ExchangeScope, scope selectors.ExchangeScope,
dps DeltaPaths, dps DeltaPaths,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Errors, errs *fault.Bus,
) error { ) error {
var ( var (
// folder ID -> delta url or folder path lookups // folder ID -> delta url or folder path lookups
@ -68,9 +68,11 @@ func filterContainersAndFillCollections(
return err return err
} }
el := errs.Local()
for _, c := range resolver.Items() { for _, c := range resolver.Items() {
if errs.Err() != nil { if el.Failure() != nil {
return errs.Err() return el.Failure()
} }
cID := *c.GetId() cID := *c.GetId()
@ -91,7 +93,7 @@ func filterContainersAndFillCollections(
if len(prevPathStr) > 0 { if len(prevPathStr) > 0 {
if prevPath, err = pathFromPrevString(prevPathStr); err != nil { if prevPath, err = pathFromPrevString(prevPathStr); err != nil {
logger.Ctx(ctx).With("err", err).Errorw("parsing prev path", clues.InErr(err).Slice()...) logger.CtxErr(ctx, err).Error("parsing prev path")
// if the previous path is unusable, then the delta must be, too. // if the previous path is unusable, then the delta must be, too.
prevDelta = "" prevDelta = ""
} }
@ -100,7 +102,7 @@ func filterContainersAndFillCollections(
added, removed, newDelta, err := getter.GetAddedAndRemovedItemIDs(ctx, qp.ResourceOwner, cID, prevDelta) added, removed, newDelta, err := getter.GetAddedAndRemovedItemIDs(ctx, qp.ResourceOwner, cID, prevDelta)
if err != nil { if err != nil {
if !graph.IsErrDeletedInFlight(err) { if !graph.IsErrDeletedInFlight(err) {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
@ -155,8 +157,12 @@ func filterContainersAndFillCollections(
// in the `previousPath` set, but does not exist in the current container // in the `previousPath` set, but does not exist in the current container
// resolver (which contains all the resource owners' current containers). // resolver (which contains all the resource owners' current containers).
for id, p := range tombstones { for id, p := range tombstones {
if el.Failure() != nil {
return el.Failure()
}
if collections[id] != nil { if collections[id] != nil {
errs.Add(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ctx))
continue continue
} }
@ -169,7 +175,7 @@ func filterContainersAndFillCollections(
prevPath, err := pathFromPrevString(p) prevPath, err := pathFromPrevString(p)
if err != nil { if err != nil {
// technically shouldn't ever happen. But just in case... // technically shouldn't ever happen. But just in case...
logger.Ctx(ctx).With("err", err).Errorw("parsing tombstone prev path", clues.InErr(err).Slice()...) logger.CtxErr(ctx, err).Error("parsing tombstone prev path")
continue continue
} }
@ -205,15 +211,14 @@ func filterContainersAndFillCollections(
path.ExchangeService, path.ExchangeService,
qp.Category, qp.Category,
entries, entries,
statusUpdater, statusUpdater)
)
if err != nil { if err != nil {
return clues.Wrap(err, "making metadata collection") return clues.Wrap(err, "making metadata collection")
} }
collections["metadata"] = col collections["metadata"] = col
return errs.Err() return el.Failure()
} }
// produces a set of id:path pairs from the deltapaths map. // produces a set of id:path pairs from the deltapaths map.

View File

@ -9,12 +9,12 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -92,7 +92,7 @@ func (m mockResolver) IDToPath(context.Context, string, bool) (*path.Builder, *p
return nil, nil, nil return nil, nil, nil
} }
func (m mockResolver) PathInCache(string) (string, bool) { return "", false } func (m mockResolver) PathInCache(string) (string, bool) { return "", false }
func (m mockResolver) Populate(context.Context, *fault.Errors, string, ...string) error { return nil } func (m mockResolver) Populate(context.Context, *fault.Bus, string, ...string) error { return nil }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests // tests
@ -110,7 +110,7 @@ func TestServiceIteratorsSuite(t *testing.T) {
func (suite *ServiceIteratorsSuite) SetupSuite() { func (suite *ServiceIteratorsSuite) SetupSuite() {
a := tester.NewMockM365Account(suite.T()) a := tester.NewMockM365Account(suite.T())
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
suite.creds = m365 suite.creds = m365
} }
@ -140,7 +140,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
added: []string{"a1", "a2", "a3"}, added: []string{"a1", "a2", "a3"},
removed: []string{"r1", "r2", "r3"}, removed: []string{"r1", "r2", "r3"},
newDelta: api.DeltaUpdate{URL: "delta_url"}, newDelta: api.DeltaUpdate{URL: "delta_url"},
err: graph.ErrDeletedInFlight{Err: *common.EncapsulateError(assert.AnError)}, err: graph.ErrDeletedInFlight,
} }
container1 = mockContainer{ container1 = mockContainer{
id: strPtr("1"), id: strPtr("1"),
@ -172,7 +172,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1), resolver: newMockResolver(container1),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 1, expectNewColls: 1,
expectMetadataColls: 1, expectMetadataColls: 1,
}, },
@ -184,7 +184,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 2, expectNewColls: 2,
expectMetadataColls: 1, expectMetadataColls: 1,
}, },
@ -196,7 +196,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 2, expectNewColls: 2,
expectMetadataColls: 1, expectMetadataColls: 1,
}, },
@ -208,7 +208,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: selectors.NewExchangeBackup(nil).MailFolders(selectors.None())[0], scope: selectors.NewExchangeBackup(nil).MailFolders(selectors.None())[0],
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 0, expectNewColls: 0,
expectMetadataColls: 1, expectMetadataColls: 1,
}, },
@ -219,7 +219,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1), resolver: newMockResolver(container1),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 1, expectNewColls: 1,
expectMetadataColls: 1, expectMetadataColls: 1,
expectDoNotMergeColls: 1, expectDoNotMergeColls: 1,
@ -231,7 +231,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1), resolver: newMockResolver(container1),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 0, expectNewColls: 0,
expectMetadataColls: 1, expectMetadataColls: 1,
}, },
@ -243,7 +243,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 2, expectNewColls: 2,
expectMetadataColls: 1, expectMetadataColls: 1,
expectDoNotMergeColls: 1, expectDoNotMergeColls: 1,
@ -256,7 +256,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
}, },
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: allScope, scope: allScope,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 1, expectNewColls: 1,
expectMetadataColls: 1, expectMetadataColls: 1,
}, },
@ -269,7 +269,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: allScope, scope: allScope,
failFast: true, failFast: true,
expectErr: assert.NoError, expectErr: aw.NoErr,
expectNewColls: 2, expectNewColls: 2,
expectMetadataColls: 1, expectMetadataColls: 1,
expectDoNotMergeColls: 1, expectDoNotMergeColls: 1,
@ -283,7 +283,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
resolver: newMockResolver(container1, container2), resolver: newMockResolver(container1, container2),
scope: allScope, scope: allScope,
failFast: true, failFast: true,
expectErr: assert.Error, expectErr: aw.Err,
expectNewColls: 0, expectNewColls: 0,
expectMetadataColls: 0, expectMetadataColls: 0,
}, },
@ -459,7 +459,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
dps, dps,
control.Options{FailFast: true}, control.Options{FailFast: true},
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
// collection assertions // collection assertions
@ -534,7 +534,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
p, err := path.Builder{}. p, err := path.Builder{}.
Append(at...). Append(at...).
ToDataLayerExchangePathForCategory(tenantID, userID, cat, false) ToDataLayerExchangePathForCategory(tenantID, userID, cat, false)
require.NoError(t, err) aw.MustNoErr(t, err)
return p return p
} }
@ -811,7 +811,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
test.dps, test.dps,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
metadatas := 0 metadatas := 0
for _, c := range collections { for _, c := range collections {

View File

@ -37,7 +37,7 @@ func RestoreExchangeObject(
policy control.CollisionPolicy, policy control.CollisionPolicy,
service graph.Servicer, service graph.Servicer,
destination, user string, destination, user string,
errs *fault.Errors, errs *fault.Bus,
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
if policy != control.Copy { if policy != control.Copy {
return nil, clues.Wrap(clues.New(policy.String()), "policy not supported for Exchange restore").WithClues(ctx) return nil, clues.Wrap(clues.New(policy.String()), "policy not supported for Exchange restore").WithClues(ctx)
@ -70,14 +70,14 @@ func RestoreExchangeContact(
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
contact, err := support.CreateContactFromBytes(bits) contact, err := support.CreateContactFromBytes(bits)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating contact from bytes").WithClues(ctx) return nil, graph.Wrap(ctx, err, "creating contact from bytes")
} }
ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId())) ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId()))
response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil) response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "uploading Contact").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "uploading Contact")
} }
if response == nil { if response == nil {
@ -102,7 +102,7 @@ func RestoreExchangeEvent(
service graph.Servicer, service graph.Servicer,
cp control.CollisionPolicy, cp control.CollisionPolicy,
destination, user string, destination, user string,
errs *fault.Errors, errs *fault.Bus,
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
event, err := support.CreateEventFromBytes(bits) event, err := support.CreateEventFromBytes(bits)
if err != nil { if err != nil {
@ -112,6 +112,7 @@ func RestoreExchangeEvent(
ctx = clues.Add(ctx, "item_id", ptr.Val(event.GetId())) ctx = clues.Add(ctx, "item_id", ptr.Val(event.GetId()))
var ( var (
el = errs.Local()
transformedEvent = support.ToEventSimplified(event) transformedEvent = support.ToEventSimplified(event)
attached []models.Attachmentable attached []models.Attachmentable
) )
@ -124,7 +125,7 @@ func RestoreExchangeEvent(
response, err := service.Client().UsersById(user).CalendarsById(destination).Events().Post(ctx, transformedEvent, nil) response, err := service.Client().UsersById(user).CalendarsById(destination).Events().Post(ctx, transformedEvent, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "uploading event").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "uploading event")
} }
if response == nil { if response == nil {
@ -139,19 +140,19 @@ func RestoreExchangeEvent(
} }
for _, attach := range attached { for _, attach := range attached {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
if err := uploadAttachment(ctx, uploader, attach); err != nil { if err := uploadAttachment(ctx, uploader, attach); err != nil {
errs.Add(err) el.AddRecoverable(err)
} }
} }
info := api.EventInfo(event) info := api.EventInfo(event)
info.Size = int64(len(bits)) info.Size = int64(len(bits))
return info, errs.Err() return info, el.Failure()
} }
// RestoreMailMessage utility function to place an exchange.Mail // RestoreMailMessage utility function to place an exchange.Mail
@ -166,7 +167,7 @@ func RestoreMailMessage(
service graph.Servicer, service graph.Servicer,
cp control.CollisionPolicy, cp control.CollisionPolicy,
destination, user string, destination, user string,
errs *fault.Errors, errs *fault.Bus,
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
// Creates messageable object from original bytes // Creates messageable object from original bytes
originalMessage, err := support.CreateMessageFromBytes(bits) originalMessage, err := support.CreateMessageFromBytes(bits)
@ -239,7 +240,7 @@ func SendMailToBackStore(
service graph.Servicer, service graph.Servicer,
user, destination string, user, destination string,
message models.Messageable, message models.Messageable,
errs *fault.Errors, errs *fault.Bus,
) error { ) error {
attached := message.GetAttachments() attached := message.GetAttachments()
@ -248,24 +249,26 @@ func SendMailToBackStore(
response, err := service.Client().UsersById(user).MailFoldersById(destination).Messages().Post(ctx, message, nil) response, err := service.Client().UsersById(user).MailFoldersById(destination).Messages().Post(ctx, message, nil)
if err != nil { if err != nil {
return clues.Wrap(err, "restoring mail").WithClues(ctx).With(graph.ErrData(err)...) return graph.Wrap(ctx, err, "restoring mail")
} }
if response == nil { if response == nil {
return clues.New("nil response from post").WithClues(ctx) return clues.New("nil response from post").WithClues(ctx)
} }
id := ptr.Val(response.GetId()) var (
el = errs.Local()
uploader := &mailAttachmentUploader{ id = ptr.Val(response.GetId())
uploader = &mailAttachmentUploader{
userID: user, userID: user,
folderID: destination, folderID: destination,
itemID: id, itemID: id,
service: service, service: service,
} }
)
for _, attachment := range attached { for _, attachment := range attached {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -273,20 +276,20 @@ func SendMailToBackStore(
if ptr.Val(attachment.GetOdataType()) == "#microsoft.graph.itemAttachment" { if ptr.Val(attachment.GetOdataType()) == "#microsoft.graph.itemAttachment" {
name := ptr.Val(attachment.GetName()) name := ptr.Val(attachment.GetName())
logger.Ctx(ctx). logger.CtxErr(ctx, err).
With("err", err, "attachment_name", name). With("attachment_name", name).
Infow("mail upload failed", clues.InErr(err).Slice()...) Info("mail upload failed")
continue continue
} }
errs.Add(errors.Wrap(err, "uploading mail attachment")) el.AddRecoverable(errors.Wrap(err, "uploading mail attachment"))
break break
} }
} }
return errs.Err() return el.Failure()
} }
// RestoreExchangeDataCollections restores M365 objects in data.RestoreCollection to MSFT // RestoreExchangeDataCollections restores M365 objects in data.RestoreCollection to MSFT
@ -299,7 +302,7 @@ func RestoreExchangeDataCollections(
dest control.RestoreDestination, dest control.RestoreDestination,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
errs *fault.Errors, errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
directoryCaches = make(map[string]map[path.CategoryType]graph.ContainerResolver) directoryCaches = make(map[string]map[path.CategoryType]graph.ContainerResolver)
@ -307,6 +310,7 @@ func RestoreExchangeDataCollections(
userID string userID string
// TODO policy to be updated from external source after completion of refactoring // TODO policy to be updated from external source after completion of refactoring
policy = control.Copy policy = control.Copy
el = errs.Local()
) )
if len(dcs) > 0 { if len(dcs) > 0 {
@ -315,8 +319,8 @@ func RestoreExchangeDataCollections(
} }
for _, dc := range dcs { for _, dc := range dcs {
if errs.Err() != nil { if el.Failure() != nil {
return nil, errs.Err() break
} }
userCaches := directoryCaches[userID] userCaches := directoryCaches[userID]
@ -333,13 +337,13 @@ func RestoreExchangeDataCollections(
userCaches, userCaches,
errs) errs)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "creating destination").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "creating destination").WithClues(ctx))
continue continue
} }
temp, canceled := restoreCollection(ctx, gs, dc, containerID, policy, deets, errs) temp, canceled := restoreCollection(ctx, gs, dc, containerID, policy, deets, errs)
metrics.Combine(temp) metrics = support.CombineMetrics(metrics, temp)
if canceled { if canceled {
break break
@ -351,10 +355,9 @@ func RestoreExchangeDataCollections(
support.Restore, support.Restore,
len(dcs), len(dcs),
metrics, metrics,
errs.Err(),
dest.ContainerName) dest.ContainerName)
return status, errs.Err() return status, el.Failure()
} }
// restoreCollection handles restoration of an individual collection. // restoreCollection handles restoration of an individual collection.
@ -365,7 +368,7 @@ func restoreCollection(
folderID string, folderID string,
policy control.CollisionPolicy, policy control.CollisionPolicy,
deets *details.Builder, deets *details.Builder,
errs *fault.Errors, errs *fault.Bus,
) (support.CollectionMetrics, bool) { ) (support.CollectionMetrics, bool) {
ctx, end := D.Span(ctx, "gc:exchange:restoreCollection", D.Label("path", dc.FullPath())) ctx, end := D.Span(ctx, "gc:exchange:restoreCollection", D.Label("path", dc.FullPath()))
defer end() defer end()
@ -396,11 +399,11 @@ func restoreCollection(
for { for {
select { select {
case <-ctx.Done(): case <-ctx.Done():
errs.Add(clues.Wrap(ctx.Err(), "context cancelled").WithClues(ctx)) errs.AddRecoverable(clues.Wrap(ctx.Err(), "context cancelled").WithClues(ctx))
return metrics, true return metrics, true
case itemData, ok := <-items: case itemData, ok := <-items:
if !ok || errs.Err() != nil { if !ok || errs.Failure() != nil {
return metrics, false return metrics, false
} }
@ -412,7 +415,7 @@ func restoreCollection(
_, err := buf.ReadFrom(itemData.ToReader()) _, err := buf.ReadFrom(itemData.ToReader())
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "reading item bytes").WithClues(ictx)) errs.AddRecoverable(clues.Wrap(err, "reading item bytes").WithClues(ictx))
continue continue
} }
@ -428,16 +431,16 @@ func restoreCollection(
user, user,
errs) errs)
if err != nil { if err != nil {
errs.Add(err) errs.AddRecoverable(err)
continue continue
} }
metrics.TotalBytes += int64(len(byteArray)) metrics.Bytes += int64(len(byteArray))
metrics.Successes++ metrics.Successes++
itemPath, err := dc.FullPath().Append(itemData.UUID(), true) itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "building full path with item").WithClues(ctx)) errs.AddRecoverable(clues.Wrap(err, "building full path with item").WithClues(ctx))
continue continue
} }
@ -472,7 +475,7 @@ func CreateContainerDestination(
directory path.Path, directory path.Path,
destination string, destination string,
caches map[path.CategoryType]graph.ContainerResolver, caches map[path.CategoryType]graph.ContainerResolver,
errs *fault.Errors, errs *fault.Bus,
) (string, error) { ) (string, error) {
var ( var (
newCache = false newCache = false
@ -585,7 +588,7 @@ func establishMailRestoreLocation(
mfc graph.ContainerResolver, mfc graph.ContainerResolver,
user string, user string,
isNewCache bool, isNewCache bool,
errs *fault.Errors, errs *fault.Bus,
) (string, error) { ) (string, error) {
// Process starts with the root folder in order to recreate // Process starts with the root folder in order to recreate
// the top-level folder with the same tactic // the top-level folder with the same tactic
@ -606,7 +609,7 @@ func establishMailRestoreLocation(
temp, err := ac.Mail().CreateMailFolderWithParent(ctx, user, folder, folderID) temp, err := ac.Mail().CreateMailFolderWithParent(ctx, user, folder, folderID)
if err != nil { if err != nil {
// Should only error if cache malfunctions or incorrect parameters // Should only error if cache malfunctions or incorrect parameters
return "", errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return "", err
} }
folderID = *temp.GetId() folderID = *temp.GetId()
@ -644,7 +647,7 @@ func establishContactsRestoreLocation(
cfc graph.ContainerResolver, cfc graph.ContainerResolver,
user string, user string,
isNewCache bool, isNewCache bool,
errs *fault.Errors, errs *fault.Bus,
) (string, error) { ) (string, error) {
cached, ok := cfc.PathInCache(folders[0]) cached, ok := cfc.PathInCache(folders[0])
if ok { if ok {
@ -655,7 +658,7 @@ func establishContactsRestoreLocation(
temp, err := ac.Contacts().CreateContactFolder(ctx, user, folders[0]) temp, err := ac.Contacts().CreateContactFolder(ctx, user, folders[0])
if err != nil { if err != nil {
return "", errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return "", err
} }
folderID := *temp.GetId() folderID := *temp.GetId()
@ -680,7 +683,7 @@ func establishEventsRestoreLocation(
ecc graph.ContainerResolver, // eventCalendarCache ecc graph.ContainerResolver, // eventCalendarCache
user string, user string,
isNewCache bool, isNewCache bool,
errs *fault.Errors, errs *fault.Bus,
) (string, error) { ) (string, error) {
// Need to prefix with the "Other Calendars" folder so lookup happens properly. // Need to prefix with the "Other Calendars" folder so lookup happens properly.
cached, ok := ecc.PathInCache(folders[0]) cached, ok := ecc.PathInCache(folders[0])
@ -692,7 +695,7 @@ func establishEventsRestoreLocation(
temp, err := ac.Events().CreateCalendar(ctx, user, folders[0]) temp, err := ac.Events().CreateCalendar(ctx, user, folders[0])
if err != nil { if err != nil {
return "", errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return "", err
} }
folderID := *temp.GetId() folderID := *temp.GetId()

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -31,7 +32,7 @@ func (suite *BetaClientSuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
} }
@ -44,7 +45,7 @@ func (suite *BetaClientSuite) TestCreateBetaClient() {
suite.credentials.AzureClientSecret, suite.credentials.AzureClientSecret,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
client := NewBetaClient(adpt) client := NewBetaClient(adpt)
assert.NotNil(t, client) assert.NotNil(t, client)
@ -64,7 +65,7 @@ func (suite *BetaClientSuite) TestBasicClientGetFunctionality() {
suite.credentials.AzureClientSecret, suite.credentials.AzureClientSecret,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
client := NewBetaClient(adpt) client := NewBetaClient(adpt)
require.NotNil(t, client) require.NotNil(t, client)
@ -73,7 +74,7 @@ func (suite *BetaClientSuite) TestBasicClientGetFunctionality() {
collection, err := client.SitesById(siteID).Pages().Get(ctx, nil) collection, err := client.SitesById(siteID).Pages().Get(ctx, nil)
// Ensures that the client is able to receive data from beta // Ensures that the client is able to receive data from beta
// Not Registered Error: content type application/json does not have a factory registered to be parsed // Not Registered Error: content type application/json does not have a factory registered to be parsed
require.NoError(t, err) aw.MustNoErr(t, err)
for _, page := range collection.GetValue() { for _, page := range collection.GetValue() {
assert.NotNil(t, page, "betasdk call for page does not return value.") assert.NotNil(t, page, "betasdk call for page does not return value.")

View File

@ -65,7 +65,7 @@ type ContainerResolver interface {
// @param ctx is necessary param for Graph API tracing // @param ctx is necessary param for Graph API tracing
// @param baseFolderID represents the M365ID base that the resolver will // @param baseFolderID represents the M365ID base that the resolver will
// conclude its search. Default input is "". // conclude its search. Default input is "".
Populate(ctx context.Context, errs *fault.Errors, baseFolderID string, baseContainerPather ...string) error Populate(ctx context.Context, errs *fault.Bus, baseFolderID string, baseContainerPather ...string) error
// PathInCache performs a look up of a path reprensentation // PathInCache performs a look up of a path reprensentation
// and returns the m365ID of directory iff the pathString // and returns the m365ID of directory iff the pathString

View File

@ -6,12 +6,14 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
"strings"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/pkg/errors" "github.com/pkg/errors"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -29,26 +31,36 @@ const (
errCodeMailboxNotEnabledForRESTAPI = "MailboxNotEnabledForRESTAPI" errCodeMailboxNotEnabledForRESTAPI = "MailboxNotEnabledForRESTAPI"
) )
var ( const (
Err401Unauthorized = errors.New("401 unauthorized") mysiteURLNotFound = "unable to retrieve user's mysite URL"
// normally the graph client will catch this for us, but in case we mysiteNotFound = "user's mysite not found"
// run our own client Do(), we need to translate it to a timeout type
// failure locally.
Err429TooManyRequests = errors.New("429 too many requests")
Err503ServiceUnavailable = errors.New("503 Service Unavailable")
Err504GatewayTimeout = errors.New("504 Gateway Timeout")
Err500InternalServerError = errors.New("500 Internal Server Error")
) )
// The folder or item was deleted between the time we identified var Labels = struct {
// it and when we tried to fetch data for it. MysiteNotFound string
type ErrDeletedInFlight struct { }{
common.Err MysiteNotFound: "mysite_not_found",
} }
var (
// The folder or item was deleted between the time we identified
// it and when we tried to fetch data for it.
ErrDeletedInFlight = clues.New("deleted in flight")
// Delta tokens can be desycned or expired. In either case, the token
// becomes invalid, and cannot be used again.
// https://learn.microsoft.com/en-us/graph/errors#code-property
ErrInvalidDelta = clues.New("inalid delta token")
// Timeout errors are identified for tracking the need to retry calls.
// Other delay errors, like throttling, are already handled by the
// graph client's built-in retries.
// https://github.com/microsoftgraph/msgraph-sdk-go/issues/302
ErrTimeout = clues.New("communication timeout")
)
func IsErrDeletedInFlight(err error) bool { func IsErrDeletedInFlight(err error) bool {
e := ErrDeletedInFlight{} if errors.Is(err, ErrDeletedInFlight) {
if errors.As(err, &e) {
return true return true
} }
@ -59,103 +71,39 @@ func IsErrDeletedInFlight(err error) bool {
return false return false
} }
// Delta tokens can be desycned or expired. In either case, the token
// becomes invalid, and cannot be used again.
// https://learn.microsoft.com/en-us/graph/errors#code-property
type ErrInvalidDelta struct {
common.Err
}
func IsErrInvalidDelta(err error) bool { func IsErrInvalidDelta(err error) bool {
e := ErrInvalidDelta{} return hasErrorCode(err, errCodeSyncStateNotFound, errCodeResyncRequired) ||
if errors.As(err, &e) { errors.Is(err, ErrInvalidDelta)
return true
}
if hasErrorCode(err, errCodeSyncStateNotFound, errCodeResyncRequired) {
return true
}
return false
} }
func IsErrExchangeMailFolderNotFound(err error) bool { func IsErrExchangeMailFolderNotFound(err error) bool {
return hasErrorCode(err, errCodeResourceNotFound, errCodeMailboxNotEnabledForRESTAPI) return hasErrorCode(err, errCodeResourceNotFound, errCodeMailboxNotEnabledForRESTAPI)
} }
// Timeout errors are identified for tracking the need to retry calls.
// Other delay errors, like throttling, are already handled by the
// graph client's built-in retries.
// https://github.com/microsoftgraph/msgraph-sdk-go/issues/302
type ErrTimeout struct {
common.Err
}
func IsErrTimeout(err error) bool { func IsErrTimeout(err error) bool {
e := ErrTimeout{}
if errors.As(err, &e) {
return true
}
if errors.Is(err, context.DeadlineExceeded) || os.IsTimeout(err) || errors.Is(err, http.ErrHandlerTimeout) {
return true
}
switch err := err.(type) { switch err := err.(type) {
case *url.Error: case *url.Error:
return err.Timeout() return err.Timeout()
default:
return false
}
} }
type ErrThrottled struct { return errors.Is(err, ErrTimeout) ||
common.Err errors.Is(err, context.DeadlineExceeded) ||
} errors.Is(err, http.ErrHandlerTimeout) ||
os.IsTimeout(err)
func IsErrThrottled(err error) bool {
if errors.Is(err, Err429TooManyRequests) {
return true
}
if hasErrorCode(err, errCodeActivityLimitReached) {
return true
}
e := ErrThrottled{}
return errors.As(err, &e)
}
type ErrUnauthorized struct {
common.Err
} }
func IsErrUnauthorized(err error) bool { func IsErrUnauthorized(err error) bool {
// TODO: refine this investigation. We don't currently know if // TODO: refine this investigation. We don't currently know if
// a specific item download url expired, or if the full connection // a specific item download url expired, or if the full connection
// auth expired. // auth expired.
if errors.Is(err, Err401Unauthorized) { return clues.HasLabel(err, LabelStatus(http.StatusUnauthorized))
return true
} }
e := ErrUnauthorized{} // LabelStatus transforms the provided statusCode into
// a standard label that can be attached to a clues error
return errors.As(err, &e) // and later reviewed when checking error statuses.
} func LabelStatus(statusCode int) string {
return fmt.Sprintf("status_code_%d", statusCode)
type ErrInternalServerError struct {
common.Err
}
func IsInternalServerError(err error) bool {
if errors.Is(err, Err500InternalServerError) {
return true
}
e := ErrInternalServerError{}
return errors.As(err, &e)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -179,43 +127,74 @@ func hasErrorCode(err error, codes ...string) bool {
return slices.Contains(codes, *oDataError.GetError().GetCode()) return slices.Contains(codes, *oDataError.GetError().GetCode())
} }
// ErrData is a helper function that extracts ODataError metadata from // Wrap is a helper function that extracts ODataError metadata from
// the error. If the error is not an ODataError type, returns an empty // the error. If the error is not an ODataError type, returns the error.
// slice. The returned value is guaranteed to be an even-length pairing func Wrap(ctx context.Context, e error, msg string) *clues.Err {
// of key, value tuples.
func ErrData(e error) []any {
result := make([]any, 0)
if e == nil { if e == nil {
return result return nil
} }
odErr, ok := e.(odataerrors.ODataErrorable) odErr, ok := e.(odataerrors.ODataErrorable)
if !ok { if !ok {
return result return clues.Wrap(e, msg).WithClues(ctx)
} }
// Get MainError data, innerMsg := ErrData(odErr)
mainErr := odErr.GetError()
result = appendIf(result, "odataerror_code", mainErr.GetCode()) return setLabels(clues.Wrap(e, msg).WithClues(ctx).With(data...), innerMsg)
result = appendIf(result, "odataerror_message", mainErr.GetMessage()) }
result = appendIf(result, "odataerror_target", mainErr.GetTarget())
// Stack is a helper function that extracts ODataError metadata from
// the error. If the error is not an ODataError type, returns the error.
func Stack(ctx context.Context, e error) *clues.Err {
if e == nil {
return nil
}
odErr, ok := e.(odataerrors.ODataErrorable)
if !ok {
return clues.Stack(e).WithClues(ctx)
}
data, innerMsg := ErrData(odErr)
return setLabels(clues.Stack(e).WithClues(ctx).With(data...), innerMsg)
}
func setLabels(err *clues.Err, msg string) *clues.Err {
if strings.Contains(msg, mysiteNotFound) || strings.Contains(msg, mysiteURLNotFound) {
err = err.Label(Labels.MysiteNotFound)
}
return err
}
func ErrData(err odataerrors.ODataErrorable) ([]any, string) {
data := make([]any, 0)
// Get MainError
mainErr := err.GetError()
data = appendIf(data, "odataerror_code", mainErr.GetCode())
data = appendIf(data, "odataerror_message", mainErr.GetMessage())
data = appendIf(data, "odataerror_target", mainErr.GetTarget())
msgConcat := ptr.Val(mainErr.GetMessage())
for i, d := range mainErr.GetDetails() { for i, d := range mainErr.GetDetails() {
pfx := fmt.Sprintf("odataerror_details_%d_", i) pfx := fmt.Sprintf("odataerror_details_%d_", i)
result = appendIf(result, pfx+"code", d.GetCode()) data = appendIf(data, pfx+"code", d.GetCode())
result = appendIf(result, pfx+"message", d.GetMessage()) data = appendIf(data, pfx+"message", d.GetMessage())
result = appendIf(result, pfx+"target", d.GetTarget()) data = appendIf(data, pfx+"target", d.GetTarget())
msgConcat += ptr.Val(d.GetMessage())
} }
inner := mainErr.GetInnererror() inner := mainErr.GetInnererror()
if inner != nil { if inner != nil {
result = appendIf(result, "odataerror_inner_cli_req_id", inner.GetClientRequestId()) data = appendIf(data, "odataerror_inner_cli_req_id", inner.GetClientRequestId())
result = appendIf(result, "odataerror_inner_req_id", inner.GetRequestId()) data = appendIf(data, "odataerror_inner_req_id", inner.GetRequestId())
} }
return result return data, strings.ToLower(msgConcat)
} }
func appendIf(a []any, k string, v *string) []any { func appendIf(a []any, k string, v *string) []any {

View File

@ -7,8 +7,6 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
) )
type GraphErrorsUnitSuite struct { type GraphErrorsUnitSuite struct {
@ -46,7 +44,7 @@ func (suite *GraphErrorsUnitSuite) TestIsErrDeletedInFlight() {
}, },
{ {
name: "as", name: "as",
err: ErrDeletedInFlight{Err: *common.EncapsulateError(assert.AnError)}, err: ErrDeletedInFlight,
expect: assert.True, expect: assert.True,
}, },
{ {
@ -90,7 +88,7 @@ func (suite *GraphErrorsUnitSuite) TestIsErrInvalidDelta() {
}, },
{ {
name: "as", name: "as",
err: ErrInvalidDelta{Err: *common.EncapsulateError(assert.AnError)}, err: ErrInvalidDelta,
expect: assert.True, expect: assert.True,
}, },
{ {
@ -129,7 +127,7 @@ func (suite *GraphErrorsUnitSuite) TestIsErrTimeout() {
}, },
{ {
name: "as", name: "as",
err: ErrTimeout{Err: *common.EncapsulateError(assert.AnError)}, err: ErrTimeout,
expect: assert.True, expect: assert.True,
}, },
{ {
@ -144,105 +142,3 @@ func (suite *GraphErrorsUnitSuite) TestIsErrTimeout() {
}) })
} }
} }
func (suite *GraphErrorsUnitSuite) TestIsErrThrottled() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrThrottled{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "is429",
err: Err429TooManyRequests,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrThrottled(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsErrUnauthorized() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrUnauthorized{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "is429",
err: Err401Unauthorized,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrUnauthorized(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsInternalServerError() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrInternalServerError{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "is429",
err: Err500InternalServerError,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsInternalServerError(test.err))
})
}
}

View File

@ -134,7 +134,7 @@ func (md MetadataCollection) DoNotMergeItems() bool {
func (md MetadataCollection) Items( func (md MetadataCollection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Errors, errs *fault.Bus,
) <-chan data.Stream { ) <-chan data.Stream {
res := make(chan data.Stream) res := make(chan data.Stream)
@ -152,9 +152,8 @@ func (md MetadataCollection) Items(
support.CollectionMetrics{ support.CollectionMetrics{
Objects: len(md.items), Objects: len(md.items),
Successes: len(md.items), Successes: len(md.items),
TotalBytes: totalBytes, Bytes: totalBytes,
}, },
nil,
md.fullPath.Folder(false), md.fullPath.Folder(false),
) )

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -35,7 +36,7 @@ func (suite *MetadataCollectionUnitSuite) TestFullPath() {
path.EmailCategory, path.EmailCategory,
false, false,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
c := NewMetadataCollection(p, nil, nil) c := NewMetadataCollection(p, nil, nil)
@ -78,14 +79,14 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
path.EmailCategory, path.EmailCategory,
false, false,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
c := NewMetadataCollection( c := NewMetadataCollection(
p, p,
items, items,
func(c *support.ConnectorOperationStatus) { func(c *support.ConnectorOperationStatus) {
assert.Equal(t, len(itemNames), c.ObjectCount) assert.Equal(t, len(itemNames), c.Metrics.Objects)
assert.Equal(t, len(itemNames), c.Successful) assert.Equal(t, len(itemNames), c.Metrics.Successes)
}, },
) )
@ -96,7 +97,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
gotNames = append(gotNames, s.UUID()) gotNames = append(gotNames, s.UUID())
buf, err := io.ReadAll(s.ToReader()) buf, err := io.ReadAll(s.ToReader())
if !assert.NoError(t, err) { if !aw.NoErr(t, err) {
continue continue
} }
@ -125,7 +126,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
cat: path.EmailCategory, cat: path.EmailCategory,
metadata: NewMetadataEntry("", nil), metadata: NewMetadataEntry("", nil),
collectionCheck: assert.Nil, collectionCheck: assert.Nil,
errCheck: assert.Error, errCheck: aw.Err,
}, },
{ {
name: "Tokens", name: "Tokens",
@ -138,7 +139,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
"hola": "mundo", "hola": "mundo",
}), }),
collectionCheck: assert.NotNil, collectionCheck: assert.NotNil,
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "BadCategory", name: "BadCategory",
@ -151,7 +152,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
"hola": "mundo", "hola": "mundo",
}), }),
collectionCheck: assert.Nil, collectionCheck: assert.Nil,
errCheck: assert.Error, errCheck: aw.Err,
}, },
} }
@ -187,7 +188,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
itemCount++ itemCount++
err := decoder.Decode(&gotMap) err := decoder.Decode(&gotMap)
if !assert.NoError(t, err) { if !aw.NoErr(t, err) {
continue continue
} }

View File

@ -6,7 +6,6 @@ import (
"strconv" "strconv"
"time" "time"
"github.com/alcionai/clues"
backoff "github.com/cenkalti/backoff/v4" backoff "github.com/cenkalti/backoff/v4"
khttp "github.com/microsoft/kiota-http-go" khttp "github.com/microsoft/kiota-http-go"
) )
@ -50,10 +49,7 @@ func (middleware RetryHandler) retryRequest(
response, err := pipeline.Next(req, middlewareIndex) response, err := pipeline.Next(req, middlewareIndex)
if err != nil && !IsErrTimeout(err) { if err != nil && !IsErrTimeout(err) {
return response, clues.Stack(err). return response, Stack(ctx, err).With("retry_count", executionCount)
WithClues(ctx).
With("retry_count", executionCount).
With(ErrData(err)...)
} }
return middleware.retryRequest(ctx, return middleware.retryRequest(ctx,
@ -68,10 +64,7 @@ func (middleware RetryHandler) retryRequest(
} }
if respErr != nil { if respErr != nil {
return nil, clues.Stack(respErr). return nil, Stack(ctx, respErr).With("retry_count", executionCount)
WithClues(ctx).
With("retry_count", executionCount).
With(ErrData(respErr)...)
} }
return resp, nil return resp, nil

View File

@ -8,7 +8,6 @@ import (
"time" "time"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity" "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/alcionai/clues"
backoff "github.com/cenkalti/backoff/v4" backoff "github.com/cenkalti/backoff/v4"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
ka "github.com/microsoft/kiota-authentication-azure-go" ka "github.com/microsoft/kiota-authentication-azure-go"
@ -336,7 +335,7 @@ func (middleware RetryHandler) Intercept(
response, err := pipeline.Next(req, middlewareIndex) response, err := pipeline.Next(req, middlewareIndex)
if err != nil && !IsErrTimeout(err) { if err != nil && !IsErrTimeout(err) {
return response, clues.Stack(err).WithClues(ctx).With(ErrData(err)...) return response, Stack(ctx, err)
} }
exponentialBackOff := backoff.NewExponentialBackOff() exponentialBackOff := backoff.NewExponentialBackOff()
@ -354,7 +353,7 @@ func (middleware RetryHandler) Intercept(
exponentialBackOff, exponentialBackOff,
err) err)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(ErrData(err)...) return nil, Stack(ctx, err)
} }
return response, nil return response, nil

View File

@ -7,10 +7,10 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -27,7 +27,7 @@ func (suite *GraphUnitSuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewMockM365Account(t) a := tester.NewMockM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.credentials = m365 suite.credentials = m365
} }
@ -39,7 +39,7 @@ func (suite *GraphUnitSuite) TestCreateAdapter() {
suite.credentials.AzureClientID, suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret) suite.credentials.AzureClientSecret)
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotNil(t, adpt) assert.NotNil(t, adpt)
} }
@ -79,7 +79,7 @@ func (suite *GraphUnitSuite) TestSerializationEndPoint() {
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
suite.credentials.AzureClientID, suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret) suite.credentials.AzureClientSecret)
require.NoError(t, err) aw.MustNoErr(t, err)
serv := NewService(adpt) serv := NewService(adpt)
email := models.NewMessage() email := models.NewMessage()
@ -87,7 +87,7 @@ func (suite *GraphUnitSuite) TestSerializationEndPoint() {
email.SetSubject(&subject) email.SetSubject(&subject)
byteArray, err := serv.Serialize(email) byteArray, err := serv.Serialize(email)
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotNil(t, byteArray) assert.NotNil(t, byteArray)
t.Log(string(byteArray)) t.Log(string(byteArray))
} }

View File

@ -68,7 +68,7 @@ func NewGraphConnector(
itemClient *http.Client, itemClient *http.Client,
acct account.Account, acct account.Account,
r resource, r resource,
errs *fault.Errors, errs *fault.Bus,
) (*GraphConnector, error) { ) (*GraphConnector, error) {
m365, err := acct.M365Config() m365, err := acct.M365Config()
if err != nil { if err != nil {
@ -129,7 +129,7 @@ func (gc *GraphConnector) createService() (*graph.Service, error) {
// setTenantUsers queries the M365 to identify the users in the // setTenantUsers queries the M365 to identify the users in the
// workspace. The users field is updated during this method // workspace. The users field is updated during this method
// iff the returned error is nil // iff the returned error is nil
func (gc *GraphConnector) setTenantUsers(ctx context.Context, errs *fault.Errors) error { func (gc *GraphConnector) setTenantUsers(ctx context.Context, errs *fault.Bus) error {
ctx, end := D.Span(ctx, "gc:setTenantUsers") ctx, end := D.Span(ctx, "gc:setTenantUsers")
defer end() defer end()
@ -160,7 +160,7 @@ func (gc *GraphConnector) GetUsersIds() []string {
// setTenantSites queries the M365 to identify the sites in the // setTenantSites queries the M365 to identify the sites in the
// workspace. The sites field is updated during this method // workspace. The sites field is updated during this method
// iff the returned error is nil. // iff the returned error is nil.
func (gc *GraphConnector) setTenantSites(ctx context.Context, errs *fault.Errors) error { func (gc *GraphConnector) setTenantSites(ctx context.Context, errs *fault.Bus) error {
gc.Sites = map[string]string{} gc.Sites = map[string]string{}
ctx, end := D.Span(ctx, "gc:setTenantSites") ctx, end := D.Span(ctx, "gc:setTenantSites")
@ -232,7 +232,7 @@ func (gc *GraphConnector) GetSiteIDs() []string {
func (gc *GraphConnector) UnionSiteIDsAndWebURLs( func (gc *GraphConnector) UnionSiteIDsAndWebURLs(
ctx context.Context, ctx context.Context,
ids, urls []string, ids, urls []string,
errs *fault.Errors, errs *fault.Bus,
) ([]string, error) { ) ([]string, error) {
if len(gc.Sites) == 0 { if len(gc.Sites) == 0 {
if err := gc.setTenantSites(ctx, errs); err != nil { if err := gc.setTenantSites(ctx, errs); err != nil {
@ -314,31 +314,31 @@ func getResources(
query func(context.Context, graph.Servicer) (serialization.Parsable, error), query func(context.Context, graph.Servicer) (serialization.Parsable, error),
parser func(parseNode serialization.ParseNode) (serialization.Parsable, error), parser func(parseNode serialization.ParseNode) (serialization.Parsable, error),
identify func(any) (string, string, error), identify func(any) (string, string, error),
errs *fault.Errors, errs *fault.Bus,
) (map[string]string, error) { ) (map[string]string, error) {
resources := map[string]string{} resources := map[string]string{}
response, err := query(ctx, gs) response, err := query(ctx, gs)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "retrieving tenant's resources"). return nil, graph.Wrap(ctx, err, "retrieving tenant's resources")
WithClues(ctx).
With(graph.ErrData(err)...)
} }
iter, err := msgraphgocore.NewPageIterator(response, gs.Adapter(), parser) iter, err := msgraphgocore.NewPageIterator(response, gs.Adapter(), parser)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
el := errs.Local()
callbackFunc := func(item any) bool { callbackFunc := func(item any) bool {
if errs.Err() != nil { if el.Failure() != nil {
return false return false
} }
k, v, err := identify(item) k, v, err := identify(item)
if err != nil { if err != nil {
if !errors.Is(err, errKnownSkippableCase) { if !errors.Is(err, errKnownSkippableCase) {
errs.Add(clues.Stack(err). el.AddRecoverable(clues.Stack(err).
WithClues(ctx). WithClues(ctx).
With("query_url", gs.Adapter().GetBaseUrl())) With("query_url", gs.Adapter().GetBaseUrl()))
} }
@ -352,8 +352,8 @@ func getResources(
} }
if err := iter.Iterate(ctx, callbackFunc); err != nil { if err := iter.Iterate(ctx, callbackFunc); err != nil {
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Stack(ctx, err)
} }
return resources, errs.Err() return resources, el.Failure()
} }

View File

@ -4,14 +4,13 @@ import (
"sync" "sync"
"testing" "testing"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -54,7 +53,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestBadConnection() {
AzureTenantID: "data", AzureTenantID: "data",
}, },
) )
require.NoError(t, err) aw.MustNoErr(t, err)
return a return a
}, },
}, },
@ -94,15 +93,9 @@ func statusTestTask(gc *GraphConnector, objects, success, folder int) {
support.CollectionMetrics{ support.CollectionMetrics{
Objects: objects, Objects: objects,
Successes: success, Successes: success,
TotalBytes: 0, Bytes: 0,
}, },
support.WrapAndAppend( "statusTestTask")
"tres",
errors.New("three"),
support.WrapAndAppend("arc376", errors.New("one"), errors.New("two")),
),
"statusTestTask",
)
gc.UpdateStatus(status) gc.UpdateStatus(status)
} }
@ -123,11 +116,11 @@ func (suite *DisconnectedGraphConnectorSuite) TestGraphConnector_Status() {
assert.NotEmpty(t, gc.PrintableStatus()) assert.NotEmpty(t, gc.PrintableStatus())
// Expect 8 objects // Expect 8 objects
assert.Equal(t, 8, gc.Status().ObjectCount) assert.Equal(t, 8, gc.Status().Metrics.Objects)
// Expect 2 success // Expect 2 success
assert.Equal(t, 2, gc.Status().Successful) assert.Equal(t, 2, gc.Status().Metrics.Successes)
// Expect 2 folders // Expect 2 folders
assert.Equal(t, 2, gc.Status().FolderCount) assert.Equal(t, 2, gc.Status().Folders)
} }
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() { func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
@ -146,14 +139,14 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
}{ }{
{ {
name: "No scopes", name: "No scopes",
checkError: assert.Error, checkError: aw.Err,
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
return selectors.NewExchangeBackup(nil).Selector return selectors.NewExchangeBackup(nil).Selector
}, },
}, },
{ {
name: "Valid Single User", name: "Valid Single User",
checkError: assert.NoError, checkError: aw.NoErr,
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup([]string{"bobKelso@someHospital.org"}) sel := selectors.NewExchangeBackup([]string{"bobKelso@someHospital.org"})
sel.Include(sel.MailFolders(selectors.Any())) sel.Include(sel.MailFolders(selectors.Any()))
@ -162,7 +155,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
}, },
{ {
name: "Partial invalid user", name: "Partial invalid user",
checkError: assert.Error, checkError: aw.Err,
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup([]string{"bobkelso@someHospital.org", "janitor@someHospital.org"}) sel := selectors.NewExchangeBackup([]string{"bobkelso@someHospital.org", "janitor@someHospital.org"})
sel.Include(sel.MailFolders(selectors.Any())) sel.Include(sel.MailFolders(selectors.Any()))
@ -172,7 +165,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
}, },
{ {
name: "Invalid discrete owner", name: "Invalid discrete owner",
checkError: assert.Error, checkError: aw.Err,
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"janitor@someHospital.org"}) sel := selectors.NewOneDriveBackup([]string{"janitor@someHospital.org"})
sel.Include(sel.AllData()) sel.Include(sel.AllData())
@ -202,7 +195,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices
}{ }{
{ {
name: "Valid User", name: "Valid User",
checkError: assert.NoError, checkError: aw.NoErr,
excludes: func(t *testing.T) selectors.Selector { excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"})
sel.Exclude(sel.Folders(selectors.Any())) sel.Exclude(sel.Folders(selectors.Any()))
@ -224,7 +217,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices
}, },
{ {
name: "Invalid User", name: "Invalid User",
checkError: assert.Error, checkError: aw.Err,
excludes: func(t *testing.T) selectors.Selector { excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"})
sel.Exclude(sel.Folders(selectors.Any())) sel.Exclude(sel.Folders(selectors.Any()))
@ -243,7 +236,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices
}, },
{ {
name: "valid sites", name: "valid sites",
checkError: assert.NoError, checkError: aw.NoErr,
excludes: func(t *testing.T) selectors.Selector { excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo" sel.DiscreteOwner = "abc.site.foo"
@ -265,7 +258,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices
}, },
{ {
name: "invalid sites", name: "invalid sites",
checkError: assert.Error, checkError: aw.Err,
excludes: func(t *testing.T) selectors.Selector { excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Exclude(sel.AllData()) sel.Exclude(sel.AllData())

View File

@ -22,6 +22,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -58,7 +59,7 @@ func mustToDataLayerPath(
err = errors.Errorf("bad service type %s", service.String()) err = errors.Errorf("bad service type %s", service.String())
} }
require.NoError(t, err) aw.MustNoErr(t, err)
return res return res
} }
@ -600,12 +601,12 @@ func compareExchangeEmail(
item data.Stream, item data.Stream,
) { ) {
itemData, err := io.ReadAll(item.ToReader()) itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) { if !aw.NoErr(t, err, "reading collection item: %s", item.UUID()) {
return return
} }
itemMessage, err := support.CreateMessageFromBytes(itemData) itemMessage, err := support.CreateMessageFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up message") { if !aw.NoErr(t, err, "deserializing backed up message") {
return return
} }
@ -615,7 +616,7 @@ func compareExchangeEmail(
} }
expectedMessage, err := support.CreateMessageFromBytes(expectedBytes) expectedMessage, err := support.CreateMessageFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source message") aw.NoErr(t, err, "deserializing source message")
checkMessage(t, expectedMessage, itemMessage) checkMessage(t, expectedMessage, itemMessage)
} }
@ -626,12 +627,12 @@ func compareExchangeContact(
item data.Stream, item data.Stream,
) { ) {
itemData, err := io.ReadAll(item.ToReader()) itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) { if !aw.NoErr(t, err, "reading collection item: %s", item.UUID()) {
return return
} }
itemContact, err := support.CreateContactFromBytes(itemData) itemContact, err := support.CreateContactFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up contact") { if !aw.NoErr(t, err, "deserializing backed up contact") {
return return
} }
@ -641,7 +642,7 @@ func compareExchangeContact(
} }
expectedContact, err := support.CreateContactFromBytes(expectedBytes) expectedContact, err := support.CreateContactFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source contact") aw.NoErr(t, err, "deserializing source contact")
checkContact(t, expectedContact, itemContact) checkContact(t, expectedContact, itemContact)
} }
@ -652,12 +653,12 @@ func compareExchangeEvent(
item data.Stream, item data.Stream,
) { ) {
itemData, err := io.ReadAll(item.ToReader()) itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) { if !aw.NoErr(t, err, "reading collection item: %s", item.UUID()) {
return return
} }
itemEvent, err := support.CreateEventFromBytes(itemData) itemEvent, err := support.CreateEventFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up contact") { if !aw.NoErr(t, err, "deserializing backed up contact") {
return return
} }
@ -667,7 +668,7 @@ func compareExchangeEvent(
} }
expectedEvent, err := support.CreateEventFromBytes(expectedBytes) expectedEvent, err := support.CreateEventFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source contact") aw.NoErr(t, err, "deserializing source contact")
checkEvent(t, expectedEvent, itemEvent) checkEvent(t, expectedEvent, itemEvent)
} }
@ -715,7 +716,7 @@ func compareOneDriveItem(
} }
buf, err := io.ReadAll(item.ToReader()) buf, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err) { if !aw.NoErr(t, err) {
return return
} }
@ -1108,7 +1109,7 @@ func loadConnector(ctx context.Context, t *testing.T, itemClient *http.Client, r
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
connector, err := NewGraphConnector(ctx, itemClient, a, r, fault.New(true)) connector, err := NewGraphConnector(ctx, itemClient, a, r, fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
return connector return connector
} }

View File

@ -5,18 +5,18 @@ import (
"testing" "testing"
"time" "time"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/discovery/api" "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -126,7 +126,7 @@ func (suite *GraphConnectorUnitSuite) TestUnionSiteIDsAndWebURLs() {
defer flush() defer flush()
result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true)) result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
assert.ElementsMatch(t, test.expect, result) assert.ElementsMatch(t, test.expect, result)
}) })
} }
@ -181,7 +181,7 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantUsers() {
defer flush() defer flush()
owners, err := api.NewClient(suite.connector.credentials) owners, err := api.NewClient(suite.connector.credentials)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
newConnector.Owners = owners newConnector.Owners = owners
suite.Empty(len(newConnector.Users)) suite.Empty(len(newConnector.Users))
@ -208,13 +208,13 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
t := suite.T() t := suite.T()
service, err := newConnector.createService() service, err := newConnector.createService()
require.NoError(t, err) aw.MustNoErr(t, err)
newConnector.Service = service newConnector.Service = service
assert.Equal(t, 0, len(newConnector.Sites)) assert.Equal(t, 0, len(newConnector.Sites))
err = newConnector.setTenantSites(ctx, fault.New(true)) err = newConnector.setTenantSites(ctx, fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
assert.Less(t, 0, len(newConnector.Sites)) assert.Less(t, 0, len(newConnector.Sites))
for _, site := range newConnector.Sites { for _, site := range newConnector.Sites {
@ -247,13 +247,13 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
}, },
nil, nil,
fault.New(true)) fault.New(true))
assert.Error(t, err) aw.Err(t, err)
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := suite.connector.AwaitStatus() status := suite.connector.AwaitStatus()
assert.Equal(t, 0, status.ObjectCount) assert.Equal(t, 0, status.Metrics.Objects)
assert.Equal(t, 0, status.FolderCount) assert.Equal(t, 0, status.Folders)
assert.Equal(t, 0, status.Successful) assert.Equal(t, 0, status.Metrics.Successes)
} }
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
@ -324,13 +324,13 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
}, },
test.col, test.col,
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
assert.NotNil(t, deets) assert.NotNil(t, deets)
stats := suite.connector.AwaitStatus() stats := suite.connector.AwaitStatus()
assert.Zero(t, stats.ObjectCount) assert.Zero(t, stats.Metrics.Objects)
assert.Zero(t, stats.FolderCount) assert.Zero(t, stats.Folders)
assert.Zero(t, stats.Successful) assert.Zero(t, stats.Metrics.Successes)
}) })
} }
} }
@ -349,19 +349,15 @@ func mustGetDefaultDriveID(
//revive:enable:context-as-argument //revive:enable:context-as-argument
d, err := service.Client().UsersById(userID).Drive().Get(ctx, nil) d, err := service.Client().UsersById(userID).Drive().Get(ctx, nil)
if err != nil { if err != nil {
err = errors.Wrapf( err = graph.Wrap(ctx, err, "retrieving drive")
err,
"failed to retrieve default user drive. user: %s, details: %s",
userID,
support.ConnectorStackErrorTrace(err),
)
} }
require.NoError(t, err) aw.MustNoErr(t, err)
require.NotNil(t, d.GetId())
require.NotEmpty(t, *d.GetId())
return *d.GetId() id := ptr.Val(d.GetId())
require.NotEmpty(t, id)
return id
} }
func getCollectionsAndExpected( func getCollectionsAndExpected(
@ -426,16 +422,14 @@ func runRestore(
config.opts, config.opts,
collections, collections,
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := restoreGC.AwaitStatus() status := restoreGC.AwaitStatus()
runTime := time.Since(start) runTime := time.Since(start)
assert.NoError(t, status.Err, "restored status.Err") assert.Equal(t, numRestoreItems, status.Metrics.Objects, "restored status.Metrics.Objects")
assert.Zero(t, status.ErrorCount, "restored status.ErrorCount") assert.Equal(t, numRestoreItems, status.Metrics.Successes, "restored status.Metrics.Successes")
assert.Equal(t, numRestoreItems, status.ObjectCount, "restored status.ObjectCount")
assert.Equal(t, numRestoreItems, status.Successful, "restored status.Successful")
assert.Len( assert.Len(
t, t,
deets.Entries, deets.Entries,
@ -485,7 +479,7 @@ func runBackupAndCompare(
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}, },
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
// No excludes yet because this isn't an incremental backup. // No excludes yet because this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -497,12 +491,10 @@ func runBackupAndCompare(
status := backupGC.AwaitStatus() status := backupGC.AwaitStatus()
assert.NoError(t, status.Err, "backup status.Err") assert.Equalf(t, totalItems+skipped, status.Metrics.Objects,
assert.Zero(t, status.ErrorCount, "backup status.ErrorCount") "backup status.Metrics.Objects; wanted %d items + %d skipped", totalItems, skipped)
assert.Equalf(t, totalItems+skipped, status.ObjectCount, assert.Equalf(t, totalItems+skipped, status.Metrics.Successes,
"backup status.ObjectCount; wanted %d items + %d skipped", totalItems, skipped) "backup status.Metrics.Successes; wanted %d items + %d skipped", totalItems, skipped)
assert.Equalf(t, totalItems+skipped, status.Successful,
"backup status.Successful; wanted %d items + %d skipped", totalItems, skipped)
} }
func runRestoreBackupTest( func runRestoreBackupTest(
@ -961,13 +953,13 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
}, },
collections, collections,
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
require.NotNil(t, deets) require.NotNil(t, deets)
status := restoreGC.AwaitStatus() status := restoreGC.AwaitStatus()
// Always just 1 because it's just 1 collection. // Always just 1 because it's just 1 collection.
assert.Equal(t, totalItems, status.ObjectCount, "status.ObjectCount") assert.Equal(t, totalItems, status.Metrics.Objects, "status.Metrics.Objects")
assert.Equal(t, totalItems, status.Successful, "status.Successful") assert.Equal(t, totalItems, status.Metrics.Successes, "status.Metrics.Successes")
assert.Equal( assert.Equal(
t, totalItems, len(deets.Entries), t, totalItems, len(deets.Entries),
"details entries contains same item count as total successful items restored") "details entries contains same item count as total successful items restored")
@ -990,7 +982,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}, },
fault.New(true)) fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
// No excludes yet because this isn't an incremental backup. // No excludes yet because this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -1001,8 +993,8 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
skipped := checkCollections(t, ctx, allItems, allExpectedData, dcs, true) skipped := checkCollections(t, ctx, allItems, allExpectedData, dcs, true)
status := backupGC.AwaitStatus() status := backupGC.AwaitStatus()
assert.Equal(t, allItems+skipped, status.ObjectCount, "status.ObjectCount") assert.Equal(t, allItems+skipped, status.Metrics.Objects, "status.Metrics.Objects")
assert.Equal(t, allItems+skipped, status.Successful, "status.Successful") assert.Equal(t, allItems+skipped, status.Metrics.Successes, "status.Metrics.Successes")
}) })
} }
} }

View File

@ -111,7 +111,7 @@ func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return med
// channel is closed when there are no more items available. // channel is closed when there are no more items available.
func (medc *MockExchangeDataCollection) Items( func (medc *MockExchangeDataCollection) Items(
ctx context.Context, ctx context.Context,
_ *fault.Errors, // unused _ *fault.Bus, // unused
) <-chan data.Stream { ) <-chan data.Stream {
res := make(chan data.Stream) res := make(chan data.Stream)

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -35,7 +36,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
for item := range mdc.Items(ctx, fault.New(true)) { for item := range mdc.Items(ctx, fault.New(true)) {
_, err := io.ReadAll(item.ToReader()) _, err := io.ReadAll(item.ToReader())
assert.NoError(suite.T(), err) aw.NoErr(suite.T(), err)
messagesRead++ messagesRead++
} }
@ -52,7 +53,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
for item := range mdc.Items(ctx, fault.New(true)) { for item := range mdc.Items(ctx, fault.New(true)) {
buf, err := io.ReadAll(item.ToReader()) buf, err := io.ReadAll(item.ToReader())
assert.NoError(t, err) aw.NoErr(t, err)
assert.Implements(t, (*data.StreamSize)(nil), item) assert.Implements(t, (*data.StreamSize)(nil), item)
s := item.(data.StreamSize) s := item.(data.StreamSize)
@ -72,11 +73,11 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchange
for stream := range mdc.Items(ctx, fault.New(true)) { for stream := range mdc.Items(ctx, fault.New(true)) {
_, err := buf.ReadFrom(stream.ToReader()) _, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) aw.NoErr(t, err)
byteArray := buf.Bytes() byteArray := buf.Bytes()
something, err := support.CreateFromBytes(byteArray, models.CreateMessageFromDiscriminatorValue) something, err := support.CreateFromBytes(byteArray, models.CreateMessageFromDiscriminatorValue)
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotNil(t, something) assert.NotNil(t, something)
} }
} }
@ -104,7 +105,7 @@ func (suite *MockExchangeDataSuite) TestMockExchangeData() {
ID: id, ID: id,
Reader: io.NopCloser(bytes.NewReader(itemData)), Reader: io.NopCloser(bytes.NewReader(itemData)),
}, },
check: require.NoError, check: aw.MustNoErr,
}, },
{ {
name: "Error", name: "Error",
@ -112,7 +113,7 @@ func (suite *MockExchangeDataSuite) TestMockExchangeData() {
ID: id, ID: id,
ReadErr: assert.AnError, ReadErr: assert.AnError,
}, },
check: require.Error, check: aw.MustErr,
}, },
} }
@ -192,10 +193,10 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
temp := mockconnector.GetMockList(subject, "Artist", emptyMap) temp := mockconnector.GetMockList(subject, "Artist", emptyMap)
writer := kioser.NewJsonSerializationWriter() writer := kioser.NewJsonSerializationWriter()
err := writer.WriteObjectValue("", temp) err := writer.WriteObjectValue("", temp)
require.NoError(t, err) aw.MustNoErr(t, err)
bytes, err := writer.GetSerializedContent() bytes, err := writer.GetSerializedContent()
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
_, err = support.CreateListFromBytes(bytes) _, err = support.CreateListFromBytes(bytes)
@ -206,7 +207,7 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
name: "SharePoint: List 6 Items", name: "SharePoint: List 6 Items",
transformation: func(t *testing.T) error { transformation: func(t *testing.T) error {
bytes, err := mockconnector.GetMockListBytes(subject) bytes, err := mockconnector.GetMockListBytes(subject)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
_, err = support.CreateListFromBytes(bytes) _, err = support.CreateListFromBytes(bytes)
return err return err
}, },
@ -225,7 +226,7 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
err := test.transformation(t) err := test.transformation(t)
assert.NoError(t, err) aw.NoErr(t, err)
}) })
} }
} }

View File

@ -8,9 +8,9 @@ import (
kw "github.com/microsoft/kiota-serialization-json-go" kw "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -48,7 +48,7 @@ func (mlc *MockListCollection) PreviousPath() path.Path {
func (mlc *MockListCollection) Items( func (mlc *MockListCollection) Items(
ctx context.Context, ctx context.Context,
_ *fault.Errors, // unused _ *fault.Bus, // unused
) <-chan data.Stream { ) <-chan data.Stream {
res := make(chan data.Stream) res := make(chan data.Stream)
@ -159,7 +159,7 @@ func GetMockListBytes(title string) ([]byte, error) {
// of the Mocked SharePoint List // of the Mocked SharePoint List
func GetMockListStream(t *testing.T, title string, numOfItems int) *MockListData { func GetMockListStream(t *testing.T, title string, numOfItems int) *MockListData {
byteArray, err := GetMockListBytes(title) byteArray, err := GetMockListBytes(title)
require.NoError(t, err) aw.MustNoErr(t, err)
listData := &MockListData{ listData := &MockListData{
ID: title, ID: title,

View File

@ -9,9 +9,9 @@ import (
js "github.com/microsoft/kiota-serialization-json-go" js "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
//nolint:lll //nolint:lll
@ -693,10 +693,10 @@ func GetMockMessageWithNestedItemAttachmentEvent(subject string) []byte {
func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, subject string) []byte { func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, subject string) []byte {
base := GetMockMessageBytes(subject) base := GetMockMessageBytes(subject)
message, err := hydrateMessage(base) message, err := hydrateMessage(base)
require.NoError(t, err) aw.MustNoErr(t, err)
nestedMessage, err := hydrateMessage(nested) nestedMessage, err := hydrateMessage(nested)
require.NoError(t, err) aw.MustNoErr(t, err)
iaNode := models.NewItemAttachment() iaNode := models.NewItemAttachment()
attachmentSize := int32(len(nested)) attachmentSize := int32(len(nested))
@ -713,13 +713,13 @@ func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, sub
func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte, subject string) []byte { func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte, subject string) []byte {
base := GetMockMessageBytes(subject) base := GetMockMessageBytes(subject)
message, err := hydrateMessage(base) message, err := hydrateMessage(base)
require.NoError(t, err) aw.MustNoErr(t, err)
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested) parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested)
require.NoError(t, err) aw.MustNoErr(t, err)
anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue) anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue)
require.NoError(t, err) aw.MustNoErr(t, err)
contact := anObject.(models.Contactable) contact := anObject.(models.Contactable)
internalName := "Nested Contact" internalName := "Nested Contact"
@ -736,10 +736,10 @@ func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte,
func serialize(t *testing.T, item absser.Parsable) []byte { func serialize(t *testing.T, item absser.Parsable) []byte {
wtr := js.NewJsonSerializationWriter() wtr := js.NewJsonSerializationWriter()
err := wtr.WriteObjectValue("", item) err := wtr.WriteObjectValue("", item)
require.NoError(t, err) aw.MustNoErr(t, err)
byteArray, err := wtr.GetSerializedContent() byteArray, err := wtr.GetSerializedContent()
require.NoError(t, err) aw.MustNoErr(t, err)
return byteArray return byteArray
} }

View File

@ -2,12 +2,13 @@ package api
import ( import (
"context" "context"
"fmt"
"github.com/alcionai/clues"
msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives" msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
mssites "github.com/microsoftgraph/msgraph-sdk-go/sites" mssites "github.com/microsoftgraph/msgraph-sdk-go/sites"
msusers "github.com/microsoftgraph/msgraph-sdk-go/users" msusers "github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/graph/api"
@ -16,10 +17,7 @@ import (
func getValues[T any](l api.PageLinker) ([]T, error) { func getValues[T any](l api.PageLinker) ([]T, error) {
page, ok := l.(interface{ GetValue() []T }) page, ok := l.(interface{ GetValue() []T })
if !ok { if !ok {
return nil, errors.Errorf( return nil, clues.New("page does not comply with GetValue() interface").With("page_item_type", fmt.Sprintf("%T", l))
"response of type [%T] does not comply with GetValue() interface",
l,
)
} }
return page.GetValue(), nil return page.GetValue(), nil
@ -69,8 +67,11 @@ func (p *driveItemPager) GetPage(ctx context.Context) (api.DeltaPageLinker, erro
) )
resp, err = p.builder.Get(ctx, p.options) resp, err = p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return resp, err return resp, nil
} }
func (p *driveItemPager) SetNext(link string) { func (p *driveItemPager) SetNext(link string) {
@ -163,8 +164,11 @@ func (p *siteDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) {
) )
resp, err = p.builder.Get(ctx, p.options) resp, err = p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return resp, err return resp, nil
} }
func (p *siteDrivePager) SetNext(link string) { func (p *siteDrivePager) SetNext(link string) {

View File

@ -10,10 +10,12 @@ import (
"sync/atomic" "sync/atomic"
"time" "time"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spatialcurrent/go-lazy/pkg/lazy" "github.com/spatialcurrent/go-lazy/pkg/lazy"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -80,9 +82,10 @@ type Collection struct {
// itemReadFunc returns a reader for the specified item // itemReadFunc returns a reader for the specified item
type itemReaderFunc func( type itemReaderFunc func(
ctx context.Context,
hc *http.Client, hc *http.Client,
item models.DriveItemable, item models.DriveItemable,
) (itemInfo details.ItemInfo, itemData io.ReadCloser, err error) ) (details.ItemInfo, io.ReadCloser, error)
// itemMetaReaderFunc returns a reader for the metadata of the // itemMetaReaderFunc returns a reader for the metadata of the
// specified item // specified item
@ -141,9 +144,9 @@ func (oc *Collection) Add(item models.DriveItemable) {
// Items() returns the channel containing M365 Exchange objects // Items() returns the channel containing M365 Exchange objects
func (oc *Collection) Items( func (oc *Collection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Errors, // TODO: currently unused while onedrive isn't up to date with clues/fault errs *fault.Bus, // TODO: currently unused while onedrive isn't up to date with clues/fault
) <-chan data.Stream { ) <-chan data.Stream {
go oc.populateItems(ctx) go oc.populateItems(ctx, errs)
return oc.data return oc.data
} }
@ -216,23 +219,22 @@ func (od *Item) ModTime() time.Time {
// populateItems iterates through items added to the collection // populateItems iterates through items added to the collection
// and uses the collection `itemReader` to read the item // and uses the collection `itemReader` to read the item
func (oc *Collection) populateItems(ctx context.Context) { func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
var ( var (
errs error
byteCount int64 byteCount int64
itemsRead int64 itemsRead int64
dirsRead int64 dirsRead int64
itemsFound int64 itemsFound int64
dirsFound int64 dirsFound int64
wg sync.WaitGroup wg sync.WaitGroup
m sync.Mutex el = errs.Local()
) )
// Retrieve the OneDrive folder path to set later in // Retrieve the OneDrive folder path to set later in
// `details.OneDriveInfo` // `details.OneDriveInfo`
parentPathString, err := path.GetDriveFolderPath(oc.folderPath) parentPathString, err := path.GetDriveFolderPath(oc.folderPath)
if err != nil { if err != nil {
oc.reportAsCompleted(ctx, 0, 0, 0, err) oc.reportAsCompleted(ctx, 0, 0, 0)
return return
} }
@ -247,14 +249,8 @@ func (oc *Collection) populateItems(ctx context.Context) {
semaphoreCh := make(chan struct{}, urlPrefetchChannelBufferSize) semaphoreCh := make(chan struct{}, urlPrefetchChannelBufferSize)
defer close(semaphoreCh) defer close(semaphoreCh)
errUpdater := func(id string, err error) {
m.Lock()
errs = support.WrapAndAppend(id, err, errs)
m.Unlock()
}
for _, item := range oc.driveItems { for _, item := range oc.driveItems {
if oc.ctrl.FailFast && errs != nil { if el.Failure() != nil {
break break
} }
@ -262,22 +258,27 @@ func (oc *Collection) populateItems(ctx context.Context) {
wg.Add(1) wg.Add(1)
go func(item models.DriveItemable) { go func(ctx context.Context, item models.DriveItemable) {
defer wg.Done() defer wg.Done()
defer func() { <-semaphoreCh }() defer func() { <-semaphoreCh }()
// Read the item // Read the item
var ( var (
itemID = *item.GetId() itemID = ptr.Val(item.GetId())
itemName = *item.GetName() itemName = ptr.Val(item.GetName())
itemSize = *item.GetSize() itemSize = ptr.Val(item.GetSize())
itemInfo details.ItemInfo itemInfo details.ItemInfo
itemMeta io.ReadCloser itemMeta io.ReadCloser
itemMetaSize int itemMetaSize int
metaSuffix string metaSuffix string
err error
) )
ctx = clues.Add(ctx,
"restore_item_id", itemID,
"restore_item_name", itemName,
"restore_item_size", itemSize,
"restore_item_info", itemInfo)
isFile := item.GetFile() != nil isFile := item.GetFile() != nil
if isFile { if isFile {
@ -301,9 +302,8 @@ func (oc *Collection) populateItems(ctx context.Context) {
itemMetaSize = 2 itemMetaSize = 2
} else { } else {
itemMeta, itemMetaSize, err = oc.itemMetaReader(ctx, oc.service, oc.driveID, item) itemMeta, itemMetaSize, err = oc.itemMetaReader(ctx, oc.service, oc.driveID, item)
if err != nil { if err != nil {
errUpdater(*item.GetId(), errors.Wrap(err, "failed to get item permissions")) el.AddRecoverable(clues.Wrap(err, "getting item permissions"))
return return
} }
} }
@ -335,7 +335,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
err error err error
) )
_, itemData, err = oc.itemReader(oc.itemClient, item) _, itemData, err = oc.itemReader(ctx, oc.itemClient, item)
if err != nil && graph.IsErrUnauthorized(err) { if err != nil && graph.IsErrUnauthorized(err) {
// assume unauthorized requests are a sign of an expired // assume unauthorized requests are a sign of an expired
@ -351,7 +351,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
// check for errors following retries // check for errors following retries
if err != nil { if err != nil {
errUpdater(itemID, err) el.AddRecoverable(clues.Stack(err).WithClues(ctx))
return nil, err return nil, err
} }
@ -361,8 +361,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
itemData, itemData,
observe.ItemBackupMsg, observe.ItemBackupMsg,
observe.PII(itemName+dataSuffix), observe.PII(itemName+dataSuffix),
itemSize, itemSize)
)
go closer() go closer()
return progReader, nil return progReader, nil
@ -419,27 +418,27 @@ func (oc *Collection) populateItems(ctx context.Context) {
atomic.AddInt64(&byteCount, itemSize) atomic.AddInt64(&byteCount, itemSize)
folderProgress <- struct{}{} folderProgress <- struct{}{}
}(item) }(ctx, item)
} }
wg.Wait() wg.Wait()
oc.reportAsCompleted(ctx, int(itemsFound), int(itemsRead), byteCount, errs) oc.reportAsCompleted(ctx, int(itemsFound), int(itemsRead), byteCount)
} }
func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRead int, byteCount int64, errs error) { func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRead int, byteCount int64) {
close(oc.data) close(oc.data)
status := support.CreateStatus(ctx, support.Backup, status := support.CreateStatus(ctx, support.Backup,
1, // num folders (always 1) 1, // num folders (always 1)
support.CollectionMetrics{ support.CollectionMetrics{
Objects: itemsFound, // items to read, Objects: itemsFound,
Successes: itemsRead, // items read successfully, Successes: itemsRead,
TotalBytes: byteCount, // Number of bytes read in the operation, Bytes: byteCount,
}, },
errs, oc.folderPath.Folder(false))
oc.folderPath.Folder(false), // Additional details
)
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String()) logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
oc.statusUpdater(status) oc.statusUpdater(status)
} }

View File

@ -21,6 +21,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -52,7 +53,7 @@ func (suite *CollectionUnitTestSuite) testStatusUpdater(
statusToUpdate *support.ConnectorOperationStatus, statusToUpdate *support.ConnectorOperationStatus,
) support.StatusUpdater { ) support.StatusUpdater {
return func(s *support.ConnectorOperationStatus) { return func(s *support.ConnectorOperationStatus) {
suite.T().Logf("Update status %v, count %d, success %d", s, s.ObjectCount, s.Successful) suite.T().Logf("Update status %v, count %d, success %d", s, s.Metrics.Objects, s.Metrics.Successes)
*statusToUpdate = *s *statusToUpdate = *s
wg.Done() wg.Done()
@ -94,7 +95,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
numInstances: 1, numInstances: 1,
source: OneDriveSource, source: OneDriveSource,
itemDeets: nst{testItemName, 42, now}, itemDeets: nst{testItemName, 42, now},
itemReader: func(*http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) { itemReader: func(context.Context, *http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName, Modified: now}}, return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)), io.NopCloser(bytes.NewReader(testItemData)),
nil nil
@ -109,7 +110,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
numInstances: 3, numInstances: 3,
source: OneDriveSource, source: OneDriveSource,
itemDeets: nst{testItemName, 42, now}, itemDeets: nst{testItemName, 42, now},
itemReader: func(*http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) { itemReader: func(context.Context, *http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName, Modified: now}}, return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)), io.NopCloser(bytes.NewReader(testItemData)),
nil nil
@ -124,7 +125,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
numInstances: 1, numInstances: 1,
source: SharePointSource, source: SharePointSource,
itemDeets: nst{testItemName, 42, now}, itemDeets: nst{testItemName, 42, now},
itemReader: func(*http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) { itemReader: func(context.Context, *http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName, Modified: now}}, return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)), io.NopCloser(bytes.NewReader(testItemData)),
nil nil
@ -139,7 +140,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
numInstances: 3, numInstances: 3,
source: SharePointSource, source: SharePointSource,
itemDeets: nst{testItemName, 42, now}, itemDeets: nst{testItemName, 42, now},
itemReader: func(*http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) { itemReader: func(context.Context, *http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName, Modified: now}}, return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)), io.NopCloser(bytes.NewReader(testItemData)),
nil nil
@ -162,9 +163,9 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
) )
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source)
require.NoError(t, err) aw.MustNoErr(t, err)
driveFolderPath, err := path.GetDriveFolderPath(folderPath) driveFolderPath, err := path.GetDriveFolderPath(folderPath)
require.NoError(t, err) aw.MustNoErr(t, err)
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -222,8 +223,8 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
} }
// Expect only 1 item // Expect only 1 item
require.Equal(t, 1, collStatus.ObjectCount) require.Equal(t, 1, collStatus.Metrics.Objects)
require.Equal(t, 1, collStatus.Successful) require.Equal(t, 1, collStatus.Metrics.Successes)
// Validate item info and data // Validate item info and data
readItem := readItems[0] readItem := readItems[0]
@ -240,7 +241,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
assert.Equal(t, now, mt.ModTime()) assert.Equal(t, now, mt.ModTime())
readData, err := io.ReadAll(readItem.ToReader()) readData, err := io.ReadAll(readItem.ToReader())
require.NoError(t, err) aw.MustNoErr(t, err)
name, parentPath := test.infoFrom(t, readItemInfo.Info()) name, parentPath := test.infoFrom(t, readItemInfo.Info())
@ -254,7 +255,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
assert.Equal(t, testItemName+MetaFileSuffix, readItemMeta.UUID()) assert.Equal(t, testItemName+MetaFileSuffix, readItemMeta.UUID())
readMetaData, err := io.ReadAll(readItemMeta.ToReader()) readMetaData, err := io.ReadAll(readItemMeta.ToReader())
require.NoError(t, err) aw.MustNoErr(t, err)
tm, err := json.Marshal(testItemMeta) tm, err := json.Marshal(testItemMeta)
if err != nil { if err != nil {
@ -301,7 +302,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
wg.Add(1) wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err) aw.MustNoErr(t, err)
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -323,7 +324,11 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
mockItem.SetLastModifiedDateTime(&now) mockItem.SetLastModifiedDateTime(&now)
coll.Add(mockItem) coll.Add(mockItem)
coll.itemReader = func(*http.Client, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) { coll.itemReader = func(
context.Context,
*http.Client,
models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, assert.AnError return details.ItemInfo{}, nil, assert.AnError
} }
@ -339,13 +344,13 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
assert.True(t, ok) assert.True(t, ok)
_, err = io.ReadAll(collItem.ToReader()) _, err = io.ReadAll(collItem.ToReader())
assert.Error(t, err) aw.Err(t, err)
wg.Wait() wg.Wait()
// Expect no items // Expect no items
require.Equal(t, 1, collStatus.ObjectCount, "only one object should be counted") require.Equal(t, 1, collStatus.Metrics.Objects, "only one object should be counted")
require.Equal(t, 1, collStatus.Successful, "TODO: should be 0, but allowing 1 to reduce async management") require.Equal(t, 1, collStatus.Metrics.Successes, "TODO: should be 0, but allowing 1 to reduce async management")
}) })
} }
} }
@ -376,7 +381,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() {
wg.Add(1) wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err) aw.MustNoErr(t, err)
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -400,6 +405,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() {
coll.Add(mockItem) coll.Add(mockItem)
coll.itemReader = func( coll.itemReader = func(
context.Context,
*http.Client, *http.Client,
models.DriveItemable, models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) { ) (details.ItemInfo, io.ReadCloser, error) {
@ -424,13 +430,13 @@ func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() {
wg.Wait() wg.Wait()
// Expect no items // Expect no items
require.Equal(t, 1, collStatus.ObjectCount) require.Equal(t, 1, collStatus.Metrics.Objects)
require.Equal(t, 1, collStatus.Successful) require.Equal(t, 1, collStatus.Metrics.Successes)
for _, i := range readItems { for _, i := range readItems {
if strings.HasSuffix(i.UUID(), MetaFileSuffix) { if strings.HasSuffix(i.UUID(), MetaFileSuffix) {
content, err := io.ReadAll(i.ToReader()) content, err := io.ReadAll(i.ToReader())
require.NoError(t, err) aw.MustNoErr(t, err)
require.Equal(t, content, []byte("{}")) require.Equal(t, content, []byte("{}"))
} }
} }
@ -466,7 +472,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
wg.Add(1) wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err) aw.MustNoErr(t, err)
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -490,6 +496,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
coll.Add(mockItem) coll.Add(mockItem)
coll.itemReader = func( coll.itemReader = func(
context.Context,
*http.Client, *http.Client,
models.DriveItemable, models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) { ) (details.ItemInfo, io.ReadCloser, error) {
@ -514,13 +521,13 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
wg.Wait() wg.Wait()
// Expect no items // Expect no items
require.Equal(t, 1, collStatus.ObjectCount) require.Equal(t, 1, collStatus.Metrics.Objects)
require.Equal(t, 1, collStatus.Successful) require.Equal(t, 1, collStatus.Metrics.Successes)
for _, i := range readItems { for _, i := range readItems {
if strings.HasSuffix(i.UUID(), MetaFileSuffix) { if strings.HasSuffix(i.UUID(), MetaFileSuffix) {
content, err := io.ReadAll(i.ToReader()) content, err := io.ReadAll(i.ToReader())
require.NoError(t, err) aw.MustNoErr(t, err)
require.Equal(t, content, []byte("{}")) require.Equal(t, content, []byte("{}"))
im, ok := i.(data.StreamModTime) im, ok := i.(data.StreamModTime)
require.Equal(t, ok, true, "modtime interface") require.Equal(t, ok, true, "modtime interface")

View File

@ -13,11 +13,13 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -117,32 +119,40 @@ func NewCollections(
func deserializeMetadata( func deserializeMetadata(
ctx context.Context, ctx context.Context,
cols []data.RestoreCollection, cols []data.RestoreCollection,
errs *fault.Bus,
) (map[string]string, map[string]map[string]string, error) { ) (map[string]string, map[string]map[string]string, error) {
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
"deserialzing previous backup metadata", "deserialzing previous backup metadata",
"num_collections", "num_collections", len(cols))
len(cols),
var (
prevDeltas = map[string]string{}
prevFolders = map[string]map[string]string{}
el = errs.Local()
) )
prevDeltas := map[string]string{}
prevFolders := map[string]map[string]string{}
for _, col := range cols { for _, col := range cols {
items := col.Items(ctx, nil) // TODO: fault.Errors instead of nil if el.Failure() != nil {
break
}
items := col.Items(ctx, errs)
for breakLoop := false; !breakLoop; { for breakLoop := false; !breakLoop; {
select { select {
case <-ctx.Done(): case <-ctx.Done():
return nil, nil, errors.Wrap(ctx.Err(), "deserialzing previous backup metadata") return nil, nil, clues.Wrap(ctx.Err(), "deserialzing previous backup metadata").WithClues(ctx)
case item, ok := <-items: case item, ok := <-items:
if !ok { if !ok {
// End of collection items.
breakLoop = true breakLoop = true
break break
} }
var err error var (
err error
ictx = clues.Add(ctx, "item_uuid", item.UUID())
)
switch item.UUID() { switch item.UUID() {
case graph.PreviousPathFileName: case graph.PreviousPathFileName:
@ -152,11 +162,9 @@ func deserializeMetadata(
err = deserializeMap(item.ToReader(), prevDeltas) err = deserializeMap(item.ToReader(), prevDeltas)
default: default:
logger.Ctx(ctx).Infow( logger.Ctx(ictx).Infow(
"skipping unknown metadata file", "skipping unknown metadata file",
"file_name", "file_name", item.UUID())
item.UUID(),
)
continue continue
} }
@ -173,20 +181,13 @@ func deserializeMetadata(
// we end up in a situation where we're sourcing items from the wrong // we end up in a situation where we're sourcing items from the wrong
// base in kopia wrapper. // base in kopia wrapper.
if errors.Is(err, errExistingMapping) { if errors.Is(err, errExistingMapping) {
return nil, nil, errors.Wrapf( return nil, nil, clues.Wrap(err, "deserializing metadata file").WithClues(ictx)
err,
"deserializing metadata file %s",
item.UUID(),
)
} }
logger.Ctx(ctx).Errorw( err = clues.Stack(err).WithClues(ictx)
"deserializing base backup metadata. Falling back to full backup for selected drives",
"error", el.AddRecoverable(err)
err, logger.CtxErr(ictx, err).Error("deserializing base backup metadata")
"file_name",
item.UUID(),
)
} }
} }
@ -213,10 +214,10 @@ func deserializeMetadata(
} }
} }
return prevDeltas, prevFolders, nil return prevDeltas, prevFolders, el.Failure()
} }
var errExistingMapping = errors.New("mapping already exists for same drive ID") var errExistingMapping = clues.New("mapping already exists for same drive ID")
// deserializeMap takes an reader and a map of already deserialized items and // deserializeMap takes an reader and a map of already deserialized items and
// adds the newly deserialized items to alreadyFound. Items are only added to // adds the newly deserialized items to alreadyFound. Items are only added to
@ -242,7 +243,7 @@ func deserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) erro
} }
if duplicate { if duplicate {
return errors.WithStack(errExistingMapping) return clues.Stack(errExistingMapping)
} }
maps.Copy(alreadyFound, tmp) maps.Copy(alreadyFound, tmp)
@ -255,8 +256,9 @@ func deserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) erro
func (c *Collections) Get( func (c *Collections) Get(
ctx context.Context, ctx context.Context,
prevMetadata []data.RestoreCollection, prevMetadata []data.RestoreCollection,
errs *fault.Bus,
) ([]data.BackupCollection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
prevDeltas, oldPathsByDriveID, err := deserializeMetadata(ctx, prevMetadata) prevDeltas, oldPathsByDriveID, err := deserializeMetadata(ctx, prevMetadata, errs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -264,7 +266,7 @@ func (c *Collections) Get(
// Enumerate drives for the specified resourceOwner // Enumerate drives for the specified resourceOwner
pager, err := c.drivePagerFunc(c.source, c.service, c.resourceOwner, nil) pager, err := c.drivePagerFunc(c.source, c.service, c.resourceOwner, nil)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, graph.Stack(ctx, err)
} }
retry := c.source == OneDriveSource retry := c.source == OneDriveSource
@ -287,39 +289,33 @@ func (c *Collections) Get(
excludedItems = map[string]struct{}{} excludedItems = map[string]struct{}{}
) )
// Update the collection map with items from each drive
for _, d := range drives { for _, d := range drives {
driveID := *d.GetId() var (
driveName := *d.GetName() driveID = ptr.Val(d.GetId())
driveName = ptr.Val(d.GetName())
prevDelta = prevDeltas[driveID]
oldPaths = oldPathsByDriveID[driveID]
numOldDelta = 0
)
prevDelta := prevDeltas[driveID]
oldPaths := oldPathsByDriveID[driveID]
numOldDelta := 0
if len(prevDelta) > 0 { if len(prevDelta) > 0 {
numOldDelta++ numOldDelta++
} }
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
"previous metadata for drive", "previous metadata for drive",
"num_paths_entries", "num_paths_entries", len(oldPaths),
len(oldPaths), "num_deltas_entries", numOldDelta)
"num_deltas_entries",
numOldDelta)
delta, paths, excluded, err := collectItems( delta, paths, excluded, err := collectItems(
ctx, ctx,
c.itemPagerFunc( c.itemPagerFunc(c.service, driveID, ""),
c.service,
driveID,
"",
),
driveID, driveID,
driveName, driveName,
c.UpdateCollections, c.UpdateCollections,
oldPaths, oldPaths,
prevDelta, prevDelta,
) errs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -343,7 +339,6 @@ func (c *Collections) Get(
// token because it thinks the folder paths weren't persisted. // token because it thinks the folder paths weren't persisted.
folderPaths[driveID] = map[string]string{} folderPaths[driveID] = map[string]string{}
maps.Copy(folderPaths[driveID], paths) maps.Copy(folderPaths[driveID], paths)
maps.Copy(excludedItems, excluded) maps.Copy(excludedItems, excluded)
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
@ -372,18 +367,13 @@ func (c *Collections) Get(
graph.NewMetadataEntry(graph.PreviousPathFileName, folderPaths), graph.NewMetadataEntry(graph.PreviousPathFileName, folderPaths),
graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs), graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs),
}, },
c.statusUpdater, c.statusUpdater)
)
if err != nil { if err != nil {
// Technically it's safe to continue here because the logic for starting an // Technically it's safe to continue here because the logic for starting an
// incremental backup should eventually find that the metadata files are // incremental backup should eventually find that the metadata files are
// empty/missing and default to a full backup. // empty/missing and default to a full backup.
logger.Ctx(ctx).Warnw( logger.CtxErr(ctx, err).Info("making metadata collection for future incremental backups")
"making metadata collection for future incremental backups",
"error",
err,
)
} else { } else {
collections = append(collections, metadata) collections = append(collections, metadata)
} }
@ -453,8 +443,15 @@ func (c *Collections) UpdateCollections(
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
invalidPrevDelta bool, invalidPrevDelta bool,
errs *fault.Bus,
) error { ) error {
el := errs.Local()
for _, item := range items { for _, item := range items {
if el.Failure() != nil {
break
}
var ( var (
prevPath path.Path prevPath path.Path
prevCollectionPath path.Path prevCollectionPath path.Path
@ -480,25 +477,30 @@ func (c *Collections) UpdateCollections(
continue continue
} }
var (
itemID = ptr.Val(item.GetId())
ictx = clues.Add(ctx, "update_item_id", itemID)
)
if item.GetParentReference() == nil || if item.GetParentReference() == nil ||
item.GetParentReference().GetId() == nil || item.GetParentReference().GetId() == nil ||
(item.GetDeleted() == nil && item.GetParentReference().GetPath() == nil) { (item.GetDeleted() == nil && item.GetParentReference().GetPath() == nil) {
err := clues.New("no parent reference").With("item_id", *item.GetId()) el.AddRecoverable(clues.New("item missing parent reference").
if item.GetName() != nil { WithClues(ictx).
err = err.With("item_name", *item.GetName()) With("item_id", itemID, "item_name", ptr.Val(item.GetName())))
}
return err continue
} }
// Create a collection for the parent of this item // Create a collection for the parent of this item
collectionID := *item.GetParentReference().GetId() collectionID := ptr.Val(item.GetParentReference().GetId())
ictx = clues.Add(ictx, "collection_id", collectionID)
var collectionPathStr string var collectionPathStr string
if item.GetDeleted() == nil { if item.GetDeleted() == nil {
collectionPathStr = *item.GetParentReference().GetPath() collectionPathStr = ptr.Val(item.GetParentReference().GetPath())
} else { } else {
collectionPathStr, ok = oldPaths[*item.GetParentReference().GetId()] collectionPathStr, ok = oldPaths[ptr.Val(item.GetParentReference().GetId())]
if !ok { if !ok {
// This collection was created and destroyed in // This collection was created and destroyed in
// between the current and previous invocation // between the current and previous invocation
@ -510,25 +512,26 @@ func (c *Collections) UpdateCollections(
collectionPathStr, collectionPathStr,
c.tenant, c.tenant,
c.resourceOwner, c.resourceOwner,
c.source, c.source)
)
if err != nil { if err != nil {
return err return clues.Stack(err).WithClues(ictx)
} }
// Skip items that don't match the folder selectors we were given. // Skip items that don't match the folder selectors we were given.
if shouldSkipDrive(ctx, collectionPath, c.matcher, driveName) { if shouldSkipDrive(ictx, collectionPath, c.matcher, driveName) {
logger.Ctx(ctx).Infof("Skipping path %s", collectionPath.String()) logger.Ctx(ictx).Infow("Skipping path", "skipped_path", collectionPath.String())
continue continue
} }
switch { switch {
case item.GetFolder() != nil, item.GetPackage() != nil: case item.GetFolder() != nil, item.GetPackage() != nil:
prevPathStr, ok := oldPaths[*item.GetId()] prevPathStr, ok := oldPaths[itemID]
if ok { if ok {
prevPath, err = path.FromDataLayerPath(prevPathStr, false) prevPath, err = path.FromDataLayerPath(prevPathStr, false)
if err != nil { if err != nil {
return clues.Wrap(err, "invalid previous path").With("path_string", prevPathStr) el.AddRecoverable(clues.Wrap(err, "invalid previous path").
WithClues(ictx).
With("path_string", prevPathStr))
} }
} }
@ -536,7 +539,7 @@ func (c *Collections) UpdateCollections(
// Nested folders also return deleted delta results so we don't have to // Nested folders also return deleted delta results so we don't have to
// worry about doing a prefix search in the map to remove the subtree of // worry about doing a prefix search in the map to remove the subtree of
// the deleted folder/package. // the deleted folder/package.
delete(newPaths, *item.GetId()) delete(newPaths, itemID)
if prevPath == nil { if prevPath == nil {
// It is possible that an item was created and // It is possible that an item was created and
@ -555,10 +558,9 @@ func (c *Collections) UpdateCollections(
c.statusUpdater, c.statusUpdater,
c.source, c.source,
c.ctrl, c.ctrl,
invalidPrevDelta, invalidPrevDelta)
)
c.CollectionMap[*item.GetId()] = col c.CollectionMap[itemID] = col
break break
} }
@ -568,14 +570,16 @@ func (c *Collections) UpdateCollections(
// parentRef or such. // parentRef or such.
folderPath, err := collectionPath.Append(*item.GetName(), false) folderPath, err := collectionPath.Append(*item.GetName(), false)
if err != nil { if err != nil {
logger.Ctx(ctx).Errorw("failed building collection path", "error", err) logger.Ctx(ictx).Errorw("building collection path", "error", err)
return err el.AddRecoverable(clues.Stack(err).WithClues(ictx))
continue
} }
// Moved folders don't cause delta results for any subfolders nested in // Moved folders don't cause delta results for any subfolders nested in
// them. We need to go through and update paths to handle that. We only // them. We need to go through and update paths to handle that. We only
// update newPaths so we don't accidentally clobber previous deletes. // update newPaths so we don't accidentally clobber previous deletes.
updatePath(newPaths, *item.GetId(), folderPath.String()) updatePath(newPaths, itemID, folderPath.String())
found, err := updateCollectionPaths(*item.GetId(), c.CollectionMap, folderPath) found, err := updateCollectionPaths(*item.GetId(), c.CollectionMap, folderPath)
if err != nil { if err != nil {
@ -598,7 +602,7 @@ func (c *Collections) UpdateCollections(
c.ctrl, c.ctrl,
invalidPrevDelta, invalidPrevDelta,
) )
c.CollectionMap[*item.GetId()] = col c.CollectionMap[itemID] = col
c.NumContainers++ c.NumContainers++
} }
} }
@ -615,7 +619,7 @@ func (c *Collections) UpdateCollections(
// deleted, we want to avoid it. If it was // deleted, we want to avoid it. If it was
// renamed/moved/modified, we still have to drop the // renamed/moved/modified, we still have to drop the
// original one and download a fresh copy. // original one and download a fresh copy.
excluded[*item.GetId()] = struct{}{} excluded[itemID] = struct{}{}
} }
if item.GetDeleted() != nil { if item.GetDeleted() != nil {
@ -679,11 +683,11 @@ func (c *Collections) UpdateCollections(
} }
default: default:
return errors.Errorf("item type not supported. item name : %s", *item.GetName()) return clues.New("item type not supported").WithClues(ctx)
} }
} }
return nil return el.Failure()
} }
func shouldSkipDrive(ctx context.Context, drivePath path.Path, m folderMatcher, driveName string) bool { func shouldSkipDrive(ctx context.Context, drivePath path.Path, m folderMatcher, driveName string) bool {
@ -705,7 +709,7 @@ func GetCanonicalPath(p, tenant, resourceOwner string, source driveSource) (path
case SharePointSource: case SharePointSource:
result, err = pathBuilder.ToDataLayerSharePointPath(tenant, resourceOwner, path.LibrariesCategory, false) result, err = pathBuilder.ToDataLayerSharePointPath(tenant, resourceOwner, path.LibrariesCategory, false)
default: default:
return nil, errors.Errorf("unrecognized drive data source") return nil, clues.New("unrecognized data source")
} }
if err != nil { if err != nil {
@ -719,7 +723,7 @@ func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) boo
// Check if the folder is allowed by the scope. // Check if the folder is allowed by the scope.
folderPathString, err := path.GetDriveFolderPath(folderPath) folderPathString, err := path.GetDriveFolderPath(folderPath)
if err != nil { if err != nil {
logger.Ctx(ctx).Error(err) logger.Ctx(ctx).With("err", err).Error("getting drive folder path")
return true return true
} }

View File

@ -19,7 +19,9 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -48,11 +50,11 @@ func getExpectedStatePathGenerator(
} else { } else {
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator") require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource) p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource)
require.NoError(t, err) aw.MustNoErr(t, err)
} }
p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource) p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource)
require.NoError(t, err) aw.MustNoErr(t, err)
switch state { switch state {
case data.NewState: case data.NewState:
@ -80,7 +82,7 @@ func getExpectedPathGenerator(t *testing.T,
) func(string) string { ) func(string) string {
return func(path string) string { return func(path string) string {
p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource) p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource)
require.NoError(t, err) aw.MustNoErr(t, err)
return p.String() return p.String()
} }
@ -109,20 +111,20 @@ func (suite *OneDriveCollectionsSuite) TestGetCanonicalPath() {
source: OneDriveSource, source: OneDriveSource,
dir: []string{"onedrive"}, dir: []string{"onedrive"},
expect: "tenant/onedrive/resourceOwner/files/onedrive", expect: "tenant/onedrive/resourceOwner/files/onedrive",
expectErr: assert.NoError, expectErr: aw.NoErr,
}, },
{ {
name: "sharepoint", name: "sharepoint",
source: SharePointSource, source: SharePointSource,
dir: []string{"sharepoint"}, dir: []string{"sharepoint"},
expect: "tenant/sharepoint/resourceOwner/libraries/sharepoint", expect: "tenant/sharepoint/resourceOwner/libraries/sharepoint",
expectErr: assert.NoError, expectErr: aw.NoErr,
}, },
{ {
name: "unknown", name: "unknown",
source: unknownDriveSource, source: unknownDriveSource,
dir: []string{"unknown"}, dir: []string{"unknown"},
expectErr: assert.Error, expectErr: aw.Err,
}, },
} }
for _, test := range table { for _, test := range table {
@ -173,7 +175,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.Error, expect: aw.Err,
expectedMetadataPaths: map[string]string{ expectedMetadataPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
@ -187,7 +189,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
}, },
@ -208,7 +210,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
}, },
@ -228,7 +230,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
}, },
@ -252,7 +254,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NewState, folder), "folder": expectedStatePath(data.NewState, folder),
@ -283,7 +285,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder"})[0], scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder"})[0],
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"folder": expectedStatePath(data.NewState, folder), "folder": expectedStatePath(data.NewState, folder),
"subfolder": expectedStatePath(data.NewState, folderSub), "subfolder": expectedStatePath(data.NewState, folderSub),
@ -317,7 +319,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: (&selectors.OneDriveBackup{}). scope: (&selectors.OneDriveBackup{}).
Folders([]string{"/folder/subfolder"}, selectors.PrefixMatch())[0], Folders([]string{"/folder/subfolder"}, selectors.PrefixMatch())[0],
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"subfolder": expectedStatePath(data.NewState, folderSub), "subfolder": expectedStatePath(data.NewState, folderSub),
"folder2": expectedStatePath(data.NewState, folderSub+folder), "folder2": expectedStatePath(data.NewState, folderSub+folder),
@ -345,7 +347,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder/subfolder"})[0], scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder/subfolder"})[0],
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"subfolder": expectedStatePath(data.NewState, folderSub), "subfolder": expectedStatePath(data.NewState, folderSub),
}, },
@ -369,7 +371,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"subfolder": expectedPath("/folder/subfolder"), "subfolder": expectedPath("/folder/subfolder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NotMovedState, "/folder"), "folder": expectedStatePath(data.NotMovedState, "/folder"),
@ -395,7 +397,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"subfolder": expectedPath("/a-folder/subfolder"), "subfolder": expectedPath("/a-folder/subfolder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"), "folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"),
@ -421,7 +423,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NotMovedState, "/folder"), "folder": expectedStatePath(data.NotMovedState, "/folder"),
@ -445,7 +447,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NewState, "/folder2"), "folder": expectedStatePath(data.NewState, "/folder2"),
@ -468,7 +470,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
}, },
inputFolderMap: map[string]string{}, inputFolderMap: map[string]string{},
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NewState, "/folder"), "folder": expectedStatePath(data.NewState, "/folder"),
@ -494,7 +496,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"subfolder": expectedPath("/a-folder/subfolder"), "subfolder": expectedPath("/a-folder/subfolder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"), "folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"),
@ -522,7 +524,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"subfolder": expectedPath("/a-folder/subfolder"), "subfolder": expectedPath("/a-folder/subfolder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"), "folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"),
@ -561,7 +563,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"subfolder": expectedPath("/a-folder/subfolder"), "subfolder": expectedPath("/a-folder/subfolder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"), "folder": expectedStatePath(data.MovedState, "/folder", "/a-folder"),
@ -592,7 +594,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"package": expectedPath("/package"), "package": expectedPath("/package"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"folder": expectedStatePath(data.DeletedState, folder), "folder": expectedStatePath(data.DeletedState, folder),
"package": expectedStatePath(data.DeletedState, pkg), "package": expectedStatePath(data.DeletedState, pkg),
@ -615,7 +617,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"root": expectedPath(""), "root": expectedPath(""),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{}, expectedCollectionIDs: map[string]statePath{},
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
@ -638,7 +640,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"subfolder": expectedPath("/folder/subfolder"), "subfolder": expectedPath("/folder/subfolder"),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: map[string]statePath{ expectedCollectionIDs: map[string]statePath{
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.DeletedState, folder), "folder": expectedStatePath(data.DeletedState, folder),
@ -662,7 +664,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
"root": expectedPath(""), "root": expectedPath(""),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 0, expectedContainerCount: 0,
@ -702,7 +704,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
outputFolderMap, outputFolderMap,
excludes, excludes,
false, false,
) fault.New(true))
tt.expect(t, err) tt.expect(t, err)
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap), "total collections") assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count") assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
@ -771,7 +773,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
folderID1: path1, folderID1: path1,
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "MissingPaths", name: "MissingPaths",
@ -787,7 +789,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: map[string]string{}, expectedDeltas: map[string]string{},
expectedPaths: map[string]map[string]string{}, expectedPaths: map[string]map[string]string{},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "MissingDeltas", name: "MissingDeltas",
@ -807,7 +809,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: map[string]string{}, expectedDeltas: map[string]string{},
expectedPaths: map[string]map[string]string{}, expectedPaths: map[string]map[string]string{},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
// An empty path map but valid delta results in metadata being returned // An empty path map but valid delta results in metadata being returned
@ -832,7 +834,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: map[string]string{driveID1: deltaURL1}, expectedDeltas: map[string]string{driveID1: deltaURL1},
expectedPaths: map[string]map[string]string{driveID1: {}}, expectedPaths: map[string]map[string]string{driveID1: {}},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
// An empty delta map but valid path results in no metadata for that drive // An empty delta map but valid path results in no metadata for that drive
@ -861,7 +863,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: map[string]string{}, expectedDeltas: map[string]string{},
expectedPaths: map[string]map[string]string{}, expectedPaths: map[string]map[string]string{},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "SuccessTwoDrivesTwoCollections", name: "SuccessTwoDrivesTwoCollections",
@ -911,7 +913,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
folderID2: path2, folderID2: path2,
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
// Bad formats are logged but skip adding entries to the maps and don't // Bad formats are logged but skip adding entries to the maps and don't
@ -929,7 +931,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: map[string]string{}, expectedDeltas: map[string]string{},
expectedPaths: map[string]map[string]string{}, expectedPaths: map[string]map[string]string{},
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
// Unexpected files are logged and skipped. They don't cause an error to // Unexpected files are logged and skipped. They don't cause an error to
@ -965,7 +967,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
folderID1: path1, folderID1: path1,
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
}, },
{ {
name: "DriveAlreadyFound_Paths", name: "DriveAlreadyFound_Paths",
@ -1001,7 +1003,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: nil, expectedDeltas: nil,
expectedPaths: nil, expectedPaths: nil,
errCheck: assert.Error, errCheck: aw.Err,
}, },
{ {
name: "DriveAlreadyFound_Deltas", name: "DriveAlreadyFound_Deltas",
@ -1033,7 +1035,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
}, },
expectedDeltas: nil, expectedDeltas: nil,
expectedPaths: nil, expectedPaths: nil,
errCheck: assert.Error, errCheck: aw.Err,
}, },
} }
@ -1053,12 +1055,12 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
c(), c(),
func(*support.ConnectorOperationStatus) {}, func(*support.ConnectorOperationStatus) {},
) )
require.NoError(t, err) aw.MustNoErr(t, err)
cols = append(cols, data.NotFoundRestoreCollection{Collection: mc}) cols = append(cols, data.NotFoundRestoreCollection{Collection: mc})
} }
deltas, paths, err := deserializeMetadata(ctx, cols) deltas, paths, err := deserializeMetadata(ctx, cols, fault.New(true))
test.errCheck(t, err) test.errCheck(t, err)
assert.Equal(t, test.expectedDeltas, deltas) assert.Equal(t, test.expectedDeltas, deltas)
@ -1137,7 +1139,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
path.FilesCategory, path.FilesCategory,
false, false,
) )
require.NoError(suite.T(), err, "making metadata path") aw.MustNoErr(suite.T(), err, "making metadata path")
empty := "" empty := ""
next := "next" next := "next"
@ -1194,7 +1196,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {"root": rootFolderPath1}, driveID1: {"root": rootFolderPath1},
}, },
@ -1223,7 +1225,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {"root": expectedPath1("")}, driveID1: {"root": expectedPath1("")},
}, },
@ -1253,7 +1255,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {}, driveID1: {},
}, },
@ -1287,7 +1289,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {}, driveID1: {},
}, },
@ -1322,7 +1324,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {}, driveID1: {},
}, },
@ -1369,7 +1371,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {}, driveID1: {},
driveID2: {}, driveID2: {},
@ -1406,7 +1408,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.Error, errCheck: aw.Err,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {}, driveID1: {},
}, },
@ -1432,7 +1434,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
expectedCollections: map[string]map[data.CollectionState][]string{ expectedCollections: map[string]map[data.CollectionState][]string{
expectedPath1(""): {data.NotMovedState: {"file"}}, expectedPath1(""): {data.NotMovedState: {"file"}},
}, },
@ -1472,7 +1474,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
expectedCollections: map[string]map[data.CollectionState][]string{ expectedCollections: map[string]map[data.CollectionState][]string{
expectedPath1(""): {data.NotMovedState: {"file", "folder"}}, expectedPath1(""): {data.NotMovedState: {"file", "folder"}},
expectedPath1("/folder"): {data.NewState: {"file"}}, expectedPath1("/folder"): {data.NewState: {"file"}},
@ -1511,7 +1513,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: aw.NoErr,
prevFolderPaths: map[string]map[string]string{ prevFolderPaths: map[string]map[string]string{
driveID1: {}, driveID1: {},
}, },
@ -1594,10 +1596,10 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
}, },
func(*support.ConnectorOperationStatus) {}, func(*support.ConnectorOperationStatus) {},
) )
assert.NoError(t, err, "creating metadata collection") aw.NoErr(t, err, "creating metadata collection")
prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}} prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}}
cols, delList, err := c.Get(ctx, prevMetadata) cols, delList, err := c.Get(ctx, prevMetadata, fault.New(true))
test.errCheck(t, err) test.errCheck(t, err)
if err != nil { if err != nil {
@ -1607,10 +1609,13 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
for _, baseCol := range cols { for _, baseCol := range cols {
folderPath := baseCol.FullPath().String() folderPath := baseCol.FullPath().String()
if folderPath == metadataPath.String() { if folderPath == metadataPath.String() {
deltas, paths, err := deserializeMetadata(ctx, []data.RestoreCollection{ deltas, paths, err := deserializeMetadata(
ctx,
[]data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: baseCol}, data.NotFoundRestoreCollection{Collection: baseCol},
}) },
if !assert.NoError(t, err, "deserializing metadata") { fault.New(true))
if !aw.NoErr(t, err, "deserializing metadata") {
continue continue
} }
@ -1804,6 +1809,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
doNotMergeItems bool, doNotMergeItems bool,
errs *fault.Bus,
) error { ) error {
return nil return nil
} }
@ -1816,9 +1822,9 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
collectorFunc, collectorFunc,
map[string]string{}, map[string]string{},
test.prevDelta, test.prevDelta,
) fault.New(true))
require.ErrorIs(suite.T(), err, test.err, "delta fetch err") aw.MustErrIs(suite.T(), err, test.err, "delta fetch err")
require.Equal(suite.T(), test.deltaURL, delta.URL, "delta url") require.Equal(suite.T(), test.deltaURL, delta.URL, "delta url")
require.Equal(suite.T(), !test.prevDeltaSuccess, delta.Reset, "delta reset") require.Equal(suite.T(), !test.prevDeltaSuccess, delta.Reset, "delta reset")
}) })

View File

@ -6,9 +6,9 @@ import (
"strings" "strings"
"time" "time"
"github.com/alcionai/clues"
msdrive "github.com/microsoftgraph/msgraph-sdk-go/drive" msdrive "github.com/microsoftgraph/msgraph-sdk-go/drive"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/pkg/errors" "github.com/pkg/errors"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
@ -16,11 +16,11 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
gapi "github.com/alcionai/corso/src/internal/connector/graph/api" gapi "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/onedrive/api" "github.com/alcionai/corso/src/internal/connector/onedrive/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
var errFolderNotFound = errors.New("folder not found") var errFolderNotFound = clues.New("folder not found")
const ( const (
getDrivesRetries = 3 getDrivesRetries = 3
@ -32,9 +32,7 @@ const (
itemByPathRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s:/%s" itemByPathRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s:/%s"
itemNotFoundErrorCode = "itemNotFound" itemNotFoundErrorCode = "itemNotFound"
userMysiteURLNotFound = "BadRequest Unable to retrieve user's mysite URL" userMysiteURLNotFound = "BadRequest Unable to retrieve user's mysite URL"
userMysiteURLNotFoundMsg = "Unable to retrieve user's mysite URL"
userMysiteNotFound = "ResourceNotFound User's mysite not found" userMysiteNotFound = "ResourceNotFound User's mysite not found"
userMysiteNotFoundMsg = "User's mysite not found"
contextDeadlineExceeded = "context deadline exceeded" contextDeadlineExceeded = "context deadline exceeded"
) )
@ -77,8 +75,6 @@ func drives(
retry bool, retry bool,
) ([]models.Driveable, error) { ) ([]models.Driveable, error) {
var ( var (
err error
page gapi.PageLinker
numberOfRetries = getDrivesRetries numberOfRetries = getDrivesRetries
drives = []models.Driveable{} drives = []models.Driveable{}
) )
@ -89,30 +85,26 @@ func drives(
// Loop through all pages returned by Graph API. // Loop through all pages returned by Graph API.
for { for {
var (
err error
page gapi.PageLinker
)
// Retry Loop for Drive retrieval. Request can timeout // Retry Loop for Drive retrieval. Request can timeout
for i := 0; i <= numberOfRetries; i++ { for i := 0; i <= numberOfRetries; i++ {
page, err = pager.GetPage(ctx) page, err = pager.GetPage(ctx)
if err != nil { if err != nil {
// Various error handling. May return an error or perform a retry. if clues.HasLabel(err, graph.Labels.MysiteNotFound) {
detailedError := support.ConnectorStackErrorTraceWrap(err, "").Error()
if strings.Contains(detailedError, userMysiteURLNotFound) ||
strings.Contains(detailedError, userMysiteURLNotFoundMsg) ||
strings.Contains(detailedError, userMysiteNotFound) ||
strings.Contains(detailedError, userMysiteNotFoundMsg) {
logger.Ctx(ctx).Infof("resource owner does not have a drive") logger.Ctx(ctx).Infof("resource owner does not have a drive")
return make([]models.Driveable, 0), nil // no license or drives. return make([]models.Driveable, 0), nil // no license or drives.
} }
if strings.Contains(detailedError, contextDeadlineExceeded) && i < numberOfRetries { if errors.Is(err, context.DeadlineExceeded) && i < numberOfRetries {
time.Sleep(time.Duration(3*(i+1)) * time.Second) time.Sleep(time.Duration(3*(i+1)) * time.Second)
continue continue
} }
return nil, errors.Wrapf( return nil, graph.Wrap(ctx, err, "retrieving drives")
err,
"failed to retrieve drives. details: %s",
detailedError,
)
} }
// No error encountered, break the retry loop so we can extract results // No error encountered, break the retry loop so we can extract results
@ -122,7 +114,7 @@ func drives(
tmp, err := pager.ValuesIn(page) tmp, err := pager.ValuesIn(page)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "extracting drives from response") return nil, graph.Wrap(ctx, err, "extracting drives from response")
} }
drives = append(drives, tmp...) drives = append(drives, tmp...)
@ -135,7 +127,7 @@ func drives(
pager.SetNext(nextLink) pager.SetNext(nextLink)
} }
logger.Ctx(ctx).Debugf("Found %d drives", len(drives)) logger.Ctx(ctx).Debugf("retrieved %d valid drives", len(drives))
return drives, nil return drives, nil
} }
@ -149,6 +141,7 @@ type itemCollector func(
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
validPrevDelta bool, validPrevDelta bool,
errs *fault.Bus,
) error ) error
type itemPager interface { type itemPager interface {
@ -193,6 +186,7 @@ func collectItems(
collector itemCollector, collector itemCollector,
oldPaths map[string]string, oldPaths map[string]string,
prevDelta string, prevDelta string,
errs *fault.Bus,
) (DeltaUpdate, map[string]string, map[string]struct{}, error) { ) (DeltaUpdate, map[string]string, map[string]struct{}, error) {
var ( var (
newDeltaURL = "" newDeltaURL = ""
@ -220,19 +214,15 @@ func collectItems(
} }
if err != nil { if err != nil {
return DeltaUpdate{}, nil, nil, errors.Wrapf( return DeltaUpdate{}, nil, nil, graph.Wrap(ctx, err, "getting page")
err,
"failed to query drive items. details: %s",
support.ConnectorStackErrorTrace(err),
)
} }
vals, err := pager.ValuesIn(page) vals, err := pager.ValuesIn(page)
if err != nil { if err != nil {
return DeltaUpdate{}, nil, nil, errors.Wrap(err, "extracting items from response") return DeltaUpdate{}, nil, nil, graph.Wrap(ctx, err, "extracting items from response")
} }
err = collector(ctx, driveID, driveName, vals, oldPaths, newPaths, excluded, invalidPrevDelta) err = collector(ctx, driveID, driveName, vals, oldPaths, newPaths, excluded, invalidPrevDelta, errs)
if err != nil { if err != nil {
return DeltaUpdate{}, nil, nil, err return DeltaUpdate{}, nil, nil, err
} }
@ -277,25 +267,16 @@ func getFolder(
foundItem, err = builder.Get(ctx, nil) foundItem, err = builder.Get(ctx, nil)
if err != nil { if err != nil {
var oDataError *odataerrors.ODataError if graph.IsErrDeletedInFlight(err) {
if errors.As(err, &oDataError) && return nil, graph.Stack(ctx, clues.Stack(errFolderNotFound, err))
oDataError.GetError() != nil &&
oDataError.GetError().GetCode() != nil &&
*oDataError.GetError().GetCode() == itemNotFoundErrorCode {
return nil, errors.WithStack(errFolderNotFound)
} }
return nil, errors.Wrapf(err, return nil, graph.Wrap(ctx, err, "getting folder")
"failed to get folder %s/%s. details: %s",
parentFolderID,
folderName,
support.ConnectorStackErrorTrace(err),
)
} }
// Check if the item found is a folder, fail the call if not // Check if the item found is a folder, fail the call if not
if foundItem.GetFolder() == nil { if foundItem.GetFolder() == nil {
return nil, errors.WithStack(errFolderNotFound) return nil, graph.Stack(ctx, errFolderNotFound)
} }
return foundItem, nil return foundItem, nil
@ -311,16 +292,11 @@ func createItem(
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended // Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310 // here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID) rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
builder := msdrive.NewItemsRequestBuilder(rawURL, service.Adapter()) builder := msdrive.NewItemsRequestBuilder(rawURL, service.Adapter())
newItem, err := builder.Post(ctx, newItem, nil) newItem, err := builder.Post(ctx, newItem, nil)
if err != nil { if err != nil {
return nil, errors.Wrapf( return nil, graph.Wrap(ctx, err, "creating item")
err,
"failed to create item. details: %s",
support.ConnectorStackErrorTrace(err),
)
} }
return newItem, nil return newItem, nil
@ -356,25 +332,30 @@ func GetAllFolders(
gs graph.Servicer, gs graph.Servicer,
pager drivePager, pager drivePager,
prefix string, prefix string,
errs *fault.Bus,
) ([]*Displayable, error) { ) ([]*Displayable, error) {
drives, err := drives(ctx, pager, true) drives, err := drives(ctx, pager, true)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "getting OneDrive folders") return nil, errors.Wrap(err, "getting OneDrive folders")
} }
folders := map[string]*Displayable{} var (
folders = map[string]*Displayable{}
el = errs.Local()
)
for _, d := range drives { for _, d := range drives {
_, _, _, err = collectItems( if el.Failure() != nil {
ctx, break
defaultItemPager( }
gs,
*d.GetId(), var (
"", id = ptr.Val(d.GetId())
), name = ptr.Val(d.GetName())
*d.GetId(), )
*d.GetName(),
func( ictx := clues.Add(ctx, "drive_id", id, "drive_name", name) // TODO: pii
collector := func(
innerCtx context.Context, innerCtx context.Context,
driveID, driveName string, driveID, driveName string,
items []models.DriveItemable, items []models.DriveItemable,
@ -382,6 +363,7 @@ func GetAllFolders(
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
doNotMergeItems bool, doNotMergeItems bool,
errs *fault.Bus,
) error { ) error {
for _, item := range items { for _, item := range items {
// Skip the root item. // Skip the root item.
@ -394,8 +376,9 @@ func GetAllFolders(
continue continue
} }
if item.GetId() == nil || len(*item.GetId()) == 0 { itemID := ptr.Val(item.GetId())
logger.Ctx(ctx).Warn("folder without ID") if len(itemID) == 0 {
logger.Ctx(ctx).Info("folder missing ID")
continue continue
} }
@ -405,16 +388,15 @@ func GetAllFolders(
// Add the item instead of the folder because the item has more // Add the item instead of the folder because the item has more
// functionality. // functionality.
folders[*item.GetId()] = &Displayable{item} folders[itemID] = &Displayable{item}
} }
return nil return nil
}, }
map[string]string{},
"", _, _, _, err = collectItems(ictx, defaultItemPager(gs, id, ""), id, name, collector, map[string]string{}, "", errs)
)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "getting items for drive %s", *d.GetName()) el.AddRecoverable(clues.Wrap(err, "enumerating items in drive"))
} }
} }
@ -424,7 +406,7 @@ func GetAllFolders(
res = append(res, f) res = append(res, f)
} }
return res, nil return res, el.Failure()
} }
func DeleteItem( func DeleteItem(
@ -435,7 +417,7 @@ func DeleteItem(
) error { ) error {
err := gs.Client().DrivesById(driveID).ItemsById(itemID).Delete(ctx, nil) err := gs.Client().DrivesById(driveID).ItemsById(itemID).Delete(ctx, nil)
if err != nil { if err != nil {
return errors.Wrapf(err, "deleting item with ID %s", itemID) return graph.Wrap(ctx, err, "deleting item").With("item_id", itemID)
} }
return nil return nil

View File

@ -15,9 +15,10 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -94,18 +95,9 @@ func (suite *OneDriveUnitSuite) TestDrives() {
// These errors won't be the "correct" format when compared to what graph // These errors won't be the "correct" format when compared to what graph
// returns, but they're close enough to have the same info when the inner // returns, but they're close enough to have the same info when the inner
// details are extracted via support package. // details are extracted via support package.
mySiteURLNotFound := support.ConnectorStackErrorTraceWrap( mySiteURLNotFound := odErr(userMysiteURLNotFound)
odErr(userMysiteURLNotFound), mySiteNotFound := odErr(userMysiteNotFound)
"maximum retries or unretryable", deadlineExceeded := odErr(contextDeadlineExceeded)
)
mySiteNotFound := support.ConnectorStackErrorTraceWrap(
odErr(userMysiteNotFound),
"maximum retries or unretryable",
)
deadlineExceeded := support.ConnectorStackErrorTraceWrap(
odErr(contextDeadlineExceeded),
"maximum retries or unretryable",
)
resultDrives := make([]models.Driveable, 0, numDriveResults) resultDrives := make([]models.Driveable, 0, numDriveResults)
@ -142,7 +134,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: false, retry: false,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: resultDrives, expectedResults: resultDrives,
}, },
{ {
@ -155,7 +147,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: false, retry: false,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: resultDrives, expectedResults: resultDrives,
}, },
{ {
@ -173,7 +165,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: false, retry: false,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: resultDrives, expectedResults: resultDrives,
}, },
{ {
@ -191,7 +183,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: false, retry: false,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: resultDrives, expectedResults: resultDrives,
}, },
{ {
@ -209,7 +201,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: true, retry: true,
expectedErr: assert.Error, expectedErr: aw.Err,
expectedResults: nil, expectedResults: nil,
}, },
{ {
@ -222,7 +214,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: true, retry: true,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: nil, expectedResults: nil,
}, },
{ {
@ -235,7 +227,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: true, retry: true,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: nil, expectedResults: nil,
}, },
{ {
@ -258,7 +250,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: true, retry: true,
expectedErr: assert.NoError, expectedErr: aw.NoErr,
expectedResults: resultDrives, expectedResults: resultDrives,
}, },
{ {
@ -281,7 +273,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}, },
}, },
retry: false, retry: false,
expectedErr: assert.Error, expectedErr: aw.Err,
expectedResults: nil, expectedResults: nil,
}, },
{ {
@ -297,7 +289,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
tooManyRetries..., tooManyRetries...,
), ),
retry: true, retry: true,
expectedErr: assert.Error, expectedErr: aw.Err,
expectedResults: nil, expectedResults: nil,
}, },
} }
@ -349,10 +341,10 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
gs := loadTestService(t) gs := loadTestService(t)
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil) pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err) aw.MustNoErr(t, err)
drives, err := drives(ctx, pager, true) drives, err := drives(ctx, pager, true)
require.NoError(t, err) aw.MustNoErr(t, err)
require.NotEmpty(t, drives) require.NotEmpty(t, drives)
// TODO: Verify the intended drive // TODO: Verify the intended drive
@ -368,7 +360,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
}() }()
folderID, err := CreateRestoreFolders(ctx, gs, driveID, folderElements) folderID, err := CreateRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err) aw.MustNoErr(t, err)
folderIDs = append(folderIDs, folderID) folderIDs = append(folderIDs, folderID)
@ -376,7 +368,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
folderElements = append(folderElements, folderName2) folderElements = append(folderElements, folderName2)
folderID, err = CreateRestoreFolders(ctx, gs, driveID, folderElements) folderID, err = CreateRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err) aw.MustNoErr(t, err)
folderIDs = append(folderIDs, folderID) folderIDs = append(folderIDs, folderID)
@ -397,10 +389,10 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil) pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err) aw.MustNoErr(t, err)
allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix) allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix, fault.New(true))
require.NoError(t, err) aw.MustNoErr(t, err)
foundFolderIDs := []string{} foundFolderIDs := []string{}
@ -435,7 +427,7 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
defer flush() defer flush()
creds, err := tester.NewM365Account(suite.T()).M365Config() creds, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
tests := []struct { tests := []struct {
name, user string name, user string
@ -465,8 +457,8 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
service, service,
service.updateStatus, service.updateStatus,
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}}, control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}},
).Get(ctx, nil) ).Get(ctx, nil, fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
// Don't expect excludes as this isn't an incremental backup. // Don't expect excludes as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)

View File

@ -4,17 +4,17 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"strings" "strings"
"github.com/alcionai/clues"
msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives" msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/connector/uploadsession" "github.com/alcionai/corso/src/internal/connector/uploadsession"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -33,7 +33,12 @@ func getDriveItem(
srv graph.Servicer, srv graph.Servicer,
driveID, itemID string, driveID, itemID string,
) (models.DriveItemable, error) { ) (models.DriveItemable, error) {
return srv.Client().DrivesById(driveID).ItemsById(itemID).Get(ctx, nil) di, err := srv.Client().DrivesById(driveID).ItemsById(itemID).Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting item")
}
return di, nil
} }
// sharePointItemReader will return a io.ReadCloser for the specified item // sharePointItemReader will return a io.ReadCloser for the specified item
@ -41,10 +46,11 @@ func getDriveItem(
// and using a http client to initialize a reader // and using a http client to initialize a reader
// TODO: Add metadata fetching to SharePoint // TODO: Add metadata fetching to SharePoint
func sharePointItemReader( func sharePointItemReader(
ctx context.Context,
hc *http.Client, hc *http.Client,
item models.DriveItemable, item models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) { ) (details.ItemInfo, io.ReadCloser, error) {
resp, err := downloadItem(hc, item) resp, err := downloadItem(ctx, hc, item)
if err != nil { if err != nil {
return details.ItemInfo{}, nil, errors.Wrap(err, "downloading item") return details.ItemInfo{}, nil, errors.Wrap(err, "downloading item")
} }
@ -69,7 +75,7 @@ func oneDriveItemMetaReader(
metaJSON, err := json.Marshal(meta) metaJSON, err := json.Marshal(meta)
if err != nil { if err != nil {
return nil, 0, err return nil, 0, clues.Wrap(err, "marshalling json").WithClues(ctx)
} }
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
@ -79,6 +85,7 @@ func oneDriveItemMetaReader(
// It crafts this by querying M365 for a download URL for the item // It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader // and using a http client to initialize a reader
func oneDriveItemReader( func oneDriveItemReader(
ctx context.Context,
hc *http.Client, hc *http.Client,
item models.DriveItemable, item models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) { ) (details.ItemInfo, io.ReadCloser, error) {
@ -88,7 +95,7 @@ func oneDriveItemReader(
) )
if isFile { if isFile {
resp, err := downloadItem(hc, item) resp, err := downloadItem(ctx, hc, item)
if err != nil { if err != nil {
return details.ItemInfo{}, nil, errors.Wrap(err, "downloading item") return details.ItemInfo{}, nil, errors.Wrap(err, "downloading item")
} }
@ -103,15 +110,15 @@ func oneDriveItemReader(
return dii, rc, nil return dii, rc, nil
} }
func downloadItem(hc *http.Client, item models.DriveItemable) (*http.Response, error) { func downloadItem(ctx context.Context, hc *http.Client, item models.DriveItemable) (*http.Response, error) {
url, ok := item.GetAdditionalData()[downloadURLKey].(*string) url, ok := item.GetAdditionalData()[downloadURLKey].(*string)
if !ok { if !ok {
return nil, fmt.Errorf("extracting file url: file %s", *item.GetId()) return nil, clues.New("extracting file url").With("item_id", ptr.Val(item.GetId()))
} }
req, err := http.NewRequest(http.MethodGet, *url, nil) req, err := http.NewRequest(http.MethodGet, *url, nil)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "new request") return nil, graph.Wrap(ctx, err, "new request")
} }
//nolint:lll //nolint:lll
@ -128,23 +135,12 @@ func downloadItem(hc *http.Client, item models.DriveItemable) (*http.Response, e
return resp, nil return resp, nil
} }
if resp.StatusCode == http.StatusTooManyRequests { // upstream error checks can compare the status with
return resp, graph.Err429TooManyRequests // clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
} cerr := clues.Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode))
if resp.StatusCode == http.StatusUnauthorized { return resp, cerr
return resp, graph.Err401Unauthorized
}
if resp.StatusCode == http.StatusInternalServerError {
return resp, graph.Err500InternalServerError
}
if resp.StatusCode == http.StatusServiceUnavailable {
return resp, graph.Err503ServiceUnavailable
}
return resp, errors.New("non-2xx http response: " + resp.Status)
} }
// oneDriveItemInfo will populate a details.OneDriveInfo struct // oneDriveItemInfo will populate a details.OneDriveInfo struct
@ -171,9 +167,9 @@ func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDrive
return &details.OneDriveInfo{ return &details.OneDriveInfo{
ItemType: details.OneDriveItem, ItemType: details.OneDriveItem,
ItemName: *di.GetName(), ItemName: ptr.Val(di.GetName()),
Created: *di.GetCreatedDateTime(), Created: ptr.Val(di.GetCreatedDateTime()),
Modified: *di.GetLastModifiedDateTime(), Modified: ptr.Val(di.GetLastModifiedDateTime()),
DriveName: parent, DriveName: parent,
Size: itemSize, Size: itemSize,
Owner: email, Owner: email,
@ -187,11 +183,13 @@ func oneDriveItemMetaInfo(
ctx context.Context, service graph.Servicer, ctx context.Context, service graph.Servicer,
driveID string, di models.DriveItemable, driveID string, di models.DriveItemable,
) (Metadata, error) { ) (Metadata, error) {
itemID := di.GetId() perm, err := service.Client().
DrivesById(driveID).
perm, err := service.Client().DrivesById(driveID).ItemsById(*itemID).Permissions().Get(ctx, nil) ItemsById(ptr.Val(di.GetId())).
Permissions().
Get(ctx, nil)
if err != nil { if err != nil {
return Metadata{}, err return Metadata{}, graph.Wrap(ctx, err, "getting item metadata")
} }
uperms := filterUserPermissions(perm.GetValue()) uperms := filterUserPermissions(perm.GetValue())
@ -223,7 +221,7 @@ func filterUserPermissions(perms []models.Permissionable) []UserPermission {
} }
up = append(up, UserPermission{ up = append(up, UserPermission{
ID: *p.GetId(), ID: ptr.Val(p.GetId()),
Roles: roles, Roles: roles,
Email: *p.GetGrantedToV2().GetUser().GetAdditionalData()["email"].(*string), Email: *p.GetGrantedToV2().GetUser().GetAdditionalData()["email"].(*string),
Expiration: p.GetExpirationDateTime(), Expiration: p.GetExpirationDateTime(),
@ -275,9 +273,9 @@ func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.ShareP
return &details.SharePointInfo{ return &details.SharePointInfo{
ItemType: details.OneDriveItem, ItemType: details.OneDriveItem,
ItemName: *di.GetName(), ItemName: ptr.Val(di.GetName()),
Created: *di.GetCreatedDateTime(), Created: ptr.Val(di.GetCreatedDateTime()),
Modified: *di.GetLastModifiedDateTime(), Modified: ptr.Val(di.GetLastModifiedDateTime()),
DriveName: parent, DriveName: parent,
Size: itemSize, Size: itemSize,
Owner: id, Owner: id,
@ -295,20 +293,16 @@ func driveItemWriter(
itemSize int64, itemSize int64,
) (io.Writer, error) { ) (io.Writer, error) {
session := msdrives.NewItemItemsItemCreateUploadSessionPostRequestBody() session := msdrives.NewItemItemsItemCreateUploadSessionPostRequestBody()
ctx = clues.Add(ctx, "upload_item_id", itemID)
r, err := service.Client().DrivesById(driveID).ItemsById(itemID).CreateUploadSession().Post(ctx, session, nil) r, err := service.Client().DrivesById(driveID).ItemsById(itemID).CreateUploadSession().Post(ctx, session, nil)
if err != nil { if err != nil {
return nil, errors.Wrapf( return nil, graph.Wrap(ctx, err, "creating item upload session")
err,
"failed to create upload session for item %s. details: %s",
itemID,
support.ConnectorStackErrorTrace(err),
)
} }
url := *r.GetUploadUrl() logger.Ctx(ctx).Debug("created an upload session")
logger.Ctx(ctx).Debugf("Created an upload session for item %s. URL: %s", itemID, url) url := ptr.Val(r.GetUploadUrl())
return uploadsession.NewWriter(itemID, url, itemSize), nil return uploadsession.NewWriter(itemID, url, itemSize), nil
} }

View File

@ -15,6 +15,8 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/fault"
) )
type ItemIntegrationSuite struct { type ItemIntegrationSuite struct {
@ -55,10 +57,10 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
adapter, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret) adapter, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err) aw.MustNoErr(t, err)
suite.client = msgraphsdk.NewGraphServiceClient(adapter) suite.client = msgraphsdk.NewGraphServiceClient(adapter)
suite.adapter = adapter suite.adapter = adapter
@ -67,7 +69,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
// and guarateed drive read-write access) // and guarateed drive read-write access)
// suite.site = tester.M365SiteID(t) // suite.site = tester.M365SiteID(t)
// spDrives, err := drives(ctx, suite, suite.site, SharePointSource) // spDrives, err := drives(ctx, suite, suite.site, SharePointSource)
// require.NoError(t, err) // aw.MustNoErr(t, err)
// // Test Requirement 1: Need a drive // // Test Requirement 1: Need a drive
// require.Greaterf(t, len(spDrives), 0, "site %s does not have a drive", suite.site) // require.Greaterf(t, len(spDrives), 0, "site %s does not have a drive", suite.site)
@ -77,10 +79,10 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.user = tester.SecondaryM365UserID(t) suite.user = tester.SecondaryM365UserID(t)
pager, err := PagerForSource(OneDriveSource, suite, suite.user, nil) pager, err := PagerForSource(OneDriveSource, suite, suite.user, nil)
require.NoError(t, err) aw.MustNoErr(t, err)
odDrives, err := drives(ctx, pager, true) odDrives, err := drives(ctx, pager, true)
require.NoError(t, err) aw.MustNoErr(t, err)
// Test Requirement 1: Need a drive // Test Requirement 1: Need a drive
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user) require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
@ -107,6 +109,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
doNotMergeItems bool, doNotMergeItems bool,
errs *fault.Bus,
) error { ) error {
for _, item := range items { for _, item := range items {
if item.GetFile() != nil { if item.GetFile() != nil {
@ -129,8 +132,8 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
itemCollector, itemCollector,
map[string]string{}, map[string]string{},
"", "",
) fault.New(true))
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
// Test Requirement 2: Need a file // Test Requirement 2: Need a file
require.NotEmpty( require.NotEmpty(
@ -142,14 +145,14 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
) )
// Read data for the file // Read data for the file
itemInfo, itemData, err := oneDriveItemReader(graph.HTTPClient(graph.NoTimeout()), driveItem) itemInfo, itemData, err := oneDriveItemReader(ctx, graph.HTTPClient(graph.NoTimeout()), driveItem)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
require.NotNil(suite.T(), itemInfo.OneDrive) require.NotNil(suite.T(), itemInfo.OneDrive)
require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName) require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName)
size, err := io.Copy(io.Discard, itemData) size, err := io.Copy(io.Discard, itemData)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
require.NotZero(suite.T(), size) require.NotZero(suite.T(), size)
require.Equal(suite.T(), size, itemInfo.OneDrive.Size) require.Equal(suite.T(), size, itemInfo.OneDrive.Size)
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName) suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
@ -178,17 +181,17 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
defer flush() defer flush()
root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil) root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
// Test Requirement 2: "Test Folder" should exist // Test Requirement 2: "Test Folder" should exist
folder, err := getFolder(ctx, suite, test.driveID, *root.GetId(), "Test Folder") folder, err := getFolder(ctx, suite, test.driveID, *root.GetId(), "Test Folder")
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting) newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting)
suite.T().Logf("Test will create folder %s", newFolderName) suite.T().Logf("Test will create folder %s", newFolderName)
newFolder, err := createItem(ctx, suite, test.driveID, *folder.GetId(), newItem(newFolderName, true)) newFolder, err := createItem(ctx, suite, test.driveID, *folder.GetId(), newItem(newFolderName, true))
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
require.NotNil(suite.T(), newFolder.GetId()) require.NotNil(suite.T(), newFolder.GetId())
@ -196,20 +199,20 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
suite.T().Logf("Test will create item %s", newItemName) suite.T().Logf("Test will create item %s", newItemName)
newItem, err := createItem(ctx, suite, test.driveID, *newFolder.GetId(), newItem(newItemName, false)) newItem, err := createItem(ctx, suite, test.driveID, *newFolder.GetId(), newItem(newItemName, false))
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
require.NotNil(suite.T(), newItem.GetId()) require.NotNil(suite.T(), newItem.GetId())
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the // HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder // newly created item should fail because it's a file not a folder
_, err = getFolder(ctx, suite, test.driveID, *newFolder.GetId(), newItemName) _, err = getFolder(ctx, suite, test.driveID, *newFolder.GetId(), newItemName)
require.ErrorIs(suite.T(), err, errFolderNotFound) aw.MustErrIs(suite.T(), err, errFolderNotFound)
// Initialize a 100KB mockDataProvider // Initialize a 100KB mockDataProvider
td, writeSize := mockDataReader(int64(100 * 1024)) td, writeSize := mockDataReader(int64(100 * 1024))
w, err := driveItemWriter(ctx, suite, test.driveID, *newItem.GetId(), writeSize) w, err := driveItemWriter(ctx, suite, test.driveID, *newItem.GetId(), writeSize)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
// Using a 32 KB buffer for the copy allows us to validate the // Using a 32 KB buffer for the copy allows us to validate the
// multi-part upload. `io.CopyBuffer` will only write 32 KB at // multi-part upload. `io.CopyBuffer` will only write 32 KB at
@ -217,7 +220,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
copyBuffer := make([]byte, 32*1024) copyBuffer := make([]byte, 32*1024)
size, err := io.CopyBuffer(w, td, copyBuffer) size, err := io.CopyBuffer(w, td, copyBuffer)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
require.Equal(suite.T(), writeSize, size) require.Equal(suite.T(), writeSize, size)
}) })
@ -249,15 +252,15 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
defer flush() defer flush()
root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil) root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
// Lookup a folder that doesn't exist // Lookup a folder that doesn't exist
_, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "FolderDoesNotExist") _, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "FolderDoesNotExist")
require.ErrorIs(suite.T(), err, errFolderNotFound) aw.MustErrIs(suite.T(), err, errFolderNotFound)
// Lookup a folder that does exist // Lookup a folder that does exist
_, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "") _, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "")
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
}) })
} }
} }

View File

@ -3,7 +3,6 @@ package onedrive
import ( import (
"context" "context"
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"runtime/trace" "runtime/trace"
"sort" "sort"
@ -14,6 +13,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -21,6 +21,7 @@ import (
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -49,7 +50,7 @@ func getParentPermissions(
} }
if len(onedrivePath.Folders) != 0 { if len(onedrivePath.Folders) != 0 {
return nil, errors.Wrap(err, "unable to compute item permissions") return nil, errors.Wrap(err, "computing item permissions")
} }
parentPerms = []UserPermission{} parentPerms = []UserPermission{}
@ -69,7 +70,6 @@ func getParentAndCollectionPermissions(
} }
var ( var (
err error
parentPerms []UserPermission parentPerms []UserPermission
colPerms []UserPermission colPerms []UserPermission
) )
@ -89,7 +89,7 @@ func getParentAndCollectionPermissions(
// TODO(ashmrtn): For versions after this pull the permissions from the // TODO(ashmrtn): For versions after this pull the permissions from the
// current collection with Fetch(). // current collection with Fetch().
colPerms, err = getParentPermissions(collectionPath, permissions) colPerms, err := getParentPermissions(collectionPath, permissions)
if err != nil { if err != nil {
return nil, nil, clues.Wrap(err, "getting collection permissions") return nil, nil, clues.Wrap(err, "getting collection permissions")
} }
@ -106,22 +106,22 @@ func RestoreCollections(
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
restoreMetrics support.CollectionMetrics restoreMetrics support.CollectionMetrics
restoreErrors error
metrics support.CollectionMetrics metrics support.CollectionMetrics
folderPerms map[string][]UserPermission folderPerms map[string][]UserPermission
canceled bool
// permissionIDMappings is used to map between old and new id // permissionIDMappings is used to map between old and new id
// of permissions as we restore them // of permissions as we restore them
permissionIDMappings = map[string]string{} permissionIDMappings = map[string]string{}
) )
errUpdater := func(id string, err error) { ctx = clues.Add(
restoreErrors = support.WrapAndAppend(id, err, restoreErrors) ctx,
} "backup_version", backupVersion,
"destination", dest.ContainerName)
// Reorder collections so that the parents directories are created // Reorder collections so that the parents directories are created
// before the child directories // before the child directories
@ -129,12 +129,28 @@ func RestoreCollections(
return dcs[i].FullPath().String() < dcs[j].FullPath().String() return dcs[i].FullPath().String() < dcs[j].FullPath().String()
}) })
parentPermissions := map[string][]UserPermission{} var (
el = errs.Local()
parentPermissions = map[string][]UserPermission{}
)
// Iterate through the data collections and restore the contents of each // Iterate through the data collections and restore the contents of each
for _, dc := range dcs { for _, dc := range dcs {
metrics, folderPerms, permissionIDMappings, canceled = RestoreCollection( if el.Failure() != nil {
break
}
var (
err error
ictx = clues.Add(
ctx, ctx,
"resource_owner", dc.FullPath().ResourceOwner(), // TODO: pii
"category", dc.FullPath().Category(),
"path", dc.FullPath()) // TODO: pii
)
metrics, folderPerms, permissionIDMappings, err = RestoreCollection(
ictx,
backupVersion, backupVersion,
service, service,
dc, dc,
@ -142,30 +158,32 @@ func RestoreCollections(
OneDriveSource, OneDriveSource,
dest.ContainerName, dest.ContainerName,
deets, deets,
errUpdater,
permissionIDMappings, permissionIDMappings,
opts.RestorePermissions, opts.RestorePermissions,
) errs)
if err != nil {
el.AddRecoverable(err)
}
for k, v := range folderPerms { for k, v := range folderPerms {
parentPermissions[k] = v parentPermissions[k] = v
} }
restoreMetrics.Combine(metrics) restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
if canceled { if errors.Is(err, context.Canceled) {
break break
} }
} }
return support.CreateStatus( status := support.CreateStatus(
ctx, ctx,
support.Restore, support.Restore,
len(dcs), len(dcs),
restoreMetrics, restoreMetrics,
restoreErrors, dest.ContainerName)
dest.ContainerName),
nil return status, el.Failure()
} }
// RestoreCollection handles restoration of an individual collection. // RestoreCollection handles restoration of an individual collection.
@ -181,10 +199,10 @@ func RestoreCollection(
source driveSource, source driveSource,
restoreContainerName string, restoreContainerName string,
deets *details.Builder, deets *details.Builder,
errUpdater func(string, error),
permissionIDMappings map[string]string, permissionIDMappings map[string]string,
restorePerms bool, restorePerms bool,
) (support.CollectionMetrics, map[string][]UserPermission, map[string]string, bool) { errs *fault.Bus,
) (support.CollectionMetrics, map[string][]UserPermission, map[string]string, error) {
ctx, end := D.Span(ctx, "gc:oneDrive:restoreCollection", D.Label("path", dc.FullPath())) ctx, end := D.Span(ctx, "gc:oneDrive:restoreCollection", D.Label("path", dc.FullPath()))
defer end() defer end()
@ -199,8 +217,7 @@ func RestoreCollection(
drivePath, err := path.ToOneDrivePath(directory) drivePath, err := path.ToOneDrivePath(directory)
if err != nil { if err != nil {
errUpdater(directory.String(), err) return metrics, folderPerms, permissionIDMappings, clues.Wrap(err, "creating drive path").WithClues(ctx)
return metrics, folderPerms, permissionIDMappings, false
} }
// Assemble folder hierarchy we're going to restore into (we recreate the folder hierarchy // Assemble folder hierarchy we're going to restore into (we recreate the folder hierarchy
@ -210,11 +227,13 @@ func RestoreCollection(
restoreFolderElements := []string{restoreContainerName} restoreFolderElements := []string{restoreContainerName}
restoreFolderElements = append(restoreFolderElements, drivePath.Folders...) restoreFolderElements = append(restoreFolderElements, drivePath.Folders...)
ctx = clues.Add(
ctx,
"destination_elements", restoreFolderElements,
"drive_id", drivePath.DriveID)
trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String()) trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String())
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Info("restoring onedrive collection")
"restoring to destination",
"origin", dc.FullPath().Folder(false),
"destination", restoreFolderElements)
parentPerms, colPerms, err := getParentAndCollectionPermissions( parentPerms, colPerms, err := getParentAndCollectionPermissions(
drivePath, drivePath,
@ -222,8 +241,7 @@ func RestoreCollection(
parentPermissions, parentPermissions,
restorePerms) restorePerms)
if err != nil { if err != nil {
errUpdater(directory.String(), err) return metrics, folderPerms, permissionIDMappings, clues.Wrap(err, "getting permissions").WithClues(ctx)
return metrics, folderPerms, permissionIDMappings, false
} }
// Create restore folders and get the folder ID of the folder the data stream will be restored in // Create restore folders and get the folder ID of the folder the data stream will be restored in
@ -237,38 +255,40 @@ func RestoreCollection(
permissionIDMappings, permissionIDMappings,
) )
if err != nil { if err != nil {
errUpdater(directory.String(), errors.Wrapf(err, "failed to create folders %v", restoreFolderElements)) return metrics, folderPerms, permissionIDMappings, clues.Wrap(err, "creating folders for restore")
return metrics, folderPerms, permissionIDMappings, false
} }
// Restore items from the collection var (
items := dc.Items(ctx, nil) // TODO: fault.Errors instead of nil el = errs.Local()
items = dc.Items(ctx, errs)
)
for { for {
if el.Failure() != nil {
break
}
select { select {
case <-ctx.Done(): case <-ctx.Done():
errUpdater("context canceled", ctx.Err()) return metrics, folderPerms, permissionIDMappings, err
return metrics, folderPerms, permissionIDMappings, true
case itemData, ok := <-items: case itemData, ok := <-items:
if !ok { if !ok {
return metrics, folderPerms, permissionIDMappings, false return metrics, folderPerms, permissionIDMappings, nil
} }
itemPath, err := dc.FullPath().Append(itemData.UUID(), true) itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil { if err != nil {
logger.Ctx(ctx).DPanicw("transforming item to full path", "error", err) el.AddRecoverable(clues.Wrap(err, "appending item to full path").WithClues(ctx))
errUpdater(itemData.UUID(), err)
continue continue
} }
if source == OneDriveSource && backupVersion >= versionWithDataAndMetaFiles { if source == OneDriveSource && backupVersion >= versionWithDataAndMetaFiles {
name := itemData.UUID() name := itemData.UUID()
if strings.HasSuffix(name, DataFileSuffix) { if strings.HasSuffix(name, DataFileSuffix) {
metrics.Objects++ metrics.Objects++
metrics.TotalBytes += int64(len(copyBuffer)) metrics.Bytes += int64(len(copyBuffer))
trimmedName := strings.TrimSuffix(name, DataFileSuffix) trimmedName := strings.TrimSuffix(name, DataFileSuffix)
itemID, itemInfo, err = restoreData( itemID, itemInfo, err = restoreData(
@ -281,7 +301,7 @@ func RestoreCollection(
copyBuffer, copyBuffer,
source) source)
if err != nil { if err != nil {
errUpdater(itemData.UUID(), err) el.AddRecoverable(err)
continue continue
} }
@ -305,7 +325,7 @@ func RestoreCollection(
permsFile, err := dc.Fetch(ctx, metaName) permsFile, err := dc.Fetch(ctx, metaName)
if err != nil { if err != nil {
errUpdater(metaName, clues.Wrap(err, "getting item metadata")) el.AddRecoverable(clues.Wrap(err, "getting item metadata"))
continue continue
} }
@ -314,7 +334,7 @@ func RestoreCollection(
metaReader.Close() metaReader.Close()
if err != nil { if err != nil {
errUpdater(metaName, clues.Wrap(err, "deserializing item metadata")) el.AddRecoverable(clues.Wrap(err, "deserializing item metadata"))
continue continue
} }
@ -325,10 +345,9 @@ func RestoreCollection(
itemID, itemID,
colPerms, colPerms,
meta.Permissions, meta.Permissions,
permissionIDMappings, permissionIDMappings)
)
if err != nil { if err != nil {
errUpdater(trimmedName, clues.Wrap(err, "restoring item permissions")) el.AddRecoverable(clues.Wrap(err, "restoring item permissions"))
continue continue
} }
@ -344,28 +363,25 @@ func RestoreCollection(
} }
metaReader := itemData.ToReader() metaReader := itemData.ToReader()
meta, err := getMetadata(metaReader) defer metaReader.Close()
metaReader.Close()
meta, err := getMetadata(metaReader)
if err != nil { if err != nil {
errUpdater(itemData.UUID(), clues.Wrap(err, "folder metadata")) el.AddRecoverable(clues.Wrap(err, "getting directory metadata").WithClues(ctx))
continue continue
} }
trimmedPath := strings.TrimSuffix(itemPath.String(), DirMetaFileSuffix) trimmedPath := strings.TrimSuffix(itemPath.String(), DirMetaFileSuffix)
folderPerms[trimmedPath] = meta.Permissions folderPerms[trimmedPath] = meta.Permissions
} else {
if !ok {
errUpdater(itemData.UUID(), fmt.Errorf("invalid backup format, you might be using an old backup"))
continue
}
} }
} else { } else {
metrics.Objects++ metrics.Objects++
metrics.TotalBytes += int64(len(copyBuffer)) metrics.Bytes += int64(len(copyBuffer))
// No permissions stored at the moment for SharePoint // No permissions stored at the moment for SharePoint
_, itemInfo, err = restoreData(ctx, _, itemInfo, err = restoreData(
ctx,
service, service,
itemData.UUID(), itemData.UUID(),
itemData, itemData,
@ -374,7 +390,7 @@ func RestoreCollection(
copyBuffer, copyBuffer,
source) source)
if err != nil { if err != nil {
errUpdater(itemData.UUID(), err) el.AddRecoverable(err)
continue continue
} }
@ -389,6 +405,8 @@ func RestoreCollection(
} }
} }
} }
return metrics, folderPerms, permissionIDMappings, el.Failure()
} }
// createRestoreFoldersWithPermissions creates the restore folder hierarchy in // createRestoreFoldersWithPermissions creates the restore folder hierarchy in
@ -431,42 +449,31 @@ func CreateRestoreFolders(
) (string, error) { ) (string, error) {
driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil) driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil)
if err != nil { if err != nil {
return "", errors.Wrapf( return "", graph.Wrap(ctx, err, "getting drive root")
err,
"failed to get drive root. details: %s",
support.ConnectorStackErrorTrace(err),
)
} }
logger.Ctx(ctx).Debugf("Found Root for Drive %s with ID %s", driveID, *driveRoot.GetId()) parentFolderID := ptr.Val(driveRoot.GetId())
ctx = clues.Add(ctx, "drive_root_id", parentFolderID)
logger.Ctx(ctx).Debug("found drive root")
parentFolderID := *driveRoot.GetId()
for _, folder := range restoreFolders { for _, folder := range restoreFolders {
folderItem, err := getFolder(ctx, service, driveID, parentFolderID, folder) folderItem, err := getFolder(ctx, service, driveID, parentFolderID, folder)
if err == nil { if err == nil {
parentFolderID = *folderItem.GetId() parentFolderID = ptr.Val(folderItem.GetId())
logger.Ctx(ctx).Debugf("Found %s with ID %s", folder, parentFolderID)
continue continue
} }
if !errors.Is(err, errFolderNotFound) { if errors.Is(err, errFolderNotFound) {
return "", errors.Wrapf(err, "folder %s not found in drive(%s) parentFolder(%s)", folder, driveID, parentFolderID) return "", clues.Wrap(err, "folder not found").With("folder_id", folder).WithClues(ctx)
} }
folderItem, err = createItem(ctx, service, driveID, parentFolderID, newItem(folder, true)) folderItem, err = createItem(ctx, service, driveID, parentFolderID, newItem(folder, true))
if err != nil { if err != nil {
return "", errors.Wrapf( return "", clues.Wrap(err, "creating folder")
err,
"failed to create folder %s/%s. details: %s", parentFolderID, folder,
support.ConnectorStackErrorTrace(err),
)
} }
logger.Ctx(ctx).Debugw("resolved restore destination", logger.Ctx(ctx).Debugw("resolved restore destination", "dest_id", *folderItem.GetId())
"dest_name", folder,
"parent", parentFolderID,
"dest_id", *folderItem.GetId())
parentFolderID = *folderItem.GetId() parentFolderID = *folderItem.GetId()
} }
@ -487,25 +494,27 @@ func restoreData(
ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID())) ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID()))
defer end() defer end()
ctx = clues.Add(ctx, "item_name", itemData.UUID())
itemName := itemData.UUID() itemName := itemData.UUID()
trace.Log(ctx, "gc:oneDrive:restoreItem", itemName) trace.Log(ctx, "gc:oneDrive:restoreItem", itemName)
// Get the stream size (needed to create the upload session) // Get the stream size (needed to create the upload session)
ss, ok := itemData.(data.StreamSize) ss, ok := itemData.(data.StreamSize)
if !ok { if !ok {
return "", details.ItemInfo{}, errors.Errorf("item %q does not implement DataStreamInfo", itemName) return "", details.ItemInfo{}, clues.New("item does not implement DataStreamInfo").WithClues(ctx)
} }
// Create Item // Create Item
newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(name, false)) newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(name, false))
if err != nil { if err != nil {
return "", details.ItemInfo{}, errors.Wrapf(err, "failed to create item %s", itemName) return "", details.ItemInfo{}, clues.Wrap(err, "creating item")
} }
// Get a drive item writer // Get a drive item writer
w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size()) w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size())
if err != nil { if err != nil {
return "", details.ItemInfo{}, errors.Wrapf(err, "failed to create item upload session %s", itemName) return "", details.ItemInfo{}, clues.Wrap(err, "creating item writer")
} }
iReader := itemData.ToReader() iReader := itemData.ToReader()
@ -516,7 +525,7 @@ func restoreData(
// Upload the stream data // Upload the stream data
written, err := io.CopyBuffer(w, progReader, copyBuffer) written, err := io.CopyBuffer(w, progReader, copyBuffer)
if err != nil { if err != nil {
return "", details.ItemInfo{}, errors.Wrapf(err, "failed to upload data: item %s", itemName) return "", details.ItemInfo{}, graph.Wrap(ctx, err, "writing item bytes")
} }
dii := details.ItemInfo{} dii := details.ItemInfo{}
@ -607,16 +616,16 @@ func restorePermissions(
) (map[string]string, error) { ) (map[string]string, error) {
permAdded, permRemoved := getChildPermissions(childPerms, parentPerms) permAdded, permRemoved := getChildPermissions(childPerms, parentPerms)
ctx = clues.Add(ctx, "permission_item_id", itemID)
for _, p := range permRemoved { for _, p := range permRemoved {
err := service.Client().DrivesById(driveID).ItemsById(itemID). err := service.Client().
PermissionsById(permissionIDMappings[p.ID]).Delete(ctx, nil) DrivesById(driveID).
ItemsById(itemID).
PermissionsById(permissionIDMappings[p.ID]).
Delete(ctx, nil)
if err != nil { if err != nil {
return permissionIDMappings, errors.Wrapf( return permissionIDMappings, graph.Wrap(ctx, err, "removing permissions")
err,
"failed to remove permission for item %s. details: %s",
itemID,
support.ConnectorStackErrorTrace(err),
)
} }
} }
@ -641,12 +650,7 @@ func restorePermissions(
np, err := service.Client().DrivesById(driveID).ItemsById(itemID).Invite().Post(ctx, pbody, nil) np, err := service.Client().DrivesById(driveID).ItemsById(itemID).Invite().Post(ctx, pbody, nil)
if err != nil { if err != nil {
return permissionIDMappings, errors.Wrapf( return permissionIDMappings, graph.Wrap(ctx, err, "setting permissions")
err,
"failed to set permission for item %s. details: %s",
itemID,
support.ConnectorStackErrorTrace(err),
)
} }
permissionIDMappings[p.ID] = *np.GetValue()[0].GetId() permissionIDMappings[p.ID] = *np.GetValue()[0].GetId()

View File

@ -4,11 +4,11 @@ import (
"testing" "testing"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -68,10 +68,10 @@ func (ods *oneDriveService) updateStatus(status *support.ConnectorOperationStatu
func loadTestService(t *testing.T) *oneDriveService { func loadTestService(t *testing.T) *oneDriveService {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
service, err := NewOneDriveService(m365) service, err := NewOneDriveService(m365)
require.NoError(t, err) aw.MustNoErr(t, err)
return service return service
} }

View File

@ -3,10 +3,9 @@ package api_test
import ( import (
"testing" "testing"
"github.com/stretchr/testify/require"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api" discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -16,7 +15,7 @@ func createTestBetaService(t *testing.T, credentials account.M365Config) *discov
credentials.AzureClientID, credentials.AzureClientID,
credentials.AzureClientSecret, credentials.AzureClientSecret,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
return discover.NewBetaService(adapter) return discover.NewBetaService(adapter)
} }

View File

@ -28,13 +28,13 @@ func GetSitePages(
serv *discover.BetaService, serv *discover.BetaService,
siteID string, siteID string,
pages []string, pages []string,
errs *fault.Errors, errs *fault.Bus,
) ([]models.SitePageable, error) { ) ([]models.SitePageable, error) {
var ( var (
col = make([]models.SitePageable, 0) col = make([]models.SitePageable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize) semaphoreCh = make(chan struct{}, fetchChannelSize)
opts = retrieveSitePageOptions() opts = retrieveSitePageOptions()
err error el = errs.Local()
wg sync.WaitGroup wg sync.WaitGroup
m sync.Mutex m sync.Mutex
) )
@ -49,7 +49,7 @@ func GetSitePages(
} }
for _, entry := range pages { for _, entry := range pages {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -61,11 +61,14 @@ func GetSitePages(
defer wg.Done() defer wg.Done()
defer func() { <-semaphoreCh }() defer func() { <-semaphoreCh }()
var page models.SitePageable var (
page models.SitePageable
err error
)
page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts) page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "fetching page").WithClues(ctx).With(graph.ErrData(err)...)) el.AddRecoverable(graph.Wrap(ctx, err, "fetching page"))
return return
} }
@ -75,7 +78,7 @@ func GetSitePages(
wg.Wait() wg.Wait()
return col, errs.Err() return col, el.Failure()
} }
// fetchPages utility function to return the tuple of item // fetchPages utility function to return the tuple of item
@ -91,7 +94,7 @@ func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([
for { for {
resp, err = builder.Get(ctx, opts) resp, err = builder.Get(ctx, opts)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "fetching site page").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "fetching site page")
} }
for _, entry := range resp.GetValue() { for _, entry := range resp.GetValue() {
@ -140,7 +143,7 @@ func DeleteSitePage(
) error { ) error {
err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil) err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil)
if err != nil { if err != nil {
return clues.Wrap(err, "deleting page").WithClues(ctx).With(graph.ErrData(err)...) return graph.Wrap(ctx, err, "deleting page")
} }
return nil return nil
@ -200,7 +203,7 @@ func RestoreSitePage(
// See: https://learn.microsoft.com/en-us/graph/api/sitepage-create?view=graph-rest-beta // See: https://learn.microsoft.com/en-us/graph/api/sitepage-create?view=graph-rest-beta
restoredPage, err := service.Client().SitesById(siteID).Pages().Post(ctx, page, nil) restoredPage, err := service.Client().SitesById(siteID).Pages().Post(ctx, page, nil)
if err != nil { if err != nil {
return dii, clues.Wrap(err, "creating page").WithClues(ctx).With(graph.ErrData(err)...) return dii, graph.Wrap(ctx, err, "creating page")
} }
pageID = ptr.Val(restoredPage.GetId()) pageID = ptr.Val(restoredPage.GetId())
@ -218,7 +221,7 @@ func RestoreSitePage(
Publish(). Publish().
Post(ctx, nil) Post(ctx, nil)
if err != nil { if err != nil {
return dii, clues.Wrap(err, "publishing page").WithClues(ctx).With(graph.ErrData(err)...) return dii, graph.Wrap(ctx, err, "publishing page")
} }
dii.SharePoint = PageInfo(restoredPage, int64(len(byteArray))) dii.SharePoint = PageInfo(restoredPage, int64(len(byteArray)))

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/connector/sharepoint/api" "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -33,7 +34,7 @@ func (suite *SharePointPageSuite) SetupSuite() {
suite.siteID = tester.M365SiteID(t) suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.creds = m365 suite.creds = m365
suite.service = createTestBetaService(t, suite.creds) suite.service = createTestBetaService(t, suite.creds)
@ -53,7 +54,7 @@ func (suite *SharePointPageSuite) TestFetchPages() {
t := suite.T() t := suite.T()
pgs, err := api.FetchPages(ctx, suite.service, suite.siteID) pgs, err := api.FetchPages(ctx, suite.service, suite.siteID)
assert.NoError(t, err) aw.NoErr(t, err)
require.NotNil(t, pgs) require.NotNil(t, pgs)
assert.NotZero(t, len(pgs)) assert.NotZero(t, len(pgs))
@ -68,12 +69,12 @@ func (suite *SharePointPageSuite) TestGetSitePages() {
t := suite.T() t := suite.T()
tuples, err := api.FetchPages(ctx, suite.service, suite.siteID) tuples, err := api.FetchPages(ctx, suite.service, suite.siteID)
require.NoError(t, err) aw.MustNoErr(t, err)
require.NotNil(t, tuples) require.NotNil(t, tuples)
jobs := []string{tuples[0].ID} jobs := []string{tuples[0].ID}
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true)) pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotEmpty(t, pages) assert.NotEmpty(t, pages)
} }
@ -103,11 +104,11 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
destName, destName,
) )
require.NoError(t, err) aw.MustNoErr(t, err)
require.NotNil(t, info) require.NotNil(t, info)
// Clean Up // Clean Up
pageID := info.SharePoint.ParentPath pageID := info.SharePoint.ParentPath
err = api.DeleteSitePage(ctx, suite.service, suite.siteID, pageID) err = api.DeleteSitePage(ctx, suite.service, suite.siteID, pageID)
assert.NoError(t, err) aw.NoErr(t, err)
} }

View File

@ -43,12 +43,6 @@ var (
_ data.StreamModTime = &Item{} _ data.StreamModTime = &Item{}
) )
type numMetrics struct {
attempts int
success int
totalBytes int64
}
// Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported // Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported
// by the oneDrive.Collection as the calls are identical for populating the Collection // by the oneDrive.Collection as the calls are identical for populating the Collection
type Collection struct { type Collection struct {
@ -112,7 +106,7 @@ func (sc Collection) DoNotMergeItems() bool {
func (sc *Collection) Items( func (sc *Collection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Errors, errs *fault.Bus,
) <-chan data.Stream { ) <-chan data.Stream {
go sc.populate(ctx, errs) go sc.populate(ctx, errs)
return sc.data return sc.data
@ -157,24 +151,17 @@ func (sd *Item) ModTime() time.Time {
func (sc *Collection) finishPopulation( func (sc *Collection) finishPopulation(
ctx context.Context, ctx context.Context,
attempts, success int, metrics support.CollectionMetrics,
totalBytes int64,
err error,
) { ) {
close(sc.data) close(sc.data)
attempted := attempts
status := support.CreateStatus( status := support.CreateStatus(
ctx, ctx,
support.Backup, support.Backup,
1, // 1 folder 1, // 1 folder
support.CollectionMetrics{ metrics,
Objects: attempted,
Successes: success,
TotalBytes: totalBytes,
},
err,
sc.fullPath.Folder(false)) sc.fullPath.Folder(false))
logger.Ctx(ctx).Debug(status.String()) logger.Ctx(ctx).Debug(status.String())
if sc.statusUpdater != nil { if sc.statusUpdater != nil {
@ -183,16 +170,17 @@ func (sc *Collection) finishPopulation(
} }
// populate utility function to retrieve data from back store for a given collection // populate utility function to retrieve data from back store for a given collection
func (sc *Collection) populate(ctx context.Context, errs *fault.Errors) { func (sc *Collection) populate(ctx context.Context, errs *fault.Bus) {
var ( metrics, _ := sc.runPopulate(ctx, errs)
metrics numMetrics sc.finishPopulation(ctx, metrics)
writer = kw.NewJsonSerializationWriter() }
err error
)
defer func() { func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support.CollectionMetrics, error) {
sc.finishPopulation(ctx, metrics.attempts, metrics.success, int64(metrics.totalBytes), err) var (
}() err error
metrics support.CollectionMetrics
writer = kw.NewJsonSerializationWriter()
)
// TODO: Insert correct ID for CollectionProgress // TODO: Insert correct ID for CollectionProgress
colProgress, closer := observe.CollectionProgress( colProgress, closer := observe.CollectionProgress(
@ -213,6 +201,8 @@ func (sc *Collection) populate(ctx context.Context, errs *fault.Errors) {
case Pages: case Pages:
metrics, err = sc.retrievePages(ctx, writer, colProgress, errs) metrics, err = sc.retrievePages(ctx, writer, colProgress, errs)
} }
return metrics, err
} }
// retrieveLists utility function for collection that downloads and serializes // retrieveLists utility function for collection that downloads and serializes
@ -221,26 +211,29 @@ func (sc *Collection) retrieveLists(
ctx context.Context, ctx context.Context,
wtr *kw.JsonSerializationWriter, wtr *kw.JsonSerializationWriter,
progress chan<- struct{}, progress chan<- struct{},
errs *fault.Errors, errs *fault.Bus,
) (numMetrics, error) { ) (support.CollectionMetrics, error) {
var metrics numMetrics var (
metrics support.CollectionMetrics
el = errs.Local()
)
lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs, errs) lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs, errs)
if err != nil { if err != nil {
return metrics, err return metrics, err
} }
metrics.attempts += len(lists) metrics.Objects += len(lists)
// For each models.Listable, object is serialized and the metrics are collected. // For each models.Listable, object is serialized and the metrics are collected.
// The progress is objected via the passed in channel. // The progress is objected via the passed in channel.
for _, lst := range lists { for _, lst := range lists {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
byteArray, err := serializeContent(wtr, lst) byteArray, err := serializeContent(ctx, wtr, lst)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "serializing list").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "serializing list").WithClues(ctx))
continue continue
} }
@ -252,9 +245,9 @@ func (sc *Collection) retrieveLists(
t = *t1 t = *t1
} }
metrics.totalBytes += size metrics.Bytes += size
metrics.success++ metrics.Successes++
sc.data <- &Item{ sc.data <- &Item{
id: *lst.GetId(), id: *lst.GetId(),
data: io.NopCloser(bytes.NewReader(byteArray)), data: io.NopCloser(bytes.NewReader(byteArray)),
@ -266,16 +259,19 @@ func (sc *Collection) retrieveLists(
} }
} }
return metrics, errs.Err() return metrics, el.Failure()
} }
func (sc *Collection) retrievePages( func (sc *Collection) retrievePages(
ctx context.Context, ctx context.Context,
wtr *kw.JsonSerializationWriter, wtr *kw.JsonSerializationWriter,
progress chan<- struct{}, progress chan<- struct{},
errs *fault.Errors, errs *fault.Bus,
) (numMetrics, error) { ) (support.CollectionMetrics, error) {
var metrics numMetrics var (
metrics support.CollectionMetrics
el = errs.Local()
)
betaService := sc.betaService betaService := sc.betaService
if betaService == nil { if betaService == nil {
@ -287,26 +283,26 @@ func (sc *Collection) retrievePages(
return metrics, err return metrics, err
} }
metrics.attempts = len(pages) metrics.Objects = len(pages)
// For each models.Pageable, object is serialize and the metrics are collected and returned. // For each models.Pageable, object is serialize and the metrics are collected and returned.
// Pageable objects are not supported in v1.0 of msgraph at this time. // Pageable objects are not supported in v1.0 of msgraph at this time.
// TODO: Verify Parsable interface supported with modified-Pageable // TODO: Verify Parsable interface supported with modified-Pageable
for _, pg := range pages { for _, pg := range pages {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
byteArray, err := serializeContent(wtr, pg) byteArray, err := serializeContent(ctx, wtr, pg)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "serializing page").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "serializing page").WithClues(ctx))
continue continue
} }
size := int64(len(byteArray)) size := int64(len(byteArray))
if size > 0 { if size > 0 {
metrics.totalBytes += size metrics.Bytes += size
metrics.success++ metrics.Successes++
sc.data <- &Item{ sc.data <- &Item{
id: *pg.GetId(), id: *pg.GetId(),
data: io.NopCloser(bytes.NewReader(byteArray)), data: io.NopCloser(bytes.NewReader(byteArray)),
@ -318,20 +314,24 @@ func (sc *Collection) retrievePages(
} }
} }
return metrics, errs.Err() return metrics, el.Failure()
} }
func serializeContent(writer *kw.JsonSerializationWriter, obj absser.Parsable) ([]byte, error) { func serializeContent(
ctx context.Context,
writer *kw.JsonSerializationWriter,
obj absser.Parsable,
) ([]byte, error) {
defer writer.Close() defer writer.Close()
err := writer.WriteObjectValue("", obj) err := writer.WriteObjectValue("", obj)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "writing object").With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "writing object")
} }
byteArray, err := writer.GetSerializedContent() byteArray, err := writer.GetSerializedContent()
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting content from writer").With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting content from writer")
} }
return byteArray, nil return byteArray, nil

View File

@ -18,6 +18,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -37,7 +38,7 @@ func (suite *SharePointCollectionSuite) SetupSuite() {
suite.siteID = tester.M365SiteID(t) suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.creds = m365 suite.creds = m365
} }
@ -61,7 +62,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Item_Read() {
data: io.NopCloser(bytes.NewReader(m)), data: io.NopCloser(bytes.NewReader(m)),
} }
readData, err := io.ReadAll(sc.ToReader()) readData, err := io.ReadAll(sc.ToReader())
require.NoError(t, err) aw.MustNoErr(t, err)
assert.Equal(t, name, sc.id) assert.Equal(t, name, sc.id)
assert.Equal(t, readData, m) assert.Equal(t, readData, m)
@ -91,7 +92,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
user, user,
path.ListsCategory, path.ListsCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return dir return dir
}, },
@ -101,10 +102,10 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
listing.SetDisplayName(&name) listing.SetDisplayName(&name)
err := ow.WriteObjectValue("", listing) err := ow.WriteObjectValue("", listing)
require.NoError(t, err) aw.MustNoErr(t, err)
byteArray, err := ow.GetSerializedContent() byteArray, err := ow.GetSerializedContent()
require.NoError(t, err) aw.MustNoErr(t, err)
data := &Item{ data := &Item{
id: name, id: name,
@ -126,14 +127,14 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
user, user,
path.PagesCategory, path.PagesCategory,
false) false)
require.NoError(t, err) aw.MustNoErr(t, err)
return dir return dir
}, },
getItem: func(t *testing.T, itemName string) *Item { getItem: func(t *testing.T, itemName string) *Item {
byteArray := mockconnector.GetMockPage(itemName) byteArray := mockconnector.GetMockPage(itemName)
page, err := support.CreatePageFromBytes(byteArray) page, err := support.CreatePageFromBytes(byteArray)
require.NoError(t, err) aw.MustNoErr(t, err)
data := &Item{ data := &Item{
id: itemName, id: itemName,
@ -183,7 +184,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
testName := "MockListing" testName := "MockListing"
listing.SetDisplayName(&testName) listing.SetDisplayName(&testName)
byteArray, err := service.Serialize(listing) byteArray, err := service.Serialize(listing)
require.NoError(t, err) aw.MustNoErr(t, err)
listData := &Item{ listData := &Item{
id: testName, id: testName,
@ -194,7 +195,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting) destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting)
deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName) deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName)
assert.NoError(t, err) aw.NoErr(t, err)
t.Logf("List created: %s\n", deets.SharePoint.ItemName) t.Logf("List created: %s\n", deets.SharePoint.ItemName)
// Clean-Up // Clean-Up
@ -206,7 +207,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
for { for {
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
assert.NoError(t, err, "experienced query error during clean up. Details: "+support.ConnectorStackErrorTrace(err)) aw.NoErr(t, err, "getting site lists")
for _, temp := range resp.GetValue() { for _, temp := range resp.GetValue() {
if *temp.GetDisplayName() == deets.SharePoint.ItemName { if *temp.GetDisplayName() == deets.SharePoint.ItemName {
@ -227,7 +228,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
if isFound { if isFound {
err := DeleteList(ctx, service, suite.siteID, deleteID) err := DeleteList(ctx, service, suite.siteID, deleteID)
assert.NoError(t, err) aw.NoErr(t, err)
} }
} }
@ -242,17 +243,17 @@ func (suite *SharePointCollectionSuite) TestRestoreLocation() {
service := createTestService(t, suite.creds) service := createTestService(t, suite.creds)
rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting) rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting)
folderID, err := createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder}) folderID, err := createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder})
assert.NoError(t, err) aw.NoErr(t, err)
t.Log("FolderID: " + folderID) t.Log("FolderID: " + folderID)
_, err = createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder, "Tsao"}) _, err = createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder, "Tsao"})
assert.NoError(t, err) aw.NoErr(t, err)
// CleanUp // CleanUp
siteDrive, err := service.Client().SitesById(suite.siteID).Drive().Get(ctx, nil) siteDrive, err := service.Client().SitesById(suite.siteID).Drive().Get(ctx, nil)
require.NoError(t, err) aw.MustNoErr(t, err)
driveID := *siteDrive.GetId() driveID := *siteDrive.GetId()
err = onedrive.DeleteItem(ctx, service, driveID, folderID) err = onedrive.DeleteItem(ctx, service, driveID, folderID)
assert.NoError(t, err) aw.NoErr(t, err)
} }

View File

@ -36,7 +36,7 @@ func DataCollections(
serv graph.Servicer, serv graph.Servicer,
su statusUpdater, su statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Errors, errs *fault.Bus,
) ([]data.BackupCollection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
b, err := selector.ToSharePointBackup() b, err := selector.ToSharePointBackup()
if err != nil { if err != nil {
@ -44,12 +44,13 @@ func DataCollections(
} }
var ( var (
el = errs.Local()
site = b.DiscreteOwner site = b.DiscreteOwner
collections = []data.BackupCollection{} collections = []data.BackupCollection{}
) )
for _, scope := range b.Scopes() { for _, scope := range b.Scopes() {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -73,7 +74,7 @@ func DataCollections(
ctrlOpts, ctrlOpts,
errs) errs)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
@ -86,9 +87,10 @@ func DataCollections(
site, site,
scope, scope,
su, su,
ctrlOpts) ctrlOpts,
errs)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
@ -102,7 +104,7 @@ func DataCollections(
ctrlOpts, ctrlOpts,
errs) errs)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
} }
@ -111,7 +113,7 @@ func DataCollections(
foldersComplete <- struct{}{} foldersComplete <- struct{}{}
} }
return collections, nil, errs.Err() return collections, nil, el.Failure()
} }
func collectLists( func collectLists(
@ -120,11 +122,14 @@ func collectLists(
tenantID, siteID string, tenantID, siteID string,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Errors, errs *fault.Bus,
) ([]data.BackupCollection, error) { ) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections") logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections")
spcs := make([]data.BackupCollection, 0) var (
el = errs.Local()
spcs = make([]data.BackupCollection, 0)
)
lists, err := preFetchLists(ctx, serv, siteID) lists, err := preFetchLists(ctx, serv, siteID)
if err != nil { if err != nil {
@ -132,7 +137,7 @@ func collectLists(
} }
for _, tuple := range lists { for _, tuple := range lists {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -143,7 +148,7 @@ func collectLists(
path.ListsCategory, path.ListsCategory,
false) false)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "creating list collection path").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "creating list collection path").WithClues(ctx))
} }
collection := NewCollection(dir, serv, List, updater.UpdateStatus, ctrlOpts) collection := NewCollection(dir, serv, List, updater.UpdateStatus, ctrlOpts)
@ -152,7 +157,7 @@ func collectLists(
spcs = append(spcs, collection) spcs = append(spcs, collection)
} }
return spcs, errs.Err() return spcs, el.Failure()
} }
// collectLibraries constructs a onedrive Collections struct and Get()s // collectLibraries constructs a onedrive Collections struct and Get()s
@ -165,6 +170,7 @@ func collectLibraries(
scope selectors.SharePointScope, scope selectors.SharePointScope,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus,
) ([]data.BackupCollection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
logger.Ctx(ctx).Debug("creating SharePoint Library collections") logger.Ctx(ctx).Debug("creating SharePoint Library collections")
@ -183,9 +189,9 @@ func collectLibraries(
// TODO(ashmrtn): Pass previous backup metadata when SharePoint supports delta // TODO(ashmrtn): Pass previous backup metadata when SharePoint supports delta
// token-based incrementals. // token-based incrementals.
odcs, excludes, err := colls.Get(ctx, nil) odcs, excludes, err := colls.Get(ctx, nil, errs)
if err != nil { if err != nil {
return nil, nil, clues.Wrap(err, "getting library").WithClues(ctx).With(graph.ErrData(err)...) return nil, nil, graph.Wrap(ctx, err, "getting library")
} }
return append(collections, odcs...), excludes, nil return append(collections, odcs...), excludes, nil
@ -200,11 +206,14 @@ func collectPages(
siteID string, siteID string,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Errors, errs *fault.Bus,
) ([]data.BackupCollection, error) { ) ([]data.BackupCollection, error) {
logger.Ctx(ctx).Debug("creating SharePoint Pages collections") logger.Ctx(ctx).Debug("creating SharePoint Pages collections")
spcs := make([]data.BackupCollection, 0) var (
el = errs.Local()
spcs = make([]data.BackupCollection, 0)
)
// make the betaClient // make the betaClient
// Need to receive From DataCollection Call // Need to receive From DataCollection Call
@ -221,7 +230,7 @@ func collectPages(
} }
for _, tuple := range tuples { for _, tuple := range tuples {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -232,7 +241,7 @@ func collectPages(
path.PagesCategory, path.PagesCategory,
false) false)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "creating page collection path").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "creating page collection path").WithClues(ctx))
} }
collection := NewCollection(dir, serv, Pages, updater.UpdateStatus, ctrlOpts) collection := NewCollection(dir, serv, Pages, updater.UpdateStatus, ctrlOpts)
@ -242,7 +251,7 @@ func collectPages(
spcs = append(spcs, collection) spcs = append(spcs, collection)
} }
return spcs, errs.Err() return spcs, el.Failure()
} }
type folderMatcher struct { type folderMatcher struct {

View File

@ -5,12 +5,12 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
@ -74,7 +74,7 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
driveItem("file", testBaseDrivePath, "root", true), driveItem("file", testBaseDrivePath, "root", true),
}, },
scope: anyFolder, scope: anyFolder,
expect: assert.NoError, expect: aw.NoErr,
expectedCollectionIDs: []string{"root"}, expectedCollectionIDs: []string{"root"},
expectedCollectionPaths: expectedPathAsSlice( expectedCollectionPaths: expectedPathAsSlice(
suite.T(), suite.T(),
@ -105,7 +105,7 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
&MockGraphService{}, &MockGraphService{},
nil, nil,
control.Options{}) control.Options{})
err := c.UpdateCollections(ctx, "driveID1", "General", test.items, paths, newPaths, excluded, true) err := c.UpdateCollections(ctx, "driveID1", "General", test.items, paths, newPaths, excluded, true, fault.New(true))
test.expect(t, err) test.expect(t, err)
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths") assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths")
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
@ -169,7 +169,7 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
siteID := tester.M365SiteID(t) siteID := tester.M365SiteID(t)
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
account, err := a.M365Config() account, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
col, err := collectPages( col, err := collectPages(
ctx, ctx,
@ -179,6 +179,6 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
&MockGraphService{}, &MockGraphService{},
control.Defaults(), control.Defaults(),
fault.New(true)) fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
assert.NotEmpty(t, col) assert.NotEmpty(t, col)
} }

View File

@ -4,11 +4,11 @@ import (
"testing" "testing"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -52,7 +52,7 @@ func createTestService(t *testing.T, credentials account.M365Config) *graph.Serv
credentials.AzureClientID, credentials.AzureClientID,
credentials.AzureClientSecret, credentials.AzureClientSecret,
) )
require.NoError(t, err, "creating microsoft graph service for exchange") aw.MustNoErr(t, err, "creating microsoft graph service for exchange")
return graph.NewService(adapter) return graph.NewService(adapter)
} }
@ -62,7 +62,7 @@ func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []st
for _, r := range rest { for _, r := range rest {
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource) p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
require.NoError(t, err) aw.MustNoErr(t, err)
res = append(res, p.String()) res = append(res, p.String())
} }

View File

@ -7,11 +7,9 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
mssite "github.com/microsoftgraph/msgraph-sdk-go/sites" mssite "github.com/microsoftgraph/msgraph-sdk-go/sites"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -46,7 +44,7 @@ func preFetchLists(
for { for {
resp, err := builder.Get(ctx, options) resp, err := builder.Get(ctx, options)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting lists").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting lists")
} }
for _, entry := range resp.GetValue() { for _, entry := range resp.GetValue() {
@ -92,11 +90,12 @@ func loadSiteLists(
gs graph.Servicer, gs graph.Servicer,
siteID string, siteID string,
listIDs []string, listIDs []string,
errs *fault.Errors, errs *fault.Bus,
) ([]models.Listable, error) { ) ([]models.Listable, error) {
var ( var (
results = make([]models.Listable, 0) results = make([]models.Listable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize) semaphoreCh = make(chan struct{}, fetchChannelSize)
el = errs.Local()
wg sync.WaitGroup wg sync.WaitGroup
m sync.Mutex m sync.Mutex
) )
@ -111,8 +110,8 @@ func loadSiteLists(
} }
for _, listID := range listIDs { for _, listID := range listIDs {
if errs.Err() != nil { if el.Failure() != nil {
return nil, errs.Err() break
} }
semaphoreCh <- struct{}{} semaphoreCh <- struct{}{}
@ -130,13 +129,13 @@ func loadSiteLists(
entry, err = gs.Client().SitesById(siteID).ListsById(id).Get(ctx, nil) entry, err = gs.Client().SitesById(siteID).ListsById(id).Get(ctx, nil)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "getting site list").WithClues(ctx).With(graph.ErrData(err)...)) el.AddRecoverable(graph.Wrap(ctx, err, "getting site list"))
return return
} }
cols, cTypes, lItems, err := fetchListContents(ctx, gs, siteID, id, errs) cols, cTypes, lItems, err := fetchListContents(ctx, gs, siteID, id, errs)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "getting list contents")) el.AddRecoverable(clues.Wrap(err, "getting list contents"))
return return
} }
@ -149,7 +148,7 @@ func loadSiteLists(
wg.Wait() wg.Wait()
return results, errs.Err() return results, el.Failure()
} }
// fetchListContents utility function to retrieve associated M365 relationships // fetchListContents utility function to retrieve associated M365 relationships
@ -159,7 +158,7 @@ func fetchListContents(
ctx context.Context, ctx context.Context,
service graph.Servicer, service graph.Servicer,
siteID, listID string, siteID, listID string,
errs *fault.Errors, errs *fault.Bus,
) ( ) (
[]models.ColumnDefinitionable, []models.ColumnDefinitionable,
[]models.ContentTypeable, []models.ContentTypeable,
@ -192,26 +191,27 @@ func fetchListItems(
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
siteID, listID string, siteID, listID string,
errs *fault.Errors, errs *fault.Bus,
) ([]models.ListItemable, error) { ) ([]models.ListItemable, error) {
var ( var (
prefix = gs.Client().SitesById(siteID).ListsById(listID) prefix = gs.Client().SitesById(siteID).ListsById(listID)
builder = prefix.Items() builder = prefix.Items()
itms = make([]models.ListItemable, 0) itms = make([]models.ListItemable, 0)
el = errs.Local()
) )
for { for {
if errs.Err() != nil { if errs.Failure() != nil {
break break
} }
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
if err != nil { if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return nil, err
} }
for _, itm := range resp.GetValue() { for _, itm := range resp.GetValue() {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -219,7 +219,7 @@ func fetchListItems(
fields, err := newPrefix.Fields().Get(ctx, nil) fields, err := newPrefix.Fields().Get(ctx, nil)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "getting list fields").WithClues(ctx).With(graph.ErrData(err)...)) el.AddRecoverable(graph.Wrap(ctx, err, "getting list fields"))
continue continue
} }
@ -235,7 +235,7 @@ func fetchListItems(
builder = mssite.NewItemListsItemItemsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter()) builder = mssite.NewItemListsItemItemsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
} }
return itms, errs.Err() return itms, el.Failure()
} }
// fetchColumns utility function to return columns from a site. // fetchColumns utility function to return columns from a site.
@ -255,7 +255,7 @@ func fetchColumns(
for { for {
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting list columns").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting list columns")
} }
cs = append(cs, resp.GetValue()...) cs = append(cs, resp.GetValue()...)
@ -272,7 +272,7 @@ func fetchColumns(
for { for {
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting content columns").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting content columns")
} }
cs = append(cs, resp.GetValue()...) cs = append(cs, resp.GetValue()...)
@ -298,25 +298,26 @@ func fetchContentTypes(
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
siteID, listID string, siteID, listID string,
errs *fault.Errors, errs *fault.Bus,
) ([]models.ContentTypeable, error) { ) ([]models.ContentTypeable, error) {
var ( var (
el = errs.Local()
cTypes = make([]models.ContentTypeable, 0) cTypes = make([]models.ContentTypeable, 0)
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypes() builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypes()
) )
for { for {
if errs.Err() != nil { if errs.Failure() != nil {
break break
} }
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
if err != nil { if err != nil {
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return nil, err
} }
for _, cont := range resp.GetValue() { for _, cont := range resp.GetValue() {
if errs.Err() != nil { if el.Failure() != nil {
break break
} }
@ -324,7 +325,7 @@ func fetchContentTypes(
links, err := fetchColumnLinks(ctx, gs, siteID, listID, id) links, err := fetchColumnLinks(ctx, gs, siteID, listID, id)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
@ -332,7 +333,7 @@ func fetchContentTypes(
cs, err := fetchColumns(ctx, gs, siteID, listID, id) cs, err := fetchColumns(ctx, gs, siteID, listID, id)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
@ -348,7 +349,7 @@ func fetchContentTypes(
builder = mssite.NewItemListsItemContentTypesRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter()) builder = mssite.NewItemListsItemContentTypesRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
} }
return cTypes, errs.Err() return cTypes, el.Failure()
} }
func fetchColumnLinks( func fetchColumnLinks(
@ -364,7 +365,7 @@ func fetchColumnLinks(
for { for {
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting column links").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting column links")
} }
links = append(links, resp.GetValue()...) links = append(links, resp.GetValue()...)
@ -391,7 +392,7 @@ func DeleteList(
) error { ) error {
err := gs.Client().SitesById(siteID).ListsById(listID).Delete(ctx, nil) err := gs.Client().SitesById(siteID).ListsById(listID).Delete(ctx, nil)
if err != nil { if err != nil {
return clues.Wrap(err, "deleting list").WithClues(ctx).With(graph.ErrData(err)...) return graph.Wrap(ctx, err, "deleting list")
} }
return nil return nil

View File

@ -4,10 +4,10 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/aw"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -21,7 +21,7 @@ func (suite *SharePointSuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) aw.MustNoErr(t, err)
suite.creds = m365 suite.creds = m365
} }
@ -52,11 +52,11 @@ func (suite *SharePointSuite) TestLoadList() {
t := suite.T() t := suite.T()
service := createTestService(t, suite.creds) service := createTestService(t, suite.creds)
tuples, err := preFetchLists(ctx, service, "root") tuples, err := preFetchLists(ctx, service, "root")
require.NoError(t, err) aw.MustNoErr(t, err)
job := []string{tuples[0].id} job := []string{tuples[0].id}
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true)) lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
assert.NoError(t, err) aw.NoErr(t, err)
assert.Greater(t, len(lists), 0) assert.Greater(t, len(lists), 0)
t.Logf("Length: %d\n", len(lists)) t.Logf("Length: %d\n", len(lists))
} }

View File

@ -6,7 +6,6 @@ import (
absser "github.com/microsoft/kiota-abstractions-go/serialization" absser "github.com/microsoft/kiota-abstractions-go/serialization"
mssite "github.com/microsoftgraph/msgraph-sdk-go/sites" mssite "github.com/microsoftgraph/msgraph-sdk-go/sites"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
) )
@ -22,7 +21,7 @@ func GetAllSitesForTenant(ctx context.Context, gs graph.Servicer) (absser.Parsab
sites, err := gs.Client().Sites().Get(ctx, options) sites, err := gs.Client().Sites().Get(ctx, options)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting sites").WithClues(ctx).With(graph.ErrData(err)...) return nil, graph.Wrap(ctx, err, "getting sites")
} }
return sites, nil return sites, nil

View File

@ -46,7 +46,7 @@ func RestoreCollections(
dest control.RestoreDestination, dest control.RestoreDestination,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
errs *fault.Errors, errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
err error err error
@ -56,7 +56,6 @@ func RestoreCollections(
// Iterate through the data collections and restore the contents of each // Iterate through the data collections and restore the contents of each
for _, dc := range dcs { for _, dc := range dcs {
var ( var (
canceled bool
category = dc.FullPath().Category() category = dc.FullPath().Category()
metrics support.CollectionMetrics metrics support.CollectionMetrics
ictx = clues.Add(ctx, ictx = clues.Add(ctx,
@ -67,7 +66,7 @@ func RestoreCollections(
switch dc.FullPath().Category() { switch dc.FullPath().Category() {
case path.LibrariesCategory: case path.LibrariesCategory:
metrics, _, _, canceled = onedrive.RestoreCollection( metrics, _, _, err = onedrive.RestoreCollection(
ictx, ictx,
backupVersion, backupVersion,
service, service,
@ -76,9 +75,9 @@ func RestoreCollections(
onedrive.SharePointSource, onedrive.SharePointSource,
dest.ContainerName, dest.ContainerName,
deets, deets,
func(s string, err error) { errs.Add(err) },
map[string]string{}, map[string]string{},
false) false,
errs)
case path.ListsCategory: case path.ListsCategory:
metrics, err = RestoreListCollection( metrics, err = RestoreListCollection(
ictx, ictx,
@ -99,9 +98,9 @@ func RestoreCollections(
return nil, clues.Wrap(clues.New(category.String()), "category not supported") return nil, clues.Wrap(clues.New(category.String()), "category not supported")
} }
restoreMetrics.Combine(metrics) restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
if canceled || err != nil { if err != nil {
break break
} }
} }
@ -111,7 +110,6 @@ func RestoreCollections(
support.Restore, support.Restore,
len(dcs), len(dcs),
restoreMetrics, restoreMetrics,
err,
dest.ContainerName) dest.ContainerName)
return status, err return status, err
@ -128,7 +126,7 @@ func createRestoreFolders(
// Get Main Drive for Site, Documents // Get Main Drive for Site, Documents
mainDrive, err := service.Client().SitesById(siteID).Drive().Get(ctx, nil) mainDrive, err := service.Client().SitesById(siteID).Drive().Get(ctx, nil)
if err != nil { if err != nil {
return "", clues.Wrap(err, "getting site drive root").WithClues(ctx).With(graph.ErrData(err)...) return "", graph.Wrap(ctx, err, "getting site drive root")
} }
return onedrive.CreateRestoreFolders(ctx, service, *mainDrive.GetId(), restoreFolders) return onedrive.CreateRestoreFolders(ctx, service, *mainDrive.GetId(), restoreFolders)
@ -184,7 +182,7 @@ func restoreListItem(
// Restore to List base to M365 back store // Restore to List base to M365 back store
restoredList, err := service.Client().SitesById(siteID).Lists().Post(ctx, newList, nil) restoredList, err := service.Client().SitesById(siteID).Lists().Post(ctx, newList, nil)
if err != nil { if err != nil {
return dii, clues.Wrap(err, "restoring list").WithClues(ctx).With(graph.ErrData(err)...) return dii, graph.Wrap(ctx, err, "restoring list")
} }
// Uploading of ListItems is conducted after the List is restored // Uploading of ListItems is conducted after the List is restored
@ -197,10 +195,8 @@ func restoreListItem(
Items(). Items().
Post(ctx, lItem, nil) Post(ctx, lItem, nil)
if err != nil { if err != nil {
return dii, clues.Wrap(err, "restoring list items"). return dii, graph.Wrap(ctx, err, "restoring list items").
With("restored_list_id", ptr.Val(restoredList.GetId())). With("restored_list_id", ptr.Val(restoredList.GetId()))
WithClues(ctx).
With(graph.ErrData(err)...)
} }
} }
} }
@ -216,7 +212,7 @@ func RestoreListCollection(
dc data.RestoreCollection, dc data.RestoreCollection,
restoreContainerName string, restoreContainerName string,
deets *details.Builder, deets *details.Builder,
errs *fault.Errors, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
ctx, end := D.Span(ctx, "gc:sharepoint:restoreListCollection", D.Label("path", dc.FullPath())) ctx, end := D.Span(ctx, "gc:sharepoint:restoreListCollection", D.Label("path", dc.FullPath()))
defer end() defer end()
@ -226,13 +222,14 @@ func RestoreListCollection(
directory = dc.FullPath() directory = dc.FullPath()
siteID = directory.ResourceOwner() siteID = directory.ResourceOwner()
items = dc.Items(ctx, errs) items = dc.Items(ctx, errs)
el = errs.Local()
) )
trace.Log(ctx, "gc:sharepoint:restoreListCollection", directory.String()) trace.Log(ctx, "gc:sharepoint:restoreListCollection", directory.String())
for { for {
if errs.Err() != nil { if el.Failure() != nil {
return metrics, errs.Err() break
} }
select { select {
@ -252,15 +249,15 @@ func RestoreListCollection(
siteID, siteID,
restoreContainerName) restoreContainerName)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
metrics.TotalBytes += itemInfo.SharePoint.Size metrics.Bytes += itemInfo.SharePoint.Size
itemPath, err := dc.FullPath().Append(itemData.UUID(), true) itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "appending item to full path").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "appending item to full path").WithClues(ctx))
continue continue
} }
@ -275,6 +272,8 @@ func RestoreListCollection(
metrics.Successes++ metrics.Successes++
} }
} }
return metrics, el.Failure()
} }
// RestorePageCollection handles restoration of an individual site page collection. // RestorePageCollection handles restoration of an individual site page collection.
@ -287,7 +286,7 @@ func RestorePageCollection(
dc data.RestoreCollection, dc data.RestoreCollection,
restoreContainerName string, restoreContainerName string,
deets *details.Builder, deets *details.Builder,
errs *fault.Errors, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
var ( var (
metrics = support.CollectionMetrics{} metrics = support.CollectionMetrics{}
@ -305,14 +304,15 @@ func RestorePageCollection(
return metrics, clues.Wrap(err, "constructing graph client") return metrics, clues.Wrap(err, "constructing graph client")
} }
service := discover.NewBetaService(adpt) var (
el = errs.Local()
// Restore items from collection service = discover.NewBetaService(adpt)
items := dc.Items(ctx, errs) items = dc.Items(ctx, errs)
)
for { for {
if errs.Err() != nil { if el.Failure() != nil {
return metrics, errs.Err() break
} }
select { select {
@ -332,15 +332,15 @@ func RestorePageCollection(
siteID, siteID,
restoreContainerName) restoreContainerName)
if err != nil { if err != nil {
errs.Add(err) el.AddRecoverable(err)
continue continue
} }
metrics.TotalBytes += itemInfo.SharePoint.Size metrics.Bytes += itemInfo.SharePoint.Size
itemPath, err := dc.FullPath().Append(itemData.UUID(), true) itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil { if err != nil {
errs.Add(clues.Wrap(err, "appending item to full path").WithClues(ctx)) el.AddRecoverable(clues.Wrap(err, "appending item to full path").WithClues(ctx))
continue continue
} }
@ -355,4 +355,6 @@ func RestorePageCollection(
metrics.Successes++ metrics.Successes++
} }
} }
return metrics, el.Failure()
} }

View File

@ -1,130 +0,0 @@
package support
import (
"fmt"
"strconv"
"strings"
multierror "github.com/hashicorp/go-multierror"
msgraph_errors "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/pkg/errors"
)
// WrapErrorAndAppend helper function used to attach identifying information to an error
// and return it as a mulitierror
func WrapAndAppend(identifier string, e, previous error) error {
return multierror.Append(previous, errors.Wrap(e, identifier))
}
// WrapErrorAndAppendf format version of WrapErrorAndAppend
func WrapAndAppendf(identifier interface{}, e, previous error) error {
return multierror.Append(previous, errors.Wrapf(e, "%v", identifier))
}
// GetErrors Helper method to return the integer amount of errors in multi error
func GetNumberOfErrors(err error) int {
if err == nil {
return 0
}
result, _, wasFound := strings.Cut(err.Error(), " ")
if wasFound {
aNum, err := strconv.Atoi(result)
if err == nil {
return aNum
}
}
return 1
}
// ListErrors is a helper method used to return the string of errors when
// the multiError library is used.
// depends on ConnectorStackErrorTrace
func ListErrors(multi multierror.Error) string {
aString := ""
for idx, err := range multi.Errors {
detail := ConnectorStackErrorTrace(err)
if detail == "" {
detail = fmt.Sprintf("%v", err)
}
aString = aString + fmt.Sprintf("\n\tErr: %d %v", idx+1, detail)
}
return aString
}
// concatenateStringFromPointers is a helper function that adds
// strings to the originalMessage iff the pointer is not nil
func concatenateStringFromPointers(orig string, pointers []*string) string {
for _, pointer := range pointers {
if pointer != nil {
orig = strings.Join([]string{orig, *pointer}, " ")
}
}
return orig
}
// ConnectorStackErrorTraceWrap is a helper function that wraps the
// stack trace for oDataErrors (if the error has one) onto the prefix.
// If no stack trace is found, wraps the error with only the prefix.
func ConnectorStackErrorTraceWrap(e error, prefix string) error {
cset := ConnectorStackErrorTrace(e)
if len(cset) > 0 {
return errors.Wrap(e, prefix+": "+cset)
}
return errors.Wrap(e, prefix)
}
// ConnectorStackErrorTrace is a helper function that extracts
// the stack trace for oDataErrors, if the error has one.
func ConnectorStackErrorTrace(e error) string {
eMessage := ""
if oDataError, ok := e.(msgraph_errors.ODataErrorable); ok {
// Get MainError
mainErr := oDataError.GetError()
// message *string
// target *string
// code *string
// details ErrorDetailsable
// Ignoring Additional Detail
code := mainErr.GetCode()
subject := mainErr.GetMessage()
target := mainErr.GetTarget()
details := mainErr.GetDetails()
inners := mainErr.GetInnererror()
eMessage = concatenateStringFromPointers(eMessage,
[]*string{code, subject, target})
// Get Error Details
// code, message, target
if details != nil {
eMessage = eMessage + "\nDetails Section:"
for idx, detail := range details {
dMessage := fmt.Sprintf("Detail %d:", idx)
c := detail.GetCode()
m := detail.GetMessage()
t := detail.GetTarget()
dMessage = concatenateStringFromPointers(dMessage,
[]*string{c, m, t})
eMessage = eMessage + dMessage
}
}
if inners != nil {
eMessage = eMessage + "\nConnector Section:"
client := inners.GetClientRequestId()
rID := inners.GetRequestId()
eMessage = concatenateStringFromPointers(eMessage,
[]*string{client, rID})
}
}
return eMessage
}

View File

@ -1,103 +0,0 @@
package support
import (
"errors"
"fmt"
"strings"
"testing"
multierror "github.com/hashicorp/go-multierror"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
type GraphConnectorErrorSuite struct {
suite.Suite
}
func TestGraphConnectorErrorSuite(t *testing.T) {
suite.Run(t, new(GraphConnectorErrorSuite))
}
func (suite *GraphConnectorErrorSuite) TestWrapAndAppend() {
err1 := fmt.Errorf("New Error")
err2 := errors.New("I have two")
returnErr := WrapAndAppend("arc376", err2, err1)
suite.True(strings.Contains(returnErr.Error(), "arc376"))
suite.Error(returnErr)
multi := &multierror.Error{Errors: []error{err1, err2}}
suite.True(strings.Contains(ListErrors(*multi), "two")) // Does not contain the wrapped information
suite.T().Log(ListErrors(*multi))
}
func (suite *GraphConnectorErrorSuite) TestWrapAndAppend_OnVar() {
var (
err1 error
id = "xi2058"
)
received := WrapAndAppend(id, errors.New("network error"), err1)
suite.True(strings.Contains(received.Error(), id))
}
func (suite *GraphConnectorErrorSuite) TestWrapAndAppend_Add3() {
errOneTwo := WrapAndAppend("user1", assert.AnError, assert.AnError)
combined := WrapAndAppend("unix36", assert.AnError, errOneTwo)
allErrors := WrapAndAppend("fxi92874", assert.AnError, combined)
suite.True(strings.Contains(combined.Error(), "unix36"))
suite.True(strings.Contains(combined.Error(), "user1"))
suite.True(strings.Contains(allErrors.Error(), "fxi92874"))
}
func (suite *GraphConnectorErrorSuite) TestWrapAndAppendf() {
err1 := assert.AnError
err2 := assert.AnError
combined := WrapAndAppendf(134323, err2, err1)
suite.True(strings.Contains(combined.Error(), "134323"))
}
func (suite *GraphConnectorErrorSuite) TestConcatenateStringFromPointers() {
var (
outString string
v1 = "Corso"
v3 = "remains"
s1 = &v1
s2 *string
s3 = &v3
)
outString = concatenateStringFromPointers(outString, []*string{s1, s2, s3})
suite.True(strings.Contains(outString, v1))
suite.True(strings.Contains(outString, v3))
}
func (suite *GraphConnectorErrorSuite) TestGetNumberOfErrors() {
table := []struct {
name string
errs error
expected int
}{
{
name: "No error",
errs: nil,
expected: 0,
},
{
name: "Not an ErrorList",
errs: errors.New("network error"),
expected: 1,
},
{
name: "Three Errors",
errs: WrapAndAppend("tres", errors.New("three"), WrapAndAppend("arc376", errors.New("one"), errors.New("two"))),
expected: 3,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
result := GetNumberOfErrors(test.errs)
suite.Equal(result, test.expected)
})
}
}

View File

@ -6,11 +6,11 @@ import (
kioser "github.com/microsoft/kiota-serialization-json-go" kioser "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type DataSupportSuite struct { type DataSupportSuite struct {
@ -38,13 +38,13 @@ func (suite *DataSupportSuite) TestCreateMessageFromBytes() {
{ {
name: "Empty Bytes", name: "Empty Bytes",
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: aw.Err,
checkObject: assert.Nil, checkObject: assert.Nil,
}, },
{ {
name: "aMessage bytes", name: "aMessage bytes",
byteArray: mockconnector.GetMockMessageBytes("m365 mail support test"), byteArray: mockconnector.GetMockMessageBytes("m365 mail support test"),
checkError: assert.NoError, checkError: aw.NoErr,
checkObject: assert.NotNil, checkObject: assert.NotNil,
}, },
} }
@ -69,19 +69,19 @@ func (suite *DataSupportSuite) TestCreateContactFromBytes() {
{ {
name: empty, name: empty,
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: aw.Err,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: invalid, name: invalid,
byteArray: []byte("A random sentence doesn't make an object"), byteArray: []byte("A random sentence doesn't make an object"),
checkError: assert.Error, checkError: aw.Err,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: "Valid Contact", name: "Valid Contact",
byteArray: mockconnector.GetMockContactBytes("Support Test"), byteArray: mockconnector.GetMockContactBytes("Support Test"),
checkError: assert.NoError, checkError: aw.NoErr,
isNil: assert.NotNil, isNil: assert.NotNil,
}, },
} }
@ -104,19 +104,19 @@ func (suite *DataSupportSuite) TestCreateEventFromBytes() {
{ {
name: empty, name: empty,
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: aw.Err,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: invalid, name: invalid,
byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"), byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"),
checkError: assert.Error, checkError: aw.Err,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: "Valid Event", name: "Valid Event",
byteArray: mockconnector.GetDefaultMockEventBytes("Event Test"), byteArray: mockconnector.GetDefaultMockEventBytes("Event Test"),
checkError: assert.NoError, checkError: aw.NoErr,
isNil: assert.NotNil, isNil: assert.NotNil,
}, },
} }
@ -131,7 +131,7 @@ func (suite *DataSupportSuite) TestCreateEventFromBytes() {
func (suite *DataSupportSuite) TestCreateListFromBytes() { func (suite *DataSupportSuite) TestCreateListFromBytes() {
listBytes, err := mockconnector.GetMockListBytes("DataSupportSuite") listBytes, err := mockconnector.GetMockListBytes("DataSupportSuite")
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
tests := []struct { tests := []struct {
name string name string
@ -142,19 +142,19 @@ func (suite *DataSupportSuite) TestCreateListFromBytes() {
{ {
name: empty, name: empty,
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: aw.Err,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: invalid, name: invalid,
byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"), byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"),
checkError: assert.Error, checkError: aw.Err,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: "Valid List", name: "Valid List",
byteArray: listBytes, byteArray: listBytes,
checkError: assert.NoError, checkError: aw.NoErr,
isNil: assert.NotNil, isNil: assert.NotNil,
}, },
} }
@ -177,7 +177,7 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
}{ }{
{ {
empty, empty,
assert.Error, aw.Err,
assert.Nil, assert.Nil,
func(t *testing.T) []byte { func(t *testing.T) []byte {
return make([]byte, 0) return make([]byte, 0)
@ -185,7 +185,7 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
}, },
{ {
invalid, invalid,
assert.Error, aw.Err,
assert.Nil, assert.Nil,
func(t *testing.T) []byte { func(t *testing.T) []byte {
return []byte("snarf") return []byte("snarf")
@ -193,7 +193,7 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
}, },
{ {
"Valid Page", "Valid Page",
assert.NoError, aw.NoErr,
assert.NotNil, assert.NotNil,
func(t *testing.T) []byte { func(t *testing.T) []byte {
pg := bmodels.NewSitePage() pg := bmodels.NewSitePage()
@ -204,10 +204,10 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
writer := kioser.NewJsonSerializationWriter() writer := kioser.NewJsonSerializationWriter()
err := pg.Serialize(writer) err := pg.Serialize(writer)
require.NoError(t, err) aw.MustNoErr(t, err)
byteArray, err := writer.GetSerializedContent() byteArray, err := writer.GetSerializedContent()
require.NoError(t, err) aw.MustNoErr(t, err)
return byteArray return byteArray
}, },
@ -239,7 +239,7 @@ func (suite *DataSupportSuite) TestHasAttachments() {
"This is testing", "This is testing",
) )
message, err := CreateMessageFromBytes(byteArray) message, err := CreateMessageFromBytes(byteArray)
require.NoError(t, err) aw.MustNoErr(t, err)
return message.GetBody() return message.GetBody()
}, },
}, },
@ -249,7 +249,7 @@ func (suite *DataSupportSuite) TestHasAttachments() {
getBodyable: func(t *testing.T) models.ItemBodyable { getBodyable: func(t *testing.T) models.ItemBodyable {
byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy") byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy")
message, err := CreateMessageFromBytes(byteArray) message, err := CreateMessageFromBytes(byteArray)
require.NoError(t, err) aw.MustNoErr(t, err)
return message.GetBody() return message.GetBody()
}, },
}, },

View File

@ -5,10 +5,10 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/tester/aw"
) )
type SupportTestSuite struct { type SupportTestSuite struct {
@ -22,7 +22,7 @@ func TestSupportTestSuite(t *testing.T) {
func (suite *SupportTestSuite) TestToMessage() { func (suite *SupportTestSuite) TestToMessage() {
bytes := mockconnector.GetMockMessageBytes("m365 mail support test") bytes := mockconnector.GetMockMessageBytes("m365 mail support test")
message, err := CreateMessageFromBytes(bytes) message, err := CreateMessageFromBytes(bytes)
require.NoError(suite.T(), err) aw.MustNoErr(suite.T(), err)
clone := ToMessage(message) clone := ToMessage(message)
suite.Equal(message.GetBccRecipients(), clone.GetBccRecipients()) suite.Equal(message.GetBccRecipients(), clone.GetBccRecipients())
@ -36,7 +36,7 @@ func (suite *SupportTestSuite) TestToEventSimplified() {
t := suite.T() t := suite.T()
bytes := mockconnector.GetMockEventWithAttendeesBytes("M365 Event Support Test") bytes := mockconnector.GetMockEventWithAttendeesBytes("M365 Event Support Test")
event, err := CreateEventFromBytes(bytes) event, err := CreateEventFromBytes(bytes)
require.NoError(t, err) aw.MustNoErr(t, err)
attendees := event.GetAttendees() attendees := event.GetAttendees()
newEvent := ToEventSimplified(event) newEvent := ToEventSimplified(event)

View File

@ -5,7 +5,6 @@ import (
"fmt" "fmt"
"github.com/dustin/go-humanize" "github.com/dustin/go-humanize"
multierror "github.com/hashicorp/go-multierror"
) )
// ConnectorOperationStatus is a data type used to describe the state of // ConnectorOperationStatus is a data type used to describe the state of
@ -15,27 +14,23 @@ import (
// @param incomplete: Bool representation of whether all intended items were download or uploaded. // @param incomplete: Bool representation of whether all intended items were download or uploaded.
// @param bytes: represents the total number of bytes that have been downloaded or uploaded. // @param bytes: represents the total number of bytes that have been downloaded or uploaded.
type ConnectorOperationStatus struct { type ConnectorOperationStatus struct {
lastOperation Operation Folders int
ObjectCount int Metrics CollectionMetrics
FolderCount int details string
Successful int op Operation
ErrorCount int
Err error
incomplete bool
incompleteReason string
additionalDetails string
bytes int64
} }
type CollectionMetrics struct { type CollectionMetrics struct {
Objects, Successes int Objects, Successes int
TotalBytes int64 Bytes int64
} }
func (cm *CollectionMetrics) Combine(additional CollectionMetrics) { func CombineMetrics(a, b CollectionMetrics) CollectionMetrics {
cm.Objects += additional.Objects return CollectionMetrics{
cm.Successes += additional.Successes Objects: a.Objects + b.Objects,
cm.TotalBytes += additional.TotalBytes Successes: a.Successes + b.Successes,
Bytes: a.Bytes + b.Bytes,
}
} }
type Operation int type Operation int
@ -53,30 +48,13 @@ func CreateStatus(
op Operation, op Operation,
folders int, folders int,
cm CollectionMetrics, cm CollectionMetrics,
err error,
details string, details string,
) *ConnectorOperationStatus { ) *ConnectorOperationStatus {
var reason string
if err != nil {
reason = err.Error()
}
hasErrors := err != nil
// TODO(keeprs): remove
numErr := GetNumberOfErrors(err)
status := ConnectorOperationStatus{ status := ConnectorOperationStatus{
lastOperation: op, Folders: folders,
ObjectCount: cm.Objects, Metrics: cm,
FolderCount: folders, details: details,
Successful: cm.Successes, op: op,
ErrorCount: numErr,
Err: err,
incomplete: hasErrors,
incompleteReason: reason,
bytes: cm.TotalBytes,
additionalDetails: details,
} }
return &status return &status
@ -89,32 +67,19 @@ type StatusUpdater func(*ConnectorOperationStatus)
// MergeStatus combines ConnectorOperationsStatus value into a single status // MergeStatus combines ConnectorOperationsStatus value into a single status
func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus { func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus {
var hasErrors bool if one.op == OpUnknown {
if one.lastOperation == OpUnknown {
return two return two
} }
if two.lastOperation == OpUnknown { if two.op == OpUnknown {
return one return one
} }
if one.incomplete || two.incomplete {
hasErrors = true
}
status := ConnectorOperationStatus{ status := ConnectorOperationStatus{
lastOperation: one.lastOperation, Folders: one.Folders + two.Folders,
ObjectCount: one.ObjectCount + two.ObjectCount, Metrics: CombineMetrics(one.Metrics, two.Metrics),
FolderCount: one.FolderCount + two.FolderCount, details: one.details + ", " + two.details,
Successful: one.Successful + two.Successful, op: one.op,
// TODO: remove in favor of fault.Errors
ErrorCount: one.ErrorCount + two.ErrorCount,
Err: multierror.Append(one.Err, two.Err).ErrorOrNil(),
bytes: one.bytes + two.bytes,
incomplete: hasErrors,
incompleteReason: one.incompleteReason + ", " + two.incompleteReason,
additionalDetails: one.additionalDetails + ", " + two.additionalDetails,
} }
return status return status
@ -123,23 +88,19 @@ func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus {
func (cos *ConnectorOperationStatus) String() string { func (cos *ConnectorOperationStatus) String() string {
var operationStatement string var operationStatement string
switch cos.lastOperation { switch cos.op {
case Backup: case Backup:
operationStatement = "Downloaded from " operationStatement = "Downloaded from "
case Restore: case Restore:
operationStatement = "Restored content to " operationStatement = "Restored content to "
} }
message := fmt.Sprintf("Action: %s performed on %d of %d objects (%s) within %d directories.", return fmt.Sprintf("Action: %s performed on %d of %d objects (%s) within %d directories. %s %s",
cos.lastOperation.String(), cos.op.String(),
cos.Successful, cos.Metrics.Successes,
cos.ObjectCount, cos.Metrics.Objects,
humanize.Bytes(uint64(cos.bytes)), humanize.Bytes(uint64(cos.Metrics.Bytes)),
cos.FolderCount) cos.Folders,
operationStatement,
if cos.incomplete { cos.details)
message += " " + cos.incompleteReason
}
return message + " " + operationStatement + cos.additionalDetails
} }

Some files were not shown because too many files have changed in this diff Show More