append ToCore to all errors tests (#2793)

In order to retrieve all clues structured error data in tests, we need to extract it from the error using the clues library.

This change appends `clues.ToCore(err)` to all
variations of `assert.NoError(t, err)`.  The only
other changes are those necessary to preserve
linting, or to produce an error variable for the
ToCore call.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Test
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #1970

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-03-15 13:02:47 -06:00 committed by GitHub
parent 0125876192
commit 76b3fe3b86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
100 changed files with 1164 additions and 931 deletions

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
@ -70,7 +71,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
suite.recorder = strings.Builder{} suite.recorder = strings.Builder{}
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -85,7 +86,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() { func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() {
@ -107,7 +108,8 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String() result := suite.recorder.String()
@ -149,7 +151,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -164,7 +166,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd() {
@ -192,7 +194,8 @@ func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String() result := recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
@ -229,7 +232,7 @@ func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_UserNotInTenant() {
// run the command // run the command
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.Error(t, err) require.Error(t, err, clues.ToCore(err))
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
@ -278,7 +281,7 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
suite.recorder = strings.Builder{} suite.recorder = strings.Builder{}
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -296,7 +299,7 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.backupOps = make(map[path.CategoryType]string) suite.backupOps = make(map[path.CategoryType]string)
@ -322,8 +325,10 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
sel.Include(scopes) sel.Include(scopes)
bop, err := suite.repo.NewBackup(ctx, sel.Selector) bop, err := suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, bop.Run(ctx)) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err)
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID) bIDs := string(bop.Results.BackupID)
@ -361,7 +366,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output // compare the output
result := suite.recorder.String() result := suite.recorder.String()
@ -394,7 +400,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_singleID() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output // compare the output
result := suite.recorder.String() result := suite.recorder.String()
@ -421,7 +428,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_badID() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}) })
} }
} }
@ -441,7 +449,7 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeDetailsCmd() {
// fetch the details from the repo first // fetch the details from the repo first
deets, _, errs := suite.repo.BackupDetails(ctx, string(bID)) deets, _, errs := suite.repo.BackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure()) require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered()) require.Empty(t, errs.Recovered())
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
@ -455,7 +463,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeDetailsCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output // compare the output
result := suite.recorder.String() result := suite.recorder.String()
@ -516,7 +525,7 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -532,7 +541,7 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
m365UserID := tester.M365UserID(t) m365UserID := tester.M365UserID(t)
users := []string{m365UserID} users := []string{m365UserID}
@ -542,8 +551,10 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector) suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx)) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err)
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() { func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
@ -560,7 +571,8 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = tester.StubRootCmd( cmd = tester.StubRootCmd(
@ -569,7 +581,8 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
"--backup", string(suite.backupOp.Results.BackupID)) "--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
require.Error(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID() { func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID() {
@ -586,5 +599,6 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -99,7 +100,8 @@ func (suite *ExchangeSuite) TestValidateBackupCreateFlags() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
test.expect(t, validateExchangeBackupCreateFlags(test.user, test.data)) err := validateExchangeBackupCreateFlags(test.user, test.data)
test.expect(t, err, clues.ToCore(err))
}) })
} }
} }
@ -233,7 +235,7 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectors() {
"backup-ID", "backup-ID",
test.Opts, test.Opts,
false) false)
assert.NoError(t, err, "failure") assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected, output.Entries) assert.ElementsMatch(t, test.Expected, output.Entries)
}) })
} }
@ -253,7 +255,7 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectorsBadFormats() {
"backup-ID", "backup-ID",
test.Opts, test.Opts,
false) false)
assert.Error(t, err, "failure") assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output) assert.Empty(t, output)
}) })
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
@ -61,7 +62,7 @@ func (suite *NoBackupOneDriveE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -83,7 +84,7 @@ func (suite *NoBackupOneDriveE2ESuite) SetupSuite() {
// TODO: turn back on when this stops throttling-out the tests. // TODO: turn back on when this stops throttling-out the tests.
// ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, // ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}) })
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() { func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
@ -105,7 +106,8 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String() result := suite.recorder.String()
@ -135,7 +137,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_UserNotInTenant() {
// run the command // run the command
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.Error(t, err) require.Error(t, err, clues.ToCore(err))
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
@ -182,7 +184,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -205,7 +207,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
// TODO: turn back on when this stops throttling-out the tests. // TODO: turn back on when this stops throttling-out the tests.
// ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, // ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}) })
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
m365UserID := tester.M365UserID(t) m365UserID := tester.M365UserID(t)
users := []string{m365UserID} users := []string{m365UserID}
@ -215,8 +217,10 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
sel.Include(sel.Folders(selectors.Any())) sel.Include(sel.Folders(selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector) suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx)) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err)
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() { func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
@ -238,7 +242,8 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String() result := suite.recorder.String()
@ -251,7 +256,8 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
"--backup", string(suite.backupOp.Results.BackupID)) "--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
require.Error(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID() { func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID() {
@ -268,5 +274,6 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -85,7 +86,8 @@ func (suite *OneDriveSuite) TestValidateOneDriveBackupCreateFlags() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.expect(suite.T(), validateOneDriveBackupCreateFlags(test.user)) err := validateOneDriveBackupCreateFlags(test.user)
test.expect(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -104,7 +106,7 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectors() {
"backup-ID", "backup-ID",
test.Opts, test.Opts,
false) false)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected, output.Entries) assert.ElementsMatch(t, test.Expected, output.Entries)
}) })
} }
@ -124,7 +126,7 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectorsBadFormats() {
"backup-ID", "backup-ID",
test.Opts, test.Opts,
false) false)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output) assert.Empty(t, output)
}) })
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
@ -59,7 +60,7 @@ func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -74,7 +75,7 @@ func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() { func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
@ -96,7 +97,8 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String() result := suite.recorder.String()
@ -138,7 +140,7 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -154,7 +156,7 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
m365SiteID := tester.M365SiteID(t) m365SiteID := tester.M365SiteID(t)
sites := []string{m365SiteID} sites := []string{m365SiteID}
@ -164,8 +166,10 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
sel.Include(sel.LibraryFolders(selectors.Any())) sel.Include(sel.LibraryFolders(selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector) suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx)) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err)
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() { func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
@ -187,7 +191,8 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String() result := suite.recorder.String()
@ -202,7 +207,8 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
// "--backup", string(suite.backupOp.Results.BackupID)) // "--backup", string(suite.backupOp.Results.BackupID))
// cli.BuildCommandTree(cmd) // cli.BuildCommandTree(cmd)
// require.Error(t, cmd.ExecuteContext(ctx)) // err := cmd.ExecuteContext(ctx)
// require.Error(t, err, clues.ToCore(err))
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unknownID() { func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unknownID() {
t := suite.T() t := suite.T()
@ -218,5 +224,6 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unkno
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
@ -100,7 +101,8 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.expect(suite.T(), validateSharePointBackupCreateFlags(test.site, test.weburl, nil)) err := validateSharePointBackupCreateFlags(test.site, test.weburl, nil)
test.expect(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -200,7 +202,7 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
defer flush() defer flush()
sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc) sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners()) assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
}) })
@ -221,7 +223,7 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectors() {
"backup-ID", "backup-ID",
test.Opts, test.Opts,
false) false)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected, output.Entries) assert.ElementsMatch(t, test.Expected, output.Entries)
}) })
} }
@ -241,7 +243,7 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
"backup-ID", "backup-ID",
test.Opts, test.Opts,
false) false)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output) assert.Empty(t, output)
}) })
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
@ -53,20 +54,21 @@ func (suite *ConfigSuite) TestReadRepoConfigBasic() {
testConfigData := fmt.Sprintf(configFileTemplate, b, tID) testConfigData := fmt.Sprintf(configFileTemplate, b, tID)
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700) err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// Configure viper to read test config file // Configure viper to read test config file
vpr.SetConfigFile(testConfigFilePath) vpr.SetConfigFile(testConfigFilePath)
// Read and validate config // Read and validate config
require.NoError(t, vpr.ReadInConfig(), "reading repo config") err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
s3Cfg, err := s3ConfigsFromViper(vpr) s3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, b, s3Cfg.Bucket) assert.Equal(t, b, s3Cfg.Bucket)
m365, err := m365ConfigsFromViper(vpr) m365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, tID, m365.AzureTenantID) assert.Equal(t, tID, m365.AzureTenantID)
} }
@ -74,6 +76,8 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
var ( var (
t = suite.T() t = suite.T()
vpr = viper.New() vpr = viper.New()
// Configure viper to read test config file
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
) )
const ( const (
@ -81,23 +85,26 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
tid = "3c0748d2-470e-444c-9064-1268e52609d5" tid = "3c0748d2-470e-444c-9064-1268e52609d5"
) )
// Configure viper to read test config file err := initWithViper(vpr, testConfigFilePath)
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") require.NoError(t, err, "initializing repo config", clues.ToCore(err))
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true} s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true}
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid"), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config") err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
readS3Cfg, err := s3ConfigsFromViper(vpr) readS3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket) assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS) assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS) assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
readM365, err := m365ConfigsFromViper(vpr) readM365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
} }
@ -105,6 +112,8 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
var ( var (
t = suite.T() t = suite.T()
vpr = viper.New() vpr = viper.New()
// Configure viper to read test config file
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
) )
const ( const (
@ -112,15 +121,17 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
tid = "dfb12063-7598-458b-85ab-42352c5c25e2" tid = "dfb12063-7598-458b-85ab-42352c5c25e2"
) )
// Configure viper to read test config file err := initWithViper(vpr, testConfigFilePath)
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") require.NoError(t, err, "initializing repo config")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
s3Cfg := storage.S3Config{Bucket: bkt} s3Cfg := storage.S3Config{Bucket: bkt}
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid"), "writing repo config") err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, vpr.ReadInConfig(), "reading repo config") require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
table := []struct { table := []struct {
name string name string
@ -172,7 +183,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input)) test.errCheck(suite.T(), mustMatchConfig(vpr, test.input), clues.ToCore(err))
}) })
} }
} }
@ -205,7 +216,9 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
// Configure viper to read test config file // Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := storage.S3Config{ s3Cfg := storage.S3Config{
Bucket: bkt, Bucket: bkt,
@ -216,14 +229,17 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
} }
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid"), "writing repo config") err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, vpr.ReadInConfig(), "reading repo config") require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
config, err := getStorageAndAccountWithViper(vpr, true, nil) config, err := getStorageAndAccountWithViper(vpr, true, nil)
require.NoError(t, err, "getting storage and account from config") require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := config.Storage.S3Config() readS3Cfg, err := config.Storage.S3Config()
require.NoError(t, err, "reading s3 config from storage") require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket) assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint) assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint)
assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix) assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix)
@ -232,11 +248,11 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
assert.Equal(t, config.RepoID, "repoid") assert.Equal(t, config.RepoID, "repoid")
common, err := config.Storage.CommonConfig() common, err := config.Storage.CommonConfig()
require.NoError(t, err, "reading common config from storage") require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase)) assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := config.Account.M365Config() readM365, err := config.Account.M365Config()
require.NoError(t, err, "reading m365 config from account") require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID)) assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret)) assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
@ -267,10 +283,10 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
} }
config, err := getStorageAndAccountWithViper(vpr, false, overrides) config, err := getStorageAndAccountWithViper(vpr, false, overrides)
require.NoError(t, err, "getting storage and account from config") require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := config.Storage.S3Config() readS3Cfg, err := config.Storage.S3Config()
require.NoError(t, err, "reading s3 config from storage") require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, bkt) assert.Equal(t, readS3Cfg.Bucket, bkt)
assert.Equal(t, config.RepoID, "") assert.Equal(t, config.RepoID, "")
assert.Equal(t, readS3Cfg.Endpoint, end) assert.Equal(t, readS3Cfg.Endpoint, end)
@ -279,11 +295,11 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
assert.True(t, readS3Cfg.DoNotVerifyTLS) assert.True(t, readS3Cfg.DoNotVerifyTLS)
common, err := config.Storage.CommonConfig() common, err := config.Storage.CommonConfig()
require.NoError(t, err, "reading common config from storage") require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase)) assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := config.Account.M365Config() readM365, err := config.Account.M365Config()
require.NoError(t, err, "reading m365 config from account") require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID)) assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret)) assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -20,14 +21,15 @@ func TestPrintUnitSuite(t *testing.T) {
} }
func (suite *PrintUnitSuite) TestOnly() { func (suite *PrintUnitSuite) TestOnly() {
t := suite.T()
c := &cobra.Command{}
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T()
c := &cobra.Command{}
ctx = SetRootCmd(ctx, c) ctx = SetRootCmd(ctx, c)
assert.NoError(t, Only(ctx, nil))
err := Only(ctx, nil)
assert.NoError(t, err, clues.ToCore(err))
assert.True(t, c.SilenceUsage) assert.True(t, c.SilenceUsage)
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -61,7 +62,7 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
if !test.hasConfigFile { if !test.hasConfigFile {
@ -80,12 +81,12 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a second initialization should result in an error // a second initialization should result in an error
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
assert.Error(t, err) assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists)
}) })
} }
} }
@ -98,7 +99,7 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -115,7 +116,8 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
} }
} }
@ -127,7 +129,7 @@ func (suite *S3E2ESuite) TestInitS3Cmd_missingBucket() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -140,7 +142,8 @@ func (suite *S3E2ESuite) TestInitS3Cmd_missingBucket() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }
func (suite *S3E2ESuite) TestConnectS3Cmd() { func (suite *S3E2ESuite) TestConnectS3Cmd() {
@ -175,7 +178,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -193,7 +196,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
// init the repo first // init the repo first
_, err = repository.Initialize(ctx, account.Account{}, st, control.Options{}) _, err = repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// then test it // then test it
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
@ -205,7 +208,8 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
assert.NoError(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}) })
} }
} }
@ -218,7 +222,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadBucket() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -232,7 +236,8 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadBucket() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }
func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() { func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() {
@ -243,7 +248,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config() cfg, err := st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil) vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -257,5 +262,6 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
@ -63,7 +64,7 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t) suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config() cfg, err := suite.st.S3Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
force := map[string]string{ force := map[string]string{
tester.TestCfgAccountProvider: "M365", tester.TestCfgAccountProvider: "M365",
@ -77,7 +78,7 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.backupOps = make(map[path.CategoryType]operations.BackupOperation) suite.backupOps = make(map[path.CategoryType]operations.BackupOperation)
@ -101,17 +102,19 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
sel.Include(scopes) sel.Include(scopes)
bop, err := suite.repo.NewBackup(ctx, sel.Selector) bop, err := suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, bop.Run(ctx)) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err)
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[set] = bop suite.backupOps[set] = bop
// sanity check, ensure we can find the backup and its details immediately // sanity check, ensure we can find the backup and its details immediately
_, err = suite.repo.Backup(ctx, bop.Results.BackupID) _, err = suite.repo.Backup(ctx, bop.Results.BackupID)
require.NoError(t, err, "retrieving recent backup by ID") require.NoError(t, err, "retrieving recent backup by ID", clues.ToCore(err))
_, _, errs := suite.repo.BackupDetails(ctx, string(bop.Results.BackupID)) _, _, errs := suite.repo.BackupDetails(ctx, string(bop.Results.BackupID))
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID") require.NoError(t, errs.Failure(), "retrieving recent backup details by ID", clues.ToCore(err))
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID") require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
} }
} }
@ -133,7 +136,8 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.NoError(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}) })
} }
} }
@ -168,7 +172,8 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badTimeFlags() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}) })
} }
} }
@ -201,7 +206,8 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badBoolFlags() {
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
require.Error(t, cmd.ExecuteContext(ctx)) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -52,7 +53,8 @@ func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.expect(suite.T(), utils.ValidateExchangeRestoreFlags(test.backupID, test.opts)) err := utils.ValidateExchangeRestoreFlags(test.backupID, test.opts)
test.expect(suite.T(), err, clues.ToCore(err))
}) })
} }
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -34,7 +35,8 @@ func (suite *CliUtilsSuite) TestRequireProps() {
}, },
} }
for _, test := range table { for _, test := range table {
test.errCheck(suite.T(), RequireProps(test.props)) err := RequireProps(test.props)
test.errCheck(suite.T(), err, clues.ToCore(err))
} }
} }

View File

@ -4,8 +4,8 @@ go 1.19
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013 github.com/alcionai/clues v0.0.0-20230314154528-c469e1adafb6
github.com/aws/aws-sdk-go v1.44.221 github.com/aws/aws-sdk-go v1.44.218
github.com/aws/aws-xray-sdk-go v1.8.1 github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.0 github.com/cenkalti/backoff/v4 v4.2.0
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0

View File

@ -52,8 +52,8 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013 h1:WWQ6E8hnoITmc/adlEySSVo3SOVvo1xhJKO08A1YPYY= github.com/alcionai/clues v0.0.0-20230314154528-c469e1adafb6 h1:U3uDQhdiI8rkwV/56duel5zWN1XESPuf+xs3EviwGHA=
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= github.com/alcionai/clues v0.0.0-20230314154528-c469e1adafb6/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -62,8 +62,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/aws/aws-sdk-go v1.44.221 h1:yndn4uvLolKXPoXIwKHhO5XtwlTnJfXLBKXs84C5+hQ= github.com/aws/aws-sdk-go v1.44.218 h1:p707+xOCazWhkSpZOeyhtTcg7Z+asxxvueGgYPSitn4=
github.com/aws/aws-sdk-go v1.44.221/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.44.218/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -58,7 +59,7 @@ func (suite *CommonConfigsSuite) TestUnionConfigs_string() {
t := suite.T() t := suite.T()
cs, err := common.UnionStringConfigs(test.ac, test.bc) cs, err := common.UnionStringConfigs(test.ac, test.bc)
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
// remaining tests depend on error-free state // remaining tests depend on error-free state
if test.ac.err != nil || test.bc.err != nil { if test.ac.err != nil || test.bc.err != nil {
return return

View File

@ -3,6 +3,7 @@ package crash_test
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/crash" "github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -50,7 +51,8 @@ func (suite *CrashTestDummySuite) TestRecovery() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer func() { defer func() {
test.expect(t, crash.Recovery(ctx, recover())) err := crash.Recovery(ctx, recover())
test.expect(t, err, clues.ToCore(err))
flush() flush()
}() }()

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -48,14 +49,14 @@ func (suite *CommonTimeUnitSuite) TestParseTime() {
nowStr := now.Format(time.RFC3339Nano) nowStr := now.Format(time.RFC3339Nano)
result, err := common.ParseTime(nowStr) result, err := common.ParseTime(nowStr)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, now.UTC(), result) assert.Equal(t, now.UTC(), result)
_, err = common.ParseTime("") _, err = common.ParseTime("")
require.Error(t, err) require.Error(t, err, clues.ToCore(err))
_, err = common.ParseTime("flablabls") _, err = common.ParseTime("flablabls")
require.Error(t, err) require.Error(t, err, clues.ToCore(err))
} }
func (suite *CommonTimeUnitSuite) TestExtractTime() { func (suite *CommonTimeUnitSuite) TestExtractTime() {
@ -68,14 +69,14 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
c, err := common.ParseTime(ts) c, err := common.ParseTime(ts)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return c return c
} }
parseT := func(v string) time.Time { parseT := func(v string) time.Time {
t, err := time.Parse(time.RFC3339, v) t, err := time.Parse(time.RFC3339, v)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
return t return t
} }
@ -151,7 +152,7 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
t := suite.T() t := suite.T()
result, err := common.ExtractTime(test.input) result, err := common.ExtractTime(test.input)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat)) assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat))
}) })
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/connector/sharepoint"
@ -109,7 +110,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
connector.UpdateStatus, connector.UpdateStatus,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, excludes) assert.Empty(t, excludes)
for range collections { for range collections {
@ -125,7 +126,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
for object := range col.Items(ctx, fault.New(true)) { for object := range col.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader()) _, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "received a buf.Read error") assert.NoError(t, err, "received a buf.Read error", clues.ToCore(err))
} }
} }
@ -211,7 +212,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestDataCollections_invali
nil, nil,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, collections) assert.Empty(t, collections)
assert.Empty(t, excludes) assert.Empty(t, excludes)
}) })
@ -265,7 +266,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
connector, connector,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// Not expecting excludes as this isn't an incremental backup. // Not expecting excludes as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -281,7 +282,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
for object := range coll.Items(ctx, fault.New(true)) { for object := range coll.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader()) _, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "reading item") assert.NoError(t, err, "reading item", clues.ToCore(err))
} }
} }
@ -342,7 +343,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
nil, nil,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Len(t, cols, 1) assert.Len(t, cols, 1)
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -376,7 +377,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
nil, nil,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 0, len(cols)) assert.Less(t, 0, len(cols))
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -388,7 +389,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
t.Log("File: " + item.UUID()) t.Log("File: " + item.UUID())
bs, err := io.ReadAll(item.ToReader()) bs, err := io.ReadAll(item.ToReader())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
t.Log(string(bs)) t.Log(string(bs))
} }
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -24,14 +25,14 @@ func (suite *BetaUnitSuite) TestBetaService_Adapter() {
t := suite.T() t := suite.T()
a := tester.NewMockM365Account(t) a := tester.NewMockM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter( adpt, err := graph.CreateAdapter(
m365.AzureTenantID, m365.AzureTenantID,
m365.AzureClientID, m365.AzureClientID,
m365.AzureClientSecret, m365.AzureClientSecret,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
service := NewBetaService(adpt) service := NewBetaService(adpt)
require.NotNil(t, service) require.NotNil(t, service)
@ -45,5 +46,5 @@ func (suite *BetaUnitSuite) TestBetaService_Adapter() {
byteArray, err := service.Serialize(testPage) byteArray, err := service.Serialize(testPage)
assert.NotEmpty(t, byteArray) assert.NotEmpty(t, byteArray)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -64,7 +65,7 @@ func (suite *UsersUnitSuite) TestValidateUser() {
t := suite.T() t := suite.T()
got, err := validateUser(tt.args) got, err := validateUser(tt.args)
tt.errCheck(t, err) tt.errCheck(t, err, clues.ToCore(err))
assert.Equal(t, tt.want, got) assert.Equal(t, tt.want, got)
}) })

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/discovery" "github.com/alcionai/corso/src/internal/connector/discovery"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -36,10 +37,10 @@ func (suite *DiscoveryIntegrationSuite) TestUsers() {
errs := fault.New(true) errs := fault.New(true)
users, err := discovery.Users(ctx, acct, errs) users, err := discovery.Users(ctx, acct, errs)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ferrs := errs.Errors() ferrs := errs.Errors()
assert.NoError(t, ferrs.Failure) assert.NoError(t, ferrs.Failure, clues.ToCore(ferrs.Failure))
assert.Empty(t, ferrs.Recovered) assert.Empty(t, ferrs.Recovered)
assert.Less(t, 0, len(users)) assert.Less(t, 0, len(users))
@ -66,7 +67,7 @@ func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() {
AzureTenantID: "data", AzureTenantID: "data",
}, },
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return a return a
}, },

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
@ -36,7 +37,7 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
@ -44,7 +45,7 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
m365.AzureTenantID, m365.AzureTenantID,
m365.AzureClientID, m365.AzureClientID,
m365.AzureClientSecret) m365.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt) suite.gs = graph.NewService(adpt)
} }
@ -79,7 +80,7 @@ func (suite *ExchangeServiceSuite) TestOptionsForCalendars() {
for _, test := range tests { for _, test := range tests {
suite.Run(test.name, func() { suite.Run(test.name, func() {
_, err := optionsForCalendars(test.params) _, err := optionsForCalendars(test.params)
test.checkError(suite.T(), err) test.checkError(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -117,7 +118,7 @@ func (suite *ExchangeServiceSuite) TestOptionsForFolders() {
t := suite.T() t := suite.T()
config, err := optionsForMailFolders(test.params) config, err := optionsForMailFolders(test.params)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
if err == nil { if err == nil {
assert.Equal(t, test.expected, len(config.QueryParameters.Select)) assert.Equal(t, test.expected, len(config.QueryParameters.Select))
} }
@ -156,7 +157,7 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() {
t := suite.T() t := suite.T()
options, err := optionsForContacts(test.params) options, err := optionsForContacts(test.params)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
if err == nil { if err == nil {
assert.Equal(t, test.expected, len(options.QueryParameters.Select)) assert.Equal(t, test.expected, len(options.QueryParameters.Select))
} }
@ -183,7 +184,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
"This is testing", "This is testing",
) )
message, err := support.CreateMessageFromBytes(byteArray) message, err := support.CreateMessageFromBytes(byteArray)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return message.GetBody() return message.GetBody()
}, },
}, },
@ -193,7 +194,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
getBodyable: func(t *testing.T) models.ItemBodyable { getBodyable: func(t *testing.T) models.ItemBodyable {
byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy") byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy")
message, err := support.CreateMessageFromBytes(byteArray) message, err := support.CreateMessageFromBytes(byteArray)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return message.GetBody() return message.GetBody()
}, },
}, },

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
@ -129,7 +130,7 @@ func (suite *EventsAPIUnitSuite) TestEventInfo() {
event, err = support.CreateEventFromBytes(bytes) event, err = support.CreateEventFromBytes(bytes)
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
return event, &details.ExchangeInfo{ return event, &details.ExchangeInfo{
ItemType: details.ExchangeEvent, ItemType: details.ExchangeEvent,

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -123,7 +124,8 @@ var (
func (suite *FolderCacheUnitSuite) TestCheckIDAndName() { func (suite *FolderCacheUnitSuite) TestCheckIDAndName() {
for _, test := range containerCheckTests { for _, test := range containerCheckTests {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.check(suite.T(), checkIDAndName(test.c)) err := checkIDAndName(test.c)
test.check(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -154,7 +156,8 @@ func (suite *FolderCacheUnitSuite) TestCheckRequiredValues() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.check(suite.T(), checkRequiredValues(test.c)) err := checkRequiredValues(test.c)
test.check(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -230,7 +233,8 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
fc := newContainerResolver() fc := newContainerResolver()
test.check(suite.T(), fc.addFolder(test.cf)) err := fc.addFolder(test.cf)
test.check(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -355,7 +359,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
resolver, containers := resolverWithContainers(test.numContainers, false) resolver, containers := resolverWithContainers(test.numContainers, false)
_, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false) _, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false)
test.check(suite.T(), err) test.check(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -366,7 +370,8 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() {
t := suite.T() t := suite.T()
require.NoError(t, suite.fc.populatePaths(ctx, false)) err := suite.fc.populatePaths(ctx, false)
require.NoError(t, err, clues.ToCore(err))
items := suite.fc.Items() items := suite.fc.Items()
gotPaths := make([]string, 0, len(items)) gotPaths := make([]string, 0, len(items))
@ -392,7 +397,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
t := suite.T() t := suite.T()
p, l, err := suite.fc.IDToPath(ctx, c.id, false) p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
}) })
@ -408,7 +413,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
t := suite.T() t := suite.T()
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true) p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
}) })
@ -423,14 +428,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths()
c := suite.allContainers[len(suite.allContainers)-1] c := suite.allContainers[len(suite.allContainers)-1]
p, l, err := suite.fc.IDToPath(ctx, c.id, false) p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo" c.parentID = "foo"
p, l, err = suite.fc.IDToPath(ctx, c.id, false) p, l, err = suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
} }
@ -443,14 +448,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_u
c := suite.containersWithID[len(suite.containersWithID)-1] c := suite.containersWithID[len(suite.containersWithID)-1]
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true) p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo" c.parentID = "foo"
p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true) p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String()) assert.Equal(t, c.expectedLocation, l.String())
} }
@ -466,7 +471,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN
delete(suite.fc.cache, almostLast.id) delete(suite.fc.cache, almostLast.id)
_, _, err := suite.fc.IDToPath(ctx, last.id, false) _, _, err := suite.fc.IDToPath(ctx, last.id, false)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
} }
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFound() { func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFound() {
@ -476,7 +481,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun
t := suite.T() t := suite.T()
_, _, err := suite.fc.IDToPath(ctx, "foo", false) _, _, err := suite.fc.IDToPath(ctx, "foo", false)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
} }
func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() { func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
@ -495,12 +500,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName) m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName)
require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache") require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache")
require.NoError(t, suite.fc.AddToCache(ctx, m, false))
err := suite.fc.AddToCache(ctx, m, false)
require.NoError(t, err, clues.ToCore(err))
require.Empty(t, suite.fc.DestinationNameToID(dest), require.Empty(t, suite.fc.DestinationNameToID(dest),
"destination id from cache, still empty, because this is not a calendar") "destination id from cache, still empty, because this is not a calendar")
p, l, err := suite.fc.IDToPath(ctx, m.id, false) p, l, err := suite.fc.IDToPath(ctx, m.id, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, m.expectedPath, p.String()) assert.Equal(t, m.expectedPath, p.String())
assert.Equal(t, m.expectedLocation, l.String()) assert.Equal(t, m.expectedLocation, l.String())
} }
@ -531,7 +538,7 @@ func (suite *FolderCacheIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
@ -539,11 +546,9 @@ func (suite *FolderCacheIntegrationSuite) SetupSuite() {
m365.AzureTenantID, m365.AzureTenantID,
m365.AzureClientID, m365.AzureClientID,
m365.AzureClientSecret) m365.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt) suite.gs = graph.NewService(adpt)
require.NoError(suite.T(), err)
} }
// Testing to ensure that cache system works for in multiple different environments // Testing to ensure that cache system works for in multiple different environments
@ -553,7 +558,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
a := tester.NewM365Account(suite.T()) a := tester.NewM365Account(suite.T())
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
var ( var (
user = tester.M365UserID(suite.T()) user = tester.M365UserID(suite.T())
@ -578,7 +583,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EmailCategory, path.EmailCategory,
false, false,
"Griffindor", "Croix") "Griffindor", "Croix")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
}, },
@ -590,7 +595,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EmailCategory, path.EmailCategory,
false, false,
"Griffindor", "Felicius") "Griffindor", "Felicius")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
}, },
@ -606,7 +611,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.ContactsCategory, path.ContactsCategory,
false, false,
"HufflePuff") "HufflePuff")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
}, },
@ -618,7 +623,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.ContactsCategory, path.ContactsCategory,
false, false,
"Ravenclaw") "Ravenclaw")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
}, },
@ -635,7 +640,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EventsCategory, path.EventsCategory,
false, false,
"Durmstrang") "Durmstrang")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
}, },
@ -647,7 +652,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EventsCategory, path.EventsCategory,
false, false,
"Beauxbatons") "Beauxbatons")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
}, },
@ -666,12 +671,12 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
folderName, folderName,
directoryCaches, directoryCaches,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
resolver := directoryCaches[test.category] resolver := directoryCaches[test.category]
_, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath) _, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
parentContainer := folderName parentContainer := folderName
if test.useIDForPath { if test.useIDForPath {
@ -685,10 +690,10 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
parentContainer, parentContainer,
directoryCaches, directoryCaches,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
_, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath) _, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
p := stdpath.Join(test.folderPrefix, parentContainer) p := stdpath.Join(test.folderPrefix, parentContainer)
_, ok := resolver.PathInCache(p) _, ok := resolver.PathInCache(p)

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -177,12 +178,12 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
entries, entries,
func(cos *support.ConnectorOperationStatus) {}, func(cos *support.ConnectorOperationStatus) {},
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{ cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: coll}, data.NotFoundRestoreCollection{Collection: coll},
}, fault.New(true)) }, fault.New(true))
test.expectError(t, err) test.expectError(t, err, clues.ToCore(err))
emails := cdps[path.EmailCategory] emails := cdps[path.EmailCategory]
@ -242,7 +243,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
acct, err = tester.NewM365Account(suite.T()).M365Config() acct, err = tester.NewM365Account(suite.T()).M365Config()
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct { tests := []struct {
name string name string
@ -274,7 +275,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
control.Options{}, control.Options{},
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
for _, c := range collections { for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService { if c.FullPath().Service() == path.ExchangeMetadataService {
@ -302,7 +303,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
acct, err = tester.NewM365Account(suite.T()).M365Config() acct, err = tester.NewM365Account(suite.T()).M365Config()
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct { tests := []struct {
name string name string
@ -344,7 +345,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
control.Options{}, control.Options{},
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 1, len(collections), "retrieved metadata and data collections") assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection var metadata data.BackupCollection
@ -360,7 +361,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{ cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: metadata}, data.NotFoundRestoreCollection{Collection: metadata},
}, fault.New(true)) }, fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
dps := cdps[test.scope.Category().PathType()] dps := cdps[test.scope.Category().PathType()]
@ -375,7 +376,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
control.Options{}, control.Options{},
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// TODO(keepers): this isn't a very useful test at the moment. It needs to // TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least // investigate the items in the original and delta collections to at least
@ -407,7 +408,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
) )
acct, err := tester.NewM365Account(t).M365Config() acct, err := tester.NewM365Account(t).M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewExchangeBackup(users) sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
@ -421,7 +422,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
control.Options{}, control.Options{},
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
wg.Add(len(collections)) wg.Add(len(collections))
@ -437,7 +438,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader()) read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read) assert.NotZero(t, read)
if isMetadata { if isMetadata {
@ -446,7 +447,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
message, err := support.CreateMessageFromBytes(buf.Bytes()) message, err := support.CreateMessageFromBytes(buf.Bytes())
assert.NotNil(t, message) assert.NotNil(t, message)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }
}) })
} }
@ -462,7 +463,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
defer flush() defer flush()
acct, err := tester.NewM365Account(suite.T()).M365Config() acct, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
users := []string{suite.user} users := []string{suite.user}
@ -494,7 +495,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
control.Options{}, control.Options{},
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
wg.Add(len(edcs)) wg.Add(len(edcs))
@ -508,7 +509,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
for stream := range edc.Items(ctx, fault.New(true)) { for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader()) read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read) assert.NotZero(t, read)
if isMetadata { if isMetadata {
@ -517,7 +518,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
contact, err := support.CreateContactFromBytes(buf.Bytes()) contact, err := support.CreateContactFromBytes(buf.Bytes())
assert.NotNil(t, contact) assert.NotNil(t, contact)
assert.NoError(t, err, "error on converting contact bytes: "+buf.String()) assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err))
count++ count++
} }
@ -541,12 +542,12 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
defer flush() defer flush()
acct, err := tester.NewM365Account(suite.T()).M365Config() acct, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
users := []string{suite.user} users := []string{suite.user}
ac, err := api.NewClient(acct) ac, err := api.NewClient(acct)
require.NoError(suite.T(), err, "creating client") require.NoError(suite.T(), err, "creating client", clues.ToCore(err))
var ( var (
calID string calID string
@ -565,7 +566,8 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
return nil return nil
} }
require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))) err = ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct { tests := []struct {
name, expected string name, expected string
@ -605,7 +607,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
control.Options{}, control.Options{},
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.Len(t, collections, 2) require.Len(t, collections, 2)
wg.Add(len(collections)) wg.Add(len(collections))
@ -624,7 +626,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader()) read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read) assert.NotZero(t, read)
if isMetadata { if isMetadata {
@ -633,7 +635,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
event, err := support.CreateEventFromBytes(buf.Bytes()) event, err := support.CreateEventFromBytes(buf.Bytes())
assert.NotNil(t, event) assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String()) assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err))
} }
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -60,7 +61,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() {
// Read the message using the `ExchangeData` reader and validate it matches what we set // Read the message using the `ExchangeData` reader and validate it matches what we set
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
_, err := buf.ReadFrom(ed.ToReader()) _, err := buf.ReadFrom(ed.ToReader())
assert.Nil(suite.T(), err, "received a buf.Read error") assert.NoError(suite.T(), err, clues.ToCore(err))
assert.Equal(suite.T(), buf.Bytes(), m) assert.Equal(suite.T(), buf.Bytes(), m)
assert.Equal(suite.T(), description, ed.UUID()) assert.Equal(suite.T(), description, ed.UUID())
} }
@ -77,7 +78,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() {
received, err := buf.ReadFrom(ed.ToReader()) received, err := buf.ReadFrom(ed.ToReader())
assert.Equal(t, expected, received) assert.Equal(t, expected, received)
assert.Nil(t, err, "received buf.Readfrom error ") assert.NoError(t, err, clues.ToCore(err))
} }
func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() { func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
@ -93,7 +94,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
path.EmailCategory, path.EmailCategory,
false, false,
folder) folder)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
edc := Collection{ edc := Collection{
user: user, user: user,
@ -117,7 +118,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
path.EmailCategory, path.EmailCategory,
false, false,
folder) folder)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
edc := Collection{ edc := Collection{
user: name, user: name,
@ -129,11 +130,11 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() { func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo") fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar") barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
locP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "human-readable") locP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "human-readable")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
@ -198,7 +199,7 @@ func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
name: "happy", name: "happy",
items: &mockItemer{}, items: &mockItemer{},
expectErr: func(t *testing.T, err error) { expectErr: func(t *testing.T, err error) {
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
}, },
expectGetCalls: 1, expectGetCalls: 1,
}, },
@ -206,7 +207,7 @@ func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
name: "an error", name: "an error",
items: &mockItemer{getErr: assert.AnError}, items: &mockItemer{getErr: assert.AnError},
expectErr: func(t *testing.T, err error) { expectErr: func(t *testing.T, err error) {
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
}, },
expectGetCalls: 3, expectGetCalls: 3,
}, },

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -35,7 +36,7 @@ func (suite *CacheResolverSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
} }
@ -45,10 +46,10 @@ func (suite *CacheResolverSuite) TestPopulate() {
defer flush() defer flush()
ac, err := api.NewClient(suite.credentials) ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar) cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver { eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{ return &eventCalendarCache{
@ -119,9 +120,10 @@ func (suite *CacheResolverSuite) TestPopulate() {
for _, test := range tests { for _, test := range tests {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
resolver := test.resolverFunc(t) resolver := test.resolverFunc(t)
require.NoError(t, resolver.Populate(ctx, fault.New(true), test.root, test.basePath))
err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath)
require.NoError(t, err, clues.ToCore(err))
_, isFound := resolver.PathInCache(test.folderInCache) _, isFound := resolver.PathInCache(test.folderInCache)
test.canFind(t, isFound) test.canFind(t, isFound)

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
@ -25,7 +26,7 @@ func (suite *ExchangeIteratorSuite) TestDisplayable() {
t := suite.T() t := suite.T()
bytes := mockconnector.GetMockContactBytes("Displayable") bytes := mockconnector.GetMockContactBytes("Displayable")
contact, err := support.CreateContactFromBytes(bytes) contact, err := support.CreateContactFromBytes(bytes)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
aDisplayable, ok := contact.(graph.Displayable) aDisplayable, ok := contact.(graph.Displayable)
assert.True(t, ok) assert.True(t, ok)
@ -37,7 +38,7 @@ func (suite *ExchangeIteratorSuite) TestDescendable() {
t := suite.T() t := suite.T()
bytes := mockconnector.GetMockMessageBytes("Descendable") bytes := mockconnector.GetMockMessageBytes("Descendable")
message, err := support.CreateMessageFromBytes(bytes) message, err := support.CreateMessageFromBytes(bytes)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
aDescendable, ok := message.(graph.Descendable) aDescendable, ok := message.(graph.Descendable)
assert.True(t, ok) assert.True(t, ok)

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -46,7 +47,7 @@ func (suite *MailFolderCacheIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
} }
@ -83,7 +84,7 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
t := suite.T() t := suite.T()
ac, err := api.NewClient(suite.credentials) ac, err := api.NewClient(suite.credentials)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
acm := ac.Mail() acm := ac.Mail()
@ -93,10 +94,11 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
getter: acm, getter: acm,
} }
require.NoError(t, mfc.Populate(ctx, fault.New(true), test.root, test.path...)) err = mfc.Populate(ctx, fault.New(true), test.root, test.path...)
require.NoError(t, err, clues.ToCore(err))
p, l, err := mfc.IDToPath(ctx, testFolderID, true) p, l, err := mfc.IDToPath(ctx, testFolderID, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
t.Logf("Path: %s\n", p.String()) t.Logf("Path: %s\n", p.String())
t.Logf("Location: %s\n", l.String()) t.Logf("Location: %s\n", l.String())

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
@ -44,18 +45,16 @@ func (suite *ExchangeRestoreSuite) SetupSuite() {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
suite.ac, err = api.NewClient(m365) suite.ac, err = api.NewClient(m365)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret) adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt) suite.gs = graph.NewService(adpt)
require.NoError(suite.T(), err)
} }
// TestRestoreContact ensures contact object can be created, placed into // TestRestoreContact ensures contact object can be created, placed into
@ -72,14 +71,14 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
) )
aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName) aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
folderID := ptr.Val(aFolder.GetId()) folderID := ptr.Val(aFolder.GetId())
defer func() { defer func() {
// Remove the folder containing contact prior to exiting test // Remove the folder containing contact prior to exiting test
err = suite.ac.Contacts().DeleteContainer(ctx, userID, folderID) err = suite.ac.Contacts().DeleteContainer(ctx, userID, folderID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
}() }()
info, err := RestoreExchangeContact( info, err := RestoreExchangeContact(
@ -89,7 +88,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
control.Copy, control.Copy,
folderID, folderID,
userID) userID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "contact item info") assert.NotNil(t, info, "contact item info")
} }
@ -106,14 +105,14 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
) )
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, name) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, name)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
calendarID := ptr.Val(calendar.GetId()) calendarID := ptr.Val(calendar.GetId())
defer func() { defer func() {
// Removes calendar containing events created during the test // Removes calendar containing events created during the test
err = suite.ac.Events().DeleteContainer(ctx, userID, calendarID) err = suite.ac.Events().DeleteContainer(ctx, userID, calendarID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
}() }()
info, err := RestoreExchangeEvent(ctx, info, err := RestoreExchangeEvent(ctx,
@ -123,7 +122,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
calendarID, calendarID,
userID, userID,
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "event item info") assert.NotNil(t, info, "event item info")
} }
@ -136,10 +135,10 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
service, err := createService(m365) service, err := createService(m365)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
deleters := map[path.CategoryType]containerDeleter{ deleters := map[path.CategoryType]containerDeleter{
path.EmailCategory: suite.ac.Mail(), path.EmailCategory: suite.ac.Mail(),
@ -162,7 +161,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailObject: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailObject: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -174,7 +173,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -186,7 +185,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -198,7 +197,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -213,7 +212,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailBasicItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailBasicItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -228,7 +227,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachmentwAttachment " + common.FormatSimpleDateTime(now) folderName := "ItemMailAttachmentwAttachment " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -243,7 +242,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachment_Contact " + common.FormatSimpleDateTime(now) folderName := "ItemMailAttachment_Contact " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -255,7 +254,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -267,7 +266,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithLargeAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithLargeAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -279,7 +278,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachments: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithAttachments: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -291,7 +290,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithReferenceAttachment: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreMailwithReferenceAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -304,7 +303,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreContactObject: " + common.FormatSimpleDateTime(now) folderName := "TestRestoreContactObject: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName) folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId()) return ptr.Val(folder.GetId())
}, },
@ -316,7 +315,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject: " + common.FormatSimpleDateTime(now) calendarName := "TestRestoreEventObject: " + common.FormatSimpleDateTime(now)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId()) return ptr.Val(calendar.GetId())
}, },
@ -328,7 +327,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject_" + common.FormatSimpleDateTime(now) calendarName := "TestRestoreEventObject_" + common.FormatSimpleDateTime(now)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId()) return ptr.Val(calendar.GetId())
}, },
@ -352,10 +351,12 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination, destination,
userID, userID,
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "item info was not populated") assert.NotNil(t, info, "item info was not populated")
assert.NotNil(t, deleters) assert.NotNil(t, deleters)
assert.NoError(t, deleters[test.category].DeleteContainer(ctx, userID, destination))
err = deleters[test.category].DeleteContainer(ctx, userID, destination)
assert.NoError(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -110,7 +111,7 @@ func TestServiceIteratorsSuite(t *testing.T) {
func (suite *ServiceIteratorsSuite) SetupSuite() { func (suite *ServiceIteratorsSuite) SetupSuite() {
a := tester.NewMockM365Account(suite.T()) a := tester.NewMockM365Account(suite.T())
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.creds = m365 suite.creds = m365
} }
@ -308,7 +309,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
dps, dps,
control.Options{FailFast: test.failFast}, control.Options{FailFast: test.failFast},
fault.New(test.failFast)) fault.New(test.failFast))
test.expectErr(t, err) test.expectErr(t, err, clues.ToCore(err))
// collection assertions // collection assertions
@ -463,7 +464,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
dps, dps,
control.Options{FailFast: true}, control.Options{FailFast: true},
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// collection assertions // collection assertions
@ -536,7 +537,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
prevPath := func(t *testing.T, at ...string) path.Path { prevPath := func(t *testing.T, at ...string) path.Path {
p, err := path.Build(tenantID, userID, path.ExchangeService, cat, false, at...) p, err := path.Build(tenantID, userID, path.ExchangeService, cat, false, at...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -815,7 +816,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
test.dps, test.dps,
control.Options{}, control.Options{},
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
metadatas := 0 metadatas := 0
for _, c := range collections { for _, c := range collections {

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -30,7 +31,7 @@ func (suite *BetaClientSuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
} }
@ -43,7 +44,7 @@ func (suite *BetaClientSuite) TestCreateBetaClient() {
suite.credentials.AzureClientSecret, suite.credentials.AzureClientSecret,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
client := NewBetaClient(adpt) client := NewBetaClient(adpt)
assert.NotNil(t, client) assert.NotNil(t, client)
@ -63,7 +64,7 @@ func (suite *BetaClientSuite) TestBasicClientGetFunctionality() {
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
suite.credentials.AzureClientID, suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret) suite.credentials.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
client := NewBetaClient(adpt) client := NewBetaClient(adpt)
require.NotNil(t, client) require.NotNil(t, client)
@ -74,7 +75,7 @@ func (suite *BetaClientSuite) TestBasicClientGetFunctionality() {
collection, err := client.SitesById(siteID).Pages().Get(ctx, nil) collection, err := client.SitesById(siteID).Pages().Get(ctx, nil)
// Ensures that the client is able to receive data from beta // Ensures that the client is able to receive data from beta
// Not Registered Error: content type application/json does not have a factory registered to be parsed // Not Registered Error: content type application/json does not have a factory registered to be parsed
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
for _, page := range collection.GetValue() { for _, page := range collection.GetValue() {
assert.NotNil(t, page, "betasdk call for page does not return value.") assert.NotNil(t, page, "betasdk call for page does not return value.")

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph/metadata" "github.com/alcionai/corso/src/internal/connector/graph/metadata"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -96,7 +97,7 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_MetaSuffixes() {
test.category, test.category,
true, true,
"file"+ext) "file"+ext)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
test.expected(t, metadata.IsMetadataFile(p), "extension %s", ext) test.expected(t, metadata.IsMetadataFile(p), "extension %s", ext)
}) })
@ -117,7 +118,7 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_NotMetaSuffixes() {
test.category, test.category,
true, true,
"file"+ext) "file"+ext)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext) assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext)
}) })
@ -140,7 +141,7 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Directories() {
test.category, test.category,
false, false,
"file"+ext) "file"+ext)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext) assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext)
}) })

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -34,7 +35,7 @@ func (suite *MetadataCollectionUnitSuite) TestFullPath() {
path.EmailCategory, path.EmailCategory,
false, false,
"foo") "foo")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
c := NewMetadataCollection(p, nil, nil) c := NewMetadataCollection(p, nil, nil)
@ -76,7 +77,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
path.EmailCategory, path.EmailCategory,
false, false,
"foo") "foo")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
c := NewMetadataCollection( c := NewMetadataCollection(
p, p,
@ -94,7 +95,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
gotNames = append(gotNames, s.UUID()) gotNames = append(gotNames, s.UUID())
buf, err := io.ReadAll(s.ToReader()) buf, err := io.ReadAll(s.ToReader())
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
continue continue
} }
@ -168,7 +169,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
[]MetadataCollectionEntry{test.metadata}, []MetadataCollectionEntry{test.metadata},
func(*support.ConnectorOperationStatus) {}) func(*support.ConnectorOperationStatus) {})
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
} }
@ -187,7 +188,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
itemCount++ itemCount++
err := decoder.Decode(&gotMap) err := decoder.Decode(&gotMap)
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
continue continue
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -27,7 +28,7 @@ func (suite *GraphUnitSuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewMockM365Account(t) a := tester.NewMockM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365 suite.credentials = m365
} }
@ -39,7 +40,7 @@ func (suite *GraphUnitSuite) TestCreateAdapter() {
suite.credentials.AzureClientID, suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret) suite.credentials.AzureClientSecret)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, adpt) assert.NotNil(t, adpt)
} }
@ -81,7 +82,7 @@ func (suite *GraphUnitSuite) TestSerializationEndPoint() {
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
suite.credentials.AzureClientID, suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret) suite.credentials.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
serv := NewService(adpt) serv := NewService(adpt)
email := models.NewMessage() email := models.NewMessage()
@ -89,7 +90,7 @@ func (suite *GraphUnitSuite) TestSerializationEndPoint() {
email.SetSubject(&subject) email.SetSubject(&subject)
byteArray, err := serv.Serialize(email) byteArray, err := serv.Serialize(email)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, byteArray) assert.NotNil(t, byteArray)
t.Log(string(byteArray)) t.Log(string(byteArray))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -53,7 +54,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestBadConnection() {
AzureTenantID: "data", AzureTenantID: "data",
}, },
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return a return a
}, },
}, },
@ -222,11 +223,11 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices
t := suite.T() t := suite.T()
err := verifyBackupInputs(test.excludes(t), sites) err := verifyBackupInputs(test.excludes(t), sites)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.filters(t), sites) err = verifyBackupInputs(test.filters(t), sites)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.includes(t), sites) err = verifyBackupInputs(test.includes(t), sites)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -16,6 +16,7 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
@ -38,7 +39,7 @@ func mustToDataLayerPath(
isItem bool, isItem bool,
) path.Path { ) path.Path {
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return res return res
} }
@ -617,12 +618,12 @@ func compareExchangeEmail(
item data.Stream, item data.Stream,
) { ) {
itemData, err := io.ReadAll(item.ToReader()) itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) { if !assert.NoError(t, err, "reading collection item", item.UUID(), clues.ToCore(err)) {
return return
} }
itemMessage, err := support.CreateMessageFromBytes(itemData) itemMessage, err := support.CreateMessageFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up message") { if !assert.NoError(t, err, "deserializing backed up message", clues.ToCore(err)) {
return return
} }
@ -632,7 +633,7 @@ func compareExchangeEmail(
} }
expectedMessage, err := support.CreateMessageFromBytes(expectedBytes) expectedMessage, err := support.CreateMessageFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source message") assert.NoError(t, err, "deserializing source message", clues.ToCore(err))
checkMessage(t, expectedMessage, itemMessage) checkMessage(t, expectedMessage, itemMessage)
} }
@ -644,12 +645,12 @@ func compareExchangeContact(
item data.Stream, item data.Stream,
) { ) {
itemData, err := io.ReadAll(item.ToReader()) itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) { if !assert.NoError(t, err, "reading collection item", item.UUID(), clues.ToCore(err)) {
return return
} }
itemContact, err := support.CreateContactFromBytes(itemData) itemContact, err := support.CreateContactFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up contact") { if !assert.NoError(t, err, "deserializing backed up contact", clues.ToCore(err)) {
return return
} }
@ -672,12 +673,12 @@ func compareExchangeEvent(
item data.Stream, item data.Stream,
) { ) {
itemData, err := io.ReadAll(item.ToReader()) itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) { if !assert.NoError(t, err, "reading collection item", item.UUID(), clues.ToCore(err)) {
return return
} }
itemEvent, err := support.CreateEventFromBytes(itemData) itemEvent, err := support.CreateEventFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up contact") { if !assert.NoError(t, err, "deserializing backed up contact", clues.ToCore(err)) {
return return
} }
@ -687,7 +688,7 @@ func compareExchangeEvent(
} }
expectedEvent, err := support.CreateEventFromBytes(expectedBytes) expectedEvent, err := support.CreateEventFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source contact") assert.NoError(t, err, "deserializing source contact", clues.ToCore(err))
checkEvent(t, expectedEvent, itemEvent) checkEvent(t, expectedEvent, itemEvent)
} }
@ -735,7 +736,7 @@ func compareOneDriveItem(
} }
buf, err := io.ReadAll(item.ToReader()) buf, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
return true return true
} }
@ -749,7 +750,7 @@ func compareOneDriveItem(
) )
err = json.Unmarshal(buf, &itemMeta) err = json.Unmarshal(buf, &itemMeta)
if !assert.NoErrorf(t, err, "unmarshalling retrieved metadata for file %s", name) { if !assert.NoError(t, err, "unmarshalling retrieved metadata for file", name, clues.ToCore(err)) {
return true return true
} }
@ -769,7 +770,7 @@ func compareOneDriveItem(
} }
err = json.Unmarshal(expectedData, &expectedMeta) err = json.Unmarshal(expectedData, &expectedMeta)
if !assert.NoError(t, err, "unmarshalling expected metadata") { if !assert.NoError(t, err, "unmarshalling expected metadata", clues.ToCore(err)) {
return true return true
} }
@ -797,12 +798,12 @@ func compareOneDriveItem(
var fileData testOneDriveData var fileData testOneDriveData
err = json.Unmarshal(buf, &fileData) err = json.Unmarshal(buf, &fileData)
if !assert.NoErrorf(t, err, "unmarshalling file data for file %s", name) { if !assert.NoError(t, err, "unmarshalling file data for file", name, clues.ToCore(err)) {
return true return true
} }
expectedData := expected[fileData.FileName] expectedData := expected[fileData.FileName]
if !assert.NotNil(t, expectedData, "unexpected file with name %s", name) { if !assert.NotNil(t, expectedData, "unexpected file with name", name) {
return true return true
} }
@ -1201,7 +1202,7 @@ func loadConnector(ctx context.Context, t *testing.T, itemClient *http.Client, r
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
connector, err := NewGraphConnector(ctx, itemClient, a, r, fault.New(true)) connector, err := NewGraphConnector(ctx, itemClient, a, r, fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return connector return connector
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
@ -69,7 +70,7 @@ func onedriveItemWithData(
} }
serialized, err := json.Marshal(content) serialized, err := json.Marshal(content)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return itemInfo{ return itemInfo{
name: name, name: name,
@ -89,7 +90,7 @@ func onedriveMetadata(
testMeta := getMetadata(fileName, perm, permUseID) testMeta := getMetadata(fileName, perm, permUseID)
testMetaJSON, err := json.Marshal(testMeta) testMetaJSON, err := json.Marshal(testMeta)
require.NoError(t, err, "marshalling metadata") require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
return itemInfo{ return itemInfo{
name: itemID, name: itemID,
@ -128,11 +129,11 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() {
suite.acct = tester.NewM365Account(suite.T()) suite.acct = tester.NewM365Account(suite.T())
user, err := suite.connector.Owners.Users().GetByID(ctx, suite.user) user, err := suite.connector.Owners.Users().GetByID(ctx, suite.user)
require.NoErrorf(suite.T(), err, "fetching user %s", suite.user) require.NoError(suite.T(), err, "fetching user", suite.user, clues.ToCore(err))
suite.userID = ptr.Val(user.GetId()) suite.userID = ptr.Val(user.GetId())
secondaryUser, err := suite.connector.Owners.Users().GetByID(ctx, suite.secondaryUser) secondaryUser, err := suite.connector.Owners.Users().GetByID(ctx, suite.secondaryUser)
require.NoErrorf(suite.T(), err, "fetching user %s", suite.secondaryUser) require.NoError(suite.T(), err, "fetching user", suite.secondaryUser, clues.ToCore(err))
suite.secondaryUserID = ptr.Val(secondaryUser.GetId()) suite.secondaryUserID = ptr.Val(secondaryUser.GetId())
tester.LogTimeOfTest(suite.T()) tester.LogTimeOfTest(suite.T())

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
@ -126,7 +127,7 @@ func (suite *GraphConnectorUnitSuite) TestUnionSiteIDsAndWebURLs() {
defer flush() defer flush()
result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true)) result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expect, result) assert.ElementsMatch(t, test.expect, result)
}) })
} }
@ -181,13 +182,13 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
t := suite.T() t := suite.T()
service, err := newConnector.createService() service, err := newConnector.createService()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
newConnector.Service = service newConnector.Service = service
assert.Equal(t, 0, len(newConnector.Sites)) assert.Equal(t, 0, len(newConnector.Sites))
err = newConnector.setTenantSites(ctx, fault.New(true)) err = newConnector.setTenantSites(ctx, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Less(t, 0, len(newConnector.Sites)) assert.Less(t, 0, len(newConnector.Sites))
for _, site := range newConnector.Sites { for _, site := range newConnector.Sites {
@ -220,7 +221,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
}, },
nil, nil,
fault.New(true)) fault.New(true))
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := suite.connector.AwaitStatus() status := suite.connector.AwaitStatus()
@ -299,7 +300,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
}, },
test.col, test.col,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets) assert.NotNil(t, deets)
stats := suite.connector.AwaitStatus() stats := suite.connector.AwaitStatus()
@ -327,7 +328,7 @@ func mustGetDefaultDriveID(
err = graph.Wrap(ctx, err, "retrieving drive") err = graph.Wrap(ctx, err, "retrieving drive")
} }
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId()) id := ptr.Val(d.GetId())
require.NotEmpty(t, id) require.NotEmpty(t, id)
@ -397,7 +398,7 @@ func runRestore(
config.opts, config.opts,
collections, collections,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := restoreGC.AwaitStatus() status := restoreGC.AwaitStatus()
@ -451,7 +452,7 @@ func runBackupAndCompare(
nil, nil,
config.opts, config.opts,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// No excludes yet because this isn't an incremental backup. // No excludes yet because this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)
@ -934,7 +935,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
}, },
collections, collections,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, deets) require.NotNil(t, deets)
status := restoreGC.AwaitStatus() status := restoreGC.AwaitStatus()
@ -963,7 +964,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
}, },
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// No excludes yet because this isn't an incremental backup. // No excludes yet because this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -35,7 +36,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
for item := range mdc.Items(ctx, fault.New(true)) { for item := range mdc.Items(ctx, fault.New(true)) {
_, err := io.ReadAll(item.ToReader()) _, err := io.ReadAll(item.ToReader())
assert.NoError(suite.T(), err) assert.NoError(suite.T(), err, clues.ToCore(err))
messagesRead++ messagesRead++
} }
@ -52,7 +53,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
for item := range mdc.Items(ctx, fault.New(true)) { for item := range mdc.Items(ctx, fault.New(true)) {
buf, err := io.ReadAll(item.ToReader()) buf, err := io.ReadAll(item.ToReader())
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Implements(t, (*data.StreamSize)(nil), item) assert.Implements(t, (*data.StreamSize)(nil), item)
s := item.(data.StreamSize) s := item.(data.StreamSize)
@ -72,11 +73,11 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchange
for stream := range mdc.Items(ctx, fault.New(true)) { for stream := range mdc.Items(ctx, fault.New(true)) {
_, err := buf.ReadFrom(stream.ToReader()) _, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
byteArray := buf.Bytes() byteArray := buf.Bytes()
something, err := support.CreateFromBytes(byteArray, models.CreateMessageFromDiscriminatorValue) something, err := support.CreateFromBytes(byteArray, models.CreateMessageFromDiscriminatorValue)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, something) assert.NotNil(t, something)
} }
} }
@ -123,7 +124,7 @@ func (suite *MockExchangeDataSuite) TestMockExchangeData() {
assert.Equal(t, id, test.reader.UUID()) assert.Equal(t, id, test.reader.UUID())
buf, err := io.ReadAll(test.reader.ToReader()) buf, err := io.ReadAll(test.reader.ToReader())
test.check(t, err) test.check(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
} }
@ -194,10 +195,10 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
temp := mockconnector.GetMockList(subject, "Artist", emptyMap) temp := mockconnector.GetMockList(subject, "Artist", emptyMap)
writer := kioser.NewJsonSerializationWriter() writer := kioser.NewJsonSerializationWriter()
err := writer.WriteObjectValue("", temp) err := writer.WriteObjectValue("", temp)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
bytes, err := writer.GetSerializedContent() bytes, err := writer.GetSerializedContent()
require.NoError(suite.T(), err) require.NoError(t, err, clues.ToCore(err))
_, err = support.CreateListFromBytes(bytes) _, err = support.CreateListFromBytes(bytes)
@ -208,7 +209,7 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
name: "SharePoint: List 6 Items", name: "SharePoint: List 6 Items",
transformation: func(t *testing.T) error { transformation: func(t *testing.T) error {
bytes, err := mockconnector.GetMockListBytes(subject) bytes, err := mockconnector.GetMockListBytes(subject)
require.NoError(suite.T(), err) require.NoError(t, err, clues.ToCore(err))
_, err = support.CreateListFromBytes(bytes) _, err = support.CreateListFromBytes(bytes)
return err return err
}, },
@ -229,7 +230,7 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
t := suite.T() t := suite.T()
err := test.transformation(t) err := test.transformation(t)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -159,7 +160,7 @@ func GetMockListBytes(title string) ([]byte, error) {
// of the Mocked SharePoint List // of the Mocked SharePoint List
func GetMockListStream(t *testing.T, title string, numOfItems int) *MockListData { func GetMockListStream(t *testing.T, title string, numOfItems int) *MockListData {
byteArray, err := GetMockListBytes(title) byteArray, err := GetMockListBytes(title)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
listData := &MockListData{ listData := &MockListData{
ID: title, ID: title,

View File

@ -11,6 +11,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
) )
@ -693,10 +694,10 @@ func GetMockMessageWithNestedItemAttachmentEvent(subject string) []byte {
func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, subject string) []byte { func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, subject string) []byte {
base := GetMockMessageBytes(subject) base := GetMockMessageBytes(subject)
message, err := hydrateMessage(base) message, err := hydrateMessage(base)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
nestedMessage, err := hydrateMessage(nested) nestedMessage, err := hydrateMessage(nested)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
iaNode := models.NewItemAttachment() iaNode := models.NewItemAttachment()
attachmentSize := int32(len(nested)) attachmentSize := int32(len(nested))
@ -713,13 +714,13 @@ func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, sub
func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte, subject string) []byte { func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte, subject string) []byte {
base := GetMockMessageBytes(subject) base := GetMockMessageBytes(subject)
message, err := hydrateMessage(base) message, err := hydrateMessage(base)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested) parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue) anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
contact := anObject.(models.Contactable) contact := anObject.(models.Contactable)
internalName := "Nested Contact" internalName := "Nested Contact"
@ -736,10 +737,10 @@ func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte,
func serialize(t *testing.T, item absser.Parsable) []byte { func serialize(t *testing.T, item absser.Parsable) []byte {
wtr := js.NewJsonSerializationWriter() wtr := js.NewJsonSerializationWriter()
err := wtr.WriteObjectValue("", item) err := wtr.WriteObjectValue("", item)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
byteArray, err := wtr.GetSerializedContent() byteArray, err := wtr.GetSerializedContent()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return byteArray return byteArray
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/api" "github.com/alcionai/corso/src/internal/connector/onedrive/api"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -23,11 +24,11 @@ func (suite *OneDriveAPISuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.creds = m365 suite.creds = m365
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret) adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.service = graph.NewService(adpt) suite.service = graph.NewService(adpt)
} }
@ -49,7 +50,7 @@ func (suite *OneDriveAPISuite) TestCreatePagerAndGetPage() {
siteID := tester.M365SiteID(t) siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"name"}) pager := api.NewSiteDrivePager(suite.service, siteID, []string{"name"})
a, err := pager.GetPage(ctx) a, err := pager.GetPage(ctx)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, a) assert.NotNil(t, a)
} }
@ -61,7 +62,7 @@ func (suite *OneDriveAPISuite) TestGetDriveIDByName() {
siteID := tester.M365SiteID(t) siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"id", "name"}) pager := api.NewSiteDrivePager(suite.service, siteID, []string{"id", "name"})
id, err := pager.GetDriveIDByName(ctx, "Documents") id, err := pager.GetDriveIDByName(ctx, "Documents")
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, id) assert.NotEmpty(t, id)
} }
@ -73,9 +74,9 @@ func (suite *OneDriveAPISuite) TestGetDriveFolderByName() {
siteID := tester.M365SiteID(t) siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"id", "name"}) pager := api.NewSiteDrivePager(suite.service, siteID, []string{"id", "name"})
id, err := pager.GetDriveIDByName(ctx, "Documents") id, err := pager.GetDriveIDByName(ctx, "Documents")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, id) require.NotEmpty(t, id)
_, err = pager.GetFolderIDByName(ctx, id, "folder") _, err = pager.GetFolderIDByName(ctx, id, "folder")
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }

View File

@ -201,9 +201,9 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
) )
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
driveFolderPath, err := path.GetDriveFolderPath(folderPath) driveFolderPath, err := path.GetDriveFolderPath(folderPath)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -302,7 +302,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
assert.Equal(t, testItemID+MetaFileSuffix, readItemMeta.UUID()) assert.Equal(t, testItemID+MetaFileSuffix, readItemMeta.UUID())
readMetaData, err := io.ReadAll(readItemMeta.ToReader()) readMetaData, err := io.ReadAll(readItemMeta.ToReader())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
tm, err := json.Marshal(testItemMeta) tm, err := json.Marshal(testItemMeta)
if err != nil { if err != nil {
@ -350,7 +350,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
wg.Add(1) wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -393,7 +393,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
assert.True(t, ok) assert.True(t, ok)
_, err = io.ReadAll(collItem.ToReader()) _, err = io.ReadAll(collItem.ToReader())
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
wg.Wait() wg.Wait()
@ -538,7 +538,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
wg.Add(1) wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source) folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
coll := NewCollection( coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
@ -594,8 +594,9 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
for _, i := range readItems { for _, i := range readItems {
if strings.HasSuffix(i.UUID(), MetaFileSuffix) { if strings.HasSuffix(i.UUID(), MetaFileSuffix) {
content, err := io.ReadAll(i.ToReader()) content, err := io.ReadAll(i.ToReader())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.Equal(t, content, []byte("{}")) require.Equal(t, content, []byte("{}"))
im, ok := i.(data.StreamModTime) im, ok := i.(data.StreamModTime)
require.Equal(t, ok, true, "modtime interface") require.Equal(t, ok, true, "modtime interface")
require.Greater(t, im.ModTime(), mtime, "permissions time greater than mod time") require.Greater(t, im.ModTime(), mtime, "permissions time greater than mod time")

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
gapi "github.com/alcionai/corso/src/internal/connector/graph/api" gapi "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
@ -49,11 +50,11 @@ func getExpectedStatePathGenerator(
} else { } else {
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator") require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource) p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource) p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
switch state { switch state {
case data.NewState: case data.NewState:
@ -81,7 +82,7 @@ func getExpectedPathGenerator(t *testing.T,
) func(string) string { ) func(string) string {
return func(path string) string { return func(path string) string {
p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource) p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p.String() return p.String()
} }
@ -129,10 +130,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGetCanonicalPath() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
p := strings.Join(test.dir, "/") p := strings.Join(test.dir, "/")
result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source) result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source)
test.expectErr(t, err) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
assert.Equal(t, test.expect, result.String()) assert.Equal(t, test.expect, result.String())
} }
@ -797,7 +799,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
itemCollection, itemCollection,
false, false,
errs) errs)
tt.expect(t, err) tt.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections") assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count") assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
@ -1138,10 +1140,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T()
cols := []data.RestoreCollection{} cols := []data.RestoreCollection{}
for _, c := range test.cols { for _, c := range test.cols {
@ -1152,7 +1154,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
path.FilesCategory, path.FilesCategory,
c(), c(),
func(*support.ConnectorOperationStatus) {}) func(*support.ConnectorOperationStatus) {})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
cols = append(cols, data.NotFoundRestoreCollection{Collection: mc}) cols = append(cols, data.NotFoundRestoreCollection{Collection: mc})
} }
@ -1241,7 +1243,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
path.FilesCategory, path.FilesCategory,
false, false,
) )
require.NoError(suite.T(), err, "making metadata path") require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err))
driveID1 := uuid.NewString() driveID1 := uuid.NewString()
drive1 := models.NewDrive() drive1 := models.NewDrive()
@ -1918,7 +1920,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
}, },
func(*support.ConnectorOperationStatus) {}, func(*support.ConnectorOperationStatus) {},
) )
assert.NoError(t, err, "creating metadata collection") assert.NoError(t, err, "creating metadata collection", clues.ToCore(err))
prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}} prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}}
errs := fault.New(true) errs := fault.New(true)
@ -1947,7 +1949,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
data.NotFoundRestoreCollection{Collection: baseCol}, data.NotFoundRestoreCollection{Collection: baseCol},
}, },
fault.New(true)) fault.New(true))
if !assert.NoError(t, err, "deserializing metadata") { if !assert.NoError(t, err, "deserializing metadata", clues.ToCore(err)) {
continue continue
} }
@ -2201,7 +2203,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
test.prevDelta, test.prevDelta,
fault.New(true)) fault.New(true))
require.ErrorIs(t, err, test.err, "delta fetch err") require.ErrorIs(t, err, test.err, "delta fetch err", clues.ToCore(err))
require.Equal(t, test.deltaURL, delta.URL, "delta url") require.Equal(t, test.deltaURL, delta.URL, "delta url")
require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset") require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset")
}) })

View File

@ -313,7 +313,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
} }
drives, err := drives(ctx, pager, test.retry) drives, err := drives(ctx, pager, test.retry)
test.expectedErr(t, err) test.expectedErr(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expectedResults, drives) assert.ElementsMatch(t, test.expectedResults, drives)
}) })
@ -352,10 +352,10 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
gs := loadTestService(t) gs := loadTestService(t)
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil) pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
drives, err := drives(ctx, pager, true) drives, err := drives(ctx, pager, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, drives) require.NotEmpty(t, drives)
// TODO: Verify the intended drive // TODO: Verify the intended drive
@ -371,7 +371,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
}() }()
folderID, err := CreateRestoreFolders(ctx, gs, driveID, folderElements) folderID, err := CreateRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
folderIDs = append(folderIDs, folderID) folderIDs = append(folderIDs, folderID)
@ -379,7 +379,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
folderElements = append(folderElements, folderName2) folderElements = append(folderElements, folderName2)
folderID, err = CreateRestoreFolders(ctx, gs, driveID, folderElements) folderID, err = CreateRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
folderIDs = append(folderIDs, folderID) folderIDs = append(folderIDs, folderID)
@ -402,10 +402,10 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
t := suite.T() t := suite.T()
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil) pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix, fault.New(true)) allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix, fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
foundFolderIDs := []string{} foundFolderIDs := []string{}
@ -437,7 +437,7 @@ func (fm testFolderMatcher) Matches(path string) bool {
func (suite *OneDriveSuite) TestOneDriveNewCollections() { func (suite *OneDriveSuite) TestOneDriveNewCollections() {
creds, err := tester.NewM365Account(suite.T()).M365Config() creds, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct { tests := []struct {
name, user string name, user string
@ -478,7 +478,7 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
}) })
odcs, excludes, err := colls.Get(ctx, nil, fault.New(true)) odcs, excludes, err := colls.Get(ctx, nil, fault.New(true))
assert.NoError(t, err, clues.InErr(err)) assert.NoError(t, err, clues.ToCore(err))
// Don't expect excludes as this isn't an incremental backup. // Don't expect excludes as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -45,10 +46,10 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.user = tester.SecondaryM365UserID(t) suite.user = tester.SecondaryM365UserID(t)
pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil) pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
odDrives, err := drives(ctx, pager, true) odDrives, err := drives(ctx, pager, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// Test Requirement 1: Need a drive // Test Requirement 1: Need a drive
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user) require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
@ -99,7 +100,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
map[string]string{}, map[string]string{},
"", "",
fault.New(true)) fault.New(true))
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
// Test Requirement 2: Need a file // Test Requirement 2: Need a file
require.NotEmpty( require.NotEmpty(
@ -113,14 +114,15 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
// Read data for the file // Read data for the file
itemInfo, itemData, err := oneDriveItemReader(ctx, graph.HTTPClient(graph.NoTimeout()), driveItem) itemInfo, itemData, err := oneDriveItemReader(ctx, graph.HTTPClient(graph.NoTimeout()), driveItem)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
require.NotNil(suite.T(), itemInfo.OneDrive) require.NotNil(suite.T(), itemInfo.OneDrive)
require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName) require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName)
size, err := io.Copy(io.Discard, itemData) size, err := io.Copy(io.Discard, itemData)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
require.NotZero(suite.T(), size) require.NotZero(suite.T(), size)
require.Equal(suite.T(), size, itemInfo.OneDrive.Size) require.Equal(suite.T(), size, itemInfo.OneDrive.Size)
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName) suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
} }
@ -150,11 +152,11 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
srv := suite.service srv := suite.service
root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil) root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// Test Requirement 2: "Test Folder" should exist // Test Requirement 2: "Test Folder" should exist
folder, err := getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "Test Folder") folder, err := getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "Test Folder")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting) newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting)
t.Logf("Test will create folder %s", newFolderName) t.Logf("Test will create folder %s", newFolderName)
@ -165,7 +167,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
test.driveID, test.driveID,
ptr.Val(folder.GetId()), ptr.Val(folder.GetId()),
newItem(newFolderName, true)) newItem(newFolderName, true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId()) require.NotNil(t, newFolder.GetId())
newItemName := "testItem_" + common.FormatNow(common.SimpleTimeTesting) newItemName := "testItem_" + common.FormatNow(common.SimpleTimeTesting)
@ -177,19 +179,19 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
test.driveID, test.driveID,
ptr.Val(newFolder.GetId()), ptr.Val(newFolder.GetId()),
newItem(newItemName, false)) newItem(newItemName, false))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newItem.GetId()) require.NotNil(t, newItem.GetId())
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the // HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder // newly created item should fail because it's a file not a folder
_, err = getFolder(ctx, srv, test.driveID, ptr.Val(newFolder.GetId()), newItemName) _, err = getFolder(ctx, srv, test.driveID, ptr.Val(newFolder.GetId()), newItemName)
require.ErrorIs(t, err, errFolderNotFound) require.ErrorIs(t, err, errFolderNotFound, clues.ToCore(err))
// Initialize a 100KB mockDataProvider // Initialize a 100KB mockDataProvider
td, writeSize := mockDataReader(int64(100 * 1024)) td, writeSize := mockDataReader(int64(100 * 1024))
w, err := driveItemWriter(ctx, srv, test.driveID, ptr.Val(newItem.GetId()), writeSize) w, err := driveItemWriter(ctx, srv, test.driveID, ptr.Val(newItem.GetId()), writeSize)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// Using a 32 KB buffer for the copy allows us to validate the // Using a 32 KB buffer for the copy allows us to validate the
// multi-part upload. `io.CopyBuffer` will only write 32 KB at // multi-part upload. `io.CopyBuffer` will only write 32 KB at
@ -197,7 +199,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
copyBuffer := make([]byte, 32*1024) copyBuffer := make([]byte, 32*1024)
size, err := io.CopyBuffer(w, td, copyBuffer) size, err := io.CopyBuffer(w, td, copyBuffer)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.Equal(t, writeSize, size) require.Equal(t, writeSize, size)
}) })
@ -232,15 +234,15 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
srv := suite.service srv := suite.service
root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil) root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// Lookup a folder that doesn't exist // Lookup a folder that doesn't exist
_, err = getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "FolderDoesNotExist") _, err = getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "FolderDoesNotExist")
require.ErrorIs(t, err, errFolderNotFound) require.ErrorIs(t, err, errFolderNotFound, clues.ToCore(err))
// Lookup a folder that does exist // Lookup a folder that does exist
_, err = getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "") _, err = getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -3,6 +3,7 @@ package onedrive
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -134,7 +135,7 @@ func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
inPaths := []path.Path{} inPaths := []path.Path{}
for _, ps := range test.input { for _, ps := range test.input {
p, err := path.FromDataLayerPath(base+ps, true) p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path") require.NoError(t, err, "creating path", clues.ToCore(err))
inPaths = append(inPaths, p) inPaths = append(inPaths, p)
} }
@ -142,13 +143,13 @@ func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
outPaths := []path.Path{} outPaths := []path.Path{}
for _, ps := range test.output { for _, ps := range test.output {
p, err := path.FromDataLayerPath(base+ps, true) p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path") require.NoError(t, err, "creating path", clues.ToCore(err))
outPaths = append(outPaths, p) outPaths = append(outPaths, p)
} }
actual, err := AugmentRestorePaths(test.version, inPaths) actual, err := AugmentRestorePaths(test.version, inPaths)
require.NoError(t, err, "augmenting paths") require.NoError(t, err, "augmenting paths", clues.ToCore(err))
// Ordering of paths matter here as we need dirmeta files // Ordering of paths matter here as we need dirmeta files
// to show up before file in dir // to show up before file in dir

View File

@ -6,6 +6,7 @@ import (
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -67,11 +68,12 @@ func (ods *oneDriveService) updateStatus(status *support.ConnectorOperationStatu
func loadTestService(t *testing.T) *oneDriveService { func loadTestService(t *testing.T) *oneDriveService {
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
service, err := NewOneDriveService(m365) service, err := NewOneDriveService(m365)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return service return service
} }

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api" discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -16,7 +17,7 @@ func createTestBetaService(t *testing.T, credentials account.M365Config) *discov
credentials.AzureClientID, credentials.AzureClientID,
credentials.AzureClientSecret, credentials.AzureClientSecret,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return discover.NewBetaService(adapter) return discover.NewBetaService(adapter)
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api" discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
@ -32,7 +33,7 @@ func (suite *SharePointPageSuite) SetupSuite() {
suite.siteID = tester.M365SiteID(t) suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.creds = m365 suite.creds = m365
suite.service = createTestBetaService(t, suite.creds) suite.service = createTestBetaService(t, suite.creds)
@ -54,7 +55,7 @@ func (suite *SharePointPageSuite) TestFetchPages() {
t := suite.T() t := suite.T()
pgs, err := api.FetchPages(ctx, suite.service, suite.siteID) pgs, err := api.FetchPages(ctx, suite.service, suite.siteID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
require.NotNil(t, pgs) require.NotNil(t, pgs)
assert.NotZero(t, len(pgs)) assert.NotZero(t, len(pgs))
@ -69,12 +70,12 @@ func (suite *SharePointPageSuite) TestGetSitePages() {
t := suite.T() t := suite.T()
tuples, err := api.FetchPages(ctx, suite.service, suite.siteID) tuples, err := api.FetchPages(ctx, suite.service, suite.siteID)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, tuples) require.NotNil(t, tuples)
jobs := []string{tuples[0].ID} jobs := []string{tuples[0].ID}
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true)) pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, pages) assert.NotEmpty(t, pages)
} }
@ -104,11 +105,12 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
destName, destName,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, info) require.NotNil(t, info)
// Clean Up // Clean Up
pageID := info.SharePoint.ParentPath pageID := info.SharePoint.ParentPath
err = api.DeleteSitePage(ctx, suite.service, suite.siteID, pageID) err = api.DeleteSitePage(ctx, suite.service, suite.siteID, pageID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
@ -37,7 +38,7 @@ func (suite *SharePointCollectionSuite) SetupSuite() {
suite.siteID = tester.M365SiteID(t) suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.creds = m365 suite.creds = m365
} }
@ -61,7 +62,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Item_Read() {
data: io.NopCloser(bytes.NewReader(m)), data: io.NopCloser(bytes.NewReader(m)),
} }
readData, err := io.ReadAll(sc.ToReader()) readData, err := io.ReadAll(sc.ToReader())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, name, sc.id) assert.Equal(t, name, sc.id)
assert.Equal(t, readData, m) assert.Equal(t, readData, m)
@ -91,7 +92,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
path.ListsCategory, path.ListsCategory,
false, false,
dirRoot) dirRoot)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return dir return dir
}, },
@ -101,10 +102,10 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
listing.SetDisplayName(&name) listing.SetDisplayName(&name)
err := ow.WriteObjectValue("", listing) err := ow.WriteObjectValue("", listing)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
byteArray, err := ow.GetSerializedContent() byteArray, err := ow.GetSerializedContent()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
data := &Item{ data := &Item{
id: name, id: name,
@ -127,14 +128,14 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
path.PagesCategory, path.PagesCategory,
false, false,
dirRoot) dirRoot)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return dir return dir
}, },
getItem: func(t *testing.T, itemName string) *Item { getItem: func(t *testing.T, itemName string) *Item {
byteArray := mockconnector.GetMockPage(itemName) byteArray := mockconnector.GetMockPage(itemName)
page, err := support.CreatePageFromBytes(byteArray) page, err := support.CreatePageFromBytes(byteArray)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
data := &Item{ data := &Item{
id: itemName, id: itemName,
@ -186,7 +187,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
testName := "MockListing" testName := "MockListing"
listing.SetDisplayName(&testName) listing.SetDisplayName(&testName)
byteArray, err := service.Serialize(listing) byteArray, err := service.Serialize(listing)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
listData := &Item{ listData := &Item{
id: testName, id: testName,
@ -197,7 +198,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting) destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting)
deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName) deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
t.Logf("List created: %s\n", deets.SharePoint.ItemName) t.Logf("List created: %s\n", deets.SharePoint.ItemName)
// Clean-Up // Clean-Up
@ -209,7 +210,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
for { for {
resp, err := builder.Get(ctx, nil) resp, err := builder.Get(ctx, nil)
assert.NoError(t, err, "getting site lists") assert.NoError(t, err, "getting site lists", clues.ToCore(err))
for _, temp := range resp.GetValue() { for _, temp := range resp.GetValue() {
if ptr.Val(temp.GetDisplayName()) == deets.SharePoint.ItemName { if ptr.Val(temp.GetDisplayName()) == deets.SharePoint.ItemName {
@ -230,7 +231,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
if isFound { if isFound {
err := DeleteList(ctx, service, suite.siteID, deleteID) err := DeleteList(ctx, service, suite.siteID, deleteID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }
} }
@ -245,17 +246,17 @@ func (suite *SharePointCollectionSuite) TestRestoreLocation() {
service := createTestService(t, suite.creds) service := createTestService(t, suite.creds)
rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting) rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting)
folderID, err := createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder}) folderID, err := createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
t.Log("FolderID: " + folderID) t.Log("FolderID: " + folderID)
_, err = createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder, "Tsao"}) _, err = createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder, "Tsao"})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// CleanUp // CleanUp
siteDrive, err := service.Client().SitesById(suite.siteID).Drive().Get(ctx, nil) siteDrive, err := service.Client().SitesById(suite.siteID).Drive().Get(ctx, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(siteDrive.GetId()) driveID := ptr.Val(siteDrive.GetId())
err = onedrive.DeleteItem(ctx, service, driveID, folderID) err = onedrive.DeleteItem(ctx, service, driveID, folderID)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -131,7 +132,7 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
true, true,
fault.New(true)) fault.New(true))
test.expect(t, err) test.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths") assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths")
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
@ -197,8 +198,9 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
t := suite.T() t := suite.T()
siteID := tester.M365SiteID(t) siteID := tester.M365SiteID(t)
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
account, err := a.M365Config() account, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
col, err := collectPages( col, err := collectPages(
ctx, ctx,
@ -208,6 +210,6 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
&MockGraphService{}, &MockGraphService{},
control.Defaults(), control.Defaults(),
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, col) assert.NotEmpty(t, col)
} }

View File

@ -6,6 +6,7 @@ import (
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
@ -52,7 +53,7 @@ func createTestService(t *testing.T, credentials account.M365Config) *graph.Serv
credentials.AzureClientID, credentials.AzureClientID,
credentials.AzureClientSecret, credentials.AzureClientSecret,
) )
require.NoError(t, err, "creating microsoft graph service for exchange") require.NoError(t, err, "creating microsoft graph service for exchange", clues.ToCore(err))
return graph.NewService(adapter) return graph.NewService(adapter)
} }
@ -62,7 +63,7 @@ func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []st
for _, r := range rest { for _, r := range rest {
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource) p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
res = append(res, p.String()) res = append(res, p.String())
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -21,7 +22,7 @@ func (suite *SharePointSuite) SetupSuite() {
t := suite.T() t := suite.T()
a := tester.NewM365Account(t) a := tester.NewM365Account(t)
m365, err := a.M365Config() m365, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.creds = m365 suite.creds = m365
} }
@ -54,11 +55,11 @@ func (suite *SharePointSuite) TestLoadList() {
t := suite.T() t := suite.T()
service := createTestService(t, suite.creds) service := createTestService(t, suite.creds)
tuples, err := preFetchLists(ctx, service, "root") tuples, err := preFetchLists(ctx, service, "root")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
job := []string{tuples[0].id} job := []string{tuples[0].id}
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true)) lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Greater(t, len(lists), 0) assert.Greater(t, len(lists), 0)
t.Logf("Length: %d\n", len(lists)) t.Logf("Length: %d\n", len(lists))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -53,7 +54,7 @@ func (suite *DataSupportSuite) TestCreateMessageFromBytes() {
t := suite.T() t := suite.T()
result, err := CreateMessageFromBytes(test.byteArray) result, err := CreateMessageFromBytes(test.byteArray)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
test.checkObject(t, result) test.checkObject(t, result)
}) })
} }
@ -92,7 +93,7 @@ func (suite *DataSupportSuite) TestCreateContactFromBytes() {
t := suite.T() t := suite.T()
result, err := CreateContactFromBytes(test.byteArray) result, err := CreateContactFromBytes(test.byteArray)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
test.isNil(t, result) test.isNil(t, result)
}) })
} }
@ -129,7 +130,7 @@ func (suite *DataSupportSuite) TestCreateEventFromBytes() {
t := suite.T() t := suite.T()
result, err := CreateEventFromBytes(test.byteArray) result, err := CreateEventFromBytes(test.byteArray)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
test.isNil(t, result) test.isNil(t, result)
}) })
} }
@ -170,7 +171,7 @@ func (suite *DataSupportSuite) TestCreateListFromBytes() {
t := suite.T() t := suite.T()
result, err := CreateListFromBytes(test.byteArray) result, err := CreateListFromBytes(test.byteArray)
test.checkError(t, err) test.checkError(t, err, clues.ToCore(err))
test.isNil(t, result) test.isNil(t, result)
}) })
} }
@ -212,10 +213,10 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
writer := kioser.NewJsonSerializationWriter() writer := kioser.NewJsonSerializationWriter()
err := pg.Serialize(writer) err := pg.Serialize(writer)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
byteArray, err := writer.GetSerializedContent() byteArray, err := writer.GetSerializedContent()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return byteArray return byteArray
}, },

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -26,7 +27,7 @@ func (suite *SupportTestSuite) TestToMessage() {
bytes := mockconnector.GetMockMessageBytes("m365 mail support test") bytes := mockconnector.GetMockMessageBytes("m365 mail support test")
message, err := CreateMessageFromBytes(bytes) message, err := CreateMessageFromBytes(bytes)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
clone := ToMessage(message) clone := ToMessage(message)
assert.Equal(t, message.GetBccRecipients(), clone.GetBccRecipients()) assert.Equal(t, message.GetBccRecipients(), clone.GetBccRecipients())
@ -40,7 +41,7 @@ func (suite *SupportTestSuite) TestToEventSimplified() {
t := suite.T() t := suite.T()
bytes := mockconnector.GetMockEventWithAttendeesBytes("M365 Event Support Test") bytes := mockconnector.GetMockEventWithAttendeesBytes("M365 Event Support Test")
event, err := CreateEventFromBytes(bytes) event, err := CreateEventFromBytes(bytes)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
attendees := event.GetAttendees() attendees := event.GetAttendees()
newEvent := ToEventSimplified(event) newEvent := ToEventSimplified(event)

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -33,8 +34,8 @@ func (suite *UploadSessionSuite) TestWriter() {
// Expected Content-Range value format // Expected Content-Range value format
contentRangeRegex := regexp.MustCompile(`^bytes (?P<rangestart>\d+)-(?P<rangeend>\d+)/(?P<length>\d+)$`) contentRangeRegex := regexp.MustCompile(`^bytes (?P<rangestart>\d+)-(?P<rangeend>\d+)/(?P<length>\d+)$`)
nextOffset := -1 nextOffset := -1
// Initialize a test http server that validates expeected headers // Initialize a test http server that validates expeected headers
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, r.Method, http.MethodPut) assert.Equal(t, r.Method, http.MethodPut)
@ -45,12 +46,15 @@ func (suite *UploadSessionSuite) TestWriter() {
// Extract the Content-Range components // Extract the Content-Range components
matches := contentRangeRegex.FindStringSubmatch(r.Header[contentRangeHeaderKey][0]) matches := contentRangeRegex.FindStringSubmatch(r.Header[contentRangeHeaderKey][0])
rangeStart, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("rangestart")]) rangeStart, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("rangestart")])
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
rangeEnd, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("rangeend")]) rangeEnd, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("rangeend")])
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
length, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("length")]) length, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("length")])
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
// Validate total size and range start/end // Validate total size and range start/end
assert.Equal(t, int(writeSize), length) assert.Equal(t, int(writeSize), length)
@ -62,6 +66,7 @@ func (suite *UploadSessionSuite) TestWriter() {
nextOffset = rangeEnd nextOffset = rangeEnd
})) }))
defer ts.Close() defer ts.Close()
writer := NewWriter("item", ts.URL, writeSize) writer := NewWriter("item", ts.URL, writeSize)
@ -72,7 +77,7 @@ func (suite *UploadSessionSuite) TestWriter() {
copyBuffer := make([]byte, 32*1024) copyBuffer := make([]byte, 32*1024)
size, err := io.CopyBuffer(writer, td, copyBuffer) size, err := io.CopyBuffer(writer, td, copyBuffer)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
require.Equal(suite.T(), writeSize, size) require.Equal(suite.T(), writeSize, size)
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -21,9 +22,9 @@ func TestDataCollectionSuite(t *testing.T) {
func (suite *DataCollectionSuite) TestStateOf() { func (suite *DataCollectionSuite) TestStateOf() {
fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo") fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar") barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -37,7 +38,7 @@ func (suite *EventsIntegrationSuite) TestNewBus() {
Prefix: "prfx", Prefix: "prfx",
}, },
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
a, err := account.NewAccount( a, err := account.NewAccount(
account.ProviderM365, account.ProviderM365,
@ -49,15 +50,19 @@ func (suite *EventsIntegrationSuite) TestNewBus() {
AzureTenantID: "tid", AzureTenantID: "tid",
}, },
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
b, err := events.NewBus(ctx, s, a.ID(), control.Options{}) b, err := events.NewBus(ctx, s, a.ID(), control.Options{})
require.NotEmpty(t, b) require.NotEmpty(t, b)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, b.Close())
err = b.Close()
require.NoError(t, err, clues.ToCore(err))
b2, err := events.NewBus(ctx, s, a.ID(), control.Options{DisableMetrics: true}) b2, err := events.NewBus(ctx, s, a.ID(), control.Options{DisableMetrics: true})
require.Empty(t, b2) require.Empty(t, b2)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, b2.Close())
err = b2.Close()
require.NoError(t, err, clues.ToCore(err))
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
@ -76,12 +77,15 @@ func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
k := NewConn(st) k := NewConn(st)
require.NoError(t, k.Initialize(ctx))
require.NoError(t, k.Close(ctx))
err := k.Initialize(ctx) err := k.Initialize(ctx)
assert.Error(t, err) require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Initialize(ctx)
assert.Error(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, ErrorRepoAlreadyExists) assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
} }
@ -90,12 +94,12 @@ func (suite *WrapperIntegrationSuite) TestBadProviderErrors() {
defer flush() defer flush()
t := suite.T() t := suite.T()
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
st.Provider = storage.ProviderUnknown st.Provider = storage.ProviderUnknown
k := NewConn(st) k := NewConn(st)
assert.Error(t, k.Initialize(ctx))
err := k.Initialize(ctx)
assert.Error(t, err, clues.ToCore(err))
} }
func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() { func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() {
@ -103,10 +107,11 @@ func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() {
defer flush() defer flush()
t := suite.T() t := suite.T()
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
k := NewConn(st) k := NewConn(st)
assert.Error(t, k.Connect(ctx))
err := k.Connect(ctx)
assert.Error(t, err, clues.ToCore(err))
} }
func (suite *WrapperIntegrationSuite) TestCloseTwiceDoesNotCrash() { func (suite *WrapperIntegrationSuite) TestCloseTwiceDoesNotCrash() {
@ -116,10 +121,14 @@ func (suite *WrapperIntegrationSuite) TestCloseTwiceDoesNotCrash() {
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.NoError(t, k.Close(ctx))
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
assert.Nil(t, k.Repository) assert.Nil(t, k.Repository)
assert.NoError(t, k.Close(ctx))
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
} }
func (suite *WrapperIntegrationSuite) TestCloseAfterWrap() { func (suite *WrapperIntegrationSuite) TestCloseAfterWrap() {
@ -129,17 +138,20 @@ func (suite *WrapperIntegrationSuite) TestCloseAfterWrap() {
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.wrap()) err = k.wrap()
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 2, k.refCount) assert.Equal(t, 2, k.refCount)
require.NoError(t, k.Close(ctx)) err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, k.Repository) assert.NotNil(t, k.Repository)
assert.Equal(t, 1, k.refCount) assert.Equal(t, 1, k.refCount)
require.NoError(t, k.Close(ctx)) err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.Nil(t, k.Repository) assert.Nil(t, k.Repository)
assert.Equal(t, 0, k.refCount) assert.Equal(t, 0, k.refCount)
} }
@ -151,10 +163,13 @@ func (suite *WrapperIntegrationSuite) TestOpenAfterClose() {
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.NoError(t, k.Close(ctx)) err = k.Close(ctx)
assert.Error(t, k.wrap()) assert.NoError(t, err, clues.ToCore(err))
err = k.wrap()
assert.Error(t, err, clues.ToCore(err))
} }
func (suite *WrapperIntegrationSuite) TestBadCompressorType() { func (suite *WrapperIntegrationSuite) TestBadCompressorType() {
@ -164,13 +179,15 @@ func (suite *WrapperIntegrationSuite) TestBadCompressorType() {
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
defer func() { defer func() {
assert.NoError(t, k.Close(ctx)) err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
assert.Error(t, k.Compression(ctx, "not-a-compressor")) err = k.Compression(ctx, "not-a-compressor")
assert.Error(t, err, clues.ToCore(err))
} }
func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() { func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
@ -180,10 +197,11 @@ func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
defer func() { defer func() {
assert.NoError(t, k.Close(ctx)) err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
si := snapshot.SourceInfo{ si := snapshot.SourceInfo{
@ -193,8 +211,7 @@ func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
} }
p, err := k.getPolicyOrEmpty(ctx, si) p, err := k.getPolicyOrEmpty(ctx, si)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, policy.Policy{}, *p) assert.Equal(t, policy.Policy{}, *p)
} }
@ -206,18 +223,19 @@ func (suite *WrapperIntegrationSuite) TestSetCompressor() {
compressor := "pgzip" compressor := "pgzip"
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
defer func() { defer func() {
assert.NoError(t, k.Close(ctx)) err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
assert.NoError(t, k.Compression(ctx, compressor)) err = k.Compression(ctx, compressor)
assert.NoError(t, err, clues.ToCore(err))
// Check the policy was actually created and has the right compressor. // Check the policy was actually created and has the right compressor.
p, err := k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo) p, err := k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, compressor, string(p.CompressionPolicy.CompressorName)) assert.Equal(t, compressor, string(p.CompressionPolicy.CompressorName))
// Check the global policy will be the effective policy in future snapshots // Check the global policy will be the effective policy in future snapshots
@ -229,13 +247,11 @@ func (suite *WrapperIntegrationSuite) TestSetCompressor() {
} }
policyTree, err := policy.TreeForSource(ctx, k, si) policyTree, err := policy.TreeForSource(ctx, k, si)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal( assert.Equal(
t, t,
compressor, compressor,
string(policyTree.EffectivePolicy().CompressionPolicy.CompressorName), string(policyTree.EffectivePolicy().CompressionPolicy.CompressorName))
)
} }
func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect() { func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect() {
@ -325,26 +341,32 @@ func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
p, err := k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo) p, err := k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
test.checkInitFunc(t, p) test.checkInitFunc(t, p)
require.NoError(t, test.mutator(ctx, p)) err = test.mutator(ctx, p)
require.NoError(t, k.writeGlobalPolicy(ctx, "TestDefaultPolicyConfigSet", p)) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.Close(ctx))
require.NoError(t, k.Connect(ctx)) err = k.writeGlobalPolicy(ctx, "TestDefaultPolicyConfigSet", p)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
defer func() { defer func() {
assert.NoError(t, k.Close(ctx)) err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
p, err = k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo) p, err = k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
test.checkFunc(t, p) test.checkFunc(t, p)
}) })
} }
@ -357,10 +379,15 @@ func (suite *WrapperIntegrationSuite) TestInitAndConnWithTempDirectory() {
t := suite.T() t := suite.T()
k, err := openKopiaRepo(t, ctx) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.Close(ctx))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
// Re-open with Connect. // Re-open with Connect.
require.NoError(t, k.Connect(ctx)) err = k.Connect(ctx)
assert.NoError(t, k.Close(ctx)) require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
} }

View File

@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -41,7 +42,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsPath() {
path.EmailCategory, path.EmailCategory,
false, false,
"some", "path", "for", "data") "some", "path", "for", "data")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
c := kopiaDataCollection{ c := kopiaDataCollection{
streams: []data.Stream{}, streams: []data.Stream{},
@ -108,13 +109,13 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
count := 0 count := 0
for returnedStream := range c.Items(ctx, fault.New(true)) { for returnedStream := range c.Items(ctx, fault.New(true)) {
require.Less(t, count, len(test.streams)) require.Less(t, count, len(test.streams))
assert.Equal(t, returnedStream.UUID(), uuids[count]) assert.Equal(t, returnedStream.UUID(), uuids[count])
buf, err := io.ReadAll(returnedStream.ToReader()) buf, err := io.ReadAll(returnedStream.ToReader())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, buf, testData[count]) assert.Equal(t, buf, testData[count])
require.Implements(t, (*data.StreamSize)(nil), returnedStream) require.Implements(t, (*data.StreamSize)(nil), returnedStream)
ss := returnedStream.(data.StreamSize) ss := returnedStream.(data.StreamSize)
assert.Equal(t, len(buf), int(ss.Size())) assert.Equal(t, len(buf), int(ss.Size()))
@ -217,7 +218,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
category, category,
false, false,
folder1, folder2) folder1, folder2)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
@ -276,15 +277,14 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
if err != nil { if err != nil {
if test.notFoundErr { if test.notFoundErr {
assert.ErrorIs(t, err, data.ErrNotFound) assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
} }
return return
} }
fileData, err := io.ReadAll(s.ToReader()) fileData, err := io.ReadAll(s.ToReader())
test.readErr(t, err, clues.ToCore(err))
test.readErr(t, err)
if err != nil { if err != nil {
return return

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -26,7 +27,7 @@ type fooModel struct {
//revive:disable-next-line:context-as-argument //revive:disable-next-line:context-as-argument
func getModelStore(t *testing.T, ctx context.Context) *ModelStore { func getModelStore(t *testing.T, ctx context.Context) *ModelStore {
c, err := openKopiaRepo(t, ctx) c, err := openKopiaRepo(t, ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return &ModelStore{c: c, modelVersion: globalModelVersion} return &ModelStore{c: c, modelVersion: globalModelVersion}
} }
@ -79,7 +80,8 @@ func (suite *ModelStoreIntegrationSuite) SetupTest() {
func (suite *ModelStoreIntegrationSuite) TearDownTest() { func (suite *ModelStoreIntegrationSuite) TearDownTest() {
defer suite.flush() defer suite.flush()
assert.NoError(suite.T(), suite.m.Close(suite.ctx)) err := suite.m.Close(suite.ctx)
assert.NoError(suite.T(), err, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestBadTagsErrors() { func (suite *ModelStoreIntegrationSuite) TestBadTagsErrors() {
@ -114,32 +116,25 @@ func (suite *ModelStoreIntegrationSuite) TestBadTagsErrors() {
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
foo.Tags = test.tags foo.Tags = test.tags
assert.ErrorIs( err := suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
t, assert.ErrorIs(t, err, errBadTagKey, clues.ToCore(err))
suite.m.Put(suite.ctx, model.BackupOpSchema, foo),
errBadTagKey,
)
// Add model for update/get ID checks. // Add model for update/get ID checks.
foo.Tags = map[string]string{} foo.Tags = map[string]string{}
require.NoError(
t, err = suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
suite.m.Put(suite.ctx, model.BackupOpSchema, foo), require.NoError(t, err, clues.ToCore(err))
)
foo.Tags = test.tags foo.Tags = test.tags
assert.ErrorIs(
t,
suite.m.Update(suite.ctx, model.BackupOpSchema, foo),
errBadTagKey,
)
_, err := suite.m.GetIDsForType( err = suite.m.Update(suite.ctx, model.BackupOpSchema, foo)
assert.ErrorIs(t, err, errBadTagKey, clues.ToCore(err))
_, err = suite.m.GetIDsForType(
suite.ctx, suite.ctx,
model.BackupOpSchema, model.BackupOpSchema,
test.tags, test.tags)
) assert.ErrorIs(t, err, errBadTagKey, clues.ToCore(err))
assert.ErrorIs(t, err, errBadTagKey)
}) })
} }
} }
@ -156,58 +151,56 @@ func (suite *ModelStoreIntegrationSuite) TestNoIDsErrors() {
noModelStoreID.ID = model.StableID(uuid.NewString()) noModelStoreID.ID = model.StableID(uuid.NewString())
noModelStoreID.ModelStoreID = "" noModelStoreID.ModelStoreID = ""
assert.Error(t, suite.m.Update(suite.ctx, theModelType, noStableID)) err := suite.m.Update(suite.ctx, theModelType, noStableID)
assert.Error(t, suite.m.Update(suite.ctx, theModelType, noModelStoreID)) assert.Error(t, err, clues.ToCore(err))
assert.Error(t, suite.m.Get(suite.ctx, theModelType, "", nil)) err = suite.m.Update(suite.ctx, theModelType, noModelStoreID)
assert.Error(t, suite.m.GetWithModelStoreID(suite.ctx, theModelType, "", nil)) assert.Error(t, err, clues.ToCore(err))
assert.Error(t, suite.m.Delete(suite.ctx, theModelType, "")) err = suite.m.Get(suite.ctx, theModelType, "", nil)
assert.Error(t, suite.m.DeleteWithModelStoreID(suite.ctx, "")) assert.Error(t, err, clues.ToCore(err))
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, "", nil)
assert.Error(t, err, clues.ToCore(err))
err = suite.m.Delete(suite.ctx, theModelType, "")
assert.Error(t, err, clues.ToCore(err))
err = suite.m.DeleteWithModelStoreID(suite.ctx, "")
assert.Error(t, err, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestBadModelTypeErrors() { func (suite *ModelStoreIntegrationSuite) TestBadModelTypeErrors() {
t := suite.T() t := suite.T()
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
assert.ErrorIs( err := suite.m.Put(suite.ctx, model.UnknownSchema, foo)
t, assert.ErrorIs(t, err, errUnrecognizedSchema, clues.ToCore(err))
suite.m.Put(suite.ctx, model.UnknownSchema, foo),
errUnrecognizedSchema,
)
require.NoError(t, suite.m.Put(suite.ctx, model.BackupOpSchema, foo)) err = suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
require.NoError(t, err, clues.ToCore(err))
_, err := suite.m.GetIDsForType(suite.ctx, model.UnknownSchema, nil) _, err = suite.m.GetIDsForType(suite.ctx, model.UnknownSchema, nil)
assert.ErrorIs(t, err, errUnrecognizedSchema) assert.ErrorIs(t, err, errUnrecognizedSchema, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestBadTypeErrors() { func (suite *ModelStoreIntegrationSuite) TestBadTypeErrors() {
t := suite.T() t := suite.T()
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
require.NoError(t, suite.m.Put(suite.ctx, model.BackupOpSchema, foo)) err := suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
require.NoError(t, err, clues.ToCore(err))
returned := &fooModel{} returned := &fooModel{}
assert.ErrorIs(
t,
suite.m.Get(suite.ctx, model.RestoreOpSchema, foo.ID, returned),
errModelTypeMismatch,
)
assert.ErrorIs( err = suite.m.Get(suite.ctx, model.RestoreOpSchema, foo.ID, returned)
t, assert.ErrorIs(t, err, errModelTypeMismatch, clues.ToCore(err))
suite.m.GetWithModelStoreID(suite.ctx, model.RestoreOpSchema, foo.ModelStoreID, returned),
errModelTypeMismatch,
)
assert.ErrorIs( err = suite.m.GetWithModelStoreID(suite.ctx, model.RestoreOpSchema, foo.ModelStoreID, returned)
t, assert.ErrorIs(t, err, errModelTypeMismatch, clues.ToCore(err))
suite.m.Delete(suite.ctx, model.RestoreOpSchema, foo.ID),
errModelTypeMismatch, err = suite.m.Delete(suite.ctx, model.RestoreOpSchema, foo.ID)
) assert.ErrorIs(t, err, errModelTypeMismatch, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestPutGetBadVersion() { func (suite *ModelStoreIntegrationSuite) TestPutGetBadVersion() {
@ -218,13 +211,13 @@ func (suite *ModelStoreIntegrationSuite) TestPutGetBadVersion() {
foo.Tags = map[string]string{} foo.Tags = map[string]string{}
err := suite.m.Put(suite.ctx, schema, foo) err := suite.m.Put(suite.ctx, schema, foo)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.m.modelVersion = 42 suite.m.modelVersion = 42
returned := &fooModel{} returned := &fooModel{}
err = suite.m.Get(suite.ctx, schema, foo.ID, returned) err = suite.m.Get(suite.ctx, schema, foo.ID, returned)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestPutGet() { func (suite *ModelStoreIntegrationSuite) TestPutGet() {
@ -264,7 +257,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
foo.Tags = map[string]string{} foo.Tags = map[string]string{}
err := suite.m.Put(suite.ctx, test.s, foo) err := suite.m.Put(suite.ctx, test.s, foo)
test.check(t, err) test.check(t, err, clues.ToCore(err))
if test.hasErr { if test.hasErr {
return return
@ -276,11 +269,11 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
returned := &fooModel{} returned := &fooModel{}
err = suite.m.Get(suite.ctx, test.s, foo.ID, returned) err = suite.m.Get(suite.ctx, test.s, foo.ID, returned)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, test.s, foo.ModelStoreID, returned) err = suite.m.GetWithModelStoreID(suite.ctx, test.s, foo.ModelStoreID, returned)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
}) })
} }
@ -318,19 +311,20 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_PreSetID() {
foo.Tags = map[string]string{} foo.Tags = map[string]string{}
err := suite.m.Put(suite.ctx, mdl, foo) err := suite.m.Put(suite.ctx, mdl, foo)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
test.expect(t, model.StableID(test.baseID), foo.ID) test.expect(t, model.StableID(test.baseID), foo.ID)
require.NotEmpty(t, foo.ModelStoreID) require.NotEmpty(t, foo.ModelStoreID)
require.NotEmpty(t, foo.ID) require.NotEmpty(t, foo.ID)
returned := &fooModel{} returned := &fooModel{}
err = suite.m.Get(suite.ctx, mdl, foo.ID, returned) err = suite.m.Get(suite.ctx, mdl, foo.ID, returned)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, mdl, foo.ModelStoreID, returned) err = suite.m.GetWithModelStoreID(suite.ctx, mdl, foo.ModelStoreID, returned)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
}) })
} }
@ -345,42 +339,43 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_WithTags() {
"bar": "baz", "bar": "baz",
} }
require.NoError(t, suite.m.Put(suite.ctx, theModelType, foo)) err := suite.m.Put(suite.ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, foo.ModelStoreID) require.NotEmpty(t, foo.ModelStoreID)
require.NotEmpty(t, foo.ID) require.NotEmpty(t, foo.ID)
returned := &fooModel{} returned := &fooModel{}
err := suite.m.Get(suite.ctx, theModelType, foo.ID, returned) err = suite.m.Get(suite.ctx, theModelType, foo.ID, returned)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned) err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
} }
func (suite *ModelStoreIntegrationSuite) TestGet_NotFoundErrors() { func (suite *ModelStoreIntegrationSuite) TestGet_NotFoundErrors() {
t := suite.T() t := suite.T()
assert.ErrorIs(t, suite.m.Get(suite.ctx, model.BackupOpSchema, "baz", nil), data.ErrNotFound) err := suite.m.Get(suite.ctx, model.BackupOpSchema, "baz", nil)
assert.ErrorIs( assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
t, suite.m.GetWithModelStoreID(suite.ctx, model.BackupOpSchema, "baz", nil), data.ErrNotFound)
err = suite.m.GetWithModelStoreID(suite.ctx, model.BackupOpSchema, "baz", nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestPutGetOfTypeBadVersion() { func (suite *ModelStoreIntegrationSuite) TestPutGetOfTypeBadVersion() {
t := suite.T() t := suite.T()
schema := model.BackupOpSchema schema := model.BackupOpSchema
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
err := suite.m.Put(suite.ctx, schema, foo) err := suite.m.Put(suite.ctx, schema, foo)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.m.modelVersion = 42 suite.m.modelVersion = 42
ids, err := suite.m.GetIDsForType(suite.ctx, schema, nil) ids, err := suite.m.GetIDsForType(suite.ctx, schema, nil)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, ids) assert.Empty(t, ids)
} }
@ -426,8 +421,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutGetOfType() {
} }
ids, err := suite.m.GetIDsForType(suite.ctx, test.s, nil) ids, err := suite.m.GetIDsForType(suite.ctx, test.s, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Len(t, ids, 1) assert.Len(t, ids, 1)
}) })
} }
@ -545,7 +539,8 @@ func (suite *ModelStoreIntegrationSuite) TestGetOfTypeWithTags() {
// Setup the store by adding all the inputs. // Setup the store by adding all the inputs.
for _, in := range inputs { for _, in := range inputs {
require.NoError(suite.T(), suite.m.Put(suite.ctx, in.schema, in.dataModel)) err := suite.m.Put(suite.ctx, in.schema, in.dataModel)
require.NoError(suite.T(), err, clues.ToCore(err))
} }
// Check we can properly execute our tests. // Check we can properly execute our tests.
@ -559,7 +554,7 @@ func (suite *ModelStoreIntegrationSuite) TestGetOfTypeWithTags() {
} }
ids, err := suite.m.GetIDsForType(suite.ctx, test.s, test.tags) ids, err := suite.m.GetIDsForType(suite.ctx, test.s, test.tags)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, expected, ids) assert.ElementsMatch(t, expected, ids)
}) })
@ -592,23 +587,24 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T()
theModelType := model.BackupOpSchema theModelType := model.BackupOpSchema
m := getModelStore(t, ctx) m := getModelStore(t, ctx)
defer func() { defer func() {
assert.NoError(t, m.c.Close(ctx)) err := m.c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
// Avoid some silly test errors from comparing nil to empty map. // Avoid some silly test errors from comparing nil to empty map.
foo.Tags = map[string]string{} foo.Tags = map[string]string{}
require.NoError(t, m.Put(ctx, theModelType, foo)) err := m.Put(ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
oldModelID := foo.ModelStoreID oldModelID := foo.ModelStoreID
oldStableID := foo.ID oldStableID := foo.ID
@ -616,19 +612,21 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
test.mutator(foo) test.mutator(foo)
require.NoError(t, m.Update(ctx, theModelType, foo)) err = m.Update(ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, oldStableID, foo.ID) assert.Equal(t, oldStableID, foo.ID)
// The version in the model store has not changed so we get the old // The version in the model store has not changed so we get the old
// version back. // version back.
assert.Equal(t, oldVersion, foo.Version) assert.Equal(t, oldVersion, foo.Version)
returned := &fooModel{} returned := &fooModel{}
require.NoError(
t, m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)) err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
ids, err := m.GetIDsForType(ctx, theModelType, nil) ids, err := m.GetIDsForType(ctx, theModelType, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.Len(t, ids, 1) require.Len(t, ids, 1)
assert.Equal(t, globalModelVersion, ids[0].Version) assert.Equal(t, globalModelVersion, ids[0].Version)
@ -639,7 +637,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
} }
err = m.GetWithModelStoreID(ctx, theModelType, oldModelID, nil) err = m.GetWithModelStoreID(ctx, theModelType, oldModelID, nil)
assert.ErrorIs(t, err, data.ErrNotFound) assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
}) })
} }
} }
@ -676,16 +674,19 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate_FailsNotMatchingPrev() {
m := getModelStore(t, ctx) m := getModelStore(t, ctx)
defer func() { defer func() {
assert.NoError(t, m.c.Close(ctx)) err := m.c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
require.NoError(t, m.Put(ctx, startModelType, foo)) err := m.Put(ctx, startModelType, foo)
require.NoError(t, err, clues.ToCore(err))
test.mutator(foo) test.mutator(foo)
assert.Error(t, m.Update(ctx, test.s, foo)) err = m.Update(ctx, test.s, foo)
assert.Error(t, err, clues.ToCore(err))
}) })
} }
} }
@ -693,23 +694,27 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate_FailsNotMatchingPrev() {
func (suite *ModelStoreIntegrationSuite) TestPutDelete() { func (suite *ModelStoreIntegrationSuite) TestPutDelete() {
t := suite.T() t := suite.T()
theModelType := model.BackupOpSchema theModelType := model.BackupOpSchema
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
require.NoError(t, suite.m.Put(suite.ctx, theModelType, foo)) err := suite.m.Put(suite.ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
require.NoError(t, suite.m.Delete(suite.ctx, theModelType, foo.ID)) err = suite.m.Delete(suite.ctx, theModelType, foo.ID)
require.NoError(t, err, clues.ToCore(err))
returned := &fooModel{} returned := &fooModel{}
err := suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned) err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
assert.ErrorIs(t, err, data.ErrNotFound) assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestPutDelete_BadIDsNoop() { func (suite *ModelStoreIntegrationSuite) TestPutDelete_BadIDsNoop() {
t := suite.T() t := suite.T()
assert.NoError(t, suite.m.Delete(suite.ctx, model.BackupOpSchema, "foo")) err := suite.m.Delete(suite.ctx, model.BackupOpSchema, "foo")
assert.NoError(t, suite.m.DeleteWithModelStoreID(suite.ctx, "foo")) assert.NoError(t, err, clues.ToCore(err))
err = suite.m.DeleteWithModelStoreID(suite.ctx, "foo")
assert.NoError(t, err, clues.ToCore(err))
} }
// --------------- // ---------------
@ -742,7 +747,8 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
m := getModelStore(t, ctx) m := getModelStore(t, ctx)
defer func() { defer func() {
assert.NoError(t, m.c.Close(ctx)) err := m.c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
foo := &fooModel{Bar: uuid.NewString()} foo := &fooModel{Bar: uuid.NewString()}
@ -750,13 +756,13 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
foo.ModelStoreID = manifest.ID(uuid.NewString()) foo.ModelStoreID = manifest.ID(uuid.NewString())
// Avoid some silly test errors from comparing nil to empty map. // Avoid some silly test errors from comparing nil to empty map.
foo.Tags = map[string]string{} foo.Tags = map[string]string{}
theModelType := model.BackupOpSchema theModelType := model.BackupOpSchema
require.NoError(t, m.Put(ctx, theModelType, foo)) err := m.Put(ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
newID := manifest.ID("") newID := manifest.ID("")
err := repo.WriteSession( err = repo.WriteSession(
ctx, ctx,
m.c, m.c,
repo.WriteSessionOptions{Purpose: "WriteSessionFailureTest"}, repo.WriteSessionOptions{Purpose: "WriteSessionFailureTest"},
@ -772,7 +778,7 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
}() }()
innerErr = putInner(innerCtx, w, theModelType, foo, false) innerErr = putInner(innerCtx, w, theModelType, foo, false)
require.NoError(t, innerErr) require.NoError(t, innerErr, clues.ToCore(innerErr))
newID = foo.ModelStoreID newID = foo.ModelStoreID
@ -780,14 +786,15 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
}, },
) )
assert.ErrorIs(t, err, assert.AnError) assert.ErrorIs(t, err, assert.AnError, clues.ToCore(err))
err = m.GetWithModelStoreID(ctx, theModelType, newID, nil) err = m.GetWithModelStoreID(ctx, theModelType, newID, nil)
assert.ErrorIs(t, err, data.ErrNotFound) assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
returned := &fooModel{} returned := &fooModel{}
require.NoError(
t, m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)) err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned) assert.Equal(t, foo, returned)
} }
@ -798,14 +805,16 @@ func openConnAndModelStore(
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
c := NewConn(st) c := NewConn(st)
require.NoError(t, c.Initialize(ctx)) err := c.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
defer func() { defer func() {
require.NoError(t, c.Close(ctx)) err := c.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
}() }()
ms, err := NewModelStore(c) ms, err := NewModelStore(c)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return c, ms return c, ms
} }
@ -815,14 +824,16 @@ func reconnectToModelStore(
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
c *conn, c *conn,
) *ModelStore { ) *ModelStore {
require.NoError(t, c.Connect(ctx)) err := c.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
defer func() { defer func() {
assert.NoError(t, c.Close(ctx)) err := c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
ms, err := NewModelStore(c) ms, err := NewModelStore(c)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ms return ms
} }
@ -837,11 +848,13 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
backupModel := backup.Backup{ backupModel := backup.Backup{
SnapshotID: "snapshotID", SnapshotID: "snapshotID",
} }
conn1, ms1 := openConnAndModelStore(t, ctx) conn1, ms1 := openConnAndModelStore(t, ctx)
require.NoError(t, ms1.Put(ctx, model.BackupSchema, &backupModel)) err := ms1.Put(ctx, model.BackupSchema, &backupModel)
require.NoError(t, ms1.Close(ctx)) require.NoError(t, err, clues.ToCore(err))
err = ms1.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
start := make(chan struct{}) start := make(chan struct{})
ready := sync.WaitGroup{} ready := sync.WaitGroup{}
@ -871,22 +884,24 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
ready.Wait() ready.Wait()
defer func() { defer func() {
assert.NoError(t, ms2.Close(ctx)) err := ms2.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
defer func() { defer func() {
assert.NoError(t, ms1.Close(ctx)) err := ms1.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
// New instance should not have model we added. // New instance should not have model we added.
gotBackup := backup.Backup{} gotBackup := backup.Backup{}
err := ms2.GetWithModelStoreID( err = ms2.GetWithModelStoreID(
ctx, ctx,
model.BackupSchema, model.BackupSchema,
backupModel.ModelStoreID, backupModel.ModelStoreID,
&gotBackup, &gotBackup,
) )
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
// Old instance should still be able to access added model. // Old instance should still be able to access added model.
gotBackup = backup.Backup{} gotBackup = backup.Backup{}
@ -896,5 +911,5 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
backupModel.ModelStoreID, backupModel.ModelStoreID,
&gotBackup, &gotBackup,
) )
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -22,14 +23,12 @@ func TestPathEncoderSuite(t *testing.T) {
func (suite *PathEncoderSuite) TestEncodeDecode() { func (suite *PathEncoderSuite) TestEncodeDecode() {
t := suite.T() t := suite.T()
elements := []string{"these", "are", "some", "path", "elements"} elements := []string{"these", "are", "some", "path", "elements"}
encoded := encodeElements(elements...) encoded := encodeElements(elements...)
decoded := make([]string, 0, len(elements)) decoded := make([]string, 0, len(elements))
for _, e := range encoded { for _, e := range encoded {
dec, err := decodeElement(e) dec, err := decodeElement(e)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
decoded = append(decoded, dec) decoded = append(decoded, dec)
} }
@ -70,7 +69,6 @@ func (suite *PathEncoderSuite) TestEncodeAsPathDecode() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
encoded := encodeAsPath(test.elements...) encoded := encodeAsPath(test.elements...)
// Sanity check, first and last character should not be '/'. // Sanity check, first and last character should not be '/'.
@ -80,7 +78,7 @@ func (suite *PathEncoderSuite) TestEncodeAsPathDecode() {
for _, e := range strings.Split(encoded, "/") { for _, e := range strings.Split(encoded, "/") {
dec, err := decodeElement(e) dec, err := decodeElement(e)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
decoded = append(decoded, dec) decoded = append(decoded, dec)
} }
@ -97,7 +95,7 @@ func FuzzEncodeDecodeSingleString(f *testing.F) {
assert.False(t, strings.ContainsRune(encoded[0], '/')) assert.False(t, strings.ContainsRune(encoded[0], '/'))
decoded, err := decodeElement(encoded[0]) decoded, err := decodeElement(encoded[0])
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in, decoded) assert.Equal(t, in, decoded)
}) })
} }

View File

@ -18,6 +18,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -28,7 +29,7 @@ import (
func makePath(t *testing.T, elements []string, isItem bool) path.Path { func makePath(t *testing.T, elements []string, isItem bool) path.Path {
p, err := path.FromDataLayerPath(stdpath.Join(elements...), isItem) p, err := path.FromDataLayerPath(stdpath.Join(elements...), isItem)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -111,7 +112,7 @@ func expectFileData(
} }
r, err := f.GetReader(ctx) r, err := f.GetReader(ctx)
if !assert.NoErrorf(t, err, "getting reader for file: %s", name) { if !assert.NoError(t, err, "getting reader for file:", name, clues.ToCore(err)) {
return return
} }
@ -122,11 +123,11 @@ func expectFileData(
} }
got, err := io.ReadAll(r) got, err := io.ReadAll(r)
if !assert.NoErrorf(t, err, "reading data in file: %s", name) { if !assert.NoError(t, err, "reading data in file", name, clues.ToCore(err)) {
return return
} }
assert.Equalf(t, expected, got, "data in file: %s", name) assert.Equal(t, expected, got, "data in file", name, clues.ToCore(err))
} }
func expectTree( func expectTree(
@ -203,7 +204,7 @@ func getDirEntriesForEntry(
require.True(t, ok, "entry is not a directory") require.True(t, ok, "entry is not a directory")
entries, err := fs.GetAllEntries(ctx, d) entries, err := fs.GetAllEntries(ctx, d)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return entries return entries
} }
@ -277,7 +278,7 @@ func (suite *VersionReadersUnitSuite) TestWriteAndRead() {
defer reversible.Close() defer reversible.Close()
allData, err := io.ReadAll(reversible) allData, err := io.ReadAll(reversible)
test.check(t, err) test.check(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
@ -303,7 +304,7 @@ func readAllInParts(
break break
} }
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
read += n read += n
res = append(res, tmp[:n]...) res = append(res, tmp[:n]...)
@ -359,7 +360,7 @@ func (suite *CorsoProgressUnitSuite) SetupSuite() {
path.EmailCategory, path.EmailCategory,
true, true,
testInboxDir, "testFile") testInboxDir, "testFile")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.targetFilePath = p suite.targetFilePath = p
suite.targetFileName = suite.targetFilePath.ToBuilder().Dir().String() suite.targetFileName = suite.targetFilePath.ToBuilder().Dir().String()
@ -474,7 +475,6 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() {
assert.Empty(t, cp.pending) assert.Empty(t, cp.pending)
entries := bd.Details().Entries entries := bd.Details().Entries
assert.Len(t, entries, test.expectedNumEntries) assert.Len(t, entries, test.expectedNumEntries)
for _, entry := range entries { for _, entry := range entries {
@ -516,7 +516,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() {
assert.Empty(t, cp.pending) assert.Empty(t, cp.pending)
assert.Empty(t, bd.Details().Entries) assert.Empty(t, bd.Details().Entries)
assert.Error(t, cp.errs.Failure()) assert.Error(t, cp.errs.Failure(), clues.ToCore(cp.errs.Failure()))
} }
func (suite *CorsoProgressUnitSuite) TestFinishedFileBuildsHierarchyNewItem() { func (suite *CorsoProgressUnitSuite) TestFinishedFileBuildsHierarchyNewItem() {
@ -722,12 +722,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
// - Inbox // - Inbox
// - 42 separate files // - 42 separate files
dirTree, err := inflateDirTree(ctx, nil, nil, collections, nil, progress) dirTree, err := inflateDirTree(ctx, nil, nil, collections, nil, progress)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, encodeAsPath(testTenant), dirTree.Name()) assert.Equal(t, encodeAsPath(testTenant), dirTree.Name())
entries, err := fs.GetAllEntries(ctx, dirTree) entries, err := fs.GetAllEntries(ctx, dirTree)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectDirs(t, entries, encodeElements(service), true) expectDirs(t, entries, encodeElements(service), true)
@ -818,12 +818,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
} }
dirTree, err := inflateDirTree(ctx, nil, nil, test.layout, nil, progress) dirTree, err := inflateDirTree(ctx, nil, nil, test.layout, nil, progress)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, encodeAsPath(testTenant), dirTree.Name()) assert.Equal(t, encodeAsPath(testTenant), dirTree.Name())
entries, err := fs.GetAllEntries(ctx, dirTree) entries, err := fs.GetAllEntries(ctx, dirTree)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectDirs(t, entries, encodeElements(service), true) expectDirs(t, entries, encodeElements(service), true)
@ -919,7 +919,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
t := suite.T() t := suite.T()
_, err := inflateDirTree(ctx, nil, nil, test.layout, nil, nil) _, err := inflateDirTree(ctx, nil, nil, test.layout, nil, nil)
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
}) })
} }
} }
@ -1031,7 +1031,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
} }
_, err := inflateDirTree(ctx, nil, nil, cols, nil, progress) _, err := inflateDirTree(ctx, nil, nil, cols, nil, progress)
require.Error(t, err) require.Error(t, err, clues.ToCore(err))
}) })
} }
} }
@ -1315,7 +1315,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
nil, nil,
progress, progress,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, test.expected, dirTree) expectTree(t, ctx, test.expected, dirTree)
}) })
@ -2093,7 +2093,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
test.inputCollections(t), test.inputCollections(t),
test.inputExcludes, test.inputExcludes,
progress) progress)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, test.expected, dirTree) expectTree(t, ctx, test.expected, dirTree)
}) })
@ -2256,7 +2256,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
collections, collections,
nil, nil,
progress) progress)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, expected, dirTree) expectTree(t, ctx, expected, dirTree)
} }
@ -2360,7 +2360,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
collections, collections,
nil, nil,
progress) progress)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, expected, dirTree) expectTree(t, ctx, expected, dirTree)
} }
@ -2611,7 +2611,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
nil, nil,
progress, progress,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, expected, dirTree) expectTree(t, ctx, expected, dirTree)
} }

View File

@ -16,6 +16,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -70,17 +71,17 @@ func testForFiles(
count++ count++
fullPath, err := c.FullPath().Append(s.UUID(), true) fullPath, err := c.FullPath().Append(s.UUID(), true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expected, ok := expected[fullPath.String()] expected, ok := expected[fullPath.String()]
require.True(t, ok, "unexpected file with path %q", fullPath) require.True(t, ok, "unexpected file with path %q", fullPath)
buf, err := io.ReadAll(s.ToReader()) buf, err := io.ReadAll(s.ToReader())
require.NoError(t, err, "reading collection item: %s", fullPath) require.NoError(t, err, "reading collection item", fullPath, clues.ToCore(err))
assert.Equal(t, expected, buf, "comparing collection item", fullPath)
assert.Equal(t, expected, buf, "comparing collection item: %s", fullPath)
require.Implements(t, (*data.StreamSize)(nil), s) require.Implements(t, (*data.StreamSize)(nil), s)
ss := s.(data.StreamSize) ss := s.(data.StreamSize)
assert.Equal(t, len(buf), int(ss.Size())) assert.Equal(t, len(buf), int(ss.Size()))
} }
@ -97,7 +98,7 @@ func checkSnapshotTags(
snapshotID string, snapshotID string,
) { ) {
man, err := snapshot.LoadSnapshot(ctx, rep, manifest.ID(snapshotID)) man, err := snapshot.LoadSnapshot(ctx, rep, manifest.ID(snapshotID))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, expectedTags, man.Tags) assert.Equal(t, expectedTags, man.Tags)
} }
@ -120,7 +121,7 @@ func (suite *KopiaUnitSuite) SetupSuite() {
), ),
false, false,
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath = tmp suite.testPath = tmp
} }
@ -172,7 +173,7 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
path.EmailCategory, path.EmailCategory,
false, false,
testInboxDir) testInboxDir)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.storePath1 = tmp suite.storePath1 = tmp
suite.locPath1 = tmp suite.locPath1 = tmp
@ -184,7 +185,7 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
path.EmailCategory, path.EmailCategory,
false, false,
testArchiveDir) testArchiveDir)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.storePath2 = tmp suite.storePath2 = tmp
suite.locPath2 = tmp suite.locPath2 = tmp
@ -195,14 +196,16 @@ func (suite *KopiaIntegrationSuite) SetupTest() {
suite.ctx, suite.flush = tester.NewContext() suite.ctx, suite.flush = tester.NewContext()
c, err := openKopiaRepo(t, suite.ctx) c, err := openKopiaRepo(t, suite.ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.w = &Wrapper{c} suite.w = &Wrapper{c}
} }
func (suite *KopiaIntegrationSuite) TearDownTest() { func (suite *KopiaIntegrationSuite) TearDownTest() {
defer suite.flush() defer suite.flush()
assert.NoError(suite.T(), suite.w.Close(suite.ctx))
err := suite.w.Close(suite.ctx)
assert.NoError(suite.T(), err, clues.ToCore(err))
} }
func (suite *KopiaIntegrationSuite) TestBackupCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollections() {
@ -282,7 +285,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
tags, tags,
true, true,
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files") assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files")
assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files") assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files")
@ -317,7 +320,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
suite.w.c, suite.w.c,
manifest.ID(stats.SnapshotID), manifest.ID(stats.SnapshotID),
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
prevSnaps = append(prevSnaps, IncrementalBase{ prevSnaps = append(prevSnaps, IncrementalBase{
Manifest: snap, Manifest: snap,
@ -337,7 +340,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
path.FilesCategory, path.FilesCategory,
false, false,
testInboxDir) testInboxDir)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
storePath := tmp storePath := tmp
locPath := tmp locPath := tmp
@ -429,7 +432,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
tags, tags,
true, true,
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files") assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files")
assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files") assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files")
@ -476,9 +479,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
snap, err := snapshot.LoadSnapshot( snap, err := snapshot.LoadSnapshot(
suite.ctx, suite.ctx,
suite.w.c, suite.w.c,
manifest.ID(stats.SnapshotID), manifest.ID(stats.SnapshotID))
) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err)
prevSnaps = append(prevSnaps, IncrementalBase{ prevSnaps = append(prevSnaps, IncrementalBase{
Manifest: snap, Manifest: snap,
@ -491,15 +493,16 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
} }
func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() { func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
t := suite.T()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
k, err := openKopiaRepo(t, ctx) t := suite.T()
require.NoError(t, err)
require.NoError(t, k.Compression(ctx, "s2-default")) k, err := openKopiaRepo(t, ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Compression(ctx, "s2-default")
require.NoError(t, err, clues.ToCore(err))
w := &Wrapper{k} w := &Wrapper{k}
@ -518,10 +521,10 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1) dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1)
fp1, err := suite.storePath1.Append(dc1.Names[0], true) fp1, err := suite.storePath1.Append(dc1.Names[0], true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
fp2, err := suite.storePath2.Append(dc2.Names[0], true) fp2, err := suite.storePath2.Append(dc2.Names[0], true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
stats, _, _, err := w.BackupCollections( stats, _, _, err := w.BackupCollections(
ctx, ctx,
@ -531,9 +534,10 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
tags, tags,
true, true,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.Compression(ctx, "gzip")) err = k.Compression(ctx, "gzip")
require.NoError(t, err, clues.ToCore(err))
expected := map[string][]byte{ expected := map[string][]byte{
fp1.String(): dc1.Data[0], fp1.String(): dc1.Data[0],
@ -549,7 +553,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
}, },
nil, nil,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 2, len(result)) assert.Equal(t, 2, len(result))
testForFiles(t, ctx, expected, result) testForFiles(t, ctx, expected, result)
@ -649,8 +653,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
tags, tags,
true, true,
fault.New(true)) fault.New(true))
require.Error(t, err) require.Error(t, err, clues.ToCore(err))
assert.Equal(t, 0, stats.ErrorCount) assert.Equal(t, 0, stats.ErrorCount)
assert.Equal(t, 5, stats.TotalFileCount) assert.Equal(t, 5, stats.TotalFileCount)
assert.Equal(t, 6, stats.TotalDirectoryCount) assert.Equal(t, 6, stats.TotalDirectoryCount)
@ -660,7 +663,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
assert.Len(t, deets.Details().Entries, 5+6) assert.Len(t, deets.Details().Entries, 5+6)
failedPath, err := suite.storePath2.Append(testFileName4, true) failedPath, err := suite.storePath2.Append(testFileName4, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
ic := i64counter{} ic := i64counter{}
@ -673,7 +676,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
// Files that had an error shouldn't make a dir entry in kopia. If they do we // Files that had an error shouldn't make a dir entry in kopia. If they do we
// may run into kopia-assisted incrementals issues because only mod time and // may run into kopia-assisted incrementals issues because only mod time and
// not file size is checked for StreamingFiles. // not file size is checked for StreamingFiles.
assert.ErrorIs(t, err, data.ErrNotFound, "errored file is restorable") assert.ErrorIs(t, err, data.ErrNotFound, "errored file is restorable", clues.ToCore(err))
} }
type backedupFile struct { type backedupFile struct {
@ -712,7 +715,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections()
nil, nil,
true, true,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, BackupStats{}, *s) assert.Equal(t, BackupStats{}, *s)
assert.Empty(t, d.Details().Entries) assert.Empty(t, d.Details().Entries)
@ -753,7 +756,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupSuite() {
path.EmailCategory, path.EmailCategory,
false, false,
testInboxDir) testInboxDir)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath1 = tmp suite.testPath1 = tmp
@ -764,7 +767,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupSuite() {
path.EmailCategory, path.EmailCategory,
false, false,
testArchiveDir) testArchiveDir)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath2 = tmp suite.testPath2 = tmp
@ -810,7 +813,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupSuite() {
for _, item := range filesInfo { for _, item := range filesInfo {
pth, err := item.parentPath.Append(item.name, true) pth, err := item.parentPath.Append(item.name, true)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
mapKey := item.parentPath.String() mapKey := item.parentPath.String()
f := &backedupFile{ f := &backedupFile{
@ -830,8 +833,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
expectedFiles := len(suite.filesByPath) expectedFiles := len(suite.filesByPath)
//nolint:forbidigo //nolint:forbidigo
suite.ctx, _ = logger.SeedLevel(context.Background(), logger.Development) suite.ctx, _ = logger.SeedLevel(context.Background(), logger.Development)
c, err := openKopiaRepo(t, suite.ctx) c, err := openKopiaRepo(t, suite.ctx)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.w = &Wrapper{c} suite.w = &Wrapper{c}
@ -872,7 +876,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
tags, tags,
false, false,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.Equal(t, stats.ErrorCount, 0) require.Equal(t, stats.ErrorCount, 0)
require.Equal(t, stats.TotalFileCount, expectedFiles) require.Equal(t, stats.TotalFileCount, expectedFiles)
require.Equal(t, stats.TotalDirectoryCount, expectedDirs) require.Equal(t, stats.TotalDirectoryCount, expectedDirs)
@ -885,7 +889,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
} }
func (suite *KopiaSimpleRepoIntegrationSuite) TearDownTest() { func (suite *KopiaSimpleRepoIntegrationSuite) TearDownTest() {
assert.NoError(suite.T(), suite.w.Close(suite.ctx)) err := suite.w.Close(suite.ctx)
assert.NoError(suite.T(), err, clues.ToCore(err))
logger.Flush(suite.ctx) logger.Flush(suite.ctx)
} }
@ -911,7 +916,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
path.EmailCategory, path.EmailCategory,
false, false,
"tmp") "tmp")
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
subtreePath := subtreePathTmp.ToBuilder().Dir() subtreePath := subtreePathTmp.ToBuilder().Dir()
@ -920,7 +925,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
[]Reason{reason}, []Reason{reason},
nil, nil,
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
require.Len(suite.T(), manifests, 1) require.Len(suite.T(), manifests, 1)
require.Equal(suite.T(), suite.snapshotID, manifests[0].ID) require.Equal(suite.T(), suite.snapshotID, manifests[0].ID)
@ -1030,7 +1035,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
tags, tags,
true, true,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedCachedItems, stats.CachedFileCount) assert.Equal(t, test.expectedCachedItems, stats.CachedFileCount)
assert.Equal(t, test.expectedUncachedItems, stats.UncachedFileCount) assert.Equal(t, test.expectedUncachedItems, stats.UncachedFileCount)
@ -1050,7 +1055,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
}, },
&ic, &ic,
fault.New(true)) fault.New(true))
test.restoreCheck(t, err) test.restoreCheck(t, err, clues.ToCore(err))
}) })
} }
} }
@ -1151,7 +1156,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems() {
test.inputPaths, test.inputPaths,
&ic, &ic,
fault.New(true)) fault.New(true))
test.expectedErr(t, err) test.expectedErr(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
@ -1166,7 +1171,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems() {
func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors() { func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors() {
itemPath, err := suite.testPath1.Append(testFileName, true) itemPath, err := suite.testPath1.Append(testFileName, true)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
@ -1200,7 +1205,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors()
test.paths, test.paths,
nil, nil,
fault.New(true)) fault.New(true))
assert.Error(t, err) assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, c) assert.Empty(t, c)
}) })
} }
@ -1209,7 +1214,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors()
func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot() { func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot() {
t := suite.T() t := suite.T()
assert.NoError(t, suite.w.DeleteSnapshot(suite.ctx, string(suite.snapshotID))) err := suite.w.DeleteSnapshot(suite.ctx, string(suite.snapshotID))
assert.NoError(t, err, clues.ToCore(err))
// assert the deletion worked // assert the deletion worked
itemPath := suite.files[suite.testPath1.String()][0].itemPath itemPath := suite.files[suite.testPath1.String()][0].itemPath
@ -1221,7 +1227,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot() {
[]path.Path{itemPath}, []path.Path{itemPath},
&ic, &ic,
fault.New(true)) fault.New(true))
assert.Error(t, err, "snapshot should be deleted") assert.Error(t, err, "snapshot should be deleted", clues.ToCore(err))
assert.Empty(t, c) assert.Empty(t, c)
assert.Zero(t, ic.i) assert.Zero(t, ic.i)
} }
@ -1247,7 +1253,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot_BadIDs() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
test.expect(t, suite.w.DeleteSnapshot(suite.ctx, test.snapshotID)) err := suite.w.DeleteSnapshot(suite.ctx, test.snapshotID)
test.expect(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -71,7 +72,7 @@ func (suite *ObserveProgressUnitSuite) TestItemProgress() {
break break
} }
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 25, n) assert.Equal(t, 25, n)
i++ i++
} }

View File

@ -16,6 +16,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
@ -62,19 +63,19 @@ func prepNewTestBackupOp(
) (BackupOperation, account.Account, *kopia.Wrapper, *kopia.ModelStore, func()) { ) (BackupOperation, account.Account, *kopia.Wrapper, *kopia.ModelStore, func()) {
//revive:enable:context-as-argument //revive:enable:context-as-argument
acct := tester.NewM365Account(t) acct := tester.NewM365Account(t)
// need to initialize the repository before we can test connecting to it. // need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st) k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
err := k.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe // kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
// to close here. // to close here.
closer := func() { k.Close(ctx) } closer := func() { k.Close(ctx) }
kw, err := kopia.NewWrapper(k) kw, err := kopia.NewWrapper(k)
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
closer() closer()
t.FailNow() t.FailNow()
} }
@ -85,7 +86,7 @@ func prepNewTestBackupOp(
} }
ms, err := kopia.NewModelStore(k) ms, err := kopia.NewModelStore(k)
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
closer() closer()
t.FailNow() t.FailNow()
} }
@ -127,7 +128,7 @@ func newTestBackupOp(
opts.ToggleFeatures = featureToggles opts.ToggleFeatures = featureToggles
bo, err := NewBackupOperation(ctx, opts, kw, sw, acct, sel, bus) bo, err := NewBackupOperation(ctx, opts, kw, sw, acct, sel, bus)
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
closer() closer()
t.FailNow() t.FailNow()
} }
@ -143,7 +144,8 @@ func runAndCheckBackup(
mb *evmock.Bus, mb *evmock.Bus,
) { ) {
//revive:enable:context-as-argument //revive:enable:context-as-argument
require.NoError(t, bo.Run(ctx)) err := bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results, "the backup had non-zero results") require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID") require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")
require.Equalf( require.Equalf(
@ -158,7 +160,7 @@ func runAndCheckBackup(
assert.Less(t, int64(0), bo.Results.BytesRead, "bytes read") assert.Less(t, int64(0), bo.Results.BytesRead, "bytes read")
assert.Less(t, int64(0), bo.Results.BytesUploaded, "bytes uploaded") assert.Less(t, int64(0), bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners") assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error") assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupStart], "backup-start events") assert.Equal(t, 1, mb.TimesCalled[events.BackupStart], "backup-start events")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events") assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
@ -193,7 +195,7 @@ func checkBackupIsInManifests(
) )
mans, err := kw.FetchPrevSnapshotManifests(ctx, reasons, tags) mans, err := kw.FetchPrevSnapshotManifests(ctx, reasons, tags)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
for _, man := range mans { for _, man := range mans {
bID, ok := man.GetTag(kopia.TagBackupID) bID, ok := man.GetTag(kopia.TagBackupID)
@ -229,7 +231,7 @@ func checkMetadataFilesExist(
bup := &backup.Backup{} bup := &backup.Backup{}
err := ms.Get(ctx, model.BackupSchema, backupID, bup) err := ms.Get(ctx, model.BackupSchema, backupID, bup)
if !assert.NoError(t, err) { if !assert.NoError(t, err, clues.ToCore(err)) {
return return
} }
@ -240,12 +242,12 @@ func checkMetadataFilesExist(
p, err := path.Builder{}. p, err := path.Builder{}.
Append(fName). Append(fName).
ToServiceCategoryMetadataPath(tenant, user, service, category, true) ToServiceCategoryMetadataPath(tenant, user, service, category, true)
if !assert.NoError(t, err, "bad metadata path") { if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) {
continue continue
} }
dir, err := p.Dir() dir, err := p.Dir()
if !assert.NoError(t, err, "parent path") { if !assert.NoError(t, err, "parent path", clues.ToCore(err)) {
continue continue
} }
@ -254,7 +256,7 @@ func checkMetadataFilesExist(
} }
cols, err := kw.RestoreMultipleItems(ctx, bup.SnapshotID, paths, nil, fault.New(true)) cols, err := kw.RestoreMultipleItems(ctx, bup.SnapshotID, paths, nil, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
for _, col := range cols { for _, col := range cols {
itemNames := []string{} itemNames := []string{}
@ -357,7 +359,7 @@ func generateContainerOfItems(
control.Options{RestorePermissions: true}, control.Options{RestorePermissions: true},
dataColls, dataColls,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return deets return deets
} }
@ -449,7 +451,7 @@ func toDataLayerPath(
err = errors.Errorf("unknown service %s", service.String()) err = errors.Errorf("unknown service %s", service.String())
} }
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -509,7 +511,7 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
test.acct, test.acct,
selectors.Selector{DiscreteOwner: "test"}, selectors.Selector{DiscreteOwner: "test"},
evmock.NewBus()) evmock.NewBus())
test.errCheck(suite.T(), err) test.errCheck(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -585,7 +587,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
defer closer() defer closer()
m365, err := acct.M365Config() m365, err := acct.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// run the tests // run the tests
runAndCheckBackup(t, ctx, &bo, mb) runAndCheckBackup(t, ctx, &bo, mb)
@ -634,7 +636,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
assert.Greater(t, bo.Results.BytesRead, incBO.Results.BytesRead, "incremental bytes read") assert.Greater(t, bo.Results.BytesRead, incBO.Results.BytesRead, "incremental bytes read")
assert.Greater(t, bo.Results.BytesUploaded, incBO.Results.BytesUploaded, "incremental bytes uploaded") assert.Greater(t, bo.Results.BytesUploaded, incBO.Results.BytesUploaded, "incremental bytes uploaded")
assert.Equal(t, bo.Results.ResourceOwners, incBO.Results.ResourceOwners, "incremental backup resource owner") assert.Equal(t, bo.Results.ResourceOwners, incBO.Results.ResourceOwners, "incremental backup resource owner")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error") assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "count incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "count incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
@ -673,7 +675,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
) )
m365, err := acct.M365Config() m365, err := acct.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
gc, err := connector.NewGraphConnector( gc, err := connector.NewGraphConnector(
ctx, ctx,
@ -681,10 +683,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
acct, acct,
connector.Users, connector.Users,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
ac, err := api.NewClient(m365) ac, err := api.NewClient(m365)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// generate 3 new folders with two items each. // generate 3 new folders with two items each.
// Only the first two folders will be part of the initial backup and // Only the first two folders will be part of the initial backup and
@ -778,11 +780,11 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
Credentials: m365, Credentials: m365,
} }
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true)) cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
require.NoError(t, err, "populating %s container resolver", category) require.NoError(t, err, "populating container resolver", category, clues.ToCore(err))
for destName, dest := range gen.dests { for destName, dest := range gen.dests {
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true) p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
id, ok := cr.PathInCache(p.Folder(false)) id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category) require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
@ -842,7 +844,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
MailFoldersById(fromContainer). MailFoldersById(fromContainer).
Move(). Move().
Post(ctx, body, nil) Post(ctx, body, nil)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
}, },
itemsRead: 0, // zero because we don't count container reads itemsRead: 0, // zero because we don't count container reads
itemsWritten: 2, itemsWritten: 2,
@ -855,20 +857,14 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
require.NoError( err := ac.Mail().DeleteContainer(ctx, suite.user, containerID)
t, require.NoError(t, err, "deleting an email folder", clues.ToCore(err))
ac.Mail().DeleteContainer(ctx, suite.user, containerID),
"deleting an email folder")
case path.ContactsCategory: case path.ContactsCategory:
require.NoError( err := ac.Contacts().DeleteContainer(ctx, suite.user, containerID)
t, require.NoError(t, err, "deleting a contacts folder", clues.ToCore(err))
ac.Contacts().DeleteContainer(ctx, suite.user, containerID),
"deleting a contacts folder")
case path.EventsCategory: case path.EventsCategory:
require.NoError( err := ac.Events().DeleteContainer(ctx, suite.user, containerID)
t, require.NoError(t, err, "deleting a calendar", clues.ToCore(err))
ac.Events().DeleteContainer(ctx, suite.user, containerID),
"deleting a calendar")
} }
} }
}, },
@ -898,13 +894,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
Credentials: m365, Credentials: m365,
} }
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true)) cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
require.NoError(t, err, "populating %s container resolver", category) require.NoError(t, err, "populating container resolver", category, clues.ToCore(err))
p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true) p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
id, ok := cr.PathInCache(p.Folder(false)) id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category) require.Truef(t, ok, "dir %s found in %s cache", p.Folder(false), category)
dataset[category].dests[container3] = contDeets{id, deets} dataset[category].dests[container3] = contDeets{id, deets}
} }
@ -932,31 +928,31 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
cmf := cli.MailFoldersById(containerID) cmf := cli.MailFoldersById(containerID)
body, err := cmf.Get(ctx, nil) body, err := cmf.Get(ctx, nil)
require.NoError(t, err, "getting mail folder") require.NoError(t, err, "getting mail folder", clues.ToCore(err))
body.SetDisplayName(&containerRename) body.SetDisplayName(&containerRename)
_, err = cmf.Patch(ctx, body, nil) _, err = cmf.Patch(ctx, body, nil)
require.NoError(t, err, "updating mail folder name") require.NoError(t, err, "updating mail folder name", clues.ToCore(err))
case path.ContactsCategory: case path.ContactsCategory:
ccf := cli.ContactFoldersById(containerID) ccf := cli.ContactFoldersById(containerID)
body, err := ccf.Get(ctx, nil) body, err := ccf.Get(ctx, nil)
require.NoError(t, err, "getting contact folder") require.NoError(t, err, "getting contact folder", clues.ToCore(err))
body.SetDisplayName(&containerRename) body.SetDisplayName(&containerRename)
_, err = ccf.Patch(ctx, body, nil) _, err = ccf.Patch(ctx, body, nil)
require.NoError(t, err, "updating contact folder name") require.NoError(t, err, "updating contact folder name", clues.ToCore(err))
case path.EventsCategory: case path.EventsCategory:
cbi := cli.CalendarsById(containerID) cbi := cli.CalendarsById(containerID)
body, err := cbi.Get(ctx, nil) body, err := cbi.Get(ctx, nil)
require.NoError(t, err, "getting calendar") require.NoError(t, err, "getting calendar", clues.ToCore(err))
body.SetName(&containerRename) body.SetName(&containerRename)
_, err = cbi.Patch(ctx, body, nil) _, err = cbi.Patch(ctx, body, nil)
require.NoError(t, err, "updating calendar name") require.NoError(t, err, "updating calendar name", clues.ToCore(err))
} }
} }
}, },
@ -974,26 +970,26 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
case path.EmailCategory: case path.EmailCategory:
_, itemData := generateItemData(t, category, suite.user, mailDBF) _, itemData := generateItemData(t, category, suite.user, mailDBF)
body, err := support.CreateMessageFromBytes(itemData) body, err := support.CreateMessageFromBytes(itemData)
require.NoError(t, err, "transforming mail bytes to messageable") require.NoError(t, err, "transforming mail bytes to messageable", clues.ToCore(err))
_, err = cli.MailFoldersById(containerID).Messages().Post(ctx, body, nil) _, err = cli.MailFoldersById(containerID).Messages().Post(ctx, body, nil)
require.NoError(t, err, "posting email item") require.NoError(t, err, "posting email item", clues.ToCore(err))
case path.ContactsCategory: case path.ContactsCategory:
_, itemData := generateItemData(t, category, suite.user, contactDBF) _, itemData := generateItemData(t, category, suite.user, contactDBF)
body, err := support.CreateContactFromBytes(itemData) body, err := support.CreateContactFromBytes(itemData)
require.NoError(t, err, "transforming contact bytes to contactable") require.NoError(t, err, "transforming contact bytes to contactable", clues.ToCore(err))
_, err = cli.ContactFoldersById(containerID).Contacts().Post(ctx, body, nil) _, err = cli.ContactFoldersById(containerID).Contacts().Post(ctx, body, nil)
require.NoError(t, err, "posting contact item") require.NoError(t, err, "posting contact item", clues.ToCore(err))
case path.EventsCategory: case path.EventsCategory:
_, itemData := generateItemData(t, category, suite.user, eventDBF) _, itemData := generateItemData(t, category, suite.user, eventDBF)
body, err := support.CreateEventFromBytes(itemData) body, err := support.CreateEventFromBytes(itemData)
require.NoError(t, err, "transforming event bytes to eventable") require.NoError(t, err, "transforming event bytes to eventable", clues.ToCore(err))
_, err = cli.CalendarsById(containerID).Events().Post(ctx, body, nil) _, err = cli.CalendarsById(containerID).Events().Post(ctx, body, nil)
require.NoError(t, err, "posting events item") require.NoError(t, err, "posting events item", clues.ToCore(err))
} }
} }
}, },
@ -1010,27 +1006,27 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
ids, _, _, err := ac.Mail().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "") ids, _, _, err := ac.Mail().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "")
require.NoError(t, err, "getting message ids") require.NoError(t, err, "getting message ids", clues.ToCore(err))
require.NotEmpty(t, ids, "message ids in folder") require.NotEmpty(t, ids, "message ids in folder")
err = cli.MessagesById(ids[0]).Delete(ctx, nil) err = cli.MessagesById(ids[0]).Delete(ctx, nil)
require.NoError(t, err, "deleting email item") require.NoError(t, err, "deleting email item", clues.ToCore(err))
case path.ContactsCategory: case path.ContactsCategory:
ids, _, _, err := ac.Contacts().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "") ids, _, _, err := ac.Contacts().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "")
require.NoError(t, err, "getting contact ids") require.NoError(t, err, "getting contact ids", clues.ToCore(err))
require.NotEmpty(t, ids, "contact ids in folder") require.NotEmpty(t, ids, "contact ids in folder")
err = cli.ContactsById(ids[0]).Delete(ctx, nil) err = cli.ContactsById(ids[0]).Delete(ctx, nil)
require.NoError(t, err, "deleting contact item") require.NoError(t, err, "deleting contact item", clues.ToCore(err))
case path.EventsCategory: case path.EventsCategory:
ids, _, _, err := ac.Events().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "") ids, _, _, err := ac.Events().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "")
require.NoError(t, err, "getting event ids") require.NoError(t, err, "getting event ids", clues.ToCore(err))
require.NotEmpty(t, ids, "event ids in folder") require.NotEmpty(t, ids, "event ids in folder")
err = cli.CalendarsById(ids[0]).Delete(ctx, nil) err = cli.CalendarsById(ids[0]).Delete(ctx, nil)
require.NoError(t, err, "deleting calendar") require.NoError(t, err, "deleting calendar", clues.ToCore(err))
} }
} }
}, },
@ -1047,7 +1043,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
) )
test.updateUserData(t) test.updateUserData(t)
require.NoError(t, incBO.Run(ctx))
err := incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...) checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...)
checkMetadataFilesExist( checkMetadataFilesExist(
t, t,
@ -1058,14 +1056,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
m365.AzureTenantID, m365.AzureTenantID,
suite.user, suite.user,
path.ExchangeService, path.ExchangeService,
categories, categories)
)
// do some additional checks to ensure the incremental dealt with fewer items. // do some additional checks to ensure the incremental dealt with fewer items.
// +4 on read/writes to account for metadata: 1 delta and 1 path for each type. // +4 on read/writes to account for metadata: 1 delta and 1 path for each type.
assert.Equal(t, test.itemsWritten+4, incBO.Results.ItemsWritten, "incremental items written") assert.Equal(t, test.itemsWritten+4, incBO.Results.ItemsWritten, "incremental items written")
assert.Equal(t, test.itemsRead+4, incBO.Results.ItemsRead, "incremental items read") assert.Equal(t, test.itemsRead+4, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error") assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
@ -1115,7 +1112,7 @@ func mustGetDefaultDriveID(
With("user", userID) With("user", userID)
} }
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId()) id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID not set") require.NotEmpty(t, id, "drive ID not set")
@ -1154,7 +1151,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
) )
m365, err := acct.M365Config() m365, err := acct.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
gc, err := connector.NewGraphConnector( gc, err := connector.NewGraphConnector(
ctx, ctx,
@ -1162,7 +1159,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
acct, acct,
connector.Users, connector.Users,
fault.New(true)) fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
driveID := mustGetDefaultDriveID(t, ctx, gc.Service, suite.user) driveID := mustGetDefaultDriveID(t, ctx, gc.Service, suite.user)
@ -1204,7 +1201,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
destName) destName)
resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()). resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
Get(ctx, nil) Get(ctx, nil)
require.NoErrorf(t, err, "getting drive folder ID", "folder name: %s", destName) require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
containerIDs[destName] = ptr.Val(resp.GetId()) containerIDs[destName] = ptr.Val(resp.GetId())
} }
@ -1256,7 +1253,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
driveID, driveID,
targetContainer, targetContainer,
driveItem) driveItem)
require.NoError(t, err, "creating new file") require.NoError(t, err, "creating new file", clues.ToCore(err))
}, },
itemsRead: 1, // .data file for newitem itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent
@ -1292,7 +1289,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID). DrivesById(driveID).
ItemsById(ptr.Val(newFile.GetId())). ItemsById(ptr.Val(newFile.GetId())).
Patch(ctx, driveItem, nil) Patch(ctx, driveItem, nil)
require.NoError(t, err, "renaming file") require.NoError(t, err, "renaming file", clues.ToCore(err))
}, },
itemsRead: 1, // .data file for newitem itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent
@ -1313,7 +1310,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID). DrivesById(driveID).
ItemsById(ptr.Val(newFile.GetId())). ItemsById(ptr.Val(newFile.GetId())).
Patch(ctx, driveItem, nil) Patch(ctx, driveItem, nil)
require.NoError(t, err, "moving file between folders") require.NoError(t, err, "moving file between folders", clues.ToCore(err))
}, },
itemsRead: 1, // .data file for newitem itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent
@ -1326,7 +1323,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID). DrivesById(driveID).
ItemsById(ptr.Val(newFile.GetId())). ItemsById(ptr.Val(newFile.GetId())).
Delete(ctx, nil) Delete(ctx, nil)
require.NoError(t, err, "deleting file") require.NoError(t, err, "deleting file", clues.ToCore(err))
}, },
itemsRead: 0, itemsRead: 0,
itemsWritten: 0, itemsWritten: 0,
@ -1348,7 +1345,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID). DrivesById(driveID).
ItemsById(source). ItemsById(source).
Patch(ctx, driveItem, nil) Patch(ctx, driveItem, nil)
require.NoError(t, err, "moving folder") require.NoError(t, err, "moving folder", clues.ToCore(err))
}, },
itemsRead: 0, itemsRead: 0,
itemsWritten: 7, // 2*2(data and meta of 2 files) + 3 (dirmeta of two moved folders and target) itemsWritten: 7, // 2*2(data and meta of 2 files) + 3 (dirmeta of two moved folders and target)
@ -1371,7 +1368,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID). DrivesById(driveID).
ItemsById(child). ItemsById(child).
Patch(ctx, driveItem, nil) Patch(ctx, driveItem, nil)
require.NoError(t, err, "renaming folder") require.NoError(t, err, "renaming folder", clues.ToCore(err))
}, },
itemsRead: 0, itemsRead: 0,
itemsWritten: 7, // 2*2(data and meta of 2 files) + 3 (dirmeta of two moved folders and target) itemsWritten: 7, // 2*2(data and meta of 2 files) + 3 (dirmeta of two moved folders and target)
@ -1385,7 +1382,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID). DrivesById(driveID).
ItemsById(container). ItemsById(container).
Delete(ctx, nil) Delete(ctx, nil)
require.NoError(t, err, "deleting folder") require.NoError(t, err, "deleting folder", clues.ToCore(err))
}, },
itemsRead: 0, itemsRead: 0,
itemsWritten: 0, itemsWritten: 0,
@ -1413,7 +1410,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
container3) container3)
resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()). resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
Get(ctx, nil) Get(ctx, nil)
require.NoErrorf(t, err, "getting drive folder ID", "folder name: %s", container3) require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err))
containerIDs[container3] = ptr.Val(resp.GetId()) containerIDs[container3] = ptr.Val(resp.GetId())
}, },
@ -1432,7 +1429,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
tester.LogTimeOfTest(suite.T()) tester.LogTimeOfTest(suite.T())
test.updateUserData(t) test.updateUserData(t)
require.NoError(t, incBO.Run(ctx))
err := incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...) checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...)
checkMetadataFilesExist( checkMetadataFilesExist(
t, t,
@ -1450,7 +1449,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
// +2 on read/writes to account for metadata: 1 delta and 1 path. // +2 on read/writes to account for metadata: 1 delta and 1 path.
assert.Equal(t, test.itemsWritten+2, incBO.Results.ItemsWritten, "incremental items written") assert.Equal(t, test.itemsWritten+2, incBO.Results.ItemsWritten, "incremental items written")
assert.Equal(t, test.itemsRead+2, incBO.Results.ItemsRead, "incremental items read") assert.Equal(t, test.itemsRead+2, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error") assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")

View File

@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
evmock "github.com/alcionai/corso/src/internal/events/mock" evmock "github.com/alcionai/corso/src/internal/events/mock"
@ -194,7 +195,7 @@ func makeMetadataBasePath(
service, service,
category, category,
false) false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -215,7 +216,7 @@ func makeMetadataPath(
service, service,
category, category,
true) true)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -250,7 +251,7 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path {
t.Helper() t.Helper()
p, err := path.FromDataLayerPath(stdpath.Join(elements...), isItem) p, err := path.FromDataLayerPath(stdpath.Join(elements...), isItem)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -295,7 +296,7 @@ func makeDetailsEntry(
case path.OneDriveService: case path.OneDriveService:
parent, err := path.GetDriveFolderPath(p) parent, err := path.GetDriveFolderPath(p)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
res.OneDrive = &details.OneDriveInfo{ res.OneDrive = &details.OneDriveInfo{
ItemType: details.OneDriveItem, ItemType: details.OneDriveItem,
@ -408,7 +409,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
acct, acct,
sel, sel,
evmock.NewBus()) evmock.NewBus())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
op.Errors.Fail(test.fail) op.Errors.Fail(test.fail)
@ -696,7 +697,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
) )
itemParents1, err := path.GetDriveFolderPath(itemPath1) itemParents1, err := path.GetDriveFolderPath(itemPath1)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
@ -1213,7 +1214,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
test.inputShortRefsFromPrevBackup, test.inputShortRefsFromPrevBackup,
&deets, &deets,
fault.New(true)) fault.New(true))
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
@ -1329,7 +1330,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
inputToMerge, inputToMerge,
&deets, &deets,
fault.New(true)) fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
compareDeetEntries(t, expectedEntries, deets.Details().Entries) compareDeetEntries(t, expectedEntries, deets.Details().Entries)
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
@ -117,7 +118,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files { for _, f := range files {
p, err := emailPath.Append(f, true) p, err := emailPath.Append(f, true)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p) ps = append(ps, p)
} }
@ -140,7 +141,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files { for _, f := range files {
p, err := emailPath.Append(f, true) p, err := emailPath.Append(f, true)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p) ps = append(ps, p)
} }
@ -168,10 +169,10 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files { for _, f := range files {
p, err := emailPath.Append(f, true) p, err := emailPath.Append(f, true)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p) ps = append(ps, p)
p, err = contactPath.Append(f, true) p, err = contactPath.Append(f, true)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p) ps = append(ps, p)
} }
@ -199,10 +200,10 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files { for _, f := range files {
p, err := emailPath.Append(f, true) p, err := emailPath.Append(f, true)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p) ps = append(ps, p)
p, err = contactPath.Append(f, true) p, err = contactPath.Append(f, true)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p) ps = append(ps, p)
} }
@ -229,7 +230,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
} }
_, err := collectMetadata(ctx, &mr, man, test.fileNames, tid, fault.New(true)) _, err := collectMetadata(ctx, &mr, man, test.fileNames, tid, fault.New(true))
assert.ErrorIs(t, err, test.expectErr) assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err))
}) })
} }
} }
@ -394,7 +395,7 @@ func (suite *OperationsManifestsUnitSuite) TestVerifyDistinctBases() {
defer flush() defer flush()
err := verifyDistinctBases(ctx, test.mans) err := verifyDistinctBases(ctx, test.mans)
test.expect(suite.T(), err) test.expect(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -649,7 +650,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
tid, tid,
test.getMeta, test.getMeta,
fault.New(true)) fault.New(true))
test.assertErr(t, err) test.assertErr(t, err, clues.ToCore(err))
test.assertB(t, b) test.assertB(t, b)
expectMans := test.mr.mans expectMans := test.mr.mans
@ -837,7 +838,8 @@ func (suite *BackupManifestUnitSuite) TestBackupOperation_VerifyDistinctBases()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
test.errCheck(suite.T(), verifyDistinctBases(ctx, test.input)) err := verifyDistinctBases(ctx, test.input)
test.errCheck(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -958,7 +960,7 @@ func (suite *BackupManifestUnitSuite) TestBackupOperation_CollectMetadata() {
mr := &mockRestorer{} mr := &mockRestorer{}
_, err := collectMetadata(ctx, mr, test.inputMan, test.inputFiles, tenant, fault.New(true)) _, err := collectMetadata(ctx, mr, test.inputMan, test.inputFiles, tenant, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
checkPaths(t, test.expected, mr.gotPaths) checkPaths(t, test.expected, mr.gotPaths)
}) })

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -44,8 +45,8 @@ func (suite *OperationSuite) TestOperation_Validate() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
op := newOperation(control.Options{}, events.Bus{}, test.kw, test.sw) err := newOperation(control.Options{}, events.Bus{}, test.kw, test.sw).validate()
test.errCheck(suite.T(), op.validate()) test.errCheck(suite.T(), err, clues.ToCore(err))
}) })
} }
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -113,11 +114,12 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
selectors.Selector{DiscreteOwner: "test"}, selectors.Selector{DiscreteOwner: "test"},
dest, dest,
evmock.NewBus()) evmock.NewBus())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
op.Errors.Fail(test.fail) op.Errors.Fail(test.fail)
test.expectErr(t, op.persistResults(ctx, now, &test.stats)) err = op.persistResults(ctx, now, &test.stats)
test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status") assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status")
assert.Equal(t, len(test.stats.cs), op.Results.ItemsRead, "items read") assert.Equal(t, len(test.stats.cs), op.Results.ItemsRead, "items read")
@ -161,27 +163,26 @@ func (suite *RestoreOpIntegrationSuite) SetupSuite() {
defer flush() defer flush()
t := suite.T() t := suite.T()
m365UserID := tester.M365UserID(t) m365UserID := tester.M365UserID(t)
acct := tester.NewM365Account(t) acct := tester.NewM365Account(t)
// need to initialize the repository before we can test connecting to it. // need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st) k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
err := k.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
suite.kopiaCloser = func(ctx context.Context) { suite.kopiaCloser = func(ctx context.Context) {
k.Close(ctx) k.Close(ctx)
} }
kw, err := kopia.NewWrapper(k) kw, err := kopia.NewWrapper(k)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.kw = kw suite.kw = kw
ms, err := kopia.NewModelStore(k) ms, err := kopia.NewModelStore(k)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
suite.ms = ms suite.ms = ms
@ -206,8 +207,10 @@ func (suite *RestoreOpIntegrationSuite) SetupSuite() {
acct, acct,
bsel.Selector, bsel.Selector,
evmock.NewBus()) evmock.NewBus())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, bo.Run(ctx))
err = bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results.BackupID) require.NotEmpty(t, bo.Results.BackupID)
suite.backupID = bo.Results.BackupID suite.backupID = bo.Results.BackupID
@ -230,8 +233,10 @@ func (suite *RestoreOpIntegrationSuite) SetupSuite() {
csel.Selector, csel.Selector,
evmock.NewBus(), evmock.NewBus(),
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, bo.Run(ctx))
err = bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results.BackupID) require.NotEmpty(t, bo.Results.BackupID)
suite.sharepointID = bo.Results.BackupID suite.sharepointID = bo.Results.BackupID
// Discount MetaData files (1 path, 1 delta) // Discount MetaData files (1 path, 1 delta)
@ -289,7 +294,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
selectors.Selector{DiscreteOwner: "test"}, selectors.Selector{DiscreteOwner: "test"},
dest, dest,
evmock.NewBus()) evmock.NewBus())
test.errCheck(suite.T(), err) test.errCheck(suite.T(), err, clues.ToCore(err))
}) })
} }
} }
@ -334,22 +339,22 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
cleanup: func(t *testing.T, dest string) { cleanup: func(t *testing.T, dest string) {
act := tester.NewM365Account(t) act := tester.NewM365Account(t)
m365, err := act.M365Config() m365, err := act.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret) adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
service := graph.NewService(adpt) service := graph.NewService(adpt)
pager := api.NewSiteDrivePager(service, tester.M365SiteID(t), []string{"id", "name"}) pager := api.NewSiteDrivePager(service, tester.M365SiteID(t), []string{"id", "name"})
driveID, err := pager.GetDriveIDByName(ctx, "Documents") driveID, err := pager.GetDriveIDByName(ctx, "Documents")
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, driveID) require.NotEmpty(t, driveID)
folderID, err := pager.GetFolderIDByName(ctx, driveID, dest) folderID, err := pager.GetFolderIDByName(ctx, driveID, dest)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, folderID) require.NotEmpty(t, folderID)
err = onedrive.DeleteItem(ctx, service, driveID, folderID) err = onedrive.DeleteItem(ctx, service, driveID, folderID)
assert.NoError(t, err, "failed to delete restore folder: operations_SharePoint_Restore") assert.NoError(t, err, "deleting restore folder", clues.ToCore(err))
}, },
}, },
} }
@ -367,11 +372,11 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
test.getSelector(t), test.getSelector(t),
test.dest, test.dest,
mb) mb)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
ds, err := ro.Run(ctx) ds, err := ro.Run(ctx)
require.NoError(t, err, "restoreOp.Run()") require.NoError(t, err, "restoreOp.Run()", clues.ToCore(err))
require.NotEmpty(t, ro.Results, "restoreOp results") require.NotEmpty(t, ro.Results, "restoreOp results")
require.NotNil(t, ds, "restored details") require.NotNil(t, ds, "restored details")
assert.Equal(t, ro.Status, Completed, "restoreOp status") assert.Equal(t, ro.Status, Completed, "restoreOp status")
@ -379,7 +384,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
assert.Less(t, 0, ro.Results.ItemsRead, "restore items read") assert.Less(t, 0, ro.Results.ItemsRead, "restore items read")
assert.Less(t, int64(0), ro.Results.BytesRead, "bytes read") assert.Less(t, int64(0), ro.Results.BytesRead, "bytes read")
assert.Equal(t, 1, ro.Results.ResourceOwners, "resource Owners") assert.Equal(t, 1, ro.Results.ResourceOwners, "resource Owners")
assert.NoError(t, ro.Errors.Failure(), "non-recoverable error") assert.NoError(t, ro.Errors.Failure(), "non-recoverable error", clues.ToCore(ro.Errors.Failure()))
assert.Empty(t, ro.Errors.Recovered(), "recoverable errors") assert.Empty(t, ro.Errors.Recovered(), "recoverable errors")
assert.Equal(t, test.expectedItems, ro.Results.ItemsWritten, "backup and restore wrote the same num of items") assert.Equal(t, test.expectedItems, ro.Results.ItemsWritten, "backup and restore wrote the same num of items")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreStart], "restore-start events") assert.Equal(t, 1, mb.TimesCalled[events.RestoreStart], "restore-start events")
@ -415,7 +420,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_ErrorNoResults() {
rsel.Selector, rsel.Selector,
dest, dest,
mb) mb)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
ds, err := ro.Run(ctx) ds, err := ro.Run(ctx)
require.Error(t, err, "restoreOp.Run() should have errored") require.Error(t, err, "restoreOp.Run() should have errored")

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -27,21 +28,21 @@ func TestStreamDetailsIntegrationSuite(t *testing.T) {
} }
func (suite *StreamDetailsIntegrationSuite) TestDetails() { func (suite *StreamDetailsIntegrationSuite) TestDetails() {
t := suite.T()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T()
// need to initialize the repository before we can test connecting to it. // need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st) k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
err := k.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
defer k.Close(ctx) defer k.Close(ctx)
kw, err := kopia.NewWrapper(k) kw, err := kopia.NewWrapper(k)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
defer kw.Close(ctx) defer kw.Close(ctx)
@ -62,12 +63,12 @@ func (suite *StreamDetailsIntegrationSuite) TestDetails() {
) )
id, err := sd.Write(ctx, deets, fault.New(true)) id, err := sd.Write(ctx, deets, fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, id) require.NotNil(t, id)
var readDeets details.Details var readDeets details.Details
err = sd.Read(ctx, id, details.UnmarshalTo(&readDeets), fault.New(true)) err = sd.Read(ctx, id, details.UnmarshalTo(&readDeets), fault.New(true))
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, readDeets) require.NotEmpty(t, readDeets)
assert.Equal(t, len(deets.Entries), len(readDeets.Entries)) assert.Equal(t, len(deets.Entries), len(readDeets.Entries))

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
) )
@ -18,7 +19,7 @@ var M365AcctCredEnvs = []string{
// variables used for integration tests that use Graph Connector. // variables used for integration tests that use Graph Connector.
func NewM365Account(t *testing.T) account.Account { func NewM365Account(t *testing.T) account.Account {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "configuring m365 account from test configuration") require.NoError(t, err, "configuring m365 account from test configuration", clues.ToCore(err))
acc, err := account.NewAccount( acc, err := account.NewAccount(
account.ProviderM365, account.ProviderM365,
@ -27,7 +28,7 @@ func NewM365Account(t *testing.T) account.Account {
AzureTenantID: cfg[TestCfgAzureTenantID], AzureTenantID: cfg[TestCfgAzureTenantID],
}, },
) )
require.NoError(t, err, "initializing account") require.NoError(t, err, "initializing account", clues.ToCore(err))
return acc return acc
} }
@ -43,7 +44,7 @@ func NewMockM365Account(t *testing.T) account.Account {
AzureTenantID: "09876", AzureTenantID: "09876",
}, },
) )
require.NoError(t, err, "initializing mock account") require.NoError(t, err, "initializing mock account", clues.ToCore(err))
return acc return acc
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -159,7 +160,7 @@ func readTestConfig() (map[string]string, error) {
// Returns a filepath string pointing to the location of the temp file. // Returns a filepath string pointing to the location of the temp file.
func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.Viper, string) { func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.Viper, string) {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "reading tester config") require.NoError(t, err, "reading tester config", clues.ToCore(err))
fName := filepath.Base(os.Getenv(EnvCorsoTestConfigFilePath)) fName := filepath.Base(os.Getenv(EnvCorsoTestConfigFilePath))
if len(fName) == 0 || fName == "." || fName == "/" { if len(fName) == 0 || fName == "." || fName == "/" {
@ -170,7 +171,7 @@ func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.
tDirFp := filepath.Join(tDir, fName) tDirFp := filepath.Join(tDir, fName)
_, err = os.Create(tDirFp) _, err = os.Create(tDirFp)
require.NoError(t, err, "creating temp test dir") require.NoError(t, err, "creating temp test dir", clues.ToCore(err))
ext := filepath.Ext(fName) ext := filepath.Ext(fName)
vpr := viper.New() vpr := viper.New()
@ -188,7 +189,8 @@ func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.
vpr.Set(k, v) vpr.Set(k, v)
} }
require.NoError(t, vpr.WriteConfig(), "writing temp dir viper config file") err = vpr.WriteConfig()
require.NoError(t, err, "writing temp dir viper config file", clues.ToCore(err))
return vpr, tDirFp return vpr, tDirFp
} }

View File

@ -5,6 +5,7 @@ import (
"os" "os"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -13,7 +14,7 @@ func LoadAFile(t *testing.T, fileName string) []byte {
bytes, err := os.ReadFile(fileName) bytes, err := os.ReadFile(fileName)
if err != nil { if err != nil {
f, err := os.Open(fileName) f, err := os.Open(fileName)
require.NoError(t, err, "opening file: "+fileName) require.NoError(t, err, "opening file:", fileName, clues.ToCore(err))
defer f.Close() defer f.Close()
@ -25,7 +26,7 @@ func LoadAFile(t *testing.T, fileName string) []byte {
buffer = append(buffer, temp...) buffer = append(buffer, temp...)
} }
require.NoError(t, reader.Err(), "reading file: "+fileName) require.NoError(t, reader.Err(), "reading file:", fileName, clues.ToCore(err))
return buffer return buffer
} }

View File

@ -5,6 +5,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -14,7 +15,7 @@ import (
// last-attempt fallback that will only work on alcion's testing org. // last-attempt fallback that will only work on alcion's testing org.
func M365TenantID(t *testing.T) string { func M365TenantID(t *testing.T) string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving m365 user id from test configuration") require.NoError(t, err, "retrieving m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgAzureTenantID] return cfg[TestCfgAzureTenantID]
} }
@ -25,7 +26,7 @@ func M365TenantID(t *testing.T) string {
// last-attempt fallback that will only work on alcion's testing org. // last-attempt fallback that will only work on alcion's testing org.
func M365UserID(t *testing.T) string { func M365UserID(t *testing.T) string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving m365 user id from test configuration") require.NoError(t, err, "retrieving m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgUserID] return cfg[TestCfgUserID]
} }
@ -37,7 +38,7 @@ func M365UserID(t *testing.T) string {
// testing org. // testing org.
func SecondaryM365UserID(t *testing.T) string { func SecondaryM365UserID(t *testing.T) string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving secondary m365 user id from test configuration") require.NoError(t, err, "retrieving secondary m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgSecondaryUserID] return cfg[TestCfgSecondaryUserID]
} }
@ -49,7 +50,7 @@ func SecondaryM365UserID(t *testing.T) string {
// testing org. // testing org.
func LoadTestM365SiteID(t *testing.T) string { func LoadTestM365SiteID(t *testing.T) string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 site id from test configuration") require.NoError(t, err, "retrieving load test m365 site id from test configuration", clues.ToCore(err))
// TODO: load test site id, not standard test site id // TODO: load test site id, not standard test site id
return cfg[TestCfgSiteID] return cfg[TestCfgSiteID]
@ -62,7 +63,7 @@ func LoadTestM365SiteID(t *testing.T) string {
// testing org. // testing org.
func LoadTestM365UserID(t *testing.T) string { func LoadTestM365UserID(t *testing.T) string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 user id from test configuration") require.NoError(t, err, "retrieving load test m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgLoadTestUserID] return cfg[TestCfgLoadTestUserID]
} }
@ -72,7 +73,7 @@ func LoadTestM365UserID(t *testing.T) string {
// the delimeter must be a |. // the delimeter must be a |.
func LoadTestM365OrgSites(t *testing.T) []string { func LoadTestM365OrgSites(t *testing.T) []string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 org sites from test configuration") require.NoError(t, err, "retrieving load test m365 org sites from test configuration", clues.ToCore(err))
// TODO: proper handling of site slice input. // TODO: proper handling of site slice input.
// sites := cfg[TestCfgLoadTestOrgSites] // sites := cfg[TestCfgLoadTestOrgSites]
@ -92,7 +93,7 @@ func LoadTestM365OrgSites(t *testing.T) []string {
// the delimeter may be either a , or |. // the delimeter may be either a , or |.
func LoadTestM365OrgUsers(t *testing.T) []string { func LoadTestM365OrgUsers(t *testing.T) []string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 org users from test configuration") require.NoError(t, err, "retrieving load test m365 org users from test configuration", clues.ToCore(err))
users := cfg[TestCfgLoadTestOrgUsers] users := cfg[TestCfgLoadTestOrgUsers]
users = strings.TrimPrefix(users, "[") users = strings.TrimPrefix(users, "[")
@ -128,7 +129,7 @@ func LoadTestM365OrgUsers(t *testing.T) []string {
// last-attempt fallback that will only work on alcion's testing org. // last-attempt fallback that will only work on alcion's testing org.
func M365SiteID(t *testing.T) string { func M365SiteID(t *testing.T) string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving m365 site id from test configuration") require.NoError(t, err, "retrieving m365 site id from test configuration", clues.ToCore(err))
return cfg[TestCfgSiteID] return cfg[TestCfgSiteID]
} }

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
@ -27,7 +28,7 @@ func NewPrefixedS3Storage(t *testing.T) storage.Storage {
now := LogTimeOfTest(t) now := LogTimeOfTest(t)
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "configuring storage from test file") require.NoError(t, err, "configuring storage from test file", clues.ToCore(err))
prefix := testRepoRootPrefix + t.Name() + "-" + now prefix := testRepoRootPrefix + t.Name() + "-" + now
t.Logf("testing at s3 bucket [%s] prefix [%s]", cfg[TestCfgBucket], prefix) t.Logf("testing at s3 bucket [%s] prefix [%s]", cfg[TestCfgBucket], prefix)
@ -43,7 +44,7 @@ func NewPrefixedS3Storage(t *testing.T) storage.Storage {
KopiaCfgDir: t.TempDir(), KopiaCfgDir: t.TempDir(),
}, },
) )
require.NoError(t, err, "creating storage") require.NoError(t, err, "creating storage", clues.ToCore(err))
return st return st
} }

View File

@ -3,6 +3,7 @@ package account
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
) )
@ -44,7 +45,7 @@ func (suite *AccountSuite) TestNewAccount() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
s, err := NewAccount(test.p, test.c) s, err := NewAccount(test.p, test.c)
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
// remaining tests are dependent upon error-free state // remaining tests are dependent upon error-free state
if test.c.err != nil { if test.c.err != nil {
return return

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
) )
@ -30,7 +31,7 @@ var goodM365Config = account.M365Config{
func (suite *M365CfgSuite) TestM365Config_Config() { func (suite *M365CfgSuite) TestM365Config_Config() {
m365 := goodM365Config m365 := goodM365Config
c, err := m365.StringConfig() c, err := m365.StringConfig()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
key string key string
@ -50,9 +51,10 @@ func (suite *M365CfgSuite) TestAccount_M365Config() {
in := goodM365Config in := goodM365Config
a, err := account.NewAccount(account.ProviderM365, in) a, err := account.NewAccount(account.ProviderM365, in)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
out, err := a.M365Config() out, err := a.M365Config()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in.AzureClientID, out.AzureClientID) assert.Equal(t, in.AzureClientID, out.AzureClientID)
assert.Equal(t, in.AzureClientSecret, out.AzureClientSecret) assert.Equal(t, in.AzureClientSecret, out.AzureClientSecret)
@ -113,8 +115,9 @@ func (suite *M365CfgSuite) TestAccount_M365Config_InvalidCases() {
for _, test := range table2 { for _, test := range table2 {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
st, err := account.NewAccount(account.ProviderUnknown, goodM365Config) st, err := account.NewAccount(account.ProviderUnknown, goodM365Config)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
test.amend(st) test.amend(st)
_, err = st.M365Config() _, err = st.M365Config()
assert.Error(t, err) assert.Error(t, err)
}) })

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -31,7 +32,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
initial := time.Now() initial := time.Now()
nowStr := common.FormatTimeWith(initial, common.TabularOutput) nowStr := common.FormatTimeWith(initial, common.TabularOutput)
now, err := common.ParseTime(nowStr) now, err := common.ParseTime(nowStr)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
@ -846,7 +847,7 @@ func makeItemPath(
category, category,
true, true,
elems...) elems...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
} }
@ -1001,10 +1002,10 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
item := test.input item := test.input
err := UpdateItem(&item, test.repoPath) err := UpdateItem(&item, test.repoPath)
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
@ -1184,7 +1185,7 @@ func (suite *DetailsUnitSuite) TestDetails_Marshal() {
}} }}
bs, err := d.Marshal() bs, err := d.Marshal()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
assert.NotEmpty(suite.T(), bs) assert.NotEmpty(suite.T(), bs)
}) })
} }
@ -1198,7 +1199,7 @@ func (suite *DetailsUnitSuite) TestUnarshalTo() {
}} }}
bs, err := orig.Marshal() bs, err := orig.Marshal()
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
assert.NotEmpty(suite.T(), bs) assert.NotEmpty(suite.T(), bs)
var result Details var result Details
@ -1206,7 +1207,7 @@ func (suite *DetailsUnitSuite) TestUnarshalTo() {
err = umt(io.NopCloser(bytes.NewReader(bs))) err = umt(io.NopCloser(bytes.NewReader(bs)))
t := suite.T() t := suite.T()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, result) require.NotNil(t, result)
assert.ElementsMatch(t, orig.Entries, result.Entries) assert.ElementsMatch(t, orig.Entries, result.Entries)
}) })

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -78,7 +79,7 @@ func (suite *FaultErrorsUnitSuite) TestErr() {
n := fault.New(test.failFast) n := fault.New(test.failFast)
require.NotNil(t, n) require.NotNil(t, n)
require.NoError(t, n.Failure()) require.NoError(t, n.Failure(), clues.ToCore(n.Failure()))
require.Empty(t, n.Recovered()) require.Empty(t, n.Recovered())
e := n.Fail(test.fail) e := n.Fail(test.fail)
@ -97,11 +98,11 @@ func (suite *FaultErrorsUnitSuite) TestFail() {
n := fault.New(false) n := fault.New(false)
require.NotNil(t, n) require.NotNil(t, n)
require.NoError(t, n.Failure()) require.NoError(t, n.Failure(), clues.ToCore(n.Failure()))
require.Empty(t, n.Recovered()) require.Empty(t, n.Recovered())
n.Fail(assert.AnError) n.Fail(assert.AnError)
assert.Error(t, n.Failure()) assert.Error(t, n.Failure(), clues.ToCore(n.Failure()))
assert.Empty(t, n.Recovered()) assert.Empty(t, n.Recovered())
n.Fail(assert.AnError) n.Fail(assert.AnError)
@ -337,10 +338,10 @@ func (suite *FaultErrorsUnitSuite) TestMarshalUnmarshal() {
n.AddRecoverable(errors.New("2")) n.AddRecoverable(errors.New("2"))
bs, err := json.Marshal(n.Errors()) bs, err := json.Marshal(n.Errors())
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
err = json.Unmarshal(bs, &fault.Errors{}) err = json.Unmarshal(bs, &fault.Errors{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
type legacyErrorsData struct { type legacyErrorsData struct {
@ -357,14 +358,14 @@ func (suite *FaultErrorsUnitSuite) TestUnmarshalLegacy() {
} }
jsonStr, err := json.Marshal(oldData) jsonStr, err := json.Marshal(oldData)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
t.Logf("jsonStr is %s\n", jsonStr) t.Logf("jsonStr is %s\n", jsonStr)
um := fault.Errors{} um := fault.Errors{}
err = json.Unmarshal(jsonStr, &um) err = json.Unmarshal(jsonStr, &um)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
} }
func (suite *FaultErrorsUnitSuite) TestTracker() { func (suite *FaultErrorsUnitSuite) TestTracker() {
@ -373,18 +374,18 @@ func (suite *FaultErrorsUnitSuite) TestTracker() {
eb := fault.New(false) eb := fault.New(false)
lb := eb.Local() lb := eb.Local()
assert.NoError(t, lb.Failure()) assert.NoError(t, lb.Failure(), clues.ToCore(lb.Failure()))
assert.Empty(t, eb.Recovered()) assert.Empty(t, eb.Recovered())
lb.AddRecoverable(assert.AnError) lb.AddRecoverable(assert.AnError)
assert.NoError(t, lb.Failure()) assert.NoError(t, lb.Failure(), clues.ToCore(lb.Failure()))
assert.NoError(t, eb.Failure()) assert.NoError(t, eb.Failure(), clues.ToCore(eb.Failure()))
assert.NotEmpty(t, eb.Recovered()) assert.NotEmpty(t, eb.Recovered())
ebt := fault.New(true) ebt := fault.New(true)
lbt := ebt.Local() lbt := ebt.Local()
assert.NoError(t, lbt.Failure()) assert.NoError(t, lbt.Failure(), clues.ToCore(lbt.Failure()))
assert.Empty(t, ebt.Recovered()) assert.Empty(t, ebt.Recovered())
lbt.AddRecoverable(assert.AnError) lbt.AddRecoverable(assert.AnError)

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -49,7 +50,7 @@ func (suite *OneDrivePathSuite) Test_ToOneDrivePath() {
t := suite.T() t := suite.T()
p, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, false, tt.pathElements...) p, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, false, tt.pathElements...)
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
got, err := path.ToOneDrivePath(p) got, err := path.ToOneDrivePath(p)
tt.errCheck(t, err) tt.errCheck(t, err)

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -241,7 +242,7 @@ func (suite *PathUnitSuite) TestUnescapeAndAppend() {
t := suite.T() t := suite.T()
p, err := Builder{}.UnescapeAndAppend(test.input...) p, err := Builder{}.UnescapeAndAppend(test.input...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedString, p.String()) assert.Equal(t, test.expectedString, p.String())
}) })
@ -256,7 +257,7 @@ func (suite *PathUnitSuite) TestEscapedFailure() {
tmp := strings.ReplaceAll(target, "_", string(c)) tmp := strings.ReplaceAll(target, "_", string(c))
_, err := Builder{}.UnescapeAndAppend("this", tmp, "path") _, err := Builder{}.UnescapeAndAppend("this", tmp, "path")
assert.Error(t, err, "path with unescaped %s did not error", string(c)) assert.Errorf(t, err, "path with unescaped %s did not error", string(c))
}) })
} }
} }
@ -270,13 +271,12 @@ func (suite *PathUnitSuite) TestBadEscapeSequenceErrors() {
tmp := strings.ReplaceAll(target, "_", string(c)) tmp := strings.ReplaceAll(target, "_", string(c))
_, err := Builder{}.UnescapeAndAppend("this", tmp, "path") _, err := Builder{}.UnescapeAndAppend("this", tmp, "path")
assert.Error( assert.Errorf(
t, t,
err, err,
"path with bad escape sequence %c%c did not error", "path with bad escape sequence %c%c did not error",
escapeCharacter, escapeCharacter,
c, c)
)
}) })
} }
} }
@ -294,8 +294,7 @@ func (suite *PathUnitSuite) TestTrailingEscapeChar() {
assert.Error( assert.Error(
t, t,
err, err,
"path with trailing escape character did not error", "path with trailing escape character did not error")
)
}) })
} }
} }
@ -337,7 +336,7 @@ func (suite *PathUnitSuite) TestElements() {
t := suite.T() t := suite.T()
p, err := test.pathFunc(test.input) p, err := test.pathFunc(test.input)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.output, p.Elements()) assert.Equal(t, test.output, p.Elements())
}) })
@ -506,7 +505,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}. p, err := Builder{}.
Append("a", "b", "c"). Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
}, },
@ -519,7 +518,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}. p, err := Builder{}.
Append("a", "b", "c"). Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
}, },
@ -533,7 +532,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}. p, err := Builder{}.
Append("a/", "b", "c"). Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
}, },
@ -546,7 +545,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}. p, err := Builder{}.
Append("a/", "b", "c"). Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return p return p
}, },
@ -680,7 +679,7 @@ func (suite *PathUnitSuite) TestFromString() {
testPath := fmt.Sprintf(test.unescapedPath, service, cat) testPath := fmt.Sprintf(test.unescapedPath, service, cat)
p, err := FromDataLayerPath(testPath, item.isItem) p, err := FromDataLayerPath(testPath, item.isItem)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, service, p.Service(), "service") assert.Equal(t, service, p.Service(), "service")
assert.Equal(t, cat, p.Category(), "category") assert.Equal(t, cat, p.Category(), "category")

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -175,7 +176,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
testUser, testUser,
true, true,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, p.Folder(false)) assert.Empty(t, p.Folder(false))
assert.Empty(t, p.Folders()) assert.Empty(t, p.Folders())
@ -201,7 +202,7 @@ func (suite *DataLayerResourcePath) TestPopFront() {
path.EmailCategory, path.EmailCategory,
m.isItem, m.isItem,
) )
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
b := p.PopFront() b := p.PopFront()
assert.Equal(t, expected.String(), b.String()) assert.Equal(t, expected.String(), b.String())
@ -226,14 +227,14 @@ func (suite *DataLayerResourcePath) TestDir() {
path.EmailCategory, path.EmailCategory,
m.isItem, m.isItem,
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
for i := 1; i <= len(rest); i++ { for i := 1; i <= len(rest); i++ {
suite.Run(fmt.Sprintf("%v", i), func() { suite.Run(fmt.Sprintf("%v", i), func() {
t := suite.T() t := suite.T()
p, err = p.Dir() p, err = p.Dir()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expected := path.Builder{}.Append(elements...).Append(rest[:len(rest)-i]...) expected := path.Builder{}.Append(elements...).Append(rest[:len(rest)-i]...)
assert.Equal(t, expected.String(), p.String()) assert.Equal(t, expected.String(), p.String())
@ -332,17 +333,15 @@ func (suite *DataLayerResourcePath) TestToServiceCategoryMetadataPath() {
test.category.String(), test.category.String(),
}, "_"), func() { }, "_"), func() {
t := suite.T() t := suite.T()
pb := path.Builder{}.Append(test.postfix...) pb := path.Builder{}.Append(test.postfix...)
p, err := pb.ToServiceCategoryMetadataPath( p, err := pb.ToServiceCategoryMetadataPath(
tenant, tenant,
user, user,
test.service, test.service,
test.category, test.category,
false, false)
) test.check(t, err, clues.ToCore(err))
test.check(t, err)
if err != nil { if err != nil {
return return
@ -391,10 +390,8 @@ func (suite *DataLayerResourcePath) TestToExchangePathForCategory() {
testTenant, testTenant,
testUser, testUser,
test.category, test.category,
m.isItem, m.isItem)
) test.check(t, err, clues.ToCore(err))
test.check(t, err)
if err != nil { if err != nil {
return return
@ -434,7 +431,7 @@ func (suite *PopulatedDataLayerResourcePath) SetupSuite() {
path.EmailCategory, path.EmailCategory,
t, t,
) )
require.NoError(suite.T(), err) require.NoError(suite.T(), err, clues.ToCore(err))
suite.paths[t] = p suite.paths[t] = p
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -123,7 +124,7 @@ func (suite *ServiceCategoryUnitSuite) TestValidateServiceAndCategory() {
t := suite.T() t := suite.T()
s, c, err := validateServiceAndCategoryStrings(test.service, test.category) s, c, err := validateServiceAndCategoryStrings(test.service, test.category)
test.check(t, err) test.check(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
D "github.com/alcionai/corso/src/internal/diagnostics" D "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -98,7 +99,7 @@ func initM365Repo(t *testing.T) (
} }
repo, err := repository.Initialize(ctx, ac, st, opts) repo, err := repository.Initialize(ctx, ac, st, opts)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return ctx, repo, ac, st return ctx, repo, ac, st
} }
@ -120,7 +121,7 @@ func runLoadTest(
//revive:enable:context-as-argument //revive:enable:context-as-argument
t.Run(prefix+"_load_test_main", func(t *testing.T) { t.Run(prefix+"_load_test_main", func(t *testing.T) {
b, err := r.NewBackup(ctx, bupSel) b, err := r.NewBackup(ctx, bupSel)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
runBackupLoadTest(t, ctx, &b, service, usersUnderTest) runBackupLoadTest(t, ctx, &b, service, usersUnderTest)
bid := string(b.Results.BackupID) bid := string(b.Results.BackupID)
@ -152,7 +153,7 @@ func runRestoreLoadTest(
dest := tester.DefaultTestRestoreDestination() dest := tester.DefaultTestRestoreDestination()
rst, err := r.NewRestore(ctx, backupID, restSel, dest) rst, err := r.NewRestore(ctx, backupID, restSel, dest)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
doRestoreLoadTest(t, ctx, rst, service, bup.Results.ItemsWritten, usersUnderTest) doRestoreLoadTest(t, ctx, rst, service, bup.Results.ItemsWritten, usersUnderTest)
}) })
@ -177,7 +178,7 @@ func runBackupLoadTest(
err = b.Run(ctx) err = b.Run(ctx)
}) })
require.NoError(t, err, "running backup") require.NoError(t, err, "running backup", clues.ToCore(err))
require.NotEmpty(t, b.Results, "has results after run") require.NotEmpty(t, b.Results, "has results after run")
assert.NotEmpty(t, b.Results.BackupID, "has an ID after run") assert.NotEmpty(t, b.Results.BackupID, "has an ID after run")
assert.Equal(t, b.Status, operations.Completed, "backup status") assert.Equal(t, b.Status, operations.Completed, "backup status")
@ -185,7 +186,7 @@ func runBackupLoadTest(
assert.Less(t, 0, b.Results.ItemsWritten, "items written") assert.Less(t, 0, b.Results.ItemsWritten, "items written")
assert.Less(t, int64(0), b.Results.BytesUploaded, "bytes uploaded") assert.Less(t, int64(0), b.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, len(users), b.Results.ResourceOwners, "resource owners") assert.Equal(t, len(users), b.Results.ResourceOwners, "resource owners")
assert.NoError(t, b.Errors.Failure(), "non-recoverable error") assert.NoError(t, b.Errors.Failure(), "non-recoverable error", clues.ToCore(b.Errors.Failure()))
assert.Empty(t, b.Errors.Recovered(), "recoverable errors") assert.Empty(t, b.Errors.Recovered(), "recoverable errors")
}) })
} }
@ -209,7 +210,7 @@ func runBackupListLoadTest(
bs, err = r.BackupsByTag(ctx) bs, err = r.BackupsByTag(ctx)
}) })
require.NoError(t, err, "retrieving backups") require.NoError(t, err, "retrieving backups", clues.ToCore(err))
require.Less(t, 0, len(bs), "at least one backup is recorded") require.Less(t, 0, len(bs), "at least one backup is recorded")
var found bool var found bool
@ -250,8 +251,8 @@ func runBackupDetailsLoadTest(
ds, b, errs = r.BackupDetails(ctx, backupID) ds, b, errs = r.BackupDetails(ctx, backupID)
}) })
require.NoError(t, errs.Failure(), "retrieving details in backup "+backupID) require.NoError(t, errs.Failure(), "retrieving details in backup", backupID, clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered(), "retrieving details in backup "+backupID) require.Empty(t, errs.Recovered(), "retrieving details in backup", backupID)
require.NotNil(t, ds, "backup details must exist") require.NotNil(t, ds, "backup details must exist")
require.NotNil(t, b, "backup must exist") require.NotNil(t, b, "backup must exist")
@ -284,7 +285,7 @@ func doRestoreLoadTest(
ds, err = r.Run(ctx) ds, err = r.Run(ctx)
}) })
require.NoError(t, err, "running restore") require.NoError(t, err, "running restore", clues.ToCore(err))
require.NotEmpty(t, r.Results, "has results after run") require.NotEmpty(t, r.Results, "has results after run")
require.NotNil(t, ds, "has restored details") require.NotNil(t, ds, "has restored details")
assert.Equal(t, r.Status, operations.Completed, "restore status") assert.Equal(t, r.Status, operations.Completed, "restore status")

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -51,9 +52,10 @@ func (suite *RepositorySuite) TestInitialize() {
defer flush() defer flush()
st, err := test.storage() st, err := test.storage()
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
_, err = repository.Initialize(ctx, test.account, st, control.Options{}) _, err = repository.Initialize(ctx, test.account, st, control.Options{})
test.errCheck(t, err, "") test.errCheck(t, err, clues.ToCore(err))
}) })
} }
} }
@ -84,9 +86,10 @@ func (suite *RepositorySuite) TestConnect() {
defer flush() defer flush()
st, err := test.storage() st, err := test.storage()
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
_, err = repository.Connect(ctx, test.account, st, control.Options{}) _, err = repository.Connect(ctx, test.account, st, control.Options{})
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
}) })
} }
} }
@ -133,11 +136,12 @@ func (suite *RepositoryIntegrationSuite) TestInitialize() {
r, err := repository.Initialize(ctx, test.account, st, control.Options{}) r, err := repository.Initialize(ctx, test.account, st, control.Options{})
if err == nil { if err == nil {
defer func() { defer func() {
assert.NoError(t, r.Close(ctx)) err := r.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}() }()
} }
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
}) })
} }
} }
@ -152,11 +156,11 @@ func (suite *RepositoryIntegrationSuite) TestConnect() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
_, err := repository.Initialize(ctx, account.Account{}, st, control.Options{}) _, err := repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
// now re-connect // now re-connect
_, err = repository.Connect(ctx, account.Account{}, st, control.Options{}) _, err = repository.Connect(ctx, account.Account{}, st, control.Options{})
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
} }
func (suite *RepositoryIntegrationSuite) TestConnect_sameID() { func (suite *RepositoryIntegrationSuite) TestConnect_sameID() {
@ -169,15 +173,16 @@ func (suite *RepositoryIntegrationSuite) TestConnect_sameID() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
r, err := repository.Initialize(ctx, account.Account{}, st, control.Options{}) r, err := repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
oldID := r.GetID() oldID := r.GetID()
require.NoError(t, r.Close(ctx)) err = r.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
// now re-connect // now re-connect
r, err = repository.Connect(ctx, account.Account{}, st, control.Options{}) r, err = repository.Connect(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, oldID, r.GetID()) assert.Equal(t, oldID, r.GetID())
} }
@ -193,10 +198,10 @@ func (suite *RepositoryIntegrationSuite) TestNewBackup() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
r, err := repository.Initialize(ctx, acct, st, control.Options{}) r, err := repository.Initialize(ctx, acct, st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
bo, err := r.NewBackup(ctx, selectors.Selector{DiscreteOwner: "test"}) bo, err := r.NewBackup(ctx, selectors.Selector{DiscreteOwner: "test"})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, bo) require.NotNil(t, bo)
} }
@ -213,10 +218,10 @@ func (suite *RepositoryIntegrationSuite) TestNewRestore() {
st := tester.NewPrefixedS3Storage(t) st := tester.NewPrefixedS3Storage(t)
r, err := repository.Initialize(ctx, acct, st, control.Options{}) r, err := repository.Initialize(ctx, acct, st, control.Options{})
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
ro, err := r.NewRestore(ctx, "backup-id", selectors.Selector{DiscreteOwner: "test"}, dest) ro, err := r.NewRestore(ctx, "backup-id", selectors.Selector{DiscreteOwner: "test"}, dest)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, ro) require.NotNil(t, ro)
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -35,19 +36,23 @@ func (suite *RepositoryModelSuite) TestWriteGetModel() {
kopiaRef = kopia.NewConn(s) kopiaRef = kopia.NewConn(s)
) )
require.NoError(t, kopiaRef.Initialize(ctx)) err := kopiaRef.Initialize(ctx)
require.NoError(t, kopiaRef.Connect(ctx)) require.NoError(t, err, clues.ToCore(err))
err = kopiaRef.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
defer kopiaRef.Close(ctx) defer kopiaRef.Close(ctx)
ms, err := kopia.NewModelStore(kopiaRef) ms, err := kopia.NewModelStore(kopiaRef)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
defer ms.Close(ctx) defer ms.Close(ctx)
require.NoError(t, newRepoModel(ctx, ms, "fnords")) err = newRepoModel(ctx, ms, "fnords")
require.NoError(t, err, clues.ToCore(err))
got, err := getRepoModel(ctx, ms) got, err := getRepoModel(ctx, ms)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, "fnords", string(got.ID)) assert.Equal(t, "fnords", string(got.ID))
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -37,7 +38,7 @@ func (suite *ExchangeSelectorSuite) TestToExchangeBackup() {
eb := NewExchangeBackup(nil) eb := NewExchangeBackup(nil)
s := eb.Selector s := eb.Selector
eb, err := s.ToExchangeBackup() eb, err := s.ToExchangeBackup()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, eb.Service, ServiceExchange) assert.Equal(t, eb.Service, ServiceExchange)
assert.NotZero(t, eb.Scopes()) assert.NotZero(t, eb.Scopes())
} }
@ -54,7 +55,7 @@ func (suite *ExchangeSelectorSuite) TestToExchangeRestore() {
eb := NewExchangeRestore(nil) eb := NewExchangeRestore(nil)
s := eb.Selector s := eb.Selector
eb, err := s.ToExchangeRestore() eb, err := s.ToExchangeRestore()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, eb.Service, ServiceExchange) assert.Equal(t, eb.Service, ServiceExchange)
assert.NotZero(t, eb.Scopes()) assert.NotZero(t, eb.Scopes())
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -188,7 +189,7 @@ func scopeMustHave[T scopeT](t *testing.T, sc T, m map[categorizer]string) {
// stubbing out static values where necessary. // stubbing out static values where necessary.
func stubPath(t *testing.T, user string, s []string, cat path.CategoryType) path.Path { func stubPath(t *testing.T, user string, s []string, cat path.CategoryType) path.Path {
pth, err := path.Build("tid", user, path.ExchangeService, cat, true, s...) pth, err := path.Build("tid", user, path.ExchangeService, cat, true, s...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
return pth return pth
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -35,7 +36,7 @@ func (suite *OneDriveSelectorSuite) TestToOneDriveBackup() {
ob := NewOneDriveBackup(Any()) ob := NewOneDriveBackup(Any())
s := ob.Selector s := ob.Selector
ob, err := s.ToOneDriveBackup() ob, err := s.ToOneDriveBackup()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ob.Service, ServiceOneDrive) assert.Equal(t, ob.Service, ServiceOneDrive)
assert.NotZero(t, ob.Scopes()) assert.NotZero(t, ob.Scopes())
} }
@ -155,7 +156,7 @@ func (suite *OneDriveSelectorSuite) TestToOneDriveRestore() {
eb := NewOneDriveRestore(Any()) eb := NewOneDriveRestore(Any())
s := eb.Selector s := eb.Selector
or, err := s.ToOneDriveRestore() or, err := s.ToOneDriveRestore()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, or.Service, ServiceOneDrive) assert.Equal(t, or.Service, ServiceOneDrive)
assert.NotZero(t, or.Scopes()) assert.NotZero(t, or.Scopes())
} }
@ -265,7 +266,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() {
elems := []string{"drive", "driveID", "root:", "dir1", "dir2", fileName + "-id"} elems := []string{"drive", "driveID", "root:", "dir1", "dir2", fileName + "-id"}
filePath, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, true, elems...) filePath, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, true, elems...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
expected := map[categorizer][]string{ expected := map[categorizer][]string{
OneDriveFolder: {"dir1/dir2"}, OneDriveFolder: {"dir1/dir2"},

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -288,7 +289,7 @@ func (suite *SelectorScopesSuite) TestReduce() {
dataCats, dataCats,
errs) errs)
require.NotNil(t, result) require.NotNil(t, result)
require.NoError(t, errs.Failure(), "no recoverable errors") require.NoError(t, errs.Failure(), "no recoverable errors", clues.ToCore(errs.Failure()))
assert.Len(t, result.Entries, test.expectLen) assert.Len(t, result.Entries, test.expectLen)
}) })
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -29,7 +30,7 @@ func (suite *SelectorSuite) TestNewSelector() {
func (suite *SelectorSuite) TestBadCastErr() { func (suite *SelectorSuite) TestBadCastErr() {
err := badCastErr(ServiceUnknown, ServiceExchange) err := badCastErr(ServiceUnknown, ServiceExchange)
assert.Error(suite.T(), err) assert.Error(suite.T(), err, clues.ToCore(err))
} }
func (suite *SelectorSuite) TestResourceOwnersIn() { func (suite *SelectorSuite) TestResourceOwnersIn() {
@ -365,13 +366,14 @@ func (suite *SelectorSuite) TestPathCategories_includes() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
obj := test.getSelector(t) obj := test.getSelector(t)
cats, err := obj.PathCategories() cats, err := obj.PathCategories()
for _, entry := range cats.Includes { for _, entry := range cats.Includes {
assert.NotEqual(t, entry, path.UnknownCategory) assert.NotEqual(t, entry, path.UnknownCategory)
} }
test.isErr(t, err)
test.isErr(t, err, clues.ToCore(err))
}) })
} }
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -35,7 +36,7 @@ func (suite *SharePointSelectorSuite) TestToSharePointBackup() {
ob := NewSharePointBackup(nil) ob := NewSharePointBackup(nil)
s := ob.Selector s := ob.Selector
ob, err := s.ToSharePointBackup() ob, err := s.ToSharePointBackup()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ob.Service, ServiceSharePoint) assert.Equal(t, ob.Service, ServiceSharePoint)
assert.NotZero(t, ob.Scopes()) assert.NotZero(t, ob.Scopes())
} }
@ -191,7 +192,7 @@ func (suite *SharePointSelectorSuite) TestToSharePointRestore() {
eb := NewSharePointRestore(nil) eb := NewSharePointRestore(nil)
s := eb.Selector s := eb.Selector
or, err := s.ToSharePointRestore() or, err := s.ToSharePointRestore()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, or.Service, ServiceSharePoint) assert.Equal(t, or.Service, ServiceSharePoint)
assert.NotZero(t, or.Scopes()) assert.NotZero(t, or.Scopes())
} }
@ -364,7 +365,7 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
test.sc.PathType(), test.sc.PathType(),
true, true,
elems...) elems...)
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
ent := details.DetailsEntry{ ent := details.DetailsEntry{
RepoRef: itemPath.String(), RepoRef: itemPath.String(),

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
) )
@ -33,7 +34,7 @@ func (suite *M365IntegrationSuite) TestUsers() {
) )
users, err := Users(ctx, acct, fault.New(true)) users, err := Users(ctx, acct, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, users) assert.NotEmpty(t, users)
for _, u := range users { for _, u := range users {
@ -57,7 +58,7 @@ func (suite *M365IntegrationSuite) TestSites() {
) )
sites, err := Sites(ctx, acct, fault.New(true)) sites, err := Sites(ctx, acct, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, sites) assert.NotEmpty(t, sites)
for _, s := range sites { for _, s := range sites {

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
@ -27,7 +28,7 @@ var goodCommonConfig = storage.CommonConfig{
func (suite *CommonCfgSuite) TestCommonConfig_Config() { func (suite *CommonCfgSuite) TestCommonConfig_Config() {
cfg := goodCommonConfig cfg := goodCommonConfig
c, err := cfg.StringConfig() c, err := cfg.StringConfig()
assert.NoError(suite.T(), err) assert.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
key string key string
@ -47,9 +48,9 @@ func (suite *CommonCfgSuite) TestStorage_CommonConfig() {
in := goodCommonConfig in := goodCommonConfig
s, err := storage.NewStorage(storage.ProviderUnknown, in) s, err := storage.NewStorage(storage.ProviderUnknown, in)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
out, err := s.CommonConfig() out, err := s.CommonConfig()
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in.CorsoPassphrase, out.CorsoPassphrase) assert.Equal(t, in.CorsoPassphrase, out.CorsoPassphrase)
} }
@ -84,7 +85,7 @@ func (suite *CommonCfgSuite) TestStorage_CommonConfig_InvalidCases() {
for _, test := range table2 { for _, test := range table2 {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
st, err := storage.NewStorage(storage.ProviderUnknown, goodCommonConfig) st, err := storage.NewStorage(storage.ProviderUnknown, goodCommonConfig)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
test.amend(st) test.amend(st)
_, err = st.CommonConfig() _, err = st.CommonConfig()
assert.Error(t, err) assert.Error(t, err)

View File

@ -3,6 +3,7 @@ package storage
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -36,8 +37,9 @@ var (
func (suite *S3CfgSuite) TestS3Config_Config() { func (suite *S3CfgSuite) TestS3Config_Config() {
s3 := goodS3Config s3 := goodS3Config
c, err := s3.StringConfig() c, err := s3.StringConfig()
assert.NoError(suite.T(), err) assert.NoError(suite.T(), err, clues.ToCore(err))
table := []struct { table := []struct {
key string key string
@ -57,9 +59,9 @@ func (suite *S3CfgSuite) TestStorage_S3Config() {
in := goodS3Config in := goodS3Config
s, err := NewStorage(ProviderS3, in) s, err := NewStorage(ProviderS3, in)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
out, err := s.S3Config() out, err := s.S3Config()
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in.Bucket, out.Bucket) assert.Equal(t, in.Bucket, out.Bucket)
assert.Equal(t, in.Endpoint, out.Endpoint) assert.Equal(t, in.Endpoint, out.Endpoint)
@ -104,7 +106,7 @@ func (suite *S3CfgSuite) TestStorage_S3Config_invalidCases() {
for _, test := range table2 { for _, test := range table2 {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
st, err := NewStorage(ProviderUnknown, goodS3Config) st, err := NewStorage(ProviderUnknown, goodS3Config)
assert.NoError(t, err) assert.NoError(t, err, clues.ToCore(err))
test.amend(st) test.amend(st)
_, err = st.S3Config() _, err = st.S3Config()
assert.Error(t, err) assert.Error(t, err)
@ -149,7 +151,7 @@ func (suite *S3CfgSuite) TestStorage_S3Config_StringConfig() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
result, err := test.input.StringConfig() result, err := test.input.StringConfig()
require.NoError(t, err) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, result) assert.Equal(t, test.expect, result)
}) })
} }

View File

@ -3,6 +3,7 @@ package storage
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
) )
@ -39,11 +40,13 @@ func (suite *StorageSuite) TestNewStorage() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
s, err := NewStorage(test.p, test.c) s, err := NewStorage(test.p, test.c)
test.errCheck(t, err) test.errCheck(t, err, clues.ToCore(err))
// remaining tests are dependent upon error-free state // remaining tests are dependent upon error-free state
if test.c.err != nil { if test.c.err != nil {
return return
} }
assert.Equalf(t, assert.Equalf(t,
test.p, test.p,
s.Provider, s.Provider,

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
@ -64,13 +65,15 @@ func (suite *StoreBackupUnitSuite) TestGetBackup() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
sm := &store.Wrapper{Storer: test.mock} sm := &store.Wrapper{Storer: test.mock}
result, err := sm.GetBackup(ctx, model.StableID(uuid.NewString())) result, err := sm.GetBackup(ctx, model.StableID(uuid.NewString()))
test.expect(t, err) test.expect(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
} }
assert.Equal(t, bu.ID, result.ID) assert.Equal(t, bu.ID, result.ID)
}) })
} }
@ -99,13 +102,15 @@ func (suite *StoreBackupUnitSuite) TestGetBackups() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
sm := &store.Wrapper{Storer: test.mock} sm := &store.Wrapper{Storer: test.mock}
result, err := sm.GetBackups(ctx) result, err := sm.GetBackups(ctx)
test.expect(t, err) test.expect(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
} }
assert.Equal(t, 1, len(result)) assert.Equal(t, 1, len(result))
assert.Equal(t, bu.ID, result[0].ID) assert.Equal(t, bu.ID, result[0].ID)
}) })
@ -135,10 +140,10 @@ func (suite *StoreBackupUnitSuite) TestDeleteBackup() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
sm := &store.Wrapper{Storer: test.mock} sm := &store.Wrapper{Storer: test.mock}
err := sm.DeleteBackup(ctx, model.StableID(uuid.NewString())) err := sm.DeleteBackup(ctx, model.StableID(uuid.NewString()))
test.expect(t, err) test.expect(t, err, clues.ToCore(err))
}) })
} }
} }
@ -166,13 +171,15 @@ func (suite *StoreBackupUnitSuite) TestGetDetailsIDFromBackupID() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
store := &store.Wrapper{Storer: test.mock} store := &store.Wrapper{Storer: test.mock}
dResult, bResult, err := store.GetDetailsIDFromBackupID(ctx, model.StableID(uuid.NewString())) dResult, bResult, err := store.GetDetailsIDFromBackupID(ctx, model.StableID(uuid.NewString()))
test.expect(t, err) test.expect(t, err, clues.ToCore(err))
if err != nil { if err != nil {
return return
} }
assert.Equal(t, bu.DetailsID, dResult) assert.Equal(t, bu.DetailsID, dResult)
assert.Equal(t, bu.ID, bResult.ID) assert.Equal(t, bu.ID, bResult.ID)
}) })