append ToCore to all errors tests (#2793)

In order to retrieve all clues structured error data in tests, we need to extract it from the error using the clues library.

This change appends `clues.ToCore(err)` to all
variations of `assert.NoError(t, err)`.  The only
other changes are those necessary to preserve
linting, or to produce an error variable for the
ToCore call.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Test
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #1970

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-03-15 13:02:47 -06:00 committed by GitHub
parent 0125876192
commit 76b3fe3b86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
100 changed files with 1164 additions and 931 deletions

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print"
@ -70,7 +71,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
suite.recorder = strings.Builder{}
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -85,7 +86,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() {
@ -107,7 +108,8 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String()
@ -149,7 +151,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -164,7 +166,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd() {
@ -192,7 +194,8 @@ func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
@ -229,7 +232,7 @@ func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_UserNotInTenant() {
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
@ -278,7 +281,7 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
suite.recorder = strings.Builder{}
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -296,7 +299,7 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps = make(map[path.CategoryType]string)
@ -322,8 +325,10 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
sel.Include(scopes)
bop, err := suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, bop.Run(ctx))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
@ -361,7 +366,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.recorder.String()
@ -394,7 +400,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_singleID() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.recorder.String()
@ -421,7 +428,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_badID() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.Error(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
})
}
}
@ -441,7 +449,7 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeDetailsCmd() {
// fetch the details from the repo first
deets, _, errs := suite.repo.BackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure())
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := tester.StubRootCmd(
@ -455,7 +463,8 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeDetailsCmd() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.recorder.String()
@ -516,7 +525,7 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -532,7 +541,7 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
m365UserID := tester.M365UserID(t)
users := []string{m365UserID}
@ -542,8 +551,10 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
@ -560,7 +571,8 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
cli.BuildCommandTree(cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = tester.StubRootCmd(
@ -569,7 +581,8 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
"--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
require.Error(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID() {
@ -586,5 +599,6 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
@ -99,7 +100,8 @@ func (suite *ExchangeSuite) TestValidateBackupCreateFlags() {
suite.Run(test.name, func() {
t := suite.T()
test.expect(t, validateExchangeBackupCreateFlags(test.user, test.data))
err := validateExchangeBackupCreateFlags(test.user, test.data)
test.expect(t, err, clues.ToCore(err))
})
}
}
@ -233,7 +235,7 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectors() {
"backup-ID",
test.Opts,
false)
assert.NoError(t, err, "failure")
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected, output.Entries)
})
}
@ -253,7 +255,7 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectorsBadFormats() {
"backup-ID",
test.Opts,
false)
assert.Error(t, err, "failure")
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output)
})
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print"
@ -61,7 +62,7 @@ func (suite *NoBackupOneDriveE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -83,7 +84,7 @@ func (suite *NoBackupOneDriveE2ESuite) SetupSuite() {
// TODO: turn back on when this stops throttling-out the tests.
// ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
@ -105,7 +106,8 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String()
@ -135,7 +137,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_UserNotInTenant() {
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
@ -182,7 +184,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -205,7 +207,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
// TODO: turn back on when this stops throttling-out the tests.
// ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
m365UserID := tester.M365UserID(t)
users := []string{m365UserID}
@ -215,8 +217,10 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
sel.Include(sel.Folders(selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
@ -238,7 +242,8 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String()
@ -251,7 +256,8 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
"--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
require.Error(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID() {
@ -268,5 +274,6 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
@ -85,7 +86,8 @@ func (suite *OneDriveSuite) TestValidateOneDriveBackupCreateFlags() {
}
for _, test := range table {
suite.Run(test.name, func() {
test.expect(suite.T(), validateOneDriveBackupCreateFlags(test.user))
err := validateOneDriveBackupCreateFlags(test.user)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
@ -104,7 +106,7 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectors() {
"backup-ID",
test.Opts,
false)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected, output.Entries)
})
}
@ -124,7 +126,7 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectorsBadFormats() {
"backup-ID",
test.Opts,
false)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output)
})
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print"
@ -59,7 +60,7 @@ func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -74,7 +75,7 @@ func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
@ -96,7 +97,8 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String()
@ -138,7 +140,7 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -154,7 +156,7 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
m365SiteID := tester.M365SiteID(t)
sites := []string{m365SiteID}
@ -164,8 +166,10 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
sel.Include(sel.LibraryFolders(selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
@ -187,7 +191,8 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.recorder.String()
@ -202,7 +207,8 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
// "--backup", string(suite.backupOp.Results.BackupID))
// cli.BuildCommandTree(cmd)
// require.Error(t, cmd.ExecuteContext(ctx))
// err := cmd.ExecuteContext(ctx)
// require.Error(t, err, clues.ToCore(err))
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unknownID() {
t := suite.T()
@ -218,5 +224,6 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unkno
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/connector"
@ -100,7 +101,8 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
}
for _, test := range table {
suite.Run(test.name, func() {
test.expect(suite.T(), validateSharePointBackupCreateFlags(test.site, test.weburl, nil))
err := validateSharePointBackupCreateFlags(test.site, test.weburl, nil)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
@ -200,7 +202,7 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
defer flush()
sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
})
@ -221,7 +223,7 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectors() {
"backup-ID",
test.Opts,
false)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected, output.Entries)
})
}
@ -241,7 +243,7 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
"backup-ID",
test.Opts,
false)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output)
})
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
@ -53,20 +54,21 @@ func (suite *ConfigSuite) TestReadRepoConfigBasic() {
testConfigData := fmt.Sprintf(configFileTemplate, b, tID)
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// Configure viper to read test config file
vpr.SetConfigFile(testConfigFilePath)
// Read and validate config
require.NoError(t, vpr.ReadInConfig(), "reading repo config")
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
s3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, b, s3Cfg.Bucket)
m365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, tID, m365.AzureTenantID)
}
@ -74,6 +76,8 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
var (
t = suite.T()
vpr = viper.New()
// Configure viper to read test config file
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
)
const (
@ -81,23 +85,26 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
tid = "3c0748d2-470e-444c-9064-1268e52609d5"
)
// Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true}
m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid"), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config")
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
readS3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
readM365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
}
@ -105,6 +112,8 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
var (
t = suite.T()
vpr = viper.New()
// Configure viper to read test config file
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
)
const (
@ -112,15 +121,17 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
tid = "dfb12063-7598-458b-85ab-42352c5c25e2"
)
// Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config")
s3Cfg := storage.S3Config{Bucket: bkt}
m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid"), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config")
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
table := []struct {
name string
@ -172,7 +183,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
}
for _, test := range table {
suite.Run(test.name, func() {
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input))
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input), clues.ToCore(err))
})
}
}
@ -205,7 +216,9 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
// Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
require.NoError(t, initWithViper(vpr, testConfigFilePath), "initializing repo config")
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := storage.S3Config{
Bucket: bkt,
@ -216,14 +229,17 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
}
m365 := account.M365Config{AzureTenantID: tid}
require.NoError(t, writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid"), "writing repo config")
require.NoError(t, vpr.ReadInConfig(), "reading repo config")
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
config, err := getStorageAndAccountWithViper(vpr, true, nil)
require.NoError(t, err, "getting storage and account from config")
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := config.Storage.S3Config()
require.NoError(t, err, "reading s3 config from storage")
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint)
assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix)
@ -232,11 +248,11 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
assert.Equal(t, config.RepoID, "repoid")
common, err := config.Storage.CommonConfig()
require.NoError(t, err, "reading common config from storage")
require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := config.Account.M365Config()
require.NoError(t, err, "reading m365 config from account")
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
@ -267,10 +283,10 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
}
config, err := getStorageAndAccountWithViper(vpr, false, overrides)
require.NoError(t, err, "getting storage and account from config")
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := config.Storage.S3Config()
require.NoError(t, err, "reading s3 config from storage")
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, bkt)
assert.Equal(t, config.RepoID, "")
assert.Equal(t, readS3Cfg.Endpoint, end)
@ -279,11 +295,11 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
assert.True(t, readS3Cfg.DoNotVerifyTLS)
common, err := config.Storage.CommonConfig()
require.NoError(t, err, "reading common config from storage")
require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := config.Account.M365Config()
require.NoError(t, err, "reading m365 config from account")
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -20,14 +21,15 @@ func TestPrintUnitSuite(t *testing.T) {
}
func (suite *PrintUnitSuite) TestOnly() {
t := suite.T()
c := &cobra.Command{}
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
c := &cobra.Command{}
ctx = SetRootCmd(ctx, c)
assert.NoError(t, Only(ctx, nil))
err := Only(ctx, nil)
assert.NoError(t, err, clues.ToCore(err))
assert.True(t, c.SilenceUsage)
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/internal/tester"
@ -61,7 +62,7 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
if !test.hasConfigFile {
@ -80,12 +81,12 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
cli.BuildCommandTree(cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a second initialization should result in an error
err = cmd.ExecuteContext(ctx)
assert.Error(t, err)
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists)
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
})
}
}
@ -98,7 +99,7 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -115,7 +116,8 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
cli.BuildCommandTree(cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}
}
@ -127,7 +129,7 @@ func (suite *S3E2ESuite) TestInitS3Cmd_missingBucket() {
st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -140,7 +142,8 @@ func (suite *S3E2ESuite) TestInitS3Cmd_missingBucket() {
cli.BuildCommandTree(cmd)
// run the command
require.Error(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *S3E2ESuite) TestConnectS3Cmd() {
@ -175,7 +178,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -193,7 +196,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
// init the repo first
_, err = repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// then test it
cmd := tester.StubRootCmd(
@ -205,7 +208,8 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
cli.BuildCommandTree(cmd)
// run the command
assert.NoError(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
})
}
}
@ -218,7 +222,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadBucket() {
st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -232,7 +236,8 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadBucket() {
cli.BuildCommandTree(cmd)
// run the command
require.Error(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() {
@ -243,7 +248,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() {
st := tester.NewPrefixedS3Storage(t)
cfg, err := st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tester.MakeTempTestConfigClone(t, nil)
@ -257,5 +262,6 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() {
cli.BuildCommandTree(cmd)
// run the command
require.Error(t, cmd.ExecuteContext(ctx))
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/utils"
@ -63,7 +64,7 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
@ -77,7 +78,7 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps = make(map[path.CategoryType]operations.BackupOperation)
@ -101,17 +102,19 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
sel.Include(scopes)
bop, err := suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, bop.Run(ctx))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[set] = bop
// sanity check, ensure we can find the backup and its details immediately
_, err = suite.repo.Backup(ctx, bop.Results.BackupID)
require.NoError(t, err, "retrieving recent backup by ID")
require.NoError(t, err, "retrieving recent backup by ID", clues.ToCore(err))
_, _, errs := suite.repo.BackupDetails(ctx, string(bop.Results.BackupID))
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID", clues.ToCore(err))
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
}
}
@ -133,7 +136,8 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd() {
cli.BuildCommandTree(cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
})
}
}
@ -168,7 +172,8 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badTimeFlags() {
cli.BuildCommandTree(cmd)
// run the command
require.Error(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
})
}
}
@ -201,7 +206,8 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badBoolFlags() {
cli.BuildCommandTree(cmd)
// run the command
require.Error(t, cmd.ExecuteContext(ctx))
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
})
}
}

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
@ -52,7 +53,8 @@ func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
}
for _, test := range table {
suite.Run(test.name, func() {
test.expect(suite.T(), utils.ValidateExchangeRestoreFlags(test.backupID, test.opts))
err := utils.ValidateExchangeRestoreFlags(test.backupID, test.opts)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors"
)
@ -34,7 +35,8 @@ func (suite *CliUtilsSuite) TestRequireProps() {
},
}
for _, test := range table {
test.errCheck(suite.T(), RequireProps(test.props))
err := RequireProps(test.props)
test.errCheck(suite.T(), err, clues.ToCore(err))
}
}

View File

@ -4,8 +4,8 @@ go 1.19
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013
github.com/aws/aws-sdk-go v1.44.221
github.com/alcionai/clues v0.0.0-20230314154528-c469e1adafb6
github.com/aws/aws-sdk-go v1.44.218
github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.0
github.com/google/uuid v1.3.0

View File

@ -52,8 +52,8 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013 h1:WWQ6E8hnoITmc/adlEySSVo3SOVvo1xhJKO08A1YPYY=
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8=
github.com/alcionai/clues v0.0.0-20230314154528-c469e1adafb6 h1:U3uDQhdiI8rkwV/56duel5zWN1XESPuf+xs3EviwGHA=
github.com/alcionai/clues v0.0.0-20230314154528-c469e1adafb6/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -62,8 +62,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/aws/aws-sdk-go v1.44.221 h1:yndn4uvLolKXPoXIwKHhO5XtwlTnJfXLBKXs84C5+hQ=
github.com/aws/aws-sdk-go v1.44.221/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go v1.44.218 h1:p707+xOCazWhkSpZOeyhtTcg7Z+asxxvueGgYPSitn4=
github.com/aws/aws-sdk-go v1.44.218/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
)
@ -58,7 +59,7 @@ func (suite *CommonConfigsSuite) TestUnionConfigs_string() {
t := suite.T()
cs, err := common.UnionStringConfigs(test.ac, test.bc)
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
// remaining tests depend on error-free state
if test.ac.err != nil || test.bc.err != nil {
return

View File

@ -3,6 +3,7 @@ package crash_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/tester"
"github.com/stretchr/testify/assert"
@ -50,7 +51,8 @@ func (suite *CrashTestDummySuite) TestRecovery() {
ctx, flush := tester.NewContext()
defer func() {
test.expect(t, crash.Recovery(ctx, recover()))
err := crash.Recovery(ctx, recover())
test.expect(t, err, clues.ToCore(err))
flush()
}()

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
)
@ -48,14 +49,14 @@ func (suite *CommonTimeUnitSuite) TestParseTime() {
nowStr := now.Format(time.RFC3339Nano)
result, err := common.ParseTime(nowStr)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, now.UTC(), result)
_, err = common.ParseTime("")
require.Error(t, err)
require.Error(t, err, clues.ToCore(err))
_, err = common.ParseTime("flablabls")
require.Error(t, err)
require.Error(t, err, clues.ToCore(err))
}
func (suite *CommonTimeUnitSuite) TestExtractTime() {
@ -68,14 +69,14 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
c, err := common.ParseTime(ts)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return c
}
parseT := func(v string) time.Time {
t, err := time.Parse(time.RFC3339, v)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
return t
}
@ -151,7 +152,7 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
t := suite.T()
result, err := common.ExtractTime(test.input)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat))
})
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
@ -109,7 +110,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
connector.UpdateStatus,
control.Options{},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, excludes)
for range collections {
@ -125,7 +126,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
for object := range col.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "received a buf.Read error")
assert.NoError(t, err, "received a buf.Read error", clues.ToCore(err))
}
}
@ -211,7 +212,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestDataCollections_invali
nil,
control.Options{},
fault.New(true))
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, collections)
assert.Empty(t, excludes)
})
@ -265,7 +266,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
connector,
control.Options{},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// Not expecting excludes as this isn't an incremental backup.
assert.Empty(t, excludes)
@ -281,7 +282,7 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
for object := range coll.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "reading item")
assert.NoError(t, err, "reading item", clues.ToCore(err))
}
}
@ -342,7 +343,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
nil,
control.Options{},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Len(t, cols, 1)
// No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes)
@ -376,7 +377,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
nil,
control.Options{},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 0, len(cols))
// No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes)
@ -388,7 +389,7 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
t.Log("File: " + item.UUID())
bs, err := io.ReadAll(item.ToReader())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
t.Log(string(bs))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/tester"
@ -24,14 +25,14 @@ func (suite *BetaUnitSuite) TestBetaService_Adapter() {
t := suite.T()
a := tester.NewMockM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
service := NewBetaService(adpt)
require.NotNil(t, service)
@ -45,5 +46,5 @@ func (suite *BetaUnitSuite) TestBetaService_Adapter() {
byteArray, err := service.Serialize(testPage)
assert.NotEmpty(t, byteArray)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -64,7 +65,7 @@ func (suite *UsersUnitSuite) TestValidateUser() {
t := suite.T()
got, err := validateUser(tt.args)
tt.errCheck(t, err)
tt.errCheck(t, err, clues.ToCore(err))
assert.Equal(t, tt.want, got)
})

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/discovery"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
@ -36,10 +37,10 @@ func (suite *DiscoveryIntegrationSuite) TestUsers() {
errs := fault.New(true)
users, err := discovery.Users(ctx, acct, errs)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ferrs := errs.Errors()
assert.NoError(t, ferrs.Failure)
assert.NoError(t, ferrs.Failure, clues.ToCore(ferrs.Failure))
assert.Empty(t, ferrs.Recovered)
assert.Less(t, 0, len(users))
@ -66,7 +67,7 @@ func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() {
AzureTenantID: "data",
},
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return a
},

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
@ -36,7 +37,7 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
@ -44,7 +45,7 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt)
}
@ -79,7 +80,7 @@ func (suite *ExchangeServiceSuite) TestOptionsForCalendars() {
for _, test := range tests {
suite.Run(test.name, func() {
_, err := optionsForCalendars(test.params)
test.checkError(suite.T(), err)
test.checkError(suite.T(), err, clues.ToCore(err))
})
}
}
@ -117,7 +118,7 @@ func (suite *ExchangeServiceSuite) TestOptionsForFolders() {
t := suite.T()
config, err := optionsForMailFolders(test.params)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
if err == nil {
assert.Equal(t, test.expected, len(config.QueryParameters.Select))
}
@ -156,7 +157,7 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() {
t := suite.T()
options, err := optionsForContacts(test.params)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
if err == nil {
assert.Equal(t, test.expected, len(options.QueryParameters.Select))
}
@ -183,7 +184,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
"This is testing",
)
message, err := support.CreateMessageFromBytes(byteArray)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return message.GetBody()
},
},
@ -193,7 +194,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
getBodyable: func(t *testing.T) models.ItemBodyable {
byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy")
message, err := support.CreateMessageFromBytes(byteArray)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return message.GetBody()
},
},

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
@ -129,7 +130,7 @@ func (suite *EventsAPIUnitSuite) TestEventInfo() {
event, err = support.CreateEventFromBytes(bytes)
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
return event, &details.ExchangeInfo{
ItemType: details.ExchangeEvent,

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
@ -123,7 +124,8 @@ var (
func (suite *FolderCacheUnitSuite) TestCheckIDAndName() {
for _, test := range containerCheckTests {
suite.Run(test.name, func() {
test.check(suite.T(), checkIDAndName(test.c))
err := checkIDAndName(test.c)
test.check(suite.T(), err, clues.ToCore(err))
})
}
}
@ -154,7 +156,8 @@ func (suite *FolderCacheUnitSuite) TestCheckRequiredValues() {
for _, test := range table {
suite.Run(test.name, func() {
test.check(suite.T(), checkRequiredValues(test.c))
err := checkRequiredValues(test.c)
test.check(suite.T(), err, clues.ToCore(err))
})
}
}
@ -230,7 +233,8 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
for _, test := range table {
suite.Run(test.name, func() {
fc := newContainerResolver()
test.check(suite.T(), fc.addFolder(test.cf))
err := fc.addFolder(test.cf)
test.check(suite.T(), err, clues.ToCore(err))
})
}
}
@ -355,7 +359,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() {
suite.Run(test.name, func() {
resolver, containers := resolverWithContainers(test.numContainers, false)
_, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false)
test.check(suite.T(), err)
test.check(suite.T(), err, clues.ToCore(err))
})
}
}
@ -366,7 +370,8 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() {
t := suite.T()
require.NoError(t, suite.fc.populatePaths(ctx, false))
err := suite.fc.populatePaths(ctx, false)
require.NoError(t, err, clues.ToCore(err))
items := suite.fc.Items()
gotPaths := make([]string, 0, len(items))
@ -392,7 +397,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
t := suite.T()
p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
})
@ -408,7 +413,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
t := suite.T()
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
})
@ -423,14 +428,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths()
c := suite.allContainers[len(suite.allContainers)-1]
p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo"
p, l, err = suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
}
@ -443,14 +448,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_u
c := suite.containersWithID[len(suite.containersWithID)-1]
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo"
p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
}
@ -466,7 +471,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN
delete(suite.fc.cache, almostLast.id)
_, _, err := suite.fc.IDToPath(ctx, last.id, false)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
}
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFound() {
@ -476,7 +481,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun
t := suite.T()
_, _, err := suite.fc.IDToPath(ctx, "foo", false)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
}
func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
@ -495,12 +500,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName)
require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache")
require.NoError(t, suite.fc.AddToCache(ctx, m, false))
err := suite.fc.AddToCache(ctx, m, false)
require.NoError(t, err, clues.ToCore(err))
require.Empty(t, suite.fc.DestinationNameToID(dest),
"destination id from cache, still empty, because this is not a calendar")
p, l, err := suite.fc.IDToPath(ctx, m.id, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, m.expectedPath, p.String())
assert.Equal(t, m.expectedLocation, l.String())
}
@ -531,7 +538,7 @@ func (suite *FolderCacheIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
@ -539,11 +546,9 @@ func (suite *FolderCacheIntegrationSuite) SetupSuite() {
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt)
require.NoError(suite.T(), err)
}
// Testing to ensure that cache system works for in multiple different environments
@ -553,7 +558,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
a := tester.NewM365Account(suite.T())
m365, err := a.M365Config()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
var (
user = tester.M365UserID(suite.T())
@ -578,7 +583,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EmailCategory,
false,
"Griffindor", "Croix")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
},
@ -590,7 +595,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EmailCategory,
false,
"Griffindor", "Felicius")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
},
@ -606,7 +611,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.ContactsCategory,
false,
"HufflePuff")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
},
@ -618,7 +623,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.ContactsCategory,
false,
"Ravenclaw")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
},
@ -635,7 +640,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EventsCategory,
false,
"Durmstrang")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
},
@ -647,7 +652,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
path.EventsCategory,
false,
"Beauxbatons")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
},
@ -666,12 +671,12 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
folderName,
directoryCaches,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
resolver := directoryCaches[test.category]
_, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
parentContainer := folderName
if test.useIDForPath {
@ -685,10 +690,10 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
parentContainer,
directoryCaches,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
_, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
p := stdpath.Join(test.folderPrefix, parentContainer)
_, ok := resolver.PathInCache(p)

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph"
@ -177,12 +178,12 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
entries,
func(cos *support.ConnectorOperationStatus) {},
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: coll},
}, fault.New(true))
test.expectError(t, err)
test.expectError(t, err, clues.ToCore(err))
emails := cdps[path.EmailCategory]
@ -242,7 +243,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
acct, err = tester.NewM365Account(suite.T()).M365Config()
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
name string
@ -274,7 +275,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
control.Options{},
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService {
@ -302,7 +303,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
acct, err = tester.NewM365Account(suite.T()).M365Config()
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
name string
@ -344,7 +345,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
control.Options{},
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.BackupCollection
@ -360,7 +361,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: metadata},
}, fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
dps := cdps[test.scope.Category().PathType()]
@ -375,7 +376,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
control.Options{},
func(status *support.ConnectorOperationStatus) {},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// TODO(keepers): this isn't a very useful test at the moment. It needs to
// investigate the items in the original and delta collections to at least
@ -407,7 +408,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
)
acct, err := tester.NewM365Account(t).M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch()))
@ -421,7 +422,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
control.Options{},
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(collections))
@ -437,7 +438,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
@ -446,7 +447,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
message, err := support.CreateMessageFromBytes(buf.Bytes())
assert.NotNil(t, message)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}
})
}
@ -462,7 +463,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
defer flush()
acct, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
users := []string{suite.user}
@ -494,7 +495,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
control.Options{},
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
wg.Add(len(edcs))
@ -508,7 +509,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
for stream := range edc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
@ -517,7 +518,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
contact, err := support.CreateContactFromBytes(buf.Bytes())
assert.NotNil(t, contact)
assert.NoError(t, err, "error on converting contact bytes: "+buf.String())
assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err))
count++
}
@ -541,12 +542,12 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
defer flush()
acct, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
users := []string{suite.user}
ac, err := api.NewClient(acct)
require.NoError(suite.T(), err, "creating client")
require.NoError(suite.T(), err, "creating client", clues.ToCore(err))
var (
calID string
@ -565,7 +566,8 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
return nil
}
require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true)))
err = ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true))
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
name, expected string
@ -605,7 +607,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
control.Options{},
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.Len(t, collections, 2)
wg.Add(len(collections))
@ -624,7 +626,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
buf := &bytes.Buffer{}
read, err := buf.ReadFrom(item.ToReader())
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotZero(t, read)
if isMetadata {
@ -633,7 +635,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
event, err := support.CreateEventFromBytes(buf.Bytes())
assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String())
assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err))
}
}

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
@ -60,7 +61,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() {
// Read the message using the `ExchangeData` reader and validate it matches what we set
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(ed.ToReader())
assert.Nil(suite.T(), err, "received a buf.Read error")
assert.NoError(suite.T(), err, clues.ToCore(err))
assert.Equal(suite.T(), buf.Bytes(), m)
assert.Equal(suite.T(), description, ed.UUID())
}
@ -77,7 +78,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() {
received, err := buf.ReadFrom(ed.ToReader())
assert.Equal(t, expected, received)
assert.Nil(t, err, "received buf.Readfrom error ")
assert.NoError(t, err, clues.ToCore(err))
}
func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
@ -93,7 +94,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() {
path.EmailCategory,
false,
folder)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
edc := Collection{
user: user,
@ -117,7 +118,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
path.EmailCategory,
false,
folder)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
edc := Collection{
user: name,
@ -129,11 +130,11 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
locP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "human-readable")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
@ -198,7 +199,7 @@ func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
name: "happy",
items: &mockItemer{},
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
},
expectGetCalls: 1,
},
@ -206,7 +207,7 @@ func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
name: "an error",
items: &mockItemer{getErr: assert.AnError},
expectErr: func(t *testing.T, err error) {
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
},
expectGetCalls: 3,
},

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph"
@ -35,7 +36,7 @@ func (suite *CacheResolverSuite) SetupSuite() {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
@ -45,10 +46,10 @@ func (suite *CacheResolverSuite) TestPopulate() {
defer flush()
ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{
@ -119,9 +120,10 @@ func (suite *CacheResolverSuite) TestPopulate() {
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
resolver := test.resolverFunc(t)
require.NoError(t, resolver.Populate(ctx, fault.New(true), test.root, test.basePath))
err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath)
require.NoError(t, err, clues.ToCore(err))
_, isFound := resolver.PathInCache(test.folderInCache)
test.canFind(t, isFound)

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
@ -25,7 +26,7 @@ func (suite *ExchangeIteratorSuite) TestDisplayable() {
t := suite.T()
bytes := mockconnector.GetMockContactBytes("Displayable")
contact, err := support.CreateContactFromBytes(bytes)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
aDisplayable, ok := contact.(graph.Displayable)
assert.True(t, ok)
@ -37,7 +38,7 @@ func (suite *ExchangeIteratorSuite) TestDescendable() {
t := suite.T()
bytes := mockconnector.GetMockMessageBytes("Descendable")
message, err := support.CreateMessageFromBytes(bytes)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
aDescendable, ok := message.(graph.Descendable)
assert.True(t, ok)

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
@ -46,7 +47,7 @@ func (suite *MailFolderCacheIntegrationSuite) SetupSuite() {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
@ -83,7 +84,7 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
t := suite.T()
ac, err := api.NewClient(suite.credentials)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
acm := ac.Mail()
@ -93,10 +94,11 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
getter: acm,
}
require.NoError(t, mfc.Populate(ctx, fault.New(true), test.root, test.path...))
err = mfc.Populate(ctx, fault.New(true), test.root, test.path...)
require.NoError(t, err, clues.ToCore(err))
p, l, err := mfc.IDToPath(ctx, testFolderID, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
t.Logf("Path: %s\n", p.String())
t.Logf("Location: %s\n", l.String())

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
@ -44,18 +45,16 @@ func (suite *ExchangeRestoreSuite) SetupSuite() {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
suite.ac, err = api.NewClient(m365)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt)
require.NoError(suite.T(), err)
}
// TestRestoreContact ensures contact object can be created, placed into
@ -72,14 +71,14 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
)
aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
folderID := ptr.Val(aFolder.GetId())
defer func() {
// Remove the folder containing contact prior to exiting test
err = suite.ac.Contacts().DeleteContainer(ctx, userID, folderID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}()
info, err := RestoreExchangeContact(
@ -89,7 +88,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
control.Copy,
folderID,
userID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "contact item info")
}
@ -106,14 +105,14 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, name)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
calendarID := ptr.Val(calendar.GetId())
defer func() {
// Removes calendar containing events created during the test
err = suite.ac.Events().DeleteContainer(ctx, userID, calendarID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}()
info, err := RestoreExchangeEvent(ctx,
@ -123,7 +122,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
calendarID,
userID,
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "event item info")
}
@ -136,10 +135,10 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
service, err := createService(m365)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
deleters := map[path.CategoryType]containerDeleter{
path.EmailCategory: suite.ac.Mail(),
@ -162,7 +161,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailObject: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -174,7 +173,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -186,7 +185,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -198,7 +197,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -213,7 +212,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailBasicItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -228,7 +227,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachmentwAttachment " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -243,7 +242,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachment_Contact " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -255,7 +254,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -267,7 +266,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithLargeAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -279,7 +278,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachments: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -291,7 +290,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithReferenceAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -304,7 +303,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreContactObject: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
},
@ -316,7 +315,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject: " + common.FormatSimpleDateTime(now)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId())
},
@ -328,7 +327,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject_" + common.FormatSimpleDateTime(now)
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId())
},
@ -352,10 +351,12 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
destination,
userID,
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "item info was not populated")
assert.NotNil(t, deleters)
assert.NoError(t, deleters[test.category].DeleteContainer(ctx, userID, destination))
err = deleters[test.category].DeleteContainer(ctx, userID, destination)
assert.NoError(t, err, clues.ToCore(err))
})
}
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph"
@ -110,7 +111,7 @@ func TestServiceIteratorsSuite(t *testing.T) {
func (suite *ServiceIteratorsSuite) SetupSuite() {
a := tester.NewMockM365Account(suite.T())
m365, err := a.M365Config()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.creds = m365
}
@ -308,7 +309,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
dps,
control.Options{FailFast: test.failFast},
fault.New(test.failFast))
test.expectErr(t, err)
test.expectErr(t, err, clues.ToCore(err))
// collection assertions
@ -463,7 +464,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
dps,
control.Options{FailFast: true},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// collection assertions
@ -536,7 +537,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
prevPath := func(t *testing.T, at ...string) path.Path {
p, err := path.Build(tenantID, userID, path.ExchangeService, cat, false, at...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -815,7 +816,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
test.dps,
control.Options{},
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
metadatas := 0
for _, c := range collections {

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
@ -30,7 +31,7 @@ func (suite *BetaClientSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
@ -43,7 +44,7 @@ func (suite *BetaClientSuite) TestCreateBetaClient() {
suite.credentials.AzureClientSecret,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
client := NewBetaClient(adpt)
assert.NotNil(t, client)
@ -63,7 +64,7 @@ func (suite *BetaClientSuite) TestBasicClientGetFunctionality() {
suite.credentials.AzureTenantID,
suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
client := NewBetaClient(adpt)
require.NotNil(t, client)
@ -74,7 +75,7 @@ func (suite *BetaClientSuite) TestBasicClientGetFunctionality() {
collection, err := client.SitesById(siteID).Pages().Get(ctx, nil)
// Ensures that the client is able to receive data from beta
// Not Registered Error: content type application/json does not have a factory registered to be parsed
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
for _, page := range collection.GetValue() {
assert.NotNil(t, page, "betasdk call for page does not return value.")

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph/metadata"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester"
@ -96,7 +97,7 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_MetaSuffixes() {
test.category,
true,
"file"+ext)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
test.expected(t, metadata.IsMetadataFile(p), "extension %s", ext)
})
@ -117,7 +118,7 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_NotMetaSuffixes() {
test.category,
true,
"file"+ext)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext)
})
@ -140,7 +141,7 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Directories() {
test.category,
false,
"file"+ext)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext)
})

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
@ -34,7 +35,7 @@ func (suite *MetadataCollectionUnitSuite) TestFullPath() {
path.EmailCategory,
false,
"foo")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
c := NewMetadataCollection(p, nil, nil)
@ -76,7 +77,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
path.EmailCategory,
false,
"foo")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
c := NewMetadataCollection(
p,
@ -94,7 +95,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
gotNames = append(gotNames, s.UUID())
buf, err := io.ReadAll(s.ToReader())
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
continue
}
@ -168,7 +169,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
[]MetadataCollectionEntry{test.metadata},
func(*support.ConnectorOperationStatus) {})
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
if err != nil {
return
}
@ -187,7 +188,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() {
itemCount++
err := decoder.Decode(&gotMap)
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
continue
}

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
)
@ -27,7 +28,7 @@ func (suite *GraphUnitSuite) SetupSuite() {
t := suite.T()
a := tester.NewMockM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
@ -39,7 +40,7 @@ func (suite *GraphUnitSuite) TestCreateAdapter() {
suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, adpt)
}
@ -81,7 +82,7 @@ func (suite *GraphUnitSuite) TestSerializationEndPoint() {
suite.credentials.AzureTenantID,
suite.credentials.AzureClientID,
suite.credentials.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
serv := NewService(adpt)
email := models.NewMessage()
@ -89,7 +90,7 @@ func (suite *GraphUnitSuite) TestSerializationEndPoint() {
email.SetSubject(&subject)
byteArray, err := serv.Serialize(email)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, byteArray)
t.Log(string(byteArray))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester"
@ -53,7 +54,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestBadConnection() {
AzureTenantID: "data",
},
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return a
},
},
@ -222,11 +223,11 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices
t := suite.T()
err := verifyBackupInputs(test.excludes(t), sites)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.filters(t), sites)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.includes(t), sites)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
})
}
}

View File

@ -16,6 +16,7 @@ import (
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/onedrive"
@ -38,7 +39,7 @@ func mustToDataLayerPath(
isItem bool,
) path.Path {
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return res
}
@ -617,12 +618,12 @@ func compareExchangeEmail(
item data.Stream,
) {
itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) {
if !assert.NoError(t, err, "reading collection item", item.UUID(), clues.ToCore(err)) {
return
}
itemMessage, err := support.CreateMessageFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up message") {
if !assert.NoError(t, err, "deserializing backed up message", clues.ToCore(err)) {
return
}
@ -632,7 +633,7 @@ func compareExchangeEmail(
}
expectedMessage, err := support.CreateMessageFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source message")
assert.NoError(t, err, "deserializing source message", clues.ToCore(err))
checkMessage(t, expectedMessage, itemMessage)
}
@ -644,12 +645,12 @@ func compareExchangeContact(
item data.Stream,
) {
itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) {
if !assert.NoError(t, err, "reading collection item", item.UUID(), clues.ToCore(err)) {
return
}
itemContact, err := support.CreateContactFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up contact") {
if !assert.NoError(t, err, "deserializing backed up contact", clues.ToCore(err)) {
return
}
@ -672,12 +673,12 @@ func compareExchangeEvent(
item data.Stream,
) {
itemData, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err, "reading collection item: %s", item.UUID()) {
if !assert.NoError(t, err, "reading collection item", item.UUID(), clues.ToCore(err)) {
return
}
itemEvent, err := support.CreateEventFromBytes(itemData)
if !assert.NoError(t, err, "deserializing backed up contact") {
if !assert.NoError(t, err, "deserializing backed up contact", clues.ToCore(err)) {
return
}
@ -687,7 +688,7 @@ func compareExchangeEvent(
}
expectedEvent, err := support.CreateEventFromBytes(expectedBytes)
assert.NoError(t, err, "deserializing source contact")
assert.NoError(t, err, "deserializing source contact", clues.ToCore(err))
checkEvent(t, expectedEvent, itemEvent)
}
@ -735,7 +736,7 @@ func compareOneDriveItem(
}
buf, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
return true
}
@ -749,7 +750,7 @@ func compareOneDriveItem(
)
err = json.Unmarshal(buf, &itemMeta)
if !assert.NoErrorf(t, err, "unmarshalling retrieved metadata for file %s", name) {
if !assert.NoError(t, err, "unmarshalling retrieved metadata for file", name, clues.ToCore(err)) {
return true
}
@ -769,7 +770,7 @@ func compareOneDriveItem(
}
err = json.Unmarshal(expectedData, &expectedMeta)
if !assert.NoError(t, err, "unmarshalling expected metadata") {
if !assert.NoError(t, err, "unmarshalling expected metadata", clues.ToCore(err)) {
return true
}
@ -797,12 +798,12 @@ func compareOneDriveItem(
var fileData testOneDriveData
err = json.Unmarshal(buf, &fileData)
if !assert.NoErrorf(t, err, "unmarshalling file data for file %s", name) {
if !assert.NoError(t, err, "unmarshalling file data for file", name, clues.ToCore(err)) {
return true
}
expectedData := expected[fileData.FileName]
if !assert.NotNil(t, expectedData, "unexpected file with name %s", name) {
if !assert.NotNil(t, expectedData, "unexpected file with name", name) {
return true
}
@ -1201,7 +1202,7 @@ func loadConnector(ctx context.Context, t *testing.T, itemClient *http.Client, r
a := tester.NewM365Account(t)
connector, err := NewGraphConnector(ctx, itemClient, a, r, fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return connector
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
@ -69,7 +70,7 @@ func onedriveItemWithData(
}
serialized, err := json.Marshal(content)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return itemInfo{
name: name,
@ -89,7 +90,7 @@ func onedriveMetadata(
testMeta := getMetadata(fileName, perm, permUseID)
testMetaJSON, err := json.Marshal(testMeta)
require.NoError(t, err, "marshalling metadata")
require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
return itemInfo{
name: itemID,
@ -128,11 +129,11 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() {
suite.acct = tester.NewM365Account(suite.T())
user, err := suite.connector.Owners.Users().GetByID(ctx, suite.user)
require.NoErrorf(suite.T(), err, "fetching user %s", suite.user)
require.NoError(suite.T(), err, "fetching user", suite.user, clues.ToCore(err))
suite.userID = ptr.Val(user.GetId())
secondaryUser, err := suite.connector.Owners.Users().GetByID(ctx, suite.secondaryUser)
require.NoErrorf(suite.T(), err, "fetching user %s", suite.secondaryUser)
require.NoError(suite.T(), err, "fetching user", suite.secondaryUser, clues.ToCore(err))
suite.secondaryUserID = ptr.Val(secondaryUser.GetId())
tester.LogTimeOfTest(suite.T())

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
@ -126,7 +127,7 @@ func (suite *GraphConnectorUnitSuite) TestUnionSiteIDsAndWebURLs() {
defer flush()
result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expect, result)
})
}
@ -181,13 +182,13 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
t := suite.T()
service, err := newConnector.createService()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
newConnector.Service = service
assert.Equal(t, 0, len(newConnector.Sites))
err = newConnector.setTenantSites(ctx, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Less(t, 0, len(newConnector.Sites))
for _, site := range newConnector.Sites {
@ -220,7 +221,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
},
nil,
fault.New(true))
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
assert.NotNil(t, deets)
status := suite.connector.AwaitStatus()
@ -299,7 +300,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
},
test.col,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets)
stats := suite.connector.AwaitStatus()
@ -327,7 +328,7 @@ func mustGetDefaultDriveID(
err = graph.Wrap(ctx, err, "retrieving drive")
}
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id)
@ -397,7 +398,7 @@ func runRestore(
config.opts,
collections,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, deets)
status := restoreGC.AwaitStatus()
@ -451,7 +452,7 @@ func runBackupAndCompare(
nil,
config.opts,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// No excludes yet because this isn't an incremental backup.
assert.Empty(t, excludes)
@ -934,7 +935,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
},
collections,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, deets)
status := restoreGC.AwaitStatus()
@ -963,7 +964,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
},
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// No excludes yet because this isn't an incremental backup.
assert.Empty(t, excludes)

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
@ -35,7 +36,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
for item := range mdc.Items(ctx, fault.New(true)) {
_, err := io.ReadAll(item.ToReader())
assert.NoError(suite.T(), err)
assert.NoError(suite.T(), err, clues.ToCore(err))
messagesRead++
}
@ -52,7 +53,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
for item := range mdc.Items(ctx, fault.New(true)) {
buf, err := io.ReadAll(item.ToReader())
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Implements(t, (*data.StreamSize)(nil), item)
s := item.(data.StreamSize)
@ -72,11 +73,11 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchange
for stream := range mdc.Items(ctx, fault.New(true)) {
_, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
byteArray := buf.Bytes()
something, err := support.CreateFromBytes(byteArray, models.CreateMessageFromDiscriminatorValue)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, something)
}
}
@ -123,7 +124,7 @@ func (suite *MockExchangeDataSuite) TestMockExchangeData() {
assert.Equal(t, id, test.reader.UUID())
buf, err := io.ReadAll(test.reader.ToReader())
test.check(t, err)
test.check(t, err, clues.ToCore(err))
if err != nil {
return
}
@ -194,10 +195,10 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
temp := mockconnector.GetMockList(subject, "Artist", emptyMap)
writer := kioser.NewJsonSerializationWriter()
err := writer.WriteObjectValue("", temp)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
bytes, err := writer.GetSerializedContent()
require.NoError(suite.T(), err)
require.NoError(t, err, clues.ToCore(err))
_, err = support.CreateListFromBytes(bytes)
@ -208,7 +209,7 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
name: "SharePoint: List 6 Items",
transformation: func(t *testing.T) error {
bytes, err := mockconnector.GetMockListBytes(subject)
require.NoError(suite.T(), err)
require.NoError(t, err, clues.ToCore(err))
_, err = support.CreateListFromBytes(bytes)
return err
},
@ -229,7 +230,7 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
t := suite.T()
err := test.transformation(t)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
})
}
}

View File

@ -10,6 +10,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -159,7 +160,7 @@ func GetMockListBytes(title string) ([]byte, error) {
// of the Mocked SharePoint List
func GetMockListStream(t *testing.T, title string, numOfItems int) *MockListData {
byteArray, err := GetMockListBytes(title)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
listData := &MockListData{
ID: title,

View File

@ -11,6 +11,7 @@ import (
"github.com/pkg/errors"
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
)
@ -693,10 +694,10 @@ func GetMockMessageWithNestedItemAttachmentEvent(subject string) []byte {
func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, subject string) []byte {
base := GetMockMessageBytes(subject)
message, err := hydrateMessage(base)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
nestedMessage, err := hydrateMessage(nested)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
iaNode := models.NewItemAttachment()
attachmentSize := int32(len(nested))
@ -713,13 +714,13 @@ func GetMockMessageWithNestedItemAttachmentMail(t *testing.T, nested []byte, sub
func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte, subject string) []byte {
base := GetMockMessageBytes(subject)
message, err := hydrateMessage(base)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", nested)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
anObject, err := parseNode.GetObjectValue(models.CreateContactFromDiscriminatorValue)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
contact := anObject.(models.Contactable)
internalName := "Nested Contact"
@ -736,10 +737,10 @@ func GetMockMessageWithNestedItemAttachmentContact(t *testing.T, nested []byte,
func serialize(t *testing.T, item absser.Parsable) []byte {
wtr := js.NewJsonSerializationWriter()
err := wtr.WriteObjectValue("", item)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
byteArray, err := wtr.GetSerializedContent()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return byteArray
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
"github.com/alcionai/corso/src/internal/tester"
@ -23,11 +24,11 @@ func (suite *OneDriveAPISuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.service = graph.NewService(adpt)
}
@ -49,7 +50,7 @@ func (suite *OneDriveAPISuite) TestCreatePagerAndGetPage() {
siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"name"})
a, err := pager.GetPage(ctx)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, a)
}
@ -61,7 +62,7 @@ func (suite *OneDriveAPISuite) TestGetDriveIDByName() {
siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"id", "name"})
id, err := pager.GetDriveIDByName(ctx, "Documents")
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, id)
}
@ -73,9 +74,9 @@ func (suite *OneDriveAPISuite) TestGetDriveFolderByName() {
siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"id", "name"})
id, err := pager.GetDriveIDByName(ctx, "Documents")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, id)
_, err = pager.GetFolderIDByName(ctx, id, "folder")
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -201,9 +201,9 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
driveFolderPath, err := path.GetDriveFolderPath(folderPath)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
@ -302,7 +302,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
assert.Equal(t, testItemID+MetaFileSuffix, readItemMeta.UUID())
readMetaData, err := io.ReadAll(readItemMeta.ToReader())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
tm, err := json.Marshal(testItemMeta)
if err != nil {
@ -350,7 +350,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
@ -393,7 +393,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
assert.True(t, ok)
_, err = io.ReadAll(collItem.ToReader())
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
wg.Wait()
@ -538,7 +538,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
@ -594,8 +594,9 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
for _, i := range readItems {
if strings.HasSuffix(i.UUID(), MetaFileSuffix) {
content, err := io.ReadAll(i.ToReader())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, content, []byte("{}"))
im, ok := i.(data.StreamModTime)
require.Equal(t, ok, true, "modtime interface")
require.Greater(t, im.ModTime(), mtime, "permissions time greater than mod time")

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
gapi "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/support"
@ -49,11 +50,11 @@ func getExpectedStatePathGenerator(
} else {
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
switch state {
case data.NewState:
@ -81,7 +82,7 @@ func getExpectedPathGenerator(t *testing.T,
) func(string) string {
return func(path string) string {
p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p.String()
}
@ -129,10 +130,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGetCanonicalPath() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
p := strings.Join(test.dir, "/")
result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source)
test.expectErr(t, err)
test.expectErr(t, err, clues.ToCore(err))
if result != nil {
assert.Equal(t, test.expect, result.String())
}
@ -797,7 +799,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
itemCollection,
false,
errs)
tt.expect(t, err)
tt.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
@ -1138,10 +1140,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
cols := []data.RestoreCollection{}
for _, c := range test.cols {
@ -1152,7 +1154,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
path.FilesCategory,
c(),
func(*support.ConnectorOperationStatus) {})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
cols = append(cols, data.NotFoundRestoreCollection{Collection: mc})
}
@ -1241,7 +1243,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
path.FilesCategory,
false,
)
require.NoError(suite.T(), err, "making metadata path")
require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err))
driveID1 := uuid.NewString()
drive1 := models.NewDrive()
@ -1918,7 +1920,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
},
func(*support.ConnectorOperationStatus) {},
)
assert.NoError(t, err, "creating metadata collection")
assert.NoError(t, err, "creating metadata collection", clues.ToCore(err))
prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}}
errs := fault.New(true)
@ -1947,7 +1949,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
data.NotFoundRestoreCollection{Collection: baseCol},
},
fault.New(true))
if !assert.NoError(t, err, "deserializing metadata") {
if !assert.NoError(t, err, "deserializing metadata", clues.ToCore(err)) {
continue
}
@ -2201,7 +2203,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
test.prevDelta,
fault.New(true))
require.ErrorIs(t, err, test.err, "delta fetch err")
require.ErrorIs(t, err, test.err, "delta fetch err", clues.ToCore(err))
require.Equal(t, test.deltaURL, delta.URL, "delta url")
require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset")
})

View File

@ -313,7 +313,7 @@ func (suite *OneDriveUnitSuite) TestDrives() {
}
drives, err := drives(ctx, pager, test.retry)
test.expectedErr(t, err)
test.expectedErr(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expectedResults, drives)
})
@ -352,10 +352,10 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
gs := loadTestService(t)
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
drives, err := drives(ctx, pager, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, drives)
// TODO: Verify the intended drive
@ -371,7 +371,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
}()
folderID, err := CreateRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
folderIDs = append(folderIDs, folderID)
@ -379,7 +379,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
folderElements = append(folderElements, folderName2)
folderID, err = CreateRestoreFolders(ctx, gs, driveID, folderElements)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
folderIDs = append(folderIDs, folderID)
@ -402,10 +402,10 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
t := suite.T()
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix, fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
foundFolderIDs := []string{}
@ -437,7 +437,7 @@ func (fm testFolderMatcher) Matches(path string) bool {
func (suite *OneDriveSuite) TestOneDriveNewCollections() {
creds, err := tester.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
name, user string
@ -478,7 +478,7 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
})
odcs, excludes, err := colls.Get(ctx, nil, fault.New(true))
assert.NoError(t, err, clues.InErr(err))
assert.NoError(t, err, clues.ToCore(err))
// Don't expect excludes as this isn't an incremental backup.
assert.Empty(t, excludes)

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
@ -45,10 +46,10 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.user = tester.SecondaryM365UserID(t)
pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
odDrives, err := drives(ctx, pager, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// Test Requirement 1: Need a drive
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
@ -99,7 +100,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
map[string]string{},
"",
fault.New(true))
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
// Test Requirement 2: Need a file
require.NotEmpty(
@ -113,14 +114,15 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
// Read data for the file
itemInfo, itemData, err := oneDriveItemReader(ctx, graph.HTTPClient(graph.NoTimeout()), driveItem)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
require.NotNil(suite.T(), itemInfo.OneDrive)
require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName)
size, err := io.Copy(io.Discard, itemData)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
require.NotZero(suite.T(), size)
require.Equal(suite.T(), size, itemInfo.OneDrive.Size)
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
}
@ -150,11 +152,11 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
srv := suite.service
root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// Test Requirement 2: "Test Folder" should exist
folder, err := getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "Test Folder")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting)
t.Logf("Test will create folder %s", newFolderName)
@ -165,7 +167,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
test.driveID,
ptr.Val(folder.GetId()),
newItem(newFolderName, true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId())
newItemName := "testItem_" + common.FormatNow(common.SimpleTimeTesting)
@ -177,19 +179,19 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
test.driveID,
ptr.Val(newFolder.GetId()),
newItem(newItemName, false))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newItem.GetId())
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder
_, err = getFolder(ctx, srv, test.driveID, ptr.Val(newFolder.GetId()), newItemName)
require.ErrorIs(t, err, errFolderNotFound)
require.ErrorIs(t, err, errFolderNotFound, clues.ToCore(err))
// Initialize a 100KB mockDataProvider
td, writeSize := mockDataReader(int64(100 * 1024))
w, err := driveItemWriter(ctx, srv, test.driveID, ptr.Val(newItem.GetId()), writeSize)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// Using a 32 KB buffer for the copy allows us to validate the
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
@ -197,7 +199,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
copyBuffer := make([]byte, 32*1024)
size, err := io.CopyBuffer(w, td, copyBuffer)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, writeSize, size)
})
@ -232,15 +234,15 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
srv := suite.service
root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// Lookup a folder that doesn't exist
_, err = getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "FolderDoesNotExist")
require.ErrorIs(t, err, errFolderNotFound)
require.ErrorIs(t, err, errFolderNotFound, clues.ToCore(err))
// Lookup a folder that does exist
_, err = getFolder(ctx, srv, test.driveID, ptr.Val(root.GetId()), "")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
})
}
}

View File

@ -3,6 +3,7 @@ package onedrive
import (
"testing"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/path"
@ -134,7 +135,7 @@ func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
inPaths := []path.Path{}
for _, ps := range test.input {
p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path")
require.NoError(t, err, "creating path", clues.ToCore(err))
inPaths = append(inPaths, p)
}
@ -142,13 +143,13 @@ func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
outPaths := []path.Path{}
for _, ps := range test.output {
p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path")
require.NoError(t, err, "creating path", clues.ToCore(err))
outPaths = append(outPaths, p)
}
actual, err := AugmentRestorePaths(test.version, inPaths)
require.NoError(t, err, "augmenting paths")
require.NoError(t, err, "augmenting paths", clues.ToCore(err))
// Ordering of paths matter here as we need dirmeta files
// to show up before file in dir

View File

@ -6,6 +6,7 @@ import (
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester"
@ -67,11 +68,12 @@ func (ods *oneDriveService) updateStatus(status *support.ConnectorOperationStatu
func loadTestService(t *testing.T) *oneDriveService {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
service, err := NewOneDriveService(m365)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return service
}

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account"
@ -16,7 +17,7 @@ func createTestBetaService(t *testing.T, credentials account.M365Config) *discov
credentials.AzureClientID,
credentials.AzureClientSecret,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return discover.NewBetaService(adapter)
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
@ -32,7 +33,7 @@ func (suite *SharePointPageSuite) SetupSuite() {
suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
suite.service = createTestBetaService(t, suite.creds)
@ -54,7 +55,7 @@ func (suite *SharePointPageSuite) TestFetchPages() {
t := suite.T()
pgs, err := api.FetchPages(ctx, suite.service, suite.siteID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
require.NotNil(t, pgs)
assert.NotZero(t, len(pgs))
@ -69,12 +70,12 @@ func (suite *SharePointPageSuite) TestGetSitePages() {
t := suite.T()
tuples, err := api.FetchPages(ctx, suite.service, suite.siteID)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, tuples)
jobs := []string{tuples[0].ID}
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, pages)
}
@ -104,11 +105,12 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
destName,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, info)
// Clean Up
pageID := info.SharePoint.ParentPath
err = api.DeleteSitePage(ctx, suite.service, suite.siteID, pageID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
@ -37,7 +38,7 @@ func (suite *SharePointCollectionSuite) SetupSuite() {
suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
}
@ -61,7 +62,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Item_Read() {
data: io.NopCloser(bytes.NewReader(m)),
}
readData, err := io.ReadAll(sc.ToReader())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, name, sc.id)
assert.Equal(t, readData, m)
@ -91,7 +92,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
path.ListsCategory,
false,
dirRoot)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return dir
},
@ -101,10 +102,10 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
listing.SetDisplayName(&name)
err := ow.WriteObjectValue("", listing)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
byteArray, err := ow.GetSerializedContent()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
data := &Item{
id: name,
@ -127,14 +128,14 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
path.PagesCategory,
false,
dirRoot)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return dir
},
getItem: func(t *testing.T, itemName string) *Item {
byteArray := mockconnector.GetMockPage(itemName)
page, err := support.CreatePageFromBytes(byteArray)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
data := &Item{
id: itemName,
@ -186,7 +187,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
testName := "MockListing"
listing.SetDisplayName(&testName)
byteArray, err := service.Serialize(listing)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
listData := &Item{
id: testName,
@ -197,7 +198,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting)
deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
t.Logf("List created: %s\n", deets.SharePoint.ItemName)
// Clean-Up
@ -209,7 +210,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
for {
resp, err := builder.Get(ctx, nil)
assert.NoError(t, err, "getting site lists")
assert.NoError(t, err, "getting site lists", clues.ToCore(err))
for _, temp := range resp.GetValue() {
if ptr.Val(temp.GetDisplayName()) == deets.SharePoint.ItemName {
@ -230,7 +231,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
if isFound {
err := DeleteList(ctx, service, suite.siteID, deleteID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}
}
@ -245,17 +246,17 @@ func (suite *SharePointCollectionSuite) TestRestoreLocation() {
service := createTestService(t, suite.creds)
rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting)
folderID, err := createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
t.Log("FolderID: " + folderID)
_, err = createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder, "Tsao"})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// CleanUp
siteDrive, err := service.Client().SitesById(suite.siteID).Drive().Get(ctx, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(siteDrive.GetId())
err = onedrive.DeleteItem(ctx, service, driveID, folderID)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester"
@ -131,7 +132,7 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
true,
fault.New(true))
test.expect(t, err)
test.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths")
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
@ -197,8 +198,9 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
t := suite.T()
siteID := tester.M365SiteID(t)
a := tester.NewM365Account(t)
account, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
col, err := collectPages(
ctx,
@ -208,6 +210,6 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
&MockGraphService{},
control.Defaults(),
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, col)
}

View File

@ -6,6 +6,7 @@ import (
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support"
@ -52,7 +53,7 @@ func createTestService(t *testing.T, credentials account.M365Config) *graph.Serv
credentials.AzureClientID,
credentials.AzureClientSecret,
)
require.NoError(t, err, "creating microsoft graph service for exchange")
require.NoError(t, err, "creating microsoft graph service for exchange", clues.ToCore(err))
return graph.NewService(adapter)
}
@ -62,7 +63,7 @@ func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []st
for _, r := range rest {
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
res = append(res, p.String())
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
@ -21,7 +22,7 @@ func (suite *SharePointSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
}
@ -54,11 +55,11 @@ func (suite *SharePointSuite) TestLoadList() {
t := suite.T()
service := createTestService(t, suite.creds)
tuples, err := preFetchLists(ctx, service, "root")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
job := []string{tuples[0].id}
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Greater(t, len(lists), 0)
t.Logf("Length: %d\n", len(lists))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/tester"
@ -53,7 +54,7 @@ func (suite *DataSupportSuite) TestCreateMessageFromBytes() {
t := suite.T()
result, err := CreateMessageFromBytes(test.byteArray)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
test.checkObject(t, result)
})
}
@ -92,7 +93,7 @@ func (suite *DataSupportSuite) TestCreateContactFromBytes() {
t := suite.T()
result, err := CreateContactFromBytes(test.byteArray)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
test.isNil(t, result)
})
}
@ -129,7 +130,7 @@ func (suite *DataSupportSuite) TestCreateEventFromBytes() {
t := suite.T()
result, err := CreateEventFromBytes(test.byteArray)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
test.isNil(t, result)
})
}
@ -170,7 +171,7 @@ func (suite *DataSupportSuite) TestCreateListFromBytes() {
t := suite.T()
result, err := CreateListFromBytes(test.byteArray)
test.checkError(t, err)
test.checkError(t, err, clues.ToCore(err))
test.isNil(t, result)
})
}
@ -212,10 +213,10 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
writer := kioser.NewJsonSerializationWriter()
err := pg.Serialize(writer)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
byteArray, err := writer.GetSerializedContent()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return byteArray
},

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/tester"
@ -26,7 +27,7 @@ func (suite *SupportTestSuite) TestToMessage() {
bytes := mockconnector.GetMockMessageBytes("m365 mail support test")
message, err := CreateMessageFromBytes(bytes)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
clone := ToMessage(message)
assert.Equal(t, message.GetBccRecipients(), clone.GetBccRecipients())
@ -40,7 +41,7 @@ func (suite *SupportTestSuite) TestToEventSimplified() {
t := suite.T()
bytes := mockconnector.GetMockEventWithAttendeesBytes("M365 Event Support Test")
event, err := CreateEventFromBytes(bytes)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
attendees := event.GetAttendees()
newEvent := ToEventSimplified(event)

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -33,8 +34,8 @@ func (suite *UploadSessionSuite) TestWriter() {
// Expected Content-Range value format
contentRangeRegex := regexp.MustCompile(`^bytes (?P<rangestart>\d+)-(?P<rangeend>\d+)/(?P<length>\d+)$`)
nextOffset := -1
// Initialize a test http server that validates expeected headers
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, r.Method, http.MethodPut)
@ -45,12 +46,15 @@ func (suite *UploadSessionSuite) TestWriter() {
// Extract the Content-Range components
matches := contentRangeRegex.FindStringSubmatch(r.Header[contentRangeHeaderKey][0])
rangeStart, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("rangestart")])
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
rangeEnd, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("rangeend")])
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
length, err := strconv.Atoi(matches[contentRangeRegex.SubexpIndex("length")])
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
// Validate total size and range start/end
assert.Equal(t, int(writeSize), length)
@ -62,6 +66,7 @@ func (suite *UploadSessionSuite) TestWriter() {
nextOffset = rangeEnd
}))
defer ts.Close()
writer := NewWriter("item", ts.URL, writeSize)
@ -72,7 +77,7 @@ func (suite *UploadSessionSuite) TestWriter() {
copyBuffer := make([]byte, 32*1024)
size, err := io.CopyBuffer(writer, td, copyBuffer)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
require.Equal(suite.T(), writeSize, size)
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
@ -21,9 +22,9 @@ func TestDataCollectionSuite(t *testing.T) {
func (suite *DataCollectionSuite) TestStateOf() {
fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
@ -37,7 +38,7 @@ func (suite *EventsIntegrationSuite) TestNewBus() {
Prefix: "prfx",
},
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
a, err := account.NewAccount(
account.ProviderM365,
@ -49,15 +50,19 @@ func (suite *EventsIntegrationSuite) TestNewBus() {
AzureTenantID: "tid",
},
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
b, err := events.NewBus(ctx, s, a.ID(), control.Options{})
require.NotEmpty(t, b)
require.NoError(t, err)
require.NoError(t, b.Close())
require.NoError(t, err, clues.ToCore(err))
err = b.Close()
require.NoError(t, err, clues.ToCore(err))
b2, err := events.NewBus(ctx, s, a.ID(), control.Options{DisableMetrics: true})
require.Empty(t, b2)
require.NoError(t, err)
require.NoError(t, b2.Close())
require.NoError(t, err, clues.ToCore(err))
err = b2.Close()
require.NoError(t, err, clues.ToCore(err))
}

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/storage"
)
@ -76,12 +77,15 @@ func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
st := tester.NewPrefixedS3Storage(t)
k := NewConn(st)
require.NoError(t, k.Initialize(ctx))
require.NoError(t, k.Close(ctx))
err := k.Initialize(ctx)
assert.Error(t, err)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Initialize(ctx)
assert.Error(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
}
@ -90,12 +94,12 @@ func (suite *WrapperIntegrationSuite) TestBadProviderErrors() {
defer flush()
t := suite.T()
st := tester.NewPrefixedS3Storage(t)
st.Provider = storage.ProviderUnknown
k := NewConn(st)
assert.Error(t, k.Initialize(ctx))
err := k.Initialize(ctx)
assert.Error(t, err, clues.ToCore(err))
}
func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() {
@ -103,10 +107,11 @@ func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() {
defer flush()
t := suite.T()
st := tester.NewPrefixedS3Storage(t)
k := NewConn(st)
assert.Error(t, k.Connect(ctx))
err := k.Connect(ctx)
assert.Error(t, err, clues.ToCore(err))
}
func (suite *WrapperIntegrationSuite) TestCloseTwiceDoesNotCrash() {
@ -116,10 +121,14 @@ func (suite *WrapperIntegrationSuite) TestCloseTwiceDoesNotCrash() {
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
assert.NoError(t, k.Close(ctx))
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
assert.Nil(t, k.Repository)
assert.NoError(t, k.Close(ctx))
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}
func (suite *WrapperIntegrationSuite) TestCloseAfterWrap() {
@ -129,17 +138,20 @@ func (suite *WrapperIntegrationSuite) TestCloseAfterWrap() {
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.wrap())
err = k.wrap()
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 2, k.refCount)
require.NoError(t, k.Close(ctx))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, k.Repository)
assert.Equal(t, 1, k.refCount)
require.NoError(t, k.Close(ctx))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.Nil(t, k.Repository)
assert.Equal(t, 0, k.refCount)
}
@ -151,10 +163,13 @@ func (suite *WrapperIntegrationSuite) TestOpenAfterClose() {
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.NoError(t, k.Close(ctx))
assert.Error(t, k.wrap())
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
err = k.wrap()
assert.Error(t, err, clues.ToCore(err))
}
func (suite *WrapperIntegrationSuite) TestBadCompressorType() {
@ -164,13 +179,15 @@ func (suite *WrapperIntegrationSuite) TestBadCompressorType() {
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
defer func() {
assert.NoError(t, k.Close(ctx))
err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
assert.Error(t, k.Compression(ctx, "not-a-compressor"))
err = k.Compression(ctx, "not-a-compressor")
assert.Error(t, err, clues.ToCore(err))
}
func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
@ -180,10 +197,11 @@ func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
defer func() {
assert.NoError(t, k.Close(ctx))
err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
si := snapshot.SourceInfo{
@ -193,8 +211,7 @@ func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
}
p, err := k.getPolicyOrEmpty(ctx, si)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, policy.Policy{}, *p)
}
@ -206,18 +223,19 @@ func (suite *WrapperIntegrationSuite) TestSetCompressor() {
compressor := "pgzip"
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
defer func() {
assert.NoError(t, k.Close(ctx))
err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
assert.NoError(t, k.Compression(ctx, compressor))
err = k.Compression(ctx, compressor)
assert.NoError(t, err, clues.ToCore(err))
// Check the policy was actually created and has the right compressor.
p, err := k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, compressor, string(p.CompressionPolicy.CompressorName))
// Check the global policy will be the effective policy in future snapshots
@ -229,13 +247,11 @@ func (suite *WrapperIntegrationSuite) TestSetCompressor() {
}
policyTree, err := policy.TreeForSource(ctx, k, si)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(
t,
compressor,
string(policyTree.EffectivePolicy().CompressionPolicy.CompressorName),
)
string(policyTree.EffectivePolicy().CompressionPolicy.CompressorName))
}
func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect() {
@ -325,26 +341,32 @@ func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
p, err := k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
test.checkInitFunc(t, p)
require.NoError(t, test.mutator(ctx, p))
require.NoError(t, k.writeGlobalPolicy(ctx, "TestDefaultPolicyConfigSet", p))
require.NoError(t, k.Close(ctx))
err = test.mutator(ctx, p)
require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.Connect(ctx))
err = k.writeGlobalPolicy(ctx, "TestDefaultPolicyConfigSet", p)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
defer func() {
assert.NoError(t, k.Close(ctx))
err := k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
p, err = k.getPolicyOrEmpty(ctx, policy.GlobalPolicySourceInfo)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
test.checkFunc(t, p)
})
}
@ -357,10 +379,15 @@ func (suite *WrapperIntegrationSuite) TestInitAndConnWithTempDirectory() {
t := suite.T()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, k.Close(ctx))
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
// Re-open with Connect.
require.NoError(t, k.Connect(ctx))
assert.NoError(t, k.Close(ctx))
err = k.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
@ -41,7 +42,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsPath() {
path.EmailCategory,
false,
"some", "path", "for", "data")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
c := kopiaDataCollection{
streams: []data.Stream{},
@ -108,13 +109,13 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
count := 0
for returnedStream := range c.Items(ctx, fault.New(true)) {
require.Less(t, count, len(test.streams))
assert.Equal(t, returnedStream.UUID(), uuids[count])
buf, err := io.ReadAll(returnedStream.ToReader())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, buf, testData[count])
require.Implements(t, (*data.StreamSize)(nil), returnedStream)
ss := returnedStream.(data.StreamSize)
assert.Equal(t, len(buf), int(ss.Size()))
@ -217,7 +218,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
category,
false,
folder1, folder2)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
@ -276,15 +277,14 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
if err != nil {
if test.notFoundErr {
assert.ErrorIs(t, err, data.ErrNotFound)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
}
return
}
fileData, err := io.ReadAll(s.ToReader())
test.readErr(t, err)
test.readErr(t, err, clues.ToCore(err))
if err != nil {
return

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester"
@ -26,7 +27,7 @@ type fooModel struct {
//revive:disable-next-line:context-as-argument
func getModelStore(t *testing.T, ctx context.Context) *ModelStore {
c, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return &ModelStore{c: c, modelVersion: globalModelVersion}
}
@ -79,7 +80,8 @@ func (suite *ModelStoreIntegrationSuite) SetupTest() {
func (suite *ModelStoreIntegrationSuite) TearDownTest() {
defer suite.flush()
assert.NoError(suite.T(), suite.m.Close(suite.ctx))
err := suite.m.Close(suite.ctx)
assert.NoError(suite.T(), err, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestBadTagsErrors() {
@ -114,32 +116,25 @@ func (suite *ModelStoreIntegrationSuite) TestBadTagsErrors() {
foo := &fooModel{Bar: uuid.NewString()}
foo.Tags = test.tags
assert.ErrorIs(
t,
suite.m.Put(suite.ctx, model.BackupOpSchema, foo),
errBadTagKey,
)
err := suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
assert.ErrorIs(t, err, errBadTagKey, clues.ToCore(err))
// Add model for update/get ID checks.
foo.Tags = map[string]string{}
require.NoError(
t,
suite.m.Put(suite.ctx, model.BackupOpSchema, foo),
)
err = suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
require.NoError(t, err, clues.ToCore(err))
foo.Tags = test.tags
assert.ErrorIs(
t,
suite.m.Update(suite.ctx, model.BackupOpSchema, foo),
errBadTagKey,
)
_, err := suite.m.GetIDsForType(
err = suite.m.Update(suite.ctx, model.BackupOpSchema, foo)
assert.ErrorIs(t, err, errBadTagKey, clues.ToCore(err))
_, err = suite.m.GetIDsForType(
suite.ctx,
model.BackupOpSchema,
test.tags,
)
assert.ErrorIs(t, err, errBadTagKey)
test.tags)
assert.ErrorIs(t, err, errBadTagKey, clues.ToCore(err))
})
}
}
@ -156,58 +151,56 @@ func (suite *ModelStoreIntegrationSuite) TestNoIDsErrors() {
noModelStoreID.ID = model.StableID(uuid.NewString())
noModelStoreID.ModelStoreID = ""
assert.Error(t, suite.m.Update(suite.ctx, theModelType, noStableID))
assert.Error(t, suite.m.Update(suite.ctx, theModelType, noModelStoreID))
err := suite.m.Update(suite.ctx, theModelType, noStableID)
assert.Error(t, err, clues.ToCore(err))
assert.Error(t, suite.m.Get(suite.ctx, theModelType, "", nil))
assert.Error(t, suite.m.GetWithModelStoreID(suite.ctx, theModelType, "", nil))
err = suite.m.Update(suite.ctx, theModelType, noModelStoreID)
assert.Error(t, err, clues.ToCore(err))
assert.Error(t, suite.m.Delete(suite.ctx, theModelType, ""))
assert.Error(t, suite.m.DeleteWithModelStoreID(suite.ctx, ""))
err = suite.m.Get(suite.ctx, theModelType, "", nil)
assert.Error(t, err, clues.ToCore(err))
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, "", nil)
assert.Error(t, err, clues.ToCore(err))
err = suite.m.Delete(suite.ctx, theModelType, "")
assert.Error(t, err, clues.ToCore(err))
err = suite.m.DeleteWithModelStoreID(suite.ctx, "")
assert.Error(t, err, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestBadModelTypeErrors() {
t := suite.T()
foo := &fooModel{Bar: uuid.NewString()}
assert.ErrorIs(
t,
suite.m.Put(suite.ctx, model.UnknownSchema, foo),
errUnrecognizedSchema,
)
err := suite.m.Put(suite.ctx, model.UnknownSchema, foo)
assert.ErrorIs(t, err, errUnrecognizedSchema, clues.ToCore(err))
require.NoError(t, suite.m.Put(suite.ctx, model.BackupOpSchema, foo))
err = suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
require.NoError(t, err, clues.ToCore(err))
_, err := suite.m.GetIDsForType(suite.ctx, model.UnknownSchema, nil)
assert.ErrorIs(t, err, errUnrecognizedSchema)
_, err = suite.m.GetIDsForType(suite.ctx, model.UnknownSchema, nil)
assert.ErrorIs(t, err, errUnrecognizedSchema, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestBadTypeErrors() {
t := suite.T()
foo := &fooModel{Bar: uuid.NewString()}
require.NoError(t, suite.m.Put(suite.ctx, model.BackupOpSchema, foo))
err := suite.m.Put(suite.ctx, model.BackupOpSchema, foo)
require.NoError(t, err, clues.ToCore(err))
returned := &fooModel{}
assert.ErrorIs(
t,
suite.m.Get(suite.ctx, model.RestoreOpSchema, foo.ID, returned),
errModelTypeMismatch,
)
assert.ErrorIs(
t,
suite.m.GetWithModelStoreID(suite.ctx, model.RestoreOpSchema, foo.ModelStoreID, returned),
errModelTypeMismatch,
)
err = suite.m.Get(suite.ctx, model.RestoreOpSchema, foo.ID, returned)
assert.ErrorIs(t, err, errModelTypeMismatch, clues.ToCore(err))
assert.ErrorIs(
t,
suite.m.Delete(suite.ctx, model.RestoreOpSchema, foo.ID),
errModelTypeMismatch,
)
err = suite.m.GetWithModelStoreID(suite.ctx, model.RestoreOpSchema, foo.ModelStoreID, returned)
assert.ErrorIs(t, err, errModelTypeMismatch, clues.ToCore(err))
err = suite.m.Delete(suite.ctx, model.RestoreOpSchema, foo.ID)
assert.ErrorIs(t, err, errModelTypeMismatch, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestPutGetBadVersion() {
@ -218,13 +211,13 @@ func (suite *ModelStoreIntegrationSuite) TestPutGetBadVersion() {
foo.Tags = map[string]string{}
err := suite.m.Put(suite.ctx, schema, foo)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.m.modelVersion = 42
returned := &fooModel{}
err = suite.m.Get(suite.ctx, schema, foo.ID, returned)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestPutGet() {
@ -264,7 +257,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
foo.Tags = map[string]string{}
err := suite.m.Put(suite.ctx, test.s, foo)
test.check(t, err)
test.check(t, err, clues.ToCore(err))
if test.hasErr {
return
@ -276,11 +269,11 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
returned := &fooModel{}
err = suite.m.Get(suite.ctx, test.s, foo.ID, returned)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, test.s, foo.ModelStoreID, returned)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
})
}
@ -318,19 +311,20 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_PreSetID() {
foo.Tags = map[string]string{}
err := suite.m.Put(suite.ctx, mdl, foo)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
test.expect(t, model.StableID(test.baseID), foo.ID)
require.NotEmpty(t, foo.ModelStoreID)
require.NotEmpty(t, foo.ID)
returned := &fooModel{}
err = suite.m.Get(suite.ctx, mdl, foo.ID, returned)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, mdl, foo.ModelStoreID, returned)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
})
}
@ -345,42 +339,43 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_WithTags() {
"bar": "baz",
}
require.NoError(t, suite.m.Put(suite.ctx, theModelType, foo))
err := suite.m.Put(suite.ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, foo.ModelStoreID)
require.NotEmpty(t, foo.ID)
returned := &fooModel{}
err := suite.m.Get(suite.ctx, theModelType, foo.ID, returned)
require.NoError(t, err)
err = suite.m.Get(suite.ctx, theModelType, foo.ID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
}
func (suite *ModelStoreIntegrationSuite) TestGet_NotFoundErrors() {
t := suite.T()
assert.ErrorIs(t, suite.m.Get(suite.ctx, model.BackupOpSchema, "baz", nil), data.ErrNotFound)
assert.ErrorIs(
t, suite.m.GetWithModelStoreID(suite.ctx, model.BackupOpSchema, "baz", nil), data.ErrNotFound)
err := suite.m.Get(suite.ctx, model.BackupOpSchema, "baz", nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
err = suite.m.GetWithModelStoreID(suite.ctx, model.BackupOpSchema, "baz", nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestPutGetOfTypeBadVersion() {
t := suite.T()
schema := model.BackupOpSchema
foo := &fooModel{Bar: uuid.NewString()}
err := suite.m.Put(suite.ctx, schema, foo)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.m.modelVersion = 42
ids, err := suite.m.GetIDsForType(suite.ctx, schema, nil)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, ids)
}
@ -426,8 +421,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutGetOfType() {
}
ids, err := suite.m.GetIDsForType(suite.ctx, test.s, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Len(t, ids, 1)
})
}
@ -545,7 +539,8 @@ func (suite *ModelStoreIntegrationSuite) TestGetOfTypeWithTags() {
// Setup the store by adding all the inputs.
for _, in := range inputs {
require.NoError(suite.T(), suite.m.Put(suite.ctx, in.schema, in.dataModel))
err := suite.m.Put(suite.ctx, in.schema, in.dataModel)
require.NoError(suite.T(), err, clues.ToCore(err))
}
// Check we can properly execute our tests.
@ -559,7 +554,7 @@ func (suite *ModelStoreIntegrationSuite) TestGetOfTypeWithTags() {
}
ids, err := suite.m.GetIDsForType(suite.ctx, test.s, test.tags)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, expected, ids)
})
@ -592,23 +587,24 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
theModelType := model.BackupOpSchema
m := getModelStore(t, ctx)
defer func() {
assert.NoError(t, m.c.Close(ctx))
err := m.c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
foo := &fooModel{Bar: uuid.NewString()}
// Avoid some silly test errors from comparing nil to empty map.
foo.Tags = map[string]string{}
require.NoError(t, m.Put(ctx, theModelType, foo))
err := m.Put(ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
oldModelID := foo.ModelStoreID
oldStableID := foo.ID
@ -616,19 +612,21 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
test.mutator(foo)
require.NoError(t, m.Update(ctx, theModelType, foo))
err = m.Update(ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, oldStableID, foo.ID)
// The version in the model store has not changed so we get the old
// version back.
assert.Equal(t, oldVersion, foo.Version)
returned := &fooModel{}
require.NoError(
t, m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned))
err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
ids, err := m.GetIDsForType(ctx, theModelType, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.Len(t, ids, 1)
assert.Equal(t, globalModelVersion, ids[0].Version)
@ -639,7 +637,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
}
err = m.GetWithModelStoreID(ctx, theModelType, oldModelID, nil)
assert.ErrorIs(t, err, data.ErrNotFound)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
})
}
}
@ -676,16 +674,19 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate_FailsNotMatchingPrev() {
m := getModelStore(t, ctx)
defer func() {
assert.NoError(t, m.c.Close(ctx))
err := m.c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
foo := &fooModel{Bar: uuid.NewString()}
require.NoError(t, m.Put(ctx, startModelType, foo))
err := m.Put(ctx, startModelType, foo)
require.NoError(t, err, clues.ToCore(err))
test.mutator(foo)
assert.Error(t, m.Update(ctx, test.s, foo))
err = m.Update(ctx, test.s, foo)
assert.Error(t, err, clues.ToCore(err))
})
}
}
@ -693,23 +694,27 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate_FailsNotMatchingPrev() {
func (suite *ModelStoreIntegrationSuite) TestPutDelete() {
t := suite.T()
theModelType := model.BackupOpSchema
foo := &fooModel{Bar: uuid.NewString()}
require.NoError(t, suite.m.Put(suite.ctx, theModelType, foo))
err := suite.m.Put(suite.ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
require.NoError(t, suite.m.Delete(suite.ctx, theModelType, foo.ID))
err = suite.m.Delete(suite.ctx, theModelType, foo.ID)
require.NoError(t, err, clues.ToCore(err))
returned := &fooModel{}
err := suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
assert.ErrorIs(t, err, data.ErrNotFound)
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
}
func (suite *ModelStoreIntegrationSuite) TestPutDelete_BadIDsNoop() {
t := suite.T()
assert.NoError(t, suite.m.Delete(suite.ctx, model.BackupOpSchema, "foo"))
assert.NoError(t, suite.m.DeleteWithModelStoreID(suite.ctx, "foo"))
err := suite.m.Delete(suite.ctx, model.BackupOpSchema, "foo")
assert.NoError(t, err, clues.ToCore(err))
err = suite.m.DeleteWithModelStoreID(suite.ctx, "foo")
assert.NoError(t, err, clues.ToCore(err))
}
// ---------------
@ -742,7 +747,8 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
m := getModelStore(t, ctx)
defer func() {
assert.NoError(t, m.c.Close(ctx))
err := m.c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
foo := &fooModel{Bar: uuid.NewString()}
@ -750,13 +756,13 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
foo.ModelStoreID = manifest.ID(uuid.NewString())
// Avoid some silly test errors from comparing nil to empty map.
foo.Tags = map[string]string{}
theModelType := model.BackupOpSchema
require.NoError(t, m.Put(ctx, theModelType, foo))
err := m.Put(ctx, theModelType, foo)
require.NoError(t, err, clues.ToCore(err))
newID := manifest.ID("")
err := repo.WriteSession(
err = repo.WriteSession(
ctx,
m.c,
repo.WriteSessionOptions{Purpose: "WriteSessionFailureTest"},
@ -772,7 +778,7 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
}()
innerErr = putInner(innerCtx, w, theModelType, foo, false)
require.NoError(t, innerErr)
require.NoError(t, innerErr, clues.ToCore(innerErr))
newID = foo.ModelStoreID
@ -780,14 +786,15 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
},
)
assert.ErrorIs(t, err, assert.AnError)
assert.ErrorIs(t, err, assert.AnError, clues.ToCore(err))
err = m.GetWithModelStoreID(ctx, theModelType, newID, nil)
assert.ErrorIs(t, err, data.ErrNotFound)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err))
returned := &fooModel{}
require.NoError(
t, m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned))
err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
}
@ -798,14 +805,16 @@ func openConnAndModelStore(
st := tester.NewPrefixedS3Storage(t)
c := NewConn(st)
require.NoError(t, c.Initialize(ctx))
err := c.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
defer func() {
require.NoError(t, c.Close(ctx))
err := c.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
}()
ms, err := NewModelStore(c)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return c, ms
}
@ -815,14 +824,16 @@ func reconnectToModelStore(
ctx context.Context, //revive:disable-line:context-as-argument
c *conn,
) *ModelStore {
require.NoError(t, c.Connect(ctx))
err := c.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
defer func() {
assert.NoError(t, c.Close(ctx))
err := c.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
ms, err := NewModelStore(c)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ms
}
@ -837,11 +848,13 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
backupModel := backup.Backup{
SnapshotID: "snapshotID",
}
conn1, ms1 := openConnAndModelStore(t, ctx)
require.NoError(t, ms1.Put(ctx, model.BackupSchema, &backupModel))
require.NoError(t, ms1.Close(ctx))
err := ms1.Put(ctx, model.BackupSchema, &backupModel)
require.NoError(t, err, clues.ToCore(err))
err = ms1.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
start := make(chan struct{})
ready := sync.WaitGroup{}
@ -871,22 +884,24 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
ready.Wait()
defer func() {
assert.NoError(t, ms2.Close(ctx))
err := ms2.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
defer func() {
assert.NoError(t, ms1.Close(ctx))
err := ms1.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
// New instance should not have model we added.
gotBackup := backup.Backup{}
err := ms2.GetWithModelStoreID(
err = ms2.GetWithModelStoreID(
ctx,
model.BackupSchema,
backupModel.ModelStoreID,
&gotBackup,
)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
// Old instance should still be able to access added model.
gotBackup = backup.Backup{}
@ -896,5 +911,5 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
backupModel.ModelStoreID,
&gotBackup,
)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -22,14 +23,12 @@ func TestPathEncoderSuite(t *testing.T) {
func (suite *PathEncoderSuite) TestEncodeDecode() {
t := suite.T()
elements := []string{"these", "are", "some", "path", "elements"}
encoded := encodeElements(elements...)
decoded := make([]string, 0, len(elements))
for _, e := range encoded {
dec, err := decodeElement(e)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
decoded = append(decoded, dec)
}
@ -70,7 +69,6 @@ func (suite *PathEncoderSuite) TestEncodeAsPathDecode() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
encoded := encodeAsPath(test.elements...)
// Sanity check, first and last character should not be '/'.
@ -80,7 +78,7 @@ func (suite *PathEncoderSuite) TestEncodeAsPathDecode() {
for _, e := range strings.Split(encoded, "/") {
dec, err := decodeElement(e)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
decoded = append(decoded, dec)
}
@ -97,7 +95,7 @@ func FuzzEncodeDecodeSingleString(f *testing.F) {
assert.False(t, strings.ContainsRune(encoded[0], '/'))
decoded, err := decodeElement(encoded[0])
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in, decoded)
})
}

View File

@ -18,6 +18,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
@ -28,7 +29,7 @@ import (
func makePath(t *testing.T, elements []string, isItem bool) path.Path {
p, err := path.FromDataLayerPath(stdpath.Join(elements...), isItem)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -111,7 +112,7 @@ func expectFileData(
}
r, err := f.GetReader(ctx)
if !assert.NoErrorf(t, err, "getting reader for file: %s", name) {
if !assert.NoError(t, err, "getting reader for file:", name, clues.ToCore(err)) {
return
}
@ -122,11 +123,11 @@ func expectFileData(
}
got, err := io.ReadAll(r)
if !assert.NoErrorf(t, err, "reading data in file: %s", name) {
if !assert.NoError(t, err, "reading data in file", name, clues.ToCore(err)) {
return
}
assert.Equalf(t, expected, got, "data in file: %s", name)
assert.Equal(t, expected, got, "data in file", name, clues.ToCore(err))
}
func expectTree(
@ -203,7 +204,7 @@ func getDirEntriesForEntry(
require.True(t, ok, "entry is not a directory")
entries, err := fs.GetAllEntries(ctx, d)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return entries
}
@ -277,7 +278,7 @@ func (suite *VersionReadersUnitSuite) TestWriteAndRead() {
defer reversible.Close()
allData, err := io.ReadAll(reversible)
test.check(t, err)
test.check(t, err, clues.ToCore(err))
if err != nil {
return
@ -303,7 +304,7 @@ func readAllInParts(
break
}
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
read += n
res = append(res, tmp[:n]...)
@ -359,7 +360,7 @@ func (suite *CorsoProgressUnitSuite) SetupSuite() {
path.EmailCategory,
true,
testInboxDir, "testFile")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.targetFilePath = p
suite.targetFileName = suite.targetFilePath.ToBuilder().Dir().String()
@ -474,7 +475,6 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() {
assert.Empty(t, cp.pending)
entries := bd.Details().Entries
assert.Len(t, entries, test.expectedNumEntries)
for _, entry := range entries {
@ -516,7 +516,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() {
assert.Empty(t, cp.pending)
assert.Empty(t, bd.Details().Entries)
assert.Error(t, cp.errs.Failure())
assert.Error(t, cp.errs.Failure(), clues.ToCore(cp.errs.Failure()))
}
func (suite *CorsoProgressUnitSuite) TestFinishedFileBuildsHierarchyNewItem() {
@ -722,12 +722,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
// - Inbox
// - 42 separate files
dirTree, err := inflateDirTree(ctx, nil, nil, collections, nil, progress)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, encodeAsPath(testTenant), dirTree.Name())
entries, err := fs.GetAllEntries(ctx, dirTree)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectDirs(t, entries, encodeElements(service), true)
@ -818,12 +818,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
}
dirTree, err := inflateDirTree(ctx, nil, nil, test.layout, nil, progress)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, encodeAsPath(testTenant), dirTree.Name())
entries, err := fs.GetAllEntries(ctx, dirTree)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectDirs(t, entries, encodeElements(service), true)
@ -919,7 +919,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
t := suite.T()
_, err := inflateDirTree(ctx, nil, nil, test.layout, nil, nil)
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
})
}
}
@ -1031,7 +1031,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
}
_, err := inflateDirTree(ctx, nil, nil, cols, nil, progress)
require.Error(t, err)
require.Error(t, err, clues.ToCore(err))
})
}
}
@ -1315,7 +1315,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
nil,
progress,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, test.expected, dirTree)
})
@ -2093,7 +2093,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
test.inputCollections(t),
test.inputExcludes,
progress)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, test.expected, dirTree)
})
@ -2256,7 +2256,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
collections,
nil,
progress)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, expected, dirTree)
}
@ -2360,7 +2360,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
collections,
nil,
progress)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, expected, dirTree)
}
@ -2611,7 +2611,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
nil,
progress,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expectTree(t, ctx, expected, dirTree)
}

View File

@ -16,6 +16,7 @@ import (
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/data"
@ -70,17 +71,17 @@ func testForFiles(
count++
fullPath, err := c.FullPath().Append(s.UUID(), true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expected, ok := expected[fullPath.String()]
require.True(t, ok, "unexpected file with path %q", fullPath)
buf, err := io.ReadAll(s.ToReader())
require.NoError(t, err, "reading collection item: %s", fullPath)
assert.Equal(t, expected, buf, "comparing collection item: %s", fullPath)
require.NoError(t, err, "reading collection item", fullPath, clues.ToCore(err))
assert.Equal(t, expected, buf, "comparing collection item", fullPath)
require.Implements(t, (*data.StreamSize)(nil), s)
ss := s.(data.StreamSize)
assert.Equal(t, len(buf), int(ss.Size()))
}
@ -97,7 +98,7 @@ func checkSnapshotTags(
snapshotID string,
) {
man, err := snapshot.LoadSnapshot(ctx, rep, manifest.ID(snapshotID))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, expectedTags, man.Tags)
}
@ -120,7 +121,7 @@ func (suite *KopiaUnitSuite) SetupSuite() {
),
false,
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath = tmp
}
@ -172,7 +173,7 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
path.EmailCategory,
false,
testInboxDir)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.storePath1 = tmp
suite.locPath1 = tmp
@ -184,7 +185,7 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
path.EmailCategory,
false,
testArchiveDir)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.storePath2 = tmp
suite.locPath2 = tmp
@ -195,14 +196,16 @@ func (suite *KopiaIntegrationSuite) SetupTest() {
suite.ctx, suite.flush = tester.NewContext()
c, err := openKopiaRepo(t, suite.ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.w = &Wrapper{c}
}
func (suite *KopiaIntegrationSuite) TearDownTest() {
defer suite.flush()
assert.NoError(suite.T(), suite.w.Close(suite.ctx))
err := suite.w.Close(suite.ctx)
assert.NoError(suite.T(), err, clues.ToCore(err))
}
func (suite *KopiaIntegrationSuite) TestBackupCollections() {
@ -282,7 +285,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
tags,
true,
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files")
assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files")
@ -317,7 +320,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
suite.w.c,
manifest.ID(stats.SnapshotID),
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
prevSnaps = append(prevSnaps, IncrementalBase{
Manifest: snap,
@ -337,7 +340,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
path.FilesCategory,
false,
testInboxDir)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
storePath := tmp
locPath := tmp
@ -429,7 +432,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
tags,
true,
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files")
assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files")
@ -476,9 +479,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
snap, err := snapshot.LoadSnapshot(
suite.ctx,
suite.w.c,
manifest.ID(stats.SnapshotID),
)
require.NoError(t, err)
manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err))
prevSnaps = append(prevSnaps, IncrementalBase{
Manifest: snap,
@ -491,15 +493,16 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
}
func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err)
t := suite.T()
require.NoError(t, k.Compression(ctx, "s2-default"))
k, err := openKopiaRepo(t, ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Compression(ctx, "s2-default")
require.NoError(t, err, clues.ToCore(err))
w := &Wrapper{k}
@ -518,10 +521,10 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1)
fp1, err := suite.storePath1.Append(dc1.Names[0], true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
fp2, err := suite.storePath2.Append(dc2.Names[0], true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
stats, _, _, err := w.BackupCollections(
ctx,
@ -531,9 +534,10 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
tags,
true,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NoError(t, k.Compression(ctx, "gzip"))
err = k.Compression(ctx, "gzip")
require.NoError(t, err, clues.ToCore(err))
expected := map[string][]byte{
fp1.String(): dc1.Data[0],
@ -549,7 +553,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
},
nil,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 2, len(result))
testForFiles(t, ctx, expected, result)
@ -649,8 +653,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
tags,
true,
fault.New(true))
require.Error(t, err)
require.Error(t, err, clues.ToCore(err))
assert.Equal(t, 0, stats.ErrorCount)
assert.Equal(t, 5, stats.TotalFileCount)
assert.Equal(t, 6, stats.TotalDirectoryCount)
@ -660,7 +663,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
assert.Len(t, deets.Details().Entries, 5+6)
failedPath, err := suite.storePath2.Append(testFileName4, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
ic := i64counter{}
@ -673,7 +676,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
// Files that had an error shouldn't make a dir entry in kopia. If they do we
// may run into kopia-assisted incrementals issues because only mod time and
// not file size is checked for StreamingFiles.
assert.ErrorIs(t, err, data.ErrNotFound, "errored file is restorable")
assert.ErrorIs(t, err, data.ErrNotFound, "errored file is restorable", clues.ToCore(err))
}
type backedupFile struct {
@ -712,7 +715,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections()
nil,
true,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, BackupStats{}, *s)
assert.Empty(t, d.Details().Entries)
@ -753,7 +756,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupSuite() {
path.EmailCategory,
false,
testInboxDir)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath1 = tmp
@ -764,7 +767,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupSuite() {
path.EmailCategory,
false,
testArchiveDir)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath2 = tmp
@ -810,7 +813,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupSuite() {
for _, item := range filesInfo {
pth, err := item.parentPath.Append(item.name, true)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
mapKey := item.parentPath.String()
f := &backedupFile{
@ -830,8 +833,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
expectedFiles := len(suite.filesByPath)
//nolint:forbidigo
suite.ctx, _ = logger.SeedLevel(context.Background(), logger.Development)
c, err := openKopiaRepo(t, suite.ctx)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.w = &Wrapper{c}
@ -872,7 +876,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
tags,
false,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, stats.ErrorCount, 0)
require.Equal(t, stats.TotalFileCount, expectedFiles)
require.Equal(t, stats.TotalDirectoryCount, expectedDirs)
@ -885,7 +889,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
}
func (suite *KopiaSimpleRepoIntegrationSuite) TearDownTest() {
assert.NoError(suite.T(), suite.w.Close(suite.ctx))
err := suite.w.Close(suite.ctx)
assert.NoError(suite.T(), err, clues.ToCore(err))
logger.Flush(suite.ctx)
}
@ -911,7 +916,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
path.EmailCategory,
false,
"tmp")
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
subtreePath := subtreePathTmp.ToBuilder().Dir()
@ -920,7 +925,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
[]Reason{reason},
nil,
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
require.Len(suite.T(), manifests, 1)
require.Equal(suite.T(), suite.snapshotID, manifests[0].ID)
@ -1030,7 +1035,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
tags,
true,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedCachedItems, stats.CachedFileCount)
assert.Equal(t, test.expectedUncachedItems, stats.UncachedFileCount)
@ -1050,7 +1055,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
},
&ic,
fault.New(true))
test.restoreCheck(t, err)
test.restoreCheck(t, err, clues.ToCore(err))
})
}
}
@ -1151,7 +1156,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems() {
test.inputPaths,
&ic,
fault.New(true))
test.expectedErr(t, err)
test.expectedErr(t, err, clues.ToCore(err))
if err != nil {
return
@ -1166,7 +1171,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems() {
func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors() {
itemPath, err := suite.testPath1.Append(testFileName, true)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
@ -1200,7 +1205,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors()
test.paths,
nil,
fault.New(true))
assert.Error(t, err)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, c)
})
}
@ -1209,7 +1214,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems_Errors()
func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot() {
t := suite.T()
assert.NoError(t, suite.w.DeleteSnapshot(suite.ctx, string(suite.snapshotID)))
err := suite.w.DeleteSnapshot(suite.ctx, string(suite.snapshotID))
assert.NoError(t, err, clues.ToCore(err))
// assert the deletion worked
itemPath := suite.files[suite.testPath1.String()][0].itemPath
@ -1221,7 +1227,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot() {
[]path.Path{itemPath},
&ic,
fault.New(true))
assert.Error(t, err, "snapshot should be deleted")
assert.Error(t, err, "snapshot should be deleted", clues.ToCore(err))
assert.Empty(t, c)
assert.Zero(t, ic.i)
}
@ -1247,7 +1253,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestDeleteSnapshot_BadIDs() {
suite.Run(test.name, func() {
t := suite.T()
test.expect(t, suite.w.DeleteSnapshot(suite.ctx, test.snapshotID))
err := suite.w.DeleteSnapshot(suite.ctx, test.snapshotID)
test.expect(t, err, clues.ToCore(err))
})
}
}

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -71,7 +72,7 @@ func (suite *ObserveProgressUnitSuite) TestItemProgress() {
break
}
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 25, n)
i++
}

View File

@ -16,6 +16,7 @@ import (
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector"
@ -62,19 +63,19 @@ func prepNewTestBackupOp(
) (BackupOperation, account.Account, *kopia.Wrapper, *kopia.ModelStore, func()) {
//revive:enable:context-as-argument
acct := tester.NewM365Account(t)
// need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
err := k.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
// to close here.
closer := func() { k.Close(ctx) }
kw, err := kopia.NewWrapper(k)
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
closer()
t.FailNow()
}
@ -85,7 +86,7 @@ func prepNewTestBackupOp(
}
ms, err := kopia.NewModelStore(k)
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
closer()
t.FailNow()
}
@ -127,7 +128,7 @@ func newTestBackupOp(
opts.ToggleFeatures = featureToggles
bo, err := NewBackupOperation(ctx, opts, kw, sw, acct, sel, bus)
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
closer()
t.FailNow()
}
@ -143,7 +144,8 @@ func runAndCheckBackup(
mb *evmock.Bus,
) {
//revive:enable:context-as-argument
require.NoError(t, bo.Run(ctx))
err := bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")
require.Equalf(
@ -158,7 +160,7 @@ func runAndCheckBackup(
assert.Less(t, int64(0), bo.Results.BytesRead, "bytes read")
assert.Less(t, int64(0), bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupStart], "backup-start events")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
@ -193,7 +195,7 @@ func checkBackupIsInManifests(
)
mans, err := kw.FetchPrevSnapshotManifests(ctx, reasons, tags)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
for _, man := range mans {
bID, ok := man.GetTag(kopia.TagBackupID)
@ -229,7 +231,7 @@ func checkMetadataFilesExist(
bup := &backup.Backup{}
err := ms.Get(ctx, model.BackupSchema, backupID, bup)
if !assert.NoError(t, err) {
if !assert.NoError(t, err, clues.ToCore(err)) {
return
}
@ -240,12 +242,12 @@ func checkMetadataFilesExist(
p, err := path.Builder{}.
Append(fName).
ToServiceCategoryMetadataPath(tenant, user, service, category, true)
if !assert.NoError(t, err, "bad metadata path") {
if !assert.NoError(t, err, "bad metadata path", clues.ToCore(err)) {
continue
}
dir, err := p.Dir()
if !assert.NoError(t, err, "parent path") {
if !assert.NoError(t, err, "parent path", clues.ToCore(err)) {
continue
}
@ -254,7 +256,7 @@ func checkMetadataFilesExist(
}
cols, err := kw.RestoreMultipleItems(ctx, bup.SnapshotID, paths, nil, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
for _, col := range cols {
itemNames := []string{}
@ -357,7 +359,7 @@ func generateContainerOfItems(
control.Options{RestorePermissions: true},
dataColls,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return deets
}
@ -449,7 +451,7 @@ func toDataLayerPath(
err = errors.Errorf("unknown service %s", service.String())
}
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -509,7 +511,7 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
test.acct,
selectors.Selector{DiscreteOwner: "test"},
evmock.NewBus())
test.errCheck(suite.T(), err)
test.errCheck(suite.T(), err, clues.ToCore(err))
})
}
}
@ -585,7 +587,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
defer closer()
m365, err := acct.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// run the tests
runAndCheckBackup(t, ctx, &bo, mb)
@ -634,7 +636,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
assert.Greater(t, bo.Results.BytesRead, incBO.Results.BytesRead, "incremental bytes read")
assert.Greater(t, bo.Results.BytesUploaded, incBO.Results.BytesUploaded, "incremental bytes uploaded")
assert.Equal(t, bo.Results.ResourceOwners, incBO.Results.ResourceOwners, "incremental backup resource owner")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "count incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
@ -673,7 +675,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
)
m365, err := acct.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
gc, err := connector.NewGraphConnector(
ctx,
@ -681,10 +683,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
acct,
connector.Users,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
ac, err := api.NewClient(m365)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// generate 3 new folders with two items each.
// Only the first two folders will be part of the initial backup and
@ -778,11 +780,11 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
Credentials: m365,
}
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
require.NoError(t, err, "populating %s container resolver", category)
require.NoError(t, err, "populating container resolver", category, clues.ToCore(err))
for destName, dest := range gen.dests {
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
@ -842,7 +844,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
MailFoldersById(fromContainer).
Move().
Post(ctx, body, nil)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
},
itemsRead: 0, // zero because we don't count container reads
itemsWritten: 2,
@ -855,20 +857,14 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
switch category {
case path.EmailCategory:
require.NoError(
t,
ac.Mail().DeleteContainer(ctx, suite.user, containerID),
"deleting an email folder")
err := ac.Mail().DeleteContainer(ctx, suite.user, containerID)
require.NoError(t, err, "deleting an email folder", clues.ToCore(err))
case path.ContactsCategory:
require.NoError(
t,
ac.Contacts().DeleteContainer(ctx, suite.user, containerID),
"deleting a contacts folder")
err := ac.Contacts().DeleteContainer(ctx, suite.user, containerID)
require.NoError(t, err, "deleting a contacts folder", clues.ToCore(err))
case path.EventsCategory:
require.NoError(
t,
ac.Events().DeleteContainer(ctx, suite.user, containerID),
"deleting a calendar")
err := ac.Events().DeleteContainer(ctx, suite.user, containerID)
require.NoError(t, err, "deleting a calendar", clues.ToCore(err))
}
}
},
@ -898,13 +894,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
Credentials: m365,
}
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
require.NoError(t, err, "populating %s container resolver", category)
require.NoError(t, err, "populating container resolver", category, clues.ToCore(err))
p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
require.Truef(t, ok, "dir %s found in %s cache", p.Folder(false), category)
dataset[category].dests[container3] = contDeets{id, deets}
}
@ -932,31 +928,31 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
cmf := cli.MailFoldersById(containerID)
body, err := cmf.Get(ctx, nil)
require.NoError(t, err, "getting mail folder")
require.NoError(t, err, "getting mail folder", clues.ToCore(err))
body.SetDisplayName(&containerRename)
_, err = cmf.Patch(ctx, body, nil)
require.NoError(t, err, "updating mail folder name")
require.NoError(t, err, "updating mail folder name", clues.ToCore(err))
case path.ContactsCategory:
ccf := cli.ContactFoldersById(containerID)
body, err := ccf.Get(ctx, nil)
require.NoError(t, err, "getting contact folder")
require.NoError(t, err, "getting contact folder", clues.ToCore(err))
body.SetDisplayName(&containerRename)
_, err = ccf.Patch(ctx, body, nil)
require.NoError(t, err, "updating contact folder name")
require.NoError(t, err, "updating contact folder name", clues.ToCore(err))
case path.EventsCategory:
cbi := cli.CalendarsById(containerID)
body, err := cbi.Get(ctx, nil)
require.NoError(t, err, "getting calendar")
require.NoError(t, err, "getting calendar", clues.ToCore(err))
body.SetName(&containerRename)
_, err = cbi.Patch(ctx, body, nil)
require.NoError(t, err, "updating calendar name")
require.NoError(t, err, "updating calendar name", clues.ToCore(err))
}
}
},
@ -974,26 +970,26 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
case path.EmailCategory:
_, itemData := generateItemData(t, category, suite.user, mailDBF)
body, err := support.CreateMessageFromBytes(itemData)
require.NoError(t, err, "transforming mail bytes to messageable")
require.NoError(t, err, "transforming mail bytes to messageable", clues.ToCore(err))
_, err = cli.MailFoldersById(containerID).Messages().Post(ctx, body, nil)
require.NoError(t, err, "posting email item")
require.NoError(t, err, "posting email item", clues.ToCore(err))
case path.ContactsCategory:
_, itemData := generateItemData(t, category, suite.user, contactDBF)
body, err := support.CreateContactFromBytes(itemData)
require.NoError(t, err, "transforming contact bytes to contactable")
require.NoError(t, err, "transforming contact bytes to contactable", clues.ToCore(err))
_, err = cli.ContactFoldersById(containerID).Contacts().Post(ctx, body, nil)
require.NoError(t, err, "posting contact item")
require.NoError(t, err, "posting contact item", clues.ToCore(err))
case path.EventsCategory:
_, itemData := generateItemData(t, category, suite.user, eventDBF)
body, err := support.CreateEventFromBytes(itemData)
require.NoError(t, err, "transforming event bytes to eventable")
require.NoError(t, err, "transforming event bytes to eventable", clues.ToCore(err))
_, err = cli.CalendarsById(containerID).Events().Post(ctx, body, nil)
require.NoError(t, err, "posting events item")
require.NoError(t, err, "posting events item", clues.ToCore(err))
}
}
},
@ -1010,27 +1006,27 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
switch category {
case path.EmailCategory:
ids, _, _, err := ac.Mail().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "")
require.NoError(t, err, "getting message ids")
require.NoError(t, err, "getting message ids", clues.ToCore(err))
require.NotEmpty(t, ids, "message ids in folder")
err = cli.MessagesById(ids[0]).Delete(ctx, nil)
require.NoError(t, err, "deleting email item")
require.NoError(t, err, "deleting email item", clues.ToCore(err))
case path.ContactsCategory:
ids, _, _, err := ac.Contacts().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "")
require.NoError(t, err, "getting contact ids")
require.NoError(t, err, "getting contact ids", clues.ToCore(err))
require.NotEmpty(t, ids, "contact ids in folder")
err = cli.ContactsById(ids[0]).Delete(ctx, nil)
require.NoError(t, err, "deleting contact item")
require.NoError(t, err, "deleting contact item", clues.ToCore(err))
case path.EventsCategory:
ids, _, _, err := ac.Events().GetAddedAndRemovedItemIDs(ctx, suite.user, containerID, "")
require.NoError(t, err, "getting event ids")
require.NoError(t, err, "getting event ids", clues.ToCore(err))
require.NotEmpty(t, ids, "event ids in folder")
err = cli.CalendarsById(ids[0]).Delete(ctx, nil)
require.NoError(t, err, "deleting calendar")
require.NoError(t, err, "deleting calendar", clues.ToCore(err))
}
}
},
@ -1047,7 +1043,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
)
test.updateUserData(t)
require.NoError(t, incBO.Run(ctx))
err := incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...)
checkMetadataFilesExist(
t,
@ -1058,14 +1056,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
m365.AzureTenantID,
suite.user,
path.ExchangeService,
categories,
)
categories)
// do some additional checks to ensure the incremental dealt with fewer items.
// +4 on read/writes to account for metadata: 1 delta and 1 path for each type.
assert.Equal(t, test.itemsWritten+4, incBO.Results.ItemsWritten, "incremental items written")
assert.Equal(t, test.itemsRead+4, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
@ -1115,7 +1112,7 @@ func mustGetDefaultDriveID(
With("user", userID)
}
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id, "drive ID not set")
@ -1154,7 +1151,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
)
m365, err := acct.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
gc, err := connector.NewGraphConnector(
ctx,
@ -1162,7 +1159,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
acct,
connector.Users,
fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
driveID := mustGetDefaultDriveID(t, ctx, gc.Service, suite.user)
@ -1204,7 +1201,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
destName)
resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
Get(ctx, nil)
require.NoErrorf(t, err, "getting drive folder ID", "folder name: %s", destName)
require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
containerIDs[destName] = ptr.Val(resp.GetId())
}
@ -1256,7 +1253,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
driveID,
targetContainer,
driveItem)
require.NoError(t, err, "creating new file")
require.NoError(t, err, "creating new file", clues.ToCore(err))
},
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent
@ -1292,7 +1289,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID).
ItemsById(ptr.Val(newFile.GetId())).
Patch(ctx, driveItem, nil)
require.NoError(t, err, "renaming file")
require.NoError(t, err, "renaming file", clues.ToCore(err))
},
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent
@ -1313,7 +1310,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID).
ItemsById(ptr.Val(newFile.GetId())).
Patch(ctx, driveItem, nil)
require.NoError(t, err, "moving file between folders")
require.NoError(t, err, "moving file between folders", clues.ToCore(err))
},
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .data and .meta for newitem, .dirmeta for parent
@ -1326,7 +1323,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID).
ItemsById(ptr.Val(newFile.GetId())).
Delete(ctx, nil)
require.NoError(t, err, "deleting file")
require.NoError(t, err, "deleting file", clues.ToCore(err))
},
itemsRead: 0,
itemsWritten: 0,
@ -1348,7 +1345,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID).
ItemsById(source).
Patch(ctx, driveItem, nil)
require.NoError(t, err, "moving folder")
require.NoError(t, err, "moving folder", clues.ToCore(err))
},
itemsRead: 0,
itemsWritten: 7, // 2*2(data and meta of 2 files) + 3 (dirmeta of two moved folders and target)
@ -1371,7 +1368,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID).
ItemsById(child).
Patch(ctx, driveItem, nil)
require.NoError(t, err, "renaming folder")
require.NoError(t, err, "renaming folder", clues.ToCore(err))
},
itemsRead: 0,
itemsWritten: 7, // 2*2(data and meta of 2 files) + 3 (dirmeta of two moved folders and target)
@ -1385,7 +1382,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
DrivesById(driveID).
ItemsById(container).
Delete(ctx, nil)
require.NoError(t, err, "deleting folder")
require.NoError(t, err, "deleting folder", clues.ToCore(err))
},
itemsRead: 0,
itemsWritten: 0,
@ -1413,7 +1410,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
container3)
resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
Get(ctx, nil)
require.NoErrorf(t, err, "getting drive folder ID", "folder name: %s", container3)
require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err))
containerIDs[container3] = ptr.Val(resp.GetId())
},
@ -1432,7 +1429,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
tester.LogTimeOfTest(suite.T())
test.updateUserData(t)
require.NoError(t, incBO.Run(ctx))
err := incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...)
checkMetadataFilesExist(
t,
@ -1450,7 +1449,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
// +2 on read/writes to account for metadata: 1 delta and 1 path.
assert.Equal(t, test.itemsWritten+2, incBO.Results.ItemsWritten, "incremental items written")
assert.Equal(t, test.itemsRead+2, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")

View File

@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
evmock "github.com/alcionai/corso/src/internal/events/mock"
@ -194,7 +195,7 @@ func makeMetadataBasePath(
service,
category,
false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -215,7 +216,7 @@ func makeMetadataPath(
service,
category,
true)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -250,7 +251,7 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path {
t.Helper()
p, err := path.FromDataLayerPath(stdpath.Join(elements...), isItem)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -295,7 +296,7 @@ func makeDetailsEntry(
case path.OneDriveService:
parent, err := path.GetDriveFolderPath(p)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
res.OneDrive = &details.OneDriveInfo{
ItemType: details.OneDriveItem,
@ -408,7 +409,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
acct,
sel,
evmock.NewBus())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
op.Errors.Fail(test.fail)
@ -696,7 +697,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
)
itemParents1, err := path.GetDriveFolderPath(itemPath1)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
@ -1213,7 +1214,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
test.inputShortRefsFromPrevBackup,
&deets,
fault.New(true))
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
if err != nil {
return
@ -1329,7 +1330,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
inputToMerge,
&deets,
fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
compareDeetEntries(t, expectedEntries, deets.Details().Entries)
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model"
@ -117,7 +118,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files {
p, err := emailPath.Append(f, true)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p)
}
@ -140,7 +141,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files {
p, err := emailPath.Append(f, true)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p)
}
@ -168,10 +169,10 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files {
p, err := emailPath.Append(f, true)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p)
p, err = contactPath.Append(f, true)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p)
}
@ -199,10 +200,10 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
for _, f := range files {
p, err := emailPath.Append(f, true)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p)
p, err = contactPath.Append(f, true)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
ps = append(ps, p)
}
@ -229,7 +230,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
}
_, err := collectMetadata(ctx, &mr, man, test.fileNames, tid, fault.New(true))
assert.ErrorIs(t, err, test.expectErr)
assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err))
})
}
}
@ -394,7 +395,7 @@ func (suite *OperationsManifestsUnitSuite) TestVerifyDistinctBases() {
defer flush()
err := verifyDistinctBases(ctx, test.mans)
test.expect(suite.T(), err)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
@ -649,7 +650,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
tid,
test.getMeta,
fault.New(true))
test.assertErr(t, err)
test.assertErr(t, err, clues.ToCore(err))
test.assertB(t, b)
expectMans := test.mr.mans
@ -837,7 +838,8 @@ func (suite *BackupManifestUnitSuite) TestBackupOperation_VerifyDistinctBases()
ctx, flush := tester.NewContext()
defer flush()
test.errCheck(suite.T(), verifyDistinctBases(ctx, test.input))
err := verifyDistinctBases(ctx, test.input)
test.errCheck(suite.T(), err, clues.ToCore(err))
})
}
}
@ -958,7 +960,7 @@ func (suite *BackupManifestUnitSuite) TestBackupOperation_CollectMetadata() {
mr := &mockRestorer{}
_, err := collectMetadata(ctx, mr, test.inputMan, test.inputFiles, tenant, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
checkPaths(t, test.expected, mr.gotPaths)
})

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester"
@ -44,8 +45,8 @@ func (suite *OperationSuite) TestOperation_Validate() {
}
for _, test := range table {
suite.Run(test.name, func() {
op := newOperation(control.Options{}, events.Bus{}, test.kw, test.sw)
test.errCheck(suite.T(), op.validate())
err := newOperation(control.Options{}, events.Bus{}, test.kw, test.sw).validate()
test.errCheck(suite.T(), err, clues.ToCore(err))
})
}
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
@ -113,11 +114,12 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
selectors.Selector{DiscreteOwner: "test"},
dest,
evmock.NewBus())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
op.Errors.Fail(test.fail)
test.expectErr(t, op.persistResults(ctx, now, &test.stats))
err = op.persistResults(ctx, now, &test.stats)
test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status")
assert.Equal(t, len(test.stats.cs), op.Results.ItemsRead, "items read")
@ -161,27 +163,26 @@ func (suite *RestoreOpIntegrationSuite) SetupSuite() {
defer flush()
t := suite.T()
m365UserID := tester.M365UserID(t)
acct := tester.NewM365Account(t)
// need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
err := k.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
suite.kopiaCloser = func(ctx context.Context) {
k.Close(ctx)
}
kw, err := kopia.NewWrapper(k)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.kw = kw
ms, err := kopia.NewModelStore(k)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.ms = ms
@ -206,8 +207,10 @@ func (suite *RestoreOpIntegrationSuite) SetupSuite() {
acct,
bsel.Selector,
evmock.NewBus())
require.NoError(t, err)
require.NoError(t, bo.Run(ctx))
require.NoError(t, err, clues.ToCore(err))
err = bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results.BackupID)
suite.backupID = bo.Results.BackupID
@ -230,8 +233,10 @@ func (suite *RestoreOpIntegrationSuite) SetupSuite() {
csel.Selector,
evmock.NewBus(),
)
require.NoError(t, err)
require.NoError(t, bo.Run(ctx))
require.NoError(t, err, clues.ToCore(err))
err = bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, bo.Results.BackupID)
suite.sharepointID = bo.Results.BackupID
// Discount MetaData files (1 path, 1 delta)
@ -289,7 +294,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
selectors.Selector{DiscreteOwner: "test"},
dest,
evmock.NewBus())
test.errCheck(suite.T(), err)
test.errCheck(suite.T(), err, clues.ToCore(err))
})
}
}
@ -334,22 +339,22 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
cleanup: func(t *testing.T, dest string) {
act := tester.NewM365Account(t)
m365, err := act.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
service := graph.NewService(adpt)
pager := api.NewSiteDrivePager(service, tester.M365SiteID(t), []string{"id", "name"})
driveID, err := pager.GetDriveIDByName(ctx, "Documents")
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, driveID)
folderID, err := pager.GetFolderIDByName(ctx, driveID, dest)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, folderID)
err = onedrive.DeleteItem(ctx, service, driveID, folderID)
assert.NoError(t, err, "failed to delete restore folder: operations_SharePoint_Restore")
assert.NoError(t, err, "deleting restore folder", clues.ToCore(err))
},
},
}
@ -367,11 +372,11 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
test.getSelector(t),
test.dest,
mb)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
ds, err := ro.Run(ctx)
require.NoError(t, err, "restoreOp.Run()")
require.NoError(t, err, "restoreOp.Run()", clues.ToCore(err))
require.NotEmpty(t, ro.Results, "restoreOp results")
require.NotNil(t, ds, "restored details")
assert.Equal(t, ro.Status, Completed, "restoreOp status")
@ -379,7 +384,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() {
assert.Less(t, 0, ro.Results.ItemsRead, "restore items read")
assert.Less(t, int64(0), ro.Results.BytesRead, "bytes read")
assert.Equal(t, 1, ro.Results.ResourceOwners, "resource Owners")
assert.NoError(t, ro.Errors.Failure(), "non-recoverable error")
assert.NoError(t, ro.Errors.Failure(), "non-recoverable error", clues.ToCore(ro.Errors.Failure()))
assert.Empty(t, ro.Errors.Recovered(), "recoverable errors")
assert.Equal(t, test.expectedItems, ro.Results.ItemsWritten, "backup and restore wrote the same num of items")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreStart], "restore-start events")
@ -415,7 +420,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_ErrorNoResults() {
rsel.Selector,
dest,
mb)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
ds, err := ro.Run(ctx)
require.Error(t, err, "restoreOp.Run() should have errored")

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -27,21 +28,21 @@ func TestStreamDetailsIntegrationSuite(t *testing.T) {
}
func (suite *StreamDetailsIntegrationSuite) TestDetails() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
// need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
err := k.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
defer k.Close(ctx)
kw, err := kopia.NewWrapper(k)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
defer kw.Close(ctx)
@ -62,12 +63,12 @@ func (suite *StreamDetailsIntegrationSuite) TestDetails() {
)
id, err := sd.Write(ctx, deets, fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, id)
var readDeets details.Details
err = sd.Read(ctx, id, details.UnmarshalTo(&readDeets), fault.New(true))
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, readDeets)
assert.Equal(t, len(deets.Entries), len(readDeets.Entries))

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
)
@ -18,7 +19,7 @@ var M365AcctCredEnvs = []string{
// variables used for integration tests that use Graph Connector.
func NewM365Account(t *testing.T) account.Account {
cfg, err := readTestConfig()
require.NoError(t, err, "configuring m365 account from test configuration")
require.NoError(t, err, "configuring m365 account from test configuration", clues.ToCore(err))
acc, err := account.NewAccount(
account.ProviderM365,
@ -27,7 +28,7 @@ func NewM365Account(t *testing.T) account.Account {
AzureTenantID: cfg[TestCfgAzureTenantID],
},
)
require.NoError(t, err, "initializing account")
require.NoError(t, err, "initializing account", clues.ToCore(err))
return acc
}
@ -43,7 +44,7 @@ func NewMockM365Account(t *testing.T) account.Account {
AzureTenantID: "09876",
},
)
require.NoError(t, err, "initializing mock account")
require.NoError(t, err, "initializing mock account", clues.ToCore(err))
return acc
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"golang.org/x/exp/maps"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/account"
)
@ -159,7 +160,7 @@ func readTestConfig() (map[string]string, error) {
// Returns a filepath string pointing to the location of the temp file.
func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.Viper, string) {
cfg, err := readTestConfig()
require.NoError(t, err, "reading tester config")
require.NoError(t, err, "reading tester config", clues.ToCore(err))
fName := filepath.Base(os.Getenv(EnvCorsoTestConfigFilePath))
if len(fName) == 0 || fName == "." || fName == "/" {
@ -170,7 +171,7 @@ func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.
tDirFp := filepath.Join(tDir, fName)
_, err = os.Create(tDirFp)
require.NoError(t, err, "creating temp test dir")
require.NoError(t, err, "creating temp test dir", clues.ToCore(err))
ext := filepath.Ext(fName)
vpr := viper.New()
@ -188,7 +189,8 @@ func MakeTempTestConfigClone(t *testing.T, overrides map[string]string) (*viper.
vpr.Set(k, v)
}
require.NoError(t, vpr.WriteConfig(), "writing temp dir viper config file")
err = vpr.WriteConfig()
require.NoError(t, err, "writing temp dir viper config file", clues.ToCore(err))
return vpr, tDirFp
}

View File

@ -5,6 +5,7 @@ import (
"os"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
)
@ -13,7 +14,7 @@ func LoadAFile(t *testing.T, fileName string) []byte {
bytes, err := os.ReadFile(fileName)
if err != nil {
f, err := os.Open(fileName)
require.NoError(t, err, "opening file: "+fileName)
require.NoError(t, err, "opening file:", fileName, clues.ToCore(err))
defer f.Close()
@ -25,7 +26,7 @@ func LoadAFile(t *testing.T, fileName string) []byte {
buffer = append(buffer, temp...)
}
require.NoError(t, reader.Err(), "reading file: "+fileName)
require.NoError(t, reader.Err(), "reading file:", fileName, clues.ToCore(err))
return buffer
}

View File

@ -5,6 +5,7 @@ import (
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
)
@ -14,7 +15,7 @@ import (
// last-attempt fallback that will only work on alcion's testing org.
func M365TenantID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving m365 user id from test configuration")
require.NoError(t, err, "retrieving m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgAzureTenantID]
}
@ -25,7 +26,7 @@ func M365TenantID(t *testing.T) string {
// last-attempt fallback that will only work on alcion's testing org.
func M365UserID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving m365 user id from test configuration")
require.NoError(t, err, "retrieving m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgUserID]
}
@ -37,7 +38,7 @@ func M365UserID(t *testing.T) string {
// testing org.
func SecondaryM365UserID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving secondary m365 user id from test configuration")
require.NoError(t, err, "retrieving secondary m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgSecondaryUserID]
}
@ -49,7 +50,7 @@ func SecondaryM365UserID(t *testing.T) string {
// testing org.
func LoadTestM365SiteID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 site id from test configuration")
require.NoError(t, err, "retrieving load test m365 site id from test configuration", clues.ToCore(err))
// TODO: load test site id, not standard test site id
return cfg[TestCfgSiteID]
@ -62,7 +63,7 @@ func LoadTestM365SiteID(t *testing.T) string {
// testing org.
func LoadTestM365UserID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 user id from test configuration")
require.NoError(t, err, "retrieving load test m365 user id from test configuration", clues.ToCore(err))
return cfg[TestCfgLoadTestUserID]
}
@ -72,7 +73,7 @@ func LoadTestM365UserID(t *testing.T) string {
// the delimeter must be a |.
func LoadTestM365OrgSites(t *testing.T) []string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 org sites from test configuration")
require.NoError(t, err, "retrieving load test m365 org sites from test configuration", clues.ToCore(err))
// TODO: proper handling of site slice input.
// sites := cfg[TestCfgLoadTestOrgSites]
@ -92,7 +93,7 @@ func LoadTestM365OrgSites(t *testing.T) []string {
// the delimeter may be either a , or |.
func LoadTestM365OrgUsers(t *testing.T) []string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 org users from test configuration")
require.NoError(t, err, "retrieving load test m365 org users from test configuration", clues.ToCore(err))
users := cfg[TestCfgLoadTestOrgUsers]
users = strings.TrimPrefix(users, "[")
@ -128,7 +129,7 @@ func LoadTestM365OrgUsers(t *testing.T) []string {
// last-attempt fallback that will only work on alcion's testing org.
func M365SiteID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving m365 site id from test configuration")
require.NoError(t, err, "retrieving m365 site id from test configuration", clues.ToCore(err))
return cfg[TestCfgSiteID]
}

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage"
)
@ -27,7 +28,7 @@ func NewPrefixedS3Storage(t *testing.T) storage.Storage {
now := LogTimeOfTest(t)
cfg, err := readTestConfig()
require.NoError(t, err, "configuring storage from test file")
require.NoError(t, err, "configuring storage from test file", clues.ToCore(err))
prefix := testRepoRootPrefix + t.Name() + "-" + now
t.Logf("testing at s3 bucket [%s] prefix [%s]", cfg[TestCfgBucket], prefix)
@ -43,7 +44,7 @@ func NewPrefixedS3Storage(t *testing.T) storage.Storage {
KopiaCfgDir: t.TempDir(),
},
)
require.NoError(t, err, "creating storage")
require.NoError(t, err, "creating storage", clues.ToCore(err))
return st
}

View File

@ -3,6 +3,7 @@ package account
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
@ -44,7 +45,7 @@ func (suite *AccountSuite) TestNewAccount() {
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
s, err := NewAccount(test.p, test.c)
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
// remaining tests are dependent upon error-free state
if test.c.err != nil {
return

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
)
@ -30,7 +31,7 @@ var goodM365Config = account.M365Config{
func (suite *M365CfgSuite) TestM365Config_Config() {
m365 := goodM365Config
c, err := m365.StringConfig()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
key string
@ -50,9 +51,10 @@ func (suite *M365CfgSuite) TestAccount_M365Config() {
in := goodM365Config
a, err := account.NewAccount(account.ProviderM365, in)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
out, err := a.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in.AzureClientID, out.AzureClientID)
assert.Equal(t, in.AzureClientSecret, out.AzureClientSecret)
@ -113,8 +115,9 @@ func (suite *M365CfgSuite) TestAccount_M365Config_InvalidCases() {
for _, test := range table2 {
suite.T().Run(test.name, func(t *testing.T) {
st, err := account.NewAccount(account.ProviderUnknown, goodM365Config)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
test.amend(st)
_, err = st.M365Config()
assert.Error(t, err)
})

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
@ -31,7 +32,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
initial := time.Now()
nowStr := common.FormatTimeWith(initial, common.TabularOutput)
now, err := common.ParseTime(nowStr)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
@ -846,7 +847,7 @@ func makeItemPath(
category,
true,
elems...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
}
@ -1001,10 +1002,10 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
item := test.input
err := UpdateItem(&item, test.repoPath)
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
if err != nil {
return
@ -1184,7 +1185,7 @@ func (suite *DetailsUnitSuite) TestDetails_Marshal() {
}}
bs, err := d.Marshal()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
assert.NotEmpty(suite.T(), bs)
})
}
@ -1198,7 +1199,7 @@ func (suite *DetailsUnitSuite) TestUnarshalTo() {
}}
bs, err := orig.Marshal()
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
assert.NotEmpty(suite.T(), bs)
var result Details
@ -1206,7 +1207,7 @@ func (suite *DetailsUnitSuite) TestUnarshalTo() {
err = umt(io.NopCloser(bytes.NewReader(bs)))
t := suite.T()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, result)
assert.ElementsMatch(t, orig.Entries, result.Entries)
})

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
)
@ -78,7 +79,7 @@ func (suite *FaultErrorsUnitSuite) TestErr() {
n := fault.New(test.failFast)
require.NotNil(t, n)
require.NoError(t, n.Failure())
require.NoError(t, n.Failure(), clues.ToCore(n.Failure()))
require.Empty(t, n.Recovered())
e := n.Fail(test.fail)
@ -97,11 +98,11 @@ func (suite *FaultErrorsUnitSuite) TestFail() {
n := fault.New(false)
require.NotNil(t, n)
require.NoError(t, n.Failure())
require.NoError(t, n.Failure(), clues.ToCore(n.Failure()))
require.Empty(t, n.Recovered())
n.Fail(assert.AnError)
assert.Error(t, n.Failure())
assert.Error(t, n.Failure(), clues.ToCore(n.Failure()))
assert.Empty(t, n.Recovered())
n.Fail(assert.AnError)
@ -337,10 +338,10 @@ func (suite *FaultErrorsUnitSuite) TestMarshalUnmarshal() {
n.AddRecoverable(errors.New("2"))
bs, err := json.Marshal(n.Errors())
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
err = json.Unmarshal(bs, &fault.Errors{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
type legacyErrorsData struct {
@ -357,14 +358,14 @@ func (suite *FaultErrorsUnitSuite) TestUnmarshalLegacy() {
}
jsonStr, err := json.Marshal(oldData)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
t.Logf("jsonStr is %s\n", jsonStr)
um := fault.Errors{}
err = json.Unmarshal(jsonStr, &um)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *FaultErrorsUnitSuite) TestTracker() {
@ -373,18 +374,18 @@ func (suite *FaultErrorsUnitSuite) TestTracker() {
eb := fault.New(false)
lb := eb.Local()
assert.NoError(t, lb.Failure())
assert.NoError(t, lb.Failure(), clues.ToCore(lb.Failure()))
assert.Empty(t, eb.Recovered())
lb.AddRecoverable(assert.AnError)
assert.NoError(t, lb.Failure())
assert.NoError(t, eb.Failure())
assert.NoError(t, lb.Failure(), clues.ToCore(lb.Failure()))
assert.NoError(t, eb.Failure(), clues.ToCore(eb.Failure()))
assert.NotEmpty(t, eb.Recovered())
ebt := fault.New(true)
lbt := ebt.Local()
assert.NoError(t, lbt.Failure())
assert.NoError(t, lbt.Failure(), clues.ToCore(lbt.Failure()))
assert.Empty(t, ebt.Recovered())
lbt.AddRecoverable(assert.AnError)

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
@ -49,7 +50,7 @@ func (suite *OneDrivePathSuite) Test_ToOneDrivePath() {
t := suite.T()
p, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, false, tt.pathElements...)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
got, err := path.ToOneDrivePath(p)
tt.errCheck(t, err)

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -241,7 +242,7 @@ func (suite *PathUnitSuite) TestUnescapeAndAppend() {
t := suite.T()
p, err := Builder{}.UnescapeAndAppend(test.input...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedString, p.String())
})
@ -256,7 +257,7 @@ func (suite *PathUnitSuite) TestEscapedFailure() {
tmp := strings.ReplaceAll(target, "_", string(c))
_, err := Builder{}.UnescapeAndAppend("this", tmp, "path")
assert.Error(t, err, "path with unescaped %s did not error", string(c))
assert.Errorf(t, err, "path with unescaped %s did not error", string(c))
})
}
}
@ -270,13 +271,12 @@ func (suite *PathUnitSuite) TestBadEscapeSequenceErrors() {
tmp := strings.ReplaceAll(target, "_", string(c))
_, err := Builder{}.UnescapeAndAppend("this", tmp, "path")
assert.Error(
assert.Errorf(
t,
err,
"path with bad escape sequence %c%c did not error",
escapeCharacter,
c,
)
c)
})
}
}
@ -294,8 +294,7 @@ func (suite *PathUnitSuite) TestTrailingEscapeChar() {
assert.Error(
t,
err,
"path with trailing escape character did not error",
)
"path with trailing escape character did not error")
})
}
}
@ -337,7 +336,7 @@ func (suite *PathUnitSuite) TestElements() {
t := suite.T()
p, err := test.pathFunc(test.input)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.output, p.Elements())
})
@ -506,7 +505,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}.
Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
},
@ -519,7 +518,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}.
Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
},
@ -533,7 +532,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}.
Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
},
@ -546,7 +545,7 @@ func (suite *PathUnitSuite) TestFolder() {
p, err := Builder{}.
Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return p
},
@ -680,7 +679,7 @@ func (suite *PathUnitSuite) TestFromString() {
testPath := fmt.Sprintf(test.unescapedPath, service, cat)
p, err := FromDataLayerPath(testPath, item.isItem)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, service, p.Service(), "service")
assert.Equal(t, cat, p.Category(), "category")

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
@ -175,7 +176,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
testUser,
true,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, p.Folder(false))
assert.Empty(t, p.Folders())
@ -201,7 +202,7 @@ func (suite *DataLayerResourcePath) TestPopFront() {
path.EmailCategory,
m.isItem,
)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
b := p.PopFront()
assert.Equal(t, expected.String(), b.String())
@ -226,14 +227,14 @@ func (suite *DataLayerResourcePath) TestDir() {
path.EmailCategory,
m.isItem,
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
for i := 1; i <= len(rest); i++ {
suite.Run(fmt.Sprintf("%v", i), func() {
t := suite.T()
p, err = p.Dir()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expected := path.Builder{}.Append(elements...).Append(rest[:len(rest)-i]...)
assert.Equal(t, expected.String(), p.String())
@ -332,17 +333,15 @@ func (suite *DataLayerResourcePath) TestToServiceCategoryMetadataPath() {
test.category.String(),
}, "_"), func() {
t := suite.T()
pb := path.Builder{}.Append(test.postfix...)
p, err := pb.ToServiceCategoryMetadataPath(
tenant,
user,
test.service,
test.category,
false,
)
test.check(t, err)
false)
test.check(t, err, clues.ToCore(err))
if err != nil {
return
@ -391,10 +390,8 @@ func (suite *DataLayerResourcePath) TestToExchangePathForCategory() {
testTenant,
testUser,
test.category,
m.isItem,
)
test.check(t, err)
m.isItem)
test.check(t, err, clues.ToCore(err))
if err != nil {
return
@ -434,7 +431,7 @@ func (suite *PopulatedDataLayerResourcePath) SetupSuite() {
path.EmailCategory,
t,
)
require.NoError(suite.T(), err)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.paths[t] = p
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
)
@ -123,7 +124,7 @@ func (suite *ServiceCategoryUnitSuite) TestValidateServiceAndCategory() {
t := suite.T()
s, c, err := validateServiceAndCategoryStrings(test.service, test.category)
test.check(t, err)
test.check(t, err, clues.ToCore(err))
if err != nil {
return

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
D "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
@ -98,7 +99,7 @@ func initM365Repo(t *testing.T) (
}
repo, err := repository.Initialize(ctx, ac, st, opts)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return ctx, repo, ac, st
}
@ -120,7 +121,7 @@ func runLoadTest(
//revive:enable:context-as-argument
t.Run(prefix+"_load_test_main", func(t *testing.T) {
b, err := r.NewBackup(ctx, bupSel)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
runBackupLoadTest(t, ctx, &b, service, usersUnderTest)
bid := string(b.Results.BackupID)
@ -152,7 +153,7 @@ func runRestoreLoadTest(
dest := tester.DefaultTestRestoreDestination()
rst, err := r.NewRestore(ctx, backupID, restSel, dest)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
doRestoreLoadTest(t, ctx, rst, service, bup.Results.ItemsWritten, usersUnderTest)
})
@ -177,7 +178,7 @@ func runBackupLoadTest(
err = b.Run(ctx)
})
require.NoError(t, err, "running backup")
require.NoError(t, err, "running backup", clues.ToCore(err))
require.NotEmpty(t, b.Results, "has results after run")
assert.NotEmpty(t, b.Results.BackupID, "has an ID after run")
assert.Equal(t, b.Status, operations.Completed, "backup status")
@ -185,7 +186,7 @@ func runBackupLoadTest(
assert.Less(t, 0, b.Results.ItemsWritten, "items written")
assert.Less(t, int64(0), b.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, len(users), b.Results.ResourceOwners, "resource owners")
assert.NoError(t, b.Errors.Failure(), "non-recoverable error")
assert.NoError(t, b.Errors.Failure(), "non-recoverable error", clues.ToCore(b.Errors.Failure()))
assert.Empty(t, b.Errors.Recovered(), "recoverable errors")
})
}
@ -209,7 +210,7 @@ func runBackupListLoadTest(
bs, err = r.BackupsByTag(ctx)
})
require.NoError(t, err, "retrieving backups")
require.NoError(t, err, "retrieving backups", clues.ToCore(err))
require.Less(t, 0, len(bs), "at least one backup is recorded")
var found bool
@ -250,8 +251,8 @@ func runBackupDetailsLoadTest(
ds, b, errs = r.BackupDetails(ctx, backupID)
})
require.NoError(t, errs.Failure(), "retrieving details in backup "+backupID)
require.Empty(t, errs.Recovered(), "retrieving details in backup "+backupID)
require.NoError(t, errs.Failure(), "retrieving details in backup", backupID, clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered(), "retrieving details in backup", backupID)
require.NotNil(t, ds, "backup details must exist")
require.NotNil(t, b, "backup must exist")
@ -284,7 +285,7 @@ func doRestoreLoadTest(
ds, err = r.Run(ctx)
})
require.NoError(t, err, "running restore")
require.NoError(t, err, "running restore", clues.ToCore(err))
require.NotEmpty(t, r.Results, "has results after run")
require.NotNil(t, ds, "has restored details")
assert.Equal(t, r.Status, operations.Completed, "restore status")

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
@ -51,9 +52,10 @@ func (suite *RepositorySuite) TestInitialize() {
defer flush()
st, err := test.storage()
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
_, err = repository.Initialize(ctx, test.account, st, control.Options{})
test.errCheck(t, err, "")
test.errCheck(t, err, clues.ToCore(err))
})
}
}
@ -84,9 +86,10 @@ func (suite *RepositorySuite) TestConnect() {
defer flush()
st, err := test.storage()
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
_, err = repository.Connect(ctx, test.account, st, control.Options{})
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
})
}
}
@ -133,11 +136,12 @@ func (suite *RepositoryIntegrationSuite) TestInitialize() {
r, err := repository.Initialize(ctx, test.account, st, control.Options{})
if err == nil {
defer func() {
assert.NoError(t, r.Close(ctx))
err := r.Close(ctx)
assert.NoError(t, err, clues.ToCore(err))
}()
}
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
})
}
}
@ -152,11 +156,11 @@ func (suite *RepositoryIntegrationSuite) TestConnect() {
st := tester.NewPrefixedS3Storage(t)
_, err := repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
// now re-connect
_, err = repository.Connect(ctx, account.Account{}, st, control.Options{})
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
}
func (suite *RepositoryIntegrationSuite) TestConnect_sameID() {
@ -169,15 +173,16 @@ func (suite *RepositoryIntegrationSuite) TestConnect_sameID() {
st := tester.NewPrefixedS3Storage(t)
r, err := repository.Initialize(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
oldID := r.GetID()
require.NoError(t, r.Close(ctx))
err = r.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
// now re-connect
r, err = repository.Connect(ctx, account.Account{}, st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, oldID, r.GetID())
}
@ -193,10 +198,10 @@ func (suite *RepositoryIntegrationSuite) TestNewBackup() {
st := tester.NewPrefixedS3Storage(t)
r, err := repository.Initialize(ctx, acct, st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
bo, err := r.NewBackup(ctx, selectors.Selector{DiscreteOwner: "test"})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, bo)
}
@ -213,10 +218,10 @@ func (suite *RepositoryIntegrationSuite) TestNewRestore() {
st := tester.NewPrefixedS3Storage(t)
r, err := repository.Initialize(ctx, acct, st, control.Options{})
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
ro, err := r.NewRestore(ctx, "backup-id", selectors.Selector{DiscreteOwner: "test"}, dest)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, ro)
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester"
)
@ -35,19 +36,23 @@ func (suite *RepositoryModelSuite) TestWriteGetModel() {
kopiaRef = kopia.NewConn(s)
)
require.NoError(t, kopiaRef.Initialize(ctx))
require.NoError(t, kopiaRef.Connect(ctx))
err := kopiaRef.Initialize(ctx)
require.NoError(t, err, clues.ToCore(err))
err = kopiaRef.Connect(ctx)
require.NoError(t, err, clues.ToCore(err))
defer kopiaRef.Close(ctx)
ms, err := kopia.NewModelStore(kopiaRef)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
defer ms.Close(ctx)
require.NoError(t, newRepoModel(ctx, ms, "fnords"))
err = newRepoModel(ctx, ms, "fnords")
require.NoError(t, err, clues.ToCore(err))
got, err := getRepoModel(ctx, ms)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, "fnords", string(got.ID))
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -37,7 +38,7 @@ func (suite *ExchangeSelectorSuite) TestToExchangeBackup() {
eb := NewExchangeBackup(nil)
s := eb.Selector
eb, err := s.ToExchangeBackup()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, eb.Service, ServiceExchange)
assert.NotZero(t, eb.Scopes())
}
@ -54,7 +55,7 @@ func (suite *ExchangeSelectorSuite) TestToExchangeRestore() {
eb := NewExchangeRestore(nil)
s := eb.Selector
eb, err := s.ToExchangeRestore()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, eb.Service, ServiceExchange)
assert.NotZero(t, eb.Scopes())
}

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
@ -188,7 +189,7 @@ func scopeMustHave[T scopeT](t *testing.T, sc T, m map[categorizer]string) {
// stubbing out static values where necessary.
func stubPath(t *testing.T, user string, s []string, cat path.CategoryType) path.Path {
pth, err := path.Build("tid", user, path.ExchangeService, cat, true, s...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
return pth
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -35,7 +36,7 @@ func (suite *OneDriveSelectorSuite) TestToOneDriveBackup() {
ob := NewOneDriveBackup(Any())
s := ob.Selector
ob, err := s.ToOneDriveBackup()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ob.Service, ServiceOneDrive)
assert.NotZero(t, ob.Scopes())
}
@ -155,7 +156,7 @@ func (suite *OneDriveSelectorSuite) TestToOneDriveRestore() {
eb := NewOneDriveRestore(Any())
s := eb.Selector
or, err := s.ToOneDriveRestore()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, or.Service, ServiceOneDrive)
assert.NotZero(t, or.Scopes())
}
@ -265,7 +266,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() {
elems := []string{"drive", "driveID", "root:", "dir1", "dir2", fileName + "-id"}
filePath, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, true, elems...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
expected := map[categorizer][]string{
OneDriveFolder: {"dir1/dir2"},

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
@ -288,7 +289,7 @@ func (suite *SelectorScopesSuite) TestReduce() {
dataCats,
errs)
require.NotNil(t, result)
require.NoError(t, errs.Failure(), "no recoverable errors")
require.NoError(t, errs.Failure(), "no recoverable errors", clues.ToCore(errs.Failure()))
assert.Len(t, result.Entries, test.expectLen)
})
}

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
@ -29,7 +30,7 @@ func (suite *SelectorSuite) TestNewSelector() {
func (suite *SelectorSuite) TestBadCastErr() {
err := badCastErr(ServiceUnknown, ServiceExchange)
assert.Error(suite.T(), err)
assert.Error(suite.T(), err, clues.ToCore(err))
}
func (suite *SelectorSuite) TestResourceOwnersIn() {
@ -365,13 +366,14 @@ func (suite *SelectorSuite) TestPathCategories_includes() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
obj := test.getSelector(t)
cats, err := obj.PathCategories()
for _, entry := range cats.Includes {
assert.NotEqual(t, entry, path.UnknownCategory)
}
test.isErr(t, err)
test.isErr(t, err, clues.ToCore(err))
})
}
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -35,7 +36,7 @@ func (suite *SharePointSelectorSuite) TestToSharePointBackup() {
ob := NewSharePointBackup(nil)
s := ob.Selector
ob, err := s.ToSharePointBackup()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ob.Service, ServiceSharePoint)
assert.NotZero(t, ob.Scopes())
}
@ -191,7 +192,7 @@ func (suite *SharePointSelectorSuite) TestToSharePointRestore() {
eb := NewSharePointRestore(nil)
s := eb.Selector
or, err := s.ToSharePointRestore()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, or.Service, ServiceSharePoint)
assert.NotZero(t, or.Scopes())
}
@ -364,7 +365,7 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
test.sc.PathType(),
true,
elems...)
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
ent := details.DetailsEntry{
RepoRef: itemPath.String(),

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
)
@ -33,7 +34,7 @@ func (suite *M365IntegrationSuite) TestUsers() {
)
users, err := Users(ctx, acct, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, users)
for _, u := range users {
@ -57,7 +58,7 @@ func (suite *M365IntegrationSuite) TestSites() {
)
sites, err := Sites(ctx, acct, fault.New(true))
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, sites)
for _, s := range sites {

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage"
)
@ -27,7 +28,7 @@ var goodCommonConfig = storage.CommonConfig{
func (suite *CommonCfgSuite) TestCommonConfig_Config() {
cfg := goodCommonConfig
c, err := cfg.StringConfig()
assert.NoError(suite.T(), err)
assert.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
key string
@ -47,9 +48,9 @@ func (suite *CommonCfgSuite) TestStorage_CommonConfig() {
in := goodCommonConfig
s, err := storage.NewStorage(storage.ProviderUnknown, in)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
out, err := s.CommonConfig()
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in.CorsoPassphrase, out.CorsoPassphrase)
}
@ -84,7 +85,7 @@ func (suite *CommonCfgSuite) TestStorage_CommonConfig_InvalidCases() {
for _, test := range table2 {
suite.T().Run(test.name, func(t *testing.T) {
st, err := storage.NewStorage(storage.ProviderUnknown, goodCommonConfig)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
test.amend(st)
_, err = st.CommonConfig()
assert.Error(t, err)

View File

@ -3,6 +3,7 @@ package storage
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
@ -36,8 +37,9 @@ var (
func (suite *S3CfgSuite) TestS3Config_Config() {
s3 := goodS3Config
c, err := s3.StringConfig()
assert.NoError(suite.T(), err)
assert.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
key string
@ -57,9 +59,9 @@ func (suite *S3CfgSuite) TestStorage_S3Config() {
in := goodS3Config
s, err := NewStorage(ProviderS3, in)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
out, err := s.S3Config()
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, in.Bucket, out.Bucket)
assert.Equal(t, in.Endpoint, out.Endpoint)
@ -104,7 +106,7 @@ func (suite *S3CfgSuite) TestStorage_S3Config_invalidCases() {
for _, test := range table2 {
suite.T().Run(test.name, func(t *testing.T) {
st, err := NewStorage(ProviderUnknown, goodS3Config)
assert.NoError(t, err)
assert.NoError(t, err, clues.ToCore(err))
test.amend(st)
_, err = st.S3Config()
assert.Error(t, err)
@ -149,7 +151,7 @@ func (suite *S3CfgSuite) TestStorage_S3Config_StringConfig() {
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
result, err := test.input.StringConfig()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, result)
})
}

View File

@ -3,6 +3,7 @@ package storage
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
@ -39,11 +40,13 @@ func (suite *StorageSuite) TestNewStorage() {
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
s, err := NewStorage(test.p, test.c)
test.errCheck(t, err)
test.errCheck(t, err, clues.ToCore(err))
// remaining tests are dependent upon error-free state
if test.c.err != nil {
return
}
assert.Equalf(t,
test.p,
s.Provider,

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
@ -64,13 +65,15 @@ func (suite *StoreBackupUnitSuite) TestGetBackup() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
sm := &store.Wrapper{Storer: test.mock}
result, err := sm.GetBackup(ctx, model.StableID(uuid.NewString()))
test.expect(t, err)
test.expect(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, bu.ID, result.ID)
})
}
@ -99,13 +102,15 @@ func (suite *StoreBackupUnitSuite) TestGetBackups() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
sm := &store.Wrapper{Storer: test.mock}
result, err := sm.GetBackups(ctx)
test.expect(t, err)
test.expect(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, 1, len(result))
assert.Equal(t, bu.ID, result[0].ID)
})
@ -135,10 +140,10 @@ func (suite *StoreBackupUnitSuite) TestDeleteBackup() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
sm := &store.Wrapper{Storer: test.mock}
err := sm.DeleteBackup(ctx, model.StableID(uuid.NewString()))
test.expect(t, err)
test.expect(t, err, clues.ToCore(err))
})
}
}
@ -166,13 +171,15 @@ func (suite *StoreBackupUnitSuite) TestGetDetailsIDFromBackupID() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
store := &store.Wrapper{Storer: test.mock}
dResult, bResult, err := store.GetDetailsIDFromBackupID(ctx, model.StableID(uuid.NewString()))
test.expect(t, err)
test.expect(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, bu.DetailsID, dResult)
assert.Equal(t, bu.ID, bResult.ID)
})