Merge branch 'main' into 2447-onedrive-folder-order-2

This commit is contained in:
Ashlie Martinez 2023-02-22 12:05:46 -08:00
commit e3488b9a5c
48 changed files with 788 additions and 355 deletions

View File

@ -39,7 +39,7 @@ var backupDataSets = []path.CategoryType{email, contacts, events}
// ---------------------------------------------------------------------------
type NoBackupExchangeIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -50,13 +50,11 @@ type NoBackupExchangeIntegrationSuite struct {
}
func TestNoBackupExchangeIntegrationSuite(t *testing.T) {
tester.RunOnAny(
suite.Run(t, &NoBackupExchangeIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
tester.CorsoCITests,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIBackupTests)
suite.Run(t, new(NoBackupExchangeIntegrationSuite))
tester.CorsoCLIBackupTests)})
}
func (suite *NoBackupExchangeIntegrationSuite) SetupSuite() {
@ -65,8 +63,6 @@ func (suite *NoBackupExchangeIntegrationSuite) SetupSuite() {
defer flush()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
@ -123,7 +119,7 @@ func (suite *NoBackupExchangeIntegrationSuite) TestExchangeBackupListCmd_empty()
// ---------------------------------------------------------------------------
type BackupExchangeIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -133,13 +129,11 @@ type BackupExchangeIntegrationSuite struct {
}
func TestBackupExchangeIntegrationSuite(t *testing.T) {
tester.RunOnAny(
suite.Run(t, &BackupExchangeIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
tester.CorsoCITests,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIBackupTests)
suite.Run(t, new(BackupExchangeIntegrationSuite))
tester.CorsoCLIBackupTests)})
}
func (suite *BackupExchangeIntegrationSuite) SetupSuite() {
@ -148,8 +142,6 @@ func (suite *BackupExchangeIntegrationSuite) SetupSuite() {
defer flush()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
@ -179,7 +171,9 @@ func (suite *BackupExchangeIntegrationSuite) TestExchangeBackupCmd() {
for _, set := range backupDataSets {
recorder.Reset()
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
@ -212,7 +206,7 @@ func (suite *BackupExchangeIntegrationSuite) TestExchangeBackupCmd() {
// ---------------------------------------------------------------------------
type PreparedBackupExchangeIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -224,18 +218,15 @@ type PreparedBackupExchangeIntegrationSuite struct {
}
func TestPreparedBackupExchangeIntegrationSuite(t *testing.T) {
tester.RunOnAny(
suite.Run(t, &PreparedBackupExchangeIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
tester.CorsoCITests,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIBackupTests)
suite.Run(t, new(PreparedBackupExchangeIntegrationSuite))
tester.CorsoCLIBackupTests)})
}
func (suite *PreparedBackupExchangeIntegrationSuite) SetupSuite() {
t := suite.T()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)
@ -309,7 +300,9 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd() {
for _, set := range backupDataSets {
suite.recorder.Reset()
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
@ -337,7 +330,9 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd_singleI
for _, set := range backupDataSets {
suite.recorder.Reset()
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
@ -366,7 +361,9 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd_singleI
func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeListCmd_badID() {
for _, set := range backupDataSets {
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
@ -389,7 +386,9 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeDetailsCmd() {
for _, set := range backupDataSets {
suite.recorder.Reset()
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
@ -427,8 +426,8 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeDetailsCmd() {
continue
}
t.Run(fmt.Sprintf("detail %d", i), func(t *testing.T) {
assert.Contains(t, result, ent.ShortRef)
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
@ -445,7 +444,7 @@ func (suite *PreparedBackupExchangeIntegrationSuite) TestExchangeDetailsCmd() {
// ---------------------------------------------------------------------------
type BackupDeleteExchangeIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -455,18 +454,17 @@ type BackupDeleteExchangeIntegrationSuite struct {
}
func TestBackupDeleteExchangeIntegrationSuite(t *testing.T) {
tester.RunOnAny(
t,
tester.CorsoCITests,
tester.CorsoCLITests,
tester.CorsoCLIBackupTests)
suite.Run(t, new(BackupDeleteExchangeIntegrationSuite))
suite.Run(t, &BackupDeleteExchangeIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIBackupTests),
})
}
func (suite *BackupDeleteExchangeIntegrationSuite) SetupSuite() {
t := suite.T()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)

View File

@ -14,11 +14,11 @@ import (
)
type ExchangeSuite struct {
suite.Suite
tester.Suite
}
func TestExchangeSuite(t *testing.T) {
suite.Run(t, new(ExchangeSuite))
suite.Run(t, &ExchangeSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeSuite) TestAddExchangeCommands() {
@ -49,7 +49,9 @@ func (suite *ExchangeSuite) TestAddExchangeCommands() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addExchangeCommands(cmd)
@ -94,7 +96,9 @@ func (suite *ExchangeSuite) TestValidateBackupCreateFlags() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
test.expect(t, validateExchangeBackupCreateFlags(test.user, test.data))
})
}
@ -206,7 +210,9 @@ func (suite *ExchangeSuite) TestExchangeBackupCreateSelectors() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
sel := exchangeBackupCreateSelectors(test.user, test.data)
assert.Equal(t, test.expectIncludeLen, len(sel.Includes))
})
@ -218,7 +224,9 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectors() {
defer flush()
for _, test := range testdata.ExchangeOptionDetailLookups {
suite.T().Run(test.Name, func(t *testing.T) {
suite.Run(test.Name, func() {
t := suite.T()
output, err := runDetailsExchangeCmd(
ctx,
test.BackupGetter,
@ -235,7 +243,9 @@ func (suite *ExchangeSuite) TestExchangeBackupDetailsSelectorsBadFormats() {
defer flush()
for _, test := range testdata.BadExchangeOptionsFormats {
suite.T().Run(test.Name, func(t *testing.T) {
suite.Run(test.Name, func() {
t := suite.T()
output, err := runDetailsExchangeCmd(
ctx,
test.BackupGetter,

View File

@ -29,7 +29,7 @@ import (
// ---------------------------------------------------------------------------
type NoBackupOneDriveIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -40,9 +40,13 @@ type NoBackupOneDriveIntegrationSuite struct {
}
func TestNoBackupOneDriveIntegrationSuite(t *testing.T) {
tester.RunOnAny(t, tester.CorsoCITests, tester.CorsoCLITests, tester.CorsoCLIBackupTests)
suite.Run(t, new(NoBackupOneDriveIntegrationSuite))
suite.Run(t, &NoBackupOneDriveIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIBackupTests),
})
}
func (suite *NoBackupOneDriveIntegrationSuite) SetupSuite() {
@ -51,8 +55,6 @@ func (suite *NoBackupOneDriveIntegrationSuite) SetupSuite() {
defer flush()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
@ -114,7 +116,7 @@ func (suite *NoBackupOneDriveIntegrationSuite) TestOneDriveBackupListCmd_empty()
// ---------------------------------------------------------------------------
type BackupDeleteOneDriveIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -125,18 +127,17 @@ type BackupDeleteOneDriveIntegrationSuite struct {
}
func TestBackupDeleteOneDriveIntegrationSuite(t *testing.T) {
tester.RunOnAny(
t,
tester.CorsoCITests,
tester.CorsoCLITests,
tester.CorsoCLIBackupTests)
suite.Run(t, new(BackupDeleteOneDriveIntegrationSuite))
suite.Run(t, &BackupDeleteOneDriveIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIBackupTests),
})
}
func (suite *BackupDeleteOneDriveIntegrationSuite) SetupSuite() {
t := suite.T()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)

View File

@ -13,11 +13,11 @@ import (
)
type OneDriveSuite struct {
suite.Suite
tester.Suite
}
func TestOneDriveSuite(t *testing.T) {
suite.Run(t, new(OneDriveSuite))
suite.Run(t, &OneDriveSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveSuite) TestAddOneDriveCommands() {
@ -48,7 +48,9 @@ func (suite *OneDriveSuite) TestAddOneDriveCommands() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addOneDriveCommands(cmd)
@ -82,8 +84,8 @@ func (suite *OneDriveSuite) TestValidateOneDriveBackupCreateFlags() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, validateOneDriveBackupCreateFlags(test.user))
suite.Run(test.name, func() {
test.expect(suite.T(), validateOneDriveBackupCreateFlags(test.user))
})
}
}
@ -93,7 +95,9 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectors() {
defer flush()
for _, test := range testdata.OneDriveOptionDetailLookups {
suite.T().Run(test.Name, func(t *testing.T) {
suite.Run(test.Name, func() {
t := suite.T()
output, err := runDetailsOneDriveCmd(
ctx,
test.BackupGetter,
@ -110,7 +114,9 @@ func (suite *OneDriveSuite) TestOneDriveBackupDetailsSelectorsBadFormats() {
defer flush()
for _, test := range testdata.BadOneDriveOptionsFormats {
suite.T().Run(test.Name, func(t *testing.T) {
suite.Run(test.Name, func() {
t := suite.T()
output, err := runDetailsOneDriveCmd(
ctx,
test.BackupGetter,

View File

@ -29,7 +29,7 @@ import (
// ---------------------------------------------------------------------------
type NoBackupSharePointIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -40,9 +40,10 @@ type NoBackupSharePointIntegrationSuite struct {
}
func TestNoBackupSharePointIntegrationSuite(t *testing.T) {
tester.RunOnAny(t, tester.CorsoCITests, tester.CorsoCLITests, tester.CorsoCLIBackupTests)
suite.Run(t, new(NoBackupSharePointIntegrationSuite))
suite.Run(t, &NoBackupSharePointIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests, tester.CorsoCLIBackupTests)})
}
func (suite *NoBackupSharePointIntegrationSuite) SetupSuite() {
@ -51,8 +52,6 @@ func (suite *NoBackupSharePointIntegrationSuite) SetupSuite() {
defer flush()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
@ -108,7 +107,7 @@ func (suite *NoBackupSharePointIntegrationSuite) TestSharePointBackupListCmd_emp
// ---------------------------------------------------------------------------
type BackupDeleteSharePointIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -119,14 +118,16 @@ type BackupDeleteSharePointIntegrationSuite struct {
}
func TestBackupDeleteSharePointIntegrationSuite(t *testing.T) {
tester.RunOnAny(t, tester.CorsoCITests, tester.CorsoCLITests, tester.CorsoCLIBackupTests)
suite.Run(t, new(BackupDeleteSharePointIntegrationSuite))
suite.Run(t, &BackupDeleteSharePointIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests, tester.CorsoCLIBackupTests),
})
}
func (suite *BackupDeleteSharePointIntegrationSuite) SetupSuite() {
t := suite.T()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// prepare common details
suite.acct = tester.NewM365Account(t)

View File

@ -16,11 +16,11 @@ import (
)
type SharePointSuite struct {
suite.Suite
tester.Suite
}
func TestSharePointSuite(t *testing.T) {
suite.Run(t, new(SharePointSuite))
suite.Run(t, &SharePointSuite{tester.NewUnitSuite(t)})
}
func (suite *SharePointSuite) TestAddSharePointCommands() {
@ -51,7 +51,9 @@ func (suite *SharePointSuite) TestAddSharePointCommands() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addSharePointCommands(cmd)
@ -97,8 +99,8 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, validateSharePointBackupCreateFlags(test.site, test.weburl, nil))
suite.Run(test.name, func() {
test.expect(suite.T(), validateSharePointBackupCreateFlags(test.site, test.weburl, nil))
})
}
}
@ -191,7 +193,9 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
@ -208,7 +212,9 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectors() {
defer flush()
for _, test := range testdata.SharePointOptionDetailLookups {
suite.T().Run(test.Name, func(t *testing.T) {
suite.Run(test.Name, func() {
t := suite.T()
output, err := runDetailsSharePointCmd(
ctx,
test.BackupGetter,
@ -225,7 +231,9 @@ func (suite *SharePointSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
defer flush()
for _, test := range testdata.BadSharePointOptionsFormats {
suite.T().Run(test.Name, func(t *testing.T) {
suite.Run(test.Name, func() {
t := suite.T()
output, err := runDetailsSharePointCmd(
ctx,
test.BackupGetter,

View File

@ -8,14 +8,15 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/internal/tester"
)
type CLISuite struct {
suite.Suite
tester.Suite
}
func TestCLISuite(t *testing.T) {
suite.Run(t, new(CLISuite))
suite.Run(t, &CLISuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *CLISuite) TestAddCommands_noPanics() {

View File

@ -31,11 +31,11 @@ const (
)
type ConfigSuite struct {
suite.Suite
tester.Suite
}
func TestConfigSuite(t *testing.T) {
suite.Run(t, new(ConfigSuite))
suite.Run(t, &ConfigSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ConfigSuite) TestReadRepoConfigBasic() {
@ -172,8 +172,8 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
},
}
for _, test := range table {
t.Run(test.name, func(t *testing.T) {
test.errCheck(t, mustMatchConfig(vpr, test.input))
suite.Run(test.name, func() {
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input))
})
}
}
@ -183,20 +183,14 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
// ------------------------------------------------------------
type ConfigIntegrationSuite struct {
suite.Suite
tester.Suite
}
func TestConfigIntegrationSuite(t *testing.T) {
tester.RunOnAny(
suite.Run(t, &ConfigIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
tester.CorsoCITests,
tester.CorsoCLIConfigTests)
suite.Run(t, new(ConfigIntegrationSuite))
}
func (suite *ConfigIntegrationSuite) SetupSuite() {
tester.MustGetEnvSets(suite.T(), tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLIConfigTests)})
}
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {

View File

@ -12,11 +12,11 @@ import (
)
type EnvSuite struct {
suite.Suite
tester.Suite
}
func TestEnvSuite(t *testing.T) {
suite.Run(t, new(EnvSuite))
suite.Run(t, &EnvSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *EnvSuite) TestAddEnvCommands() {

View File

@ -2,28 +2,31 @@ package print
import (
"bytes"
"context"
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
)
type PrintUnitSuite struct {
suite.Suite
tester.Suite
}
func TestPrintUnitSuite(t *testing.T) {
suite.Run(t, new(PrintUnitSuite))
suite.Run(t, &PrintUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *PrintUnitSuite) TestOnly() {
t := suite.T()
c := &cobra.Command{}
// cannot use tester.NewContext() here: circular imports
//nolint:forbidigo
ctx := SetRootCmd(context.Background(), c)
ctx, flush := tester.NewContext()
defer flush()
ctx = SetRootCmd(ctx, c)
assert.NoError(t, Only(ctx, nil))
assert.True(t, c.SilenceUsage)
}

View File

@ -16,21 +16,15 @@ import (
)
type S3IntegrationSuite struct {
suite.Suite
tester.Suite
}
func TestS3IntegrationSuite(t *testing.T) {
tester.RunOnAny(
suite.Run(t, &S3IntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
tester.CorsoCITests,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIRepoTests)
suite.Run(t, new(S3IntegrationSuite))
}
func (suite *S3IntegrationSuite) SetupSuite() {
tester.MustGetEnvSets(suite.T(), tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
tester.CorsoCLIRepoTests)})
}
func (suite *S3IntegrationSuite) TestInitS3Cmd() {
@ -49,7 +43,9 @@ func (suite *S3IntegrationSuite) TestInitS3Cmd() {
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
@ -148,7 +144,9 @@ func (suite *S3IntegrationSuite) TestConnectS3Cmd() {
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()

View File

@ -12,11 +12,11 @@ import (
)
type S3Suite struct {
suite.Suite
tester.Suite
}
func TestS3Suite(t *testing.T) {
suite.Run(t, new(S3Suite))
suite.Run(t, &S3Suite{Suite: tester.NewUnitSuite(t)})
}
func (suite *S3Suite) TestAddS3Commands() {
@ -33,7 +33,9 @@ func (suite *S3Suite) TestAddS3Commands() {
{"connect s3", connectCommand, expectUse, s3ConnectCmd().Short, connectS3Cmd},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addS3Commands(cmd)

View File

@ -31,7 +31,7 @@ var (
var backupDataSets = []path.CategoryType{email, contacts, events}
type RestoreExchangeIntegrationSuite struct {
suite.Suite
tester.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
@ -42,13 +42,13 @@ type RestoreExchangeIntegrationSuite struct {
}
func TestRestoreExchangeIntegrationSuite(t *testing.T) {
tester.RunOnAny(
t,
tester.CorsoCITests,
tester.CorsoCLITests,
tester.CorsoCLIRestoreTests)
suite.Run(t, new(RestoreExchangeIntegrationSuite))
suite.Run(t, &RestoreExchangeIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCLITests,
tester.CorsoCLIRestoreTests),
})
}
func (suite *RestoreExchangeIntegrationSuite) SetupSuite() {
@ -57,8 +57,6 @@ func (suite *RestoreExchangeIntegrationSuite) SetupSuite() {
ctx, flush := tester.NewContext()
defer flush()
tester.MustGetEnvSets(t, tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs)
// aggregate required details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
@ -119,7 +117,9 @@ func (suite *RestoreExchangeIntegrationSuite) SetupSuite() {
func (suite *RestoreExchangeIntegrationSuite) TestExchangeRestoreCmd() {
for _, set := range backupDataSets {
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
@ -143,7 +143,9 @@ func (suite *RestoreExchangeIntegrationSuite) TestExchangeRestoreCmd_badTimeFlag
suite.T().Skip()
}
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
@ -176,7 +178,9 @@ func (suite *RestoreExchangeIntegrationSuite) TestExchangeRestoreCmd_badBoolFlag
suite.T().Skip()
}
suite.T().Run(set.String(), func(t *testing.T) {
suite.Run(set.String(), func() {
t := suite.T()
//nolint:forbidigo
ctx := config.SetViper(context.Background(), suite.vpr)
ctx, flush := tester.WithContext(ctx)

View File

@ -12,11 +12,11 @@ import (
)
type ExchangeSuite struct {
suite.Suite
tester.Suite
}
func TestExchangeSuite(t *testing.T) {
suite.Run(t, new(ExchangeSuite))
suite.Run(t, &ExchangeSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeSuite) TestAddExchangeCommands() {
@ -32,7 +32,9 @@ func (suite *ExchangeSuite) TestAddExchangeCommands() {
{"restore exchange", restoreCommand, expectUse, exchangeRestoreCmd().Short, restoreExchangeCmd},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addExchangeCommands(cmd)

View File

@ -12,11 +12,11 @@ import (
)
type OneDriveSuite struct {
suite.Suite
tester.Suite
}
func TestOneDriveSuite(t *testing.T) {
suite.Run(t, new(OneDriveSuite))
suite.Run(t, &OneDriveSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveSuite) TestAddOneDriveCommands() {
@ -32,7 +32,9 @@ func (suite *OneDriveSuite) TestAddOneDriveCommands() {
{"restore onedrive", restoreCommand, expectUse, oneDriveRestoreCmd().Short, restoreOneDriveCmd},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addOneDriveCommands(cmd)

View File

@ -12,11 +12,11 @@ import (
)
type SharePointSuite struct {
suite.Suite
tester.Suite
}
func TestSharePointSuite(t *testing.T) {
suite.Run(t, new(SharePointSuite))
suite.Run(t, &SharePointSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *SharePointSuite) TestAddSharePointCommands() {
@ -32,7 +32,9 @@ func (suite *SharePointSuite) TestAddSharePointCommands() {
{"restore onedrive", restoreCommand, expectUse, sharePointRestoreCmd().Short, restoreSharePointCmd},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addSharePointCommands(cmd)

View File

@ -8,15 +8,16 @@ import (
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors"
)
type ExchangeUtilsSuite struct {
suite.Suite
tester.Suite
}
func TestExchangeUtilsSuite(t *testing.T) {
suite.Run(t, new(ExchangeUtilsSuite))
suite.Run(t, &ExchangeUtilsSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
@ -50,8 +51,8 @@ func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, utils.ValidateExchangeRestoreFlags(test.backupID, test.opts))
suite.Run(test.name, func() {
test.expect(suite.T(), utils.ValidateExchangeRestoreFlags(test.backupID, test.opts))
})
}
}
@ -300,9 +301,9 @@ func (suite *ExchangeUtilsSuite) TestIncludeExchangeRestoreDataSelectors() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
sel := utils.IncludeExchangeRestoreDataSelectors(test.opts)
assert.Len(t, sel.Includes, test.expectIncludeLen)
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
})
}
}
@ -361,11 +362,11 @@ func (suite *ExchangeUtilsSuite) TestAddExchangeInclude() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
sel := selectors.NewExchangeRestore(nil)
// no return, mutates sel as a side effect
utils.AddExchangeInclude(sel, test.folders, test.items, eisc)
assert.Len(t, sel.Includes, test.expectIncludeLen)
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
})
}
}
@ -477,10 +478,10 @@ func (suite *ExchangeUtilsSuite) TestFilterExchangeRestoreInfoSelectors() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
sel := selectors.NewExchangeRestore(nil)
utils.FilterExchangeRestoreInfoSelectors(sel, test.opts)
assert.Len(t, sel.Filters, test.expectFilterLen)
assert.Len(suite.T(), sel.Filters, test.expectFilterLen)
})
}
}

View File

@ -7,14 +7,15 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type OneDriveUtilsSuite struct {
suite.Suite
tester.Suite
}
func TestOneDriveUtilsSuite(t *testing.T) {
suite.Run(t, new(OneDriveUtilsSuite))
suite.Run(t, &OneDriveUtilsSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
@ -88,9 +89,9 @@ func (suite *OneDriveUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
sel := utils.IncludeOneDriveRestoreDataSelectors(test.opts)
assert.Len(t, sel.Includes, test.expectIncludeLen)
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
})
}
}

View File

@ -7,14 +7,15 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type SharePointUtilsSuite struct {
suite.Suite
tester.Suite
}
func TestSharePointUtilsSuite(t *testing.T) {
suite.Run(t, new(SharePointUtilsSuite))
suite.Run(t, &SharePointUtilsSuite{Suite: tester.NewUnitSuite(t)})
}
// Tests selector build for SharePoint properly
@ -181,9 +182,9 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
sel := utils.IncludeSharePointRestoreDataSelectors(test.opts)
assert.Len(t, sel.Includes, test.expectIncludeLen)
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
})
}
}

View File

@ -6,15 +6,16 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors"
)
type CliUtilsSuite struct {
suite.Suite
tester.Suite
}
func TestCliUtilsSuite(t *testing.T) {
suite.Run(t, new(CliUtilsSuite))
suite.Run(t, &CliUtilsSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *CliUtilsSuite) TestRequireProps() {
@ -75,7 +76,9 @@ func (suite *CliUtilsSuite) TestSplitFoldersIntoContainsAndPrefix() {
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
suite.Run(test.name, func() {
t := suite.T()
c, p := splitFoldersIntoContainsAndPrefix(test.input)
assert.ElementsMatch(t, test.expectC, c, "contains set")
assert.ElementsMatch(t, test.expectP, p, "prefix set")

View File

@ -5,7 +5,7 @@ go 1.19
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230217203352-c3714e5e9013
github.com/aws/aws-sdk-go v1.44.205
github.com/aws/aws-sdk-go v1.44.206
github.com/aws/aws-xray-sdk-go v1.8.0
github.com/cenkalti/backoff/v4 v4.2.0
github.com/google/uuid v1.3.0

View File

@ -62,8 +62,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/aws/aws-sdk-go v1.44.205 h1:q23NJXgLPIuBMn4zaluWWz57HPP5z7Ut8ZtK1D3N9bs=
github.com/aws/aws-sdk-go v1.44.205/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go v1.44.206 h1:xC7O40wdnKH4A95KdYt+smXl9hig1vu9b3mFxAxUoak=
github.com/aws/aws-sdk-go v1.44.206/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.0 h1:0xncHZ588wB/geLjbM/esoW3FOEThWy2TJyb4VXfLFY=
github.com/aws/aws-xray-sdk-go v1.8.0/go.mod h1:7LKe47H+j3evfvS1+q0wzpoaGXGrF3mUsfM+thqVO+A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=

View File

@ -0,0 +1,54 @@
package crash
import (
"context"
"fmt"
"runtime"
"runtime/debug"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/logger"
)
// Recovery provides a deferrable func that can be called
// to recover from, and log context about, crashes.
// If an error is returned, then a panic recovery occurred.
//
// Call it as follows:
//
// defer func() {
// if crErr := crash.Recovery(ctx, recover()); crErr != nil {
// err = crErr // err needs to be a named return variable
// }
// }()
func Recovery(ctx context.Context, r any) error {
var (
err error
inFile string
)
if r != nil {
if re, ok := r.(error); ok {
err = re
} else if re, ok := r.(string); ok {
err = clues.New(re)
} else {
err = clues.New(fmt.Sprintf("%v", r))
}
_, file, _, ok := runtime.Caller(2)
if ok {
inFile = " in file: " + file
}
err = clues.Wrap(err, "panic recovery"+inFile).
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.Ctx(ctx).
With("err", err).
Errorw("backup panic", clues.InErr(err).Slice()...)
}
return err
}

View File

@ -0,0 +1,62 @@
package crash_test
import (
"testing"
"github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/tester"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
type CrashTestDummySuite struct {
tester.Suite
}
func TestCrashTestDummySuite(t *testing.T) {
suite.Run(t, &CrashTestDummySuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *CrashTestDummySuite) TestRecovery() {
table := []struct {
name string
v any
expect assert.ErrorAssertionFunc
}{
{
name: "no panic",
v: nil,
expect: assert.NoError,
},
{
name: "error panic",
v: assert.AnError,
expect: assert.Error,
},
{
name: "string panic",
v: "an error",
expect: assert.Error,
},
{
name: "any panic",
v: map[string]string{"error": "yes"},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext()
defer func() {
test.expect(t, crash.Recovery(ctx, recover()))
flush()
}()
if test.v != nil {
panic(test.v)
}
})
}
}

View File

@ -108,16 +108,19 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userabl
return nil, clues.Wrap(err, "creating users iterator").WithClues(ctx).With(graph.ErrData(err)...)
}
us := make([]models.Userable, 0)
var (
us = make([]models.Userable, 0)
et = errs.Tracker()
)
iterator := func(item any) bool {
if errs.Err() != nil {
if et.Err() != nil {
return false
}
u, err := validateUser(item)
if err != nil {
errs.Add(clues.Wrap(err, "validating user").WithClues(ctx).With(graph.ErrData(err)...))
et.Add(clues.Wrap(err, "validating user").WithClues(ctx).With(graph.ErrData(err)...))
} else {
us = append(us, u)
}
@ -129,7 +132,7 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userabl
return nil, clues.Wrap(err, "iterating all users").WithClues(ctx).With(graph.ErrData(err)...)
}
return us, errs.Err()
return us, et.Err()
}
func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) {

View File

@ -178,6 +178,7 @@ func DataCollections(
var (
user = selector.DiscreteOwner
collections = []data.BackupCollection{}
et = errs.Tracker()
)
cdps, err := parseMetadataCollections(ctx, metadata, errs)
@ -186,7 +187,7 @@ func DataCollections(
}
for _, scope := range eb.Scopes() {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -200,14 +201,14 @@ func DataCollections(
su,
errs)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
collections = append(collections, dcs...)
}
return collections, nil, errs.Err()
return collections, nil, et.Err()
}
func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedItemIDsGetter, error) {

View File

@ -68,9 +68,11 @@ func filterContainersAndFillCollections(
return err
}
et := errs.Tracker()
for _, c := range resolver.Items() {
if errs.Err() != nil {
return errs.Err()
if et.Err() != nil {
return et.Err()
}
cID := *c.GetId()
@ -100,7 +102,7 @@ func filterContainersAndFillCollections(
added, removed, newDelta, err := getter.GetAddedAndRemovedItemIDs(ctx, qp.ResourceOwner, cID, prevDelta)
if err != nil {
if !graph.IsErrDeletedInFlight(err) {
errs.Add(err)
et.Add(err)
continue
}
@ -155,8 +157,12 @@ func filterContainersAndFillCollections(
// in the `previousPath` set, but does not exist in the current container
// resolver (which contains all the resource owners' current containers).
for id, p := range tombstones {
if et.Err() != nil {
return et.Err()
}
if collections[id] != nil {
errs.Add(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ctx))
et.Add(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ctx))
continue
}
@ -205,15 +211,14 @@ func filterContainersAndFillCollections(
path.ExchangeService,
qp.Category,
entries,
statusUpdater,
)
statusUpdater)
if err != nil {
return clues.Wrap(err, "making metadata collection")
}
collections["metadata"] = col
return errs.Err()
return et.Err()
}
// produces a set of id:path pairs from the deltapaths map.

View File

@ -112,6 +112,7 @@ func RestoreExchangeEvent(
ctx = clues.Add(ctx, "item_id", ptr.Val(event.GetId()))
var (
et = errs.Tracker()
transformedEvent = support.ToEventSimplified(event)
attached []models.Attachmentable
)
@ -139,19 +140,19 @@ func RestoreExchangeEvent(
}
for _, attach := range attached {
if errs.Err() != nil {
if et.Err() != nil {
break
}
if err := uploadAttachment(ctx, uploader, attach); err != nil {
errs.Add(err)
et.Add(err)
}
}
info := api.EventInfo(event)
info.Size = int64(len(bits))
return info, errs.Err()
return info, et.Err()
}
// RestoreMailMessage utility function to place an exchange.Mail
@ -255,17 +256,19 @@ func SendMailToBackStore(
return clues.New("nil response from post").WithClues(ctx)
}
id := ptr.Val(response.GetId())
uploader := &mailAttachmentUploader{
userID: user,
folderID: destination,
itemID: id,
service: service,
}
var (
et = errs.Tracker()
id = ptr.Val(response.GetId())
uploader = &mailAttachmentUploader{
userID: user,
folderID: destination,
itemID: id,
service: service,
}
)
for _, attachment := range attached {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -280,13 +283,13 @@ func SendMailToBackStore(
continue
}
errs.Add(errors.Wrap(err, "uploading mail attachment"))
et.Add(errors.Wrap(err, "uploading mail attachment"))
break
}
}
return errs.Err()
return et.Err()
}
// RestoreExchangeDataCollections restores M365 objects in data.RestoreCollection to MSFT
@ -307,6 +310,7 @@ func RestoreExchangeDataCollections(
userID string
// TODO policy to be updated from external source after completion of refactoring
policy = control.Copy
et = errs.Tracker()
)
if len(dcs) > 0 {
@ -315,8 +319,8 @@ func RestoreExchangeDataCollections(
}
for _, dc := range dcs {
if errs.Err() != nil {
return nil, errs.Err()
if et.Err() != nil {
break
}
userCaches := directoryCaches[userID]
@ -333,7 +337,7 @@ func RestoreExchangeDataCollections(
userCaches,
errs)
if err != nil {
errs.Add(clues.Wrap(err, "creating destination").WithClues(ctx))
et.Add(clues.Wrap(err, "creating destination").WithClues(ctx))
continue
}
@ -351,10 +355,10 @@ func RestoreExchangeDataCollections(
support.Restore,
len(dcs),
metrics,
errs.Err(),
et.Err(),
dest.ContainerName)
return status, errs.Err()
return status, et.Err()
}
// restoreCollection handles restoration of an individual collection.

View File

@ -320,15 +320,17 @@ func getResources(
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...)
}
et := errs.Tracker()
callbackFunc := func(item any) bool {
if errs.Err() != nil {
if et.Err() != nil {
return false
}
k, v, err := identify(item)
if err != nil {
if !errors.Is(err, errKnownSkippableCase) {
errs.Add(clues.Stack(err).
et.Add(clues.Stack(err).
WithClues(ctx).
With("query_url", gs.Adapter().GetBaseUrl()))
}
@ -345,5 +347,5 @@ func getResources(
return nil, clues.Stack(err).WithClues(ctx).With(graph.ErrData(err)...)
}
return resources, errs.Err()
return resources, et.Err()
}

View File

@ -179,6 +179,7 @@ func defaultItemPager(
"package",
"parentReference",
"root",
"sharepointIds",
"size",
"deleted",
},

View File

@ -14,6 +14,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/connector/uploadsession"
@ -194,9 +195,9 @@ func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDrive
return &details.OneDriveInfo{
ItemType: details.OneDriveItem,
ItemName: *di.GetName(),
Created: *di.GetCreatedDateTime(),
Modified: *di.GetLastModifiedDateTime(),
ItemName: ptr.Val(di.GetName()),
Created: ptr.Val(di.GetCreatedDateTime()),
Modified: ptr.Val(di.GetLastModifiedDateTime()),
DriveName: parent,
Size: itemSize,
Owner: email,
@ -223,6 +224,10 @@ func oneDriveItemPermissionInfo(
Permissions().
Get(ctx, nil)
if err != nil {
msg := support.ConnectorStackErrorTrace(err)
err = clues.Wrap(err, "fetching item permissions: "+msg).
With("item_id", *di.GetId())
return nil, err
}
@ -280,36 +285,32 @@ func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.ShareP
// TODO: we rely on this info for details/restore lookups,
// so if it's nil we have an issue, and will need an alternative
// way to source the data.
gsi := di.GetSharepointIds()
if gsi != nil {
if gsi.GetSiteId() != nil {
id = *gsi.GetSiteId()
}
id = ptr.Val(gsi.GetSiteId())
url = ptr.Val(gsi.GetSiteUrl())
if gsi.GetSiteUrl() != nil {
url = *gsi.GetSiteUrl()
if len(url) == 0 {
url = constructWebURL(di.GetAdditionalData())
}
}
if reference != nil {
parent = *reference.GetDriveId()
parent = ptr.Val(reference.GetDriveId())
temp := ptr.Val(reference.GetName())
temp = strings.TrimSpace(temp)
if reference.GetName() != nil {
// EndPoint is not always populated from external apps
temp := *reference.GetName()
temp = strings.TrimSpace(temp)
if temp != "" {
parent = temp
}
if temp != "" {
parent = temp
}
}
return &details.SharePointInfo{
ItemType: details.OneDriveItem,
ItemName: *di.GetName(),
Created: *di.GetCreatedDateTime(),
Modified: *di.GetLastModifiedDateTime(),
ItemName: ptr.Val(di.GetName()),
Created: ptr.Val(di.GetCreatedDateTime()),
Modified: ptr.Val(di.GetLastModifiedDateTime()),
DriveName: parent,
Size: itemSize,
Owner: id,
@ -344,3 +345,36 @@ func driveItemWriter(
return uploadsession.NewWriter(itemID, url, itemSize), nil
}
// constructWebURL helper function for recreating the webURL
// for the originating SharePoint site. Uses additional data map
// from a models.DriveItemable that possesses a downloadURL within the map.
// Returns "" if map nil or key is not present.
func constructWebURL(adtl map[string]any) string {
var (
desiredKey = "@microsoft.graph.downloadUrl"
sep = `/_layouts`
url string
)
if adtl == nil {
return url
}
r := adtl[desiredKey]
point, ok := r.(*string)
if !ok {
return url
}
value := ptr.Val(point)
if len(value) == 0 {
return url
}
temp := strings.Split(value, sep)
url = temp[0]
return url
}

View File

@ -34,7 +34,7 @@ func GetSitePages(
col = make([]models.SitePageable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize)
opts = retrieveSitePageOptions()
err error
et = errs.Tracker()
wg sync.WaitGroup
m sync.Mutex
)
@ -49,7 +49,7 @@ func GetSitePages(
}
for _, entry := range pages {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -61,11 +61,14 @@ func GetSitePages(
defer wg.Done()
defer func() { <-semaphoreCh }()
var page models.SitePageable
var (
page models.SitePageable
err error
)
page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts)
if err != nil {
errs.Add(clues.Wrap(err, "fetching page").WithClues(ctx).With(graph.ErrData(err)...))
et.Add(clues.Wrap(err, "fetching page").WithClues(ctx).With(graph.ErrData(err)...))
return
}
@ -75,7 +78,7 @@ func GetSitePages(
wg.Wait()
return col, errs.Err()
return col, et.Err()
}
// fetchPages utility function to return the tuple of item

View File

@ -223,7 +223,10 @@ func (sc *Collection) retrieveLists(
progress chan<- struct{},
errs *fault.Errors,
) (numMetrics, error) {
var metrics numMetrics
var (
metrics numMetrics
et = errs.Tracker()
)
lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs, errs)
if err != nil {
@ -234,13 +237,13 @@ func (sc *Collection) retrieveLists(
// For each models.Listable, object is serialized and the metrics are collected.
// The progress is objected via the passed in channel.
for _, lst := range lists {
if errs.Err() != nil {
if et.Err() != nil {
break
}
byteArray, err := serializeContent(wtr, lst)
if err != nil {
errs.Add(clues.Wrap(err, "serializing list").WithClues(ctx))
et.Add(clues.Wrap(err, "serializing list").WithClues(ctx))
continue
}
@ -266,7 +269,7 @@ func (sc *Collection) retrieveLists(
}
}
return metrics, errs.Err()
return metrics, et.Err()
}
func (sc *Collection) retrievePages(
@ -275,7 +278,10 @@ func (sc *Collection) retrievePages(
progress chan<- struct{},
errs *fault.Errors,
) (numMetrics, error) {
var metrics numMetrics
var (
metrics numMetrics
et = errs.Tracker()
)
betaService := sc.betaService
if betaService == nil {
@ -292,13 +298,13 @@ func (sc *Collection) retrievePages(
// Pageable objects are not supported in v1.0 of msgraph at this time.
// TODO: Verify Parsable interface supported with modified-Pageable
for _, pg := range pages {
if errs.Err() != nil {
if et.Err() != nil {
break
}
byteArray, err := serializeContent(wtr, pg)
if err != nil {
errs.Add(clues.Wrap(err, "serializing page").WithClues(ctx))
et.Add(clues.Wrap(err, "serializing page").WithClues(ctx))
continue
}
@ -318,7 +324,7 @@ func (sc *Collection) retrievePages(
}
}
return metrics, errs.Err()
return metrics, et.Err()
}
func serializeContent(writer *kw.JsonSerializationWriter, obj absser.Parsable) ([]byte, error) {

View File

@ -44,12 +44,13 @@ func DataCollections(
}
var (
et = errs.Tracker()
site = b.DiscreteOwner
collections = []data.BackupCollection{}
)
for _, scope := range b.Scopes() {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -73,7 +74,7 @@ func DataCollections(
ctrlOpts,
errs)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
@ -88,7 +89,7 @@ func DataCollections(
su,
ctrlOpts)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
@ -102,7 +103,7 @@ func DataCollections(
ctrlOpts,
errs)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
}
@ -111,7 +112,7 @@ func DataCollections(
foldersComplete <- struct{}{}
}
return collections, nil, errs.Err()
return collections, nil, et.Err()
}
func collectLists(
@ -124,7 +125,10 @@ func collectLists(
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections")
spcs := make([]data.BackupCollection, 0)
var (
et = errs.Tracker()
spcs = make([]data.BackupCollection, 0)
)
lists, err := preFetchLists(ctx, serv, siteID)
if err != nil {
@ -132,7 +136,7 @@ func collectLists(
}
for _, tuple := range lists {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -143,7 +147,7 @@ func collectLists(
path.ListsCategory,
false)
if err != nil {
errs.Add(clues.Wrap(err, "creating list collection path").WithClues(ctx))
et.Add(clues.Wrap(err, "creating list collection path").WithClues(ctx))
}
collection := NewCollection(dir, serv, List, updater.UpdateStatus, ctrlOpts)
@ -152,7 +156,7 @@ func collectLists(
spcs = append(spcs, collection)
}
return spcs, errs.Err()
return spcs, et.Err()
}
// collectLibraries constructs a onedrive Collections struct and Get()s
@ -204,7 +208,10 @@ func collectPages(
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).Debug("creating SharePoint Pages collections")
spcs := make([]data.BackupCollection, 0)
var (
et = errs.Tracker()
spcs = make([]data.BackupCollection, 0)
)
// make the betaClient
// Need to receive From DataCollection Call
@ -221,7 +228,7 @@ func collectPages(
}
for _, tuple := range tuples {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -232,7 +239,7 @@ func collectPages(
path.PagesCategory,
false)
if err != nil {
errs.Add(clues.Wrap(err, "creating page collection path").WithClues(ctx))
et.Add(clues.Wrap(err, "creating page collection path").WithClues(ctx))
}
collection := NewCollection(dir, serv, Pages, updater.UpdateStatus, ctrlOpts)
@ -242,7 +249,7 @@ func collectPages(
spcs = append(spcs, collection)
}
return spcs, errs.Err()
return spcs, et.Err()
}
type folderMatcher struct {

View File

@ -97,6 +97,7 @@ func loadSiteLists(
var (
results = make([]models.Listable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize)
et = errs.Tracker()
wg sync.WaitGroup
m sync.Mutex
)
@ -111,8 +112,8 @@ func loadSiteLists(
}
for _, listID := range listIDs {
if errs.Err() != nil {
return nil, errs.Err()
if et.Err() != nil {
break
}
semaphoreCh <- struct{}{}
@ -130,13 +131,13 @@ func loadSiteLists(
entry, err = gs.Client().SitesById(siteID).ListsById(id).Get(ctx, nil)
if err != nil {
errs.Add(clues.Wrap(err, "getting site list").WithClues(ctx).With(graph.ErrData(err)...))
et.Add(clues.Wrap(err, "getting site list").WithClues(ctx).With(graph.ErrData(err)...))
return
}
cols, cTypes, lItems, err := fetchListContents(ctx, gs, siteID, id, errs)
if err != nil {
errs.Add(clues.Wrap(err, "getting list contents"))
et.Add(clues.Wrap(err, "getting list contents"))
return
}
@ -149,7 +150,7 @@ func loadSiteLists(
wg.Wait()
return results, errs.Err()
return results, et.Err()
}
// fetchListContents utility function to retrieve associated M365 relationships
@ -198,6 +199,7 @@ func fetchListItems(
prefix = gs.Client().SitesById(siteID).ListsById(listID)
builder = prefix.Items()
itms = make([]models.ListItemable, 0)
et = errs.Tracker()
)
for {
@ -211,7 +213,7 @@ func fetchListItems(
}
for _, itm := range resp.GetValue() {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -219,7 +221,7 @@ func fetchListItems(
fields, err := newPrefix.Fields().Get(ctx, nil)
if err != nil {
errs.Add(clues.Wrap(err, "getting list fields").WithClues(ctx).With(graph.ErrData(err)...))
et.Add(clues.Wrap(err, "getting list fields").WithClues(ctx).With(graph.ErrData(err)...))
continue
}
@ -235,7 +237,7 @@ func fetchListItems(
builder = mssite.NewItemListsItemItemsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
return itms, errs.Err()
return itms, et.Err()
}
// fetchColumns utility function to return columns from a site.
@ -301,6 +303,7 @@ func fetchContentTypes(
errs *fault.Errors,
) ([]models.ContentTypeable, error) {
var (
et = errs.Tracker()
cTypes = make([]models.ContentTypeable, 0)
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypes()
)
@ -316,7 +319,7 @@ func fetchContentTypes(
}
for _, cont := range resp.GetValue() {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -324,7 +327,7 @@ func fetchContentTypes(
links, err := fetchColumnLinks(ctx, gs, siteID, listID, id)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
@ -332,7 +335,7 @@ func fetchContentTypes(
cs, err := fetchColumns(ctx, gs, siteID, listID, id)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
@ -348,7 +351,7 @@ func fetchContentTypes(
builder = mssite.NewItemListsItemContentTypesRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
}
return cTypes, errs.Err()
return cTypes, et.Err()
}
func fetchColumnLinks(

View File

@ -226,13 +226,14 @@ func RestoreListCollection(
directory = dc.FullPath()
siteID = directory.ResourceOwner()
items = dc.Items(ctx, errs)
et = errs.Tracker()
)
trace.Log(ctx, "gc:sharepoint:restoreListCollection", directory.String())
for {
if errs.Err() != nil {
return metrics, errs.Err()
if et.Err() != nil {
break
}
select {
@ -252,7 +253,7 @@ func RestoreListCollection(
siteID,
restoreContainerName)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
@ -260,7 +261,7 @@ func RestoreListCollection(
itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil {
errs.Add(clues.Wrap(err, "appending item to full path").WithClues(ctx))
et.Add(clues.Wrap(err, "appending item to full path").WithClues(ctx))
continue
}
@ -275,6 +276,8 @@ func RestoreListCollection(
metrics.Successes++
}
}
return metrics, et.Err()
}
// RestorePageCollection handles restoration of an individual site page collection.
@ -305,14 +308,15 @@ func RestorePageCollection(
return metrics, clues.Wrap(err, "constructing graph client")
}
service := discover.NewBetaService(adpt)
// Restore items from collection
items := dc.Items(ctx, errs)
var (
et = errs.Tracker()
service = discover.NewBetaService(adpt)
items = dc.Items(ctx, errs)
)
for {
if errs.Err() != nil {
return metrics, errs.Err()
if et.Err() != nil {
break
}
select {
@ -332,7 +336,7 @@ func RestorePageCollection(
siteID,
restoreContainerName)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
@ -340,7 +344,7 @@ func RestorePageCollection(
itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil {
errs.Add(clues.Wrap(err, "appending item to full path").WithClues(ctx))
et.Add(clues.Wrap(err, "appending item to full path").WithClues(ctx))
continue
}
@ -355,4 +359,6 @@ func RestorePageCollection(
metrics.Successes++
}
}
return metrics, et.Err()
}

View File

@ -397,23 +397,26 @@ func (w Wrapper) RestoreMultipleItems(
return nil, err
}
// Maps short ID of parent path to data collection for that folder.
cols := map[string]*kopiaDataCollection{}
var (
// Maps short ID of parent path to data collection for that folder.
cols = map[string]*kopiaDataCollection{}
et = errs.Tracker()
)
for _, itemPath := range paths {
if errs.Err() != nil {
return nil, errs.Err()
if et.Err() != nil {
return nil, et.Err()
}
ds, err := getItemStream(ctx, itemPath, snapshotRoot, bcounter)
if err != nil {
errs.Add(err)
et.Add(err)
continue
}
parentPath, err := itemPath.Dir()
if err != nil {
errs.Add(clues.Wrap(err, "making directory collection").WithClues(ctx))
et.Add(clues.Wrap(err, "making directory collection").WithClues(ctx))
continue
}
@ -437,7 +440,7 @@ func (w Wrapper) RestoreMultipleItems(
res = append(res, c)
}
return res, errs.Err()
return res, et.Err()
}
// DeleteSnapshot removes the provided manifest from kopia.

View File

@ -2,8 +2,6 @@ package operations
import (
"context"
"fmt"
"runtime/debug"
"time"
"github.com/alcionai/clues"
@ -12,6 +10,7 @@ import (
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
@ -111,22 +110,8 @@ type detailsWriter interface {
// Run begins a synchronous backup operation.
func (op *BackupOperation) Run(ctx context.Context) (err error) {
defer func() {
if r := recover(); r != nil {
var rerr error
if re, ok := r.(error); ok {
rerr = re
} else if re, ok := r.(string); ok {
rerr = clues.New(re)
} else {
rerr = clues.New(fmt.Sprintf("%v", r))
}
err = clues.Wrap(rerr, "panic recovery").
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.Ctx(ctx).
With("err", err).
Errorw("backup panic", clues.InErr(err).Slice()...)
if crErr := crash.Recovery(ctx, recover()); crErr != nil {
err = crErr
}
}()
@ -184,6 +169,15 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
opStats.readErr = op.Errors.Err()
}
// TODO: the consumer (sdk or cli) should run this, not operations.
recoverableCount := len(op.Errors.Errs())
for i, err := range op.Errors.Errs() {
logger.Ctx(ctx).
With("error", err).
With(clues.InErr(err).Slice()...).
Errorf("doing backup: recoverable error %d of %d", i+1, recoverableCount)
}
// -----
// Persistence
// -----
@ -223,6 +217,7 @@ func (op *BackupOperation) do(
backupID model.StableID,
) (*details.Builder, error) {
reasons := selectorToReasons(op.Selectors)
logger.Ctx(ctx).With("selectors", op.Selectors).Info("backing up selection")
// should always be 1, since backups are 1:1 with resourceOwners.
opStats.resourceCount = 1

View File

@ -139,10 +139,11 @@ func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs
var (
failed bool
reasons = map[string]manifest.ID{}
et = errs.Tracker()
)
for _, man := range mans {
if errs.Err() != nil {
if et.Err() != nil {
break
}
@ -162,7 +163,7 @@ func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs
if b, ok := reasons[reasonKey]; ok {
failed = true
errs.Add(clues.New("manifests have overlapping reasons").
et.Add(clues.New("manifests have overlapping reasons").
WithClues(ctx).
With("other_manifest_id", b))
@ -177,7 +178,7 @@ func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs
return clues.New("multiple base snapshots qualify").WithClues(ctx)
}
return errs.Err()
return et.Err()
}
// collectMetadata retrieves all metadata files associated with the manifest.

View File

@ -3,7 +3,6 @@ package operations
import (
"context"
"fmt"
"runtime/debug"
"sort"
"time"
@ -13,6 +12,7 @@ import (
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics"
@ -111,22 +111,8 @@ type restorer interface {
// Run begins a synchronous restore operation.
func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.Details, err error) {
defer func() {
if r := recover(); r != nil {
var rerr error
if re, ok := r.(error); ok {
rerr = re
} else if re, ok := r.(string); ok {
rerr = clues.New(re)
} else {
rerr = clues.New(fmt.Sprintf("%v", r))
}
err = clues.Wrap(rerr, "panic recovery").
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.Ctx(ctx).
With("err", err).
Errorw("backup panic", clues.InErr(err).Slice()...)
if crErr := crash.Recovery(ctx, recover()); crErr != nil {
err = crErr
}
}()
@ -170,6 +156,15 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
opStats.readErr = op.Errors.Err()
}
// TODO: the consumer (sdk or cli) should run this, not operations.
recoverableCount := len(op.Errors.Errs())
for i, err := range op.Errors.Errs() {
logger.Ctx(ctx).
With("error", err).
With(clues.InErr(err).Slice()...).
Errorf("doing restore: recoverable error %d of %d", i+1, recoverableCount)
}
// -----
// Persistence
// -----
@ -224,6 +219,7 @@ func (op *RestoreOperation) do(
})
observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items in backup %s to restore", len(paths), op.BackupID)))
logger.Ctx(ctx).With("selectors", op.Selectors).Info("restoring selection")
kopiaComplete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Enumerating items in repository"))
defer closer()
@ -352,18 +348,20 @@ func formatDetailsForRestoration(
}
var (
fdsPaths = fds.Paths()
paths = make([]path.Path, len(fdsPaths))
fdsPaths = fds.Paths()
paths = make([]path.Path, len(fdsPaths))
shortRefs = make([]string, len(fdsPaths))
et = errs.Tracker()
)
for i := range fdsPaths {
if errs.Err() != nil {
return nil, errs.Err()
if et.Err() != nil {
break
}
p, err := path.FromDataLayerPath(fdsPaths[i], true)
if err != nil {
errs.Add(clues.
et.Add(clues.
Wrap(err, "parsing details path after reduction").
WithMap(clues.In(ctx)).
With("path", fdsPaths[i]))
@ -372,6 +370,7 @@ func formatDetailsForRestoration(
}
paths[i] = p
shortRefs[i] = p.ShortRef()
}
// TODO(meain): Move this to onedrive specific component, but as
@ -385,5 +384,7 @@ func formatDetailsForRestoration(
return paths[i].String() < paths[j].String()
})
return paths, errs.Err()
logger.Ctx(ctx).With("short_refs", shortRefs).Infof("found %d details entries to restore", len(shortRefs))
return paths, et.Err()
}

View File

@ -224,8 +224,41 @@ func ExampleErrors_Errs() {
// Err() is nil
}
// ExampleErrors_e2e showcases a more complex integration.
func ExampleErrors_e2e() {
func ExampleErrors_Tracker() {
// It is common for Corso to run operations in parallel,
// and for iterations to be nested within iterations. To
// avoid mistakenly returning an error that was sourced
// from some other async iteration, recoverable instances
// are aggrgated into a Tracker.
errs := fault.New(false)
trkr := errs.Tracker()
err := func() error {
for i := range items {
if trkr.Err() != nil {
break
}
if err := getIthItem(i); err != nil {
// instead of calling errs.Add(err), we call the
// trackers Add method. The error will still get
// added to the errs.Errs() set. But if this err
// causes the run to fail, only this tracker treats
// it as the causal failure.
trkr.Add(err)
}
}
return trkr.Err()
}()
if err != nil {
// handle the Err() that appeared in the tracker
fmt.Println("err occurred", errs.Err())
}
}
// ExampleErrorsE2e showcases a more complex integration.
func Example_errors_e2e() {
oper := newOperation()
// imagine that we're a user, calling into corso SDK.

View File

@ -124,3 +124,48 @@ func (e *Errors) addErr(err error) *Errors {
return e
}
// ---------------------------------------------------------------------------
// Iteration Tracker
// ---------------------------------------------------------------------------
// Tracker constructs a new errors tracker for aggregating errors
// in a single iteration loop. Trackers shouldn't be passed down
// to other funcs, and the function that spawned the tracker should
// always return `tracker.Err()` to ensure that hard failures are
// propagated upstream.
func (e *Errors) Tracker() *tracker {
return &tracker{
mu: &sync.Mutex{},
errs: e,
}
}
type tracker struct {
mu *sync.Mutex
errs *Errors
current error
}
func (e *tracker) Add(err error) {
if err == nil {
return
}
e.mu.Lock()
defer e.mu.Unlock()
if e.current == nil && e.errs.failFast {
e.current = err
}
e.errs.Add(err)
}
// Err returns the primary error in the tracker. Will be nil if the
// original Errors is set to bestEffort handling. Does not return the
// underlying Errors.Err(). Should be called as the return value of
// any func which created a new tracker.
func (e *tracker) Err() error {
return e.current
}

View File

@ -251,3 +251,29 @@ func (suite *FaultErrorsUnitSuite) TestUnmarshalLegacy() {
err = json.Unmarshal(jsonStr, &um)
require.NoError(t, err)
}
func (suite *FaultErrorsUnitSuite) TestTracker() {
t := suite.T()
be := fault.New(false)
ba := be.Tracker()
assert.NoError(t, ba.Err())
assert.Empty(t, be.Errs())
ba.Add(assert.AnError)
assert.NoError(t, ba.Err())
assert.NoError(t, be.Err())
assert.NotEmpty(t, be.Errs())
fe := fault.New(true)
fa := fe.Tracker()
assert.NoError(t, fa.Err())
assert.Empty(t, fe.Errs())
fa.Add(assert.AnError)
assert.Error(t, fa.Err())
assert.Error(t, fe.Err())
assert.NotEmpty(t, fe.Errs())
}

View File

@ -8,6 +8,7 @@ import (
"github.com/google/uuid"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model"
@ -88,13 +89,19 @@ func Initialize(
acct account.Account,
s storage.Storage,
opts control.Options,
) (Repository, error) {
) (repo Repository, err error) {
ctx = clues.Add(
ctx,
"acct_provider", acct.Provider.String(),
"acct_id", acct.ID(), // TODO: pii
"storage_provider", s.Provider.String())
defer func() {
if crErr := crash.Recovery(ctx, recover()); crErr != nil {
err = crErr
}
}()
kopiaRef := kopia.NewConn(s)
if err := kopiaRef.Initialize(ctx); err != nil {
// replace common internal errors so that sdk users can check results with errors.Is()
@ -156,13 +163,19 @@ func Connect(
acct account.Account,
s storage.Storage,
opts control.Options,
) (Repository, error) {
) (r Repository, err error) {
ctx = clues.Add(
ctx,
"acct_provider", acct.Provider.String(),
"acct_id", acct.ID(), // TODO: pii
"storage_provider", s.Provider.String())
defer func() {
if crErr := crash.Recovery(ctx, recover()); crErr != nil {
err = crErr
}
}()
// Close/Reset the progress bar. This ensures callers don't have to worry about
// their output getting clobbered (#1720)
defer observe.Complete()

View File

@ -0,0 +1,65 @@
---
slug: large-microsoft-365-exchange-backups
title: "Backing up large Microsoft 365 Exchange mailboxes with Corso"
description: "A guide to using Corso to back up very large Exchange mailboxes in Microsoft 365"
authors: nica
tags: [corso, microsoft 365, backups, S3]
date: 2023-2-21
image: ./images/heavy-mover.jpg
---
![heavy earth mover By Lowcarb23 - Own work, CC BY-SA 4.0, https://commons.wikimedia.org/w/index.php?curid=114344394](./images/heavy-mover.jpg)
Over the last few months its been amazing sharing Corso with more and more users. One pleasant surprise has been users
who are operating in large, often multi-tenant deployments of Microsoft 365 who want to use Corso to back up all their
data. In our discussions on the [Corso User Discord](https://discord.gg/63DTTSnuhT), weve found some best practices for
backing up large Exchange mailboxes with Corso.
<!-- truncate -->
### Make sure youre using the latest version of Corso
We've recently done a lot of work to harden Corso against transient network outages and Graph API timeouts. This
hardening work makes the most impact during large backups as their long runtime increase the probability of running
into transient errors.
Our recent work has also included support for incremental backups, which youll definitely need for larger data sets.
This means that while your first backup of a user with a large mailbox can take some time, all subsequent backups
will be quite fast as Corso will only capture the incremental changes while still constructing a full backup.
### Dont be afraid to restart your backups
Fundamentally, Corso is a consumer of the Microsoft Graph API, which like all complex APIs, isnt 100% predictable.
Even in the event of a failed backup, Corso will often have stored multiple objects in the course of a backup. Corso
will work hard to reuse these stored objects in the next backup, meaning your next backup isnt starting from
zero. A second attempt is likely to run faster with a better chance of completing successfully.
### Batch your users
If many of your users have large file attachments (or if you have more than a few hundred users), youll want to batch
your users for your first backup. A tool like [Microsoft365dsc](https://microsoft365dsc.com/) can help you get a list
of all user emails ready for parsing. After that you can back up a few users or even a single user at a time with the
Corso command `corso backup create exchange --user "alice@example.com,bob@example.com"`
Why cant you just run them all in one go with `--user '*'` ? Again were limited by the Microsofts Graph API which
often has timeouts, 5xx errors, and throttles its clients.
The good news is that with Corsos robust ability to do incremental backups, after your first backup, you can
absolutely use larger batches of users, as all future backups to the same repository will run **much** faster.
### Use multiple repositories for different tenants
If youre a managed service provider or otherwise running a multi-tennant architecture, you should use multiple separate
repositories with Corso. Two ways to pursue this:
- Point to separate buckets
- Place other repositories in subfolders of the same bucket with the `prefix` option
In both cases, the best way to keep these settings tidy is by using multiple `.corso.toml`
[configuration files](../../docs/setup/configuration/#configuration-file). Use the
`-config-file` option to point to separate config files
### Have questions?
Corso is under active development, and we expect our support for this type of use case to improve rapidly.
If you have feedback for us please do [join our discord](https://discord.gg/63DTTSnuhT) and talk directly with the team!

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 KiB

View File

@ -20,7 +20,7 @@
"feather-icons": "^4.29.0",
"jarallax": "^2.1.3",
"mdx-mermaid": "^1.3.2",
"mermaid": "^9.4.0",
"mermaid": "^10.0.0",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@ -33,7 +33,7 @@
"@iconify/react": "^4.1.0",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.21",
"tailwindcss": "^3.2.6"
"tailwindcss": "^3.2.7"
}
},
"node_modules/@algolia/autocomplete-core": {
@ -9131,25 +9131,26 @@
}
},
"node_modules/mermaid": {
"version": "9.4.0",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-9.4.0.tgz",
"integrity": "sha512-4PWbOND7CNRbjHrdG3WUUGBreKAFVnMhdlPjttuUkeHbCQmAHkwzSh5dGwbrKmXGRaR4uTvfFVYzUcg++h0DkA==",
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.0.0.tgz",
"integrity": "sha512-syS1qyYCd3EPXCVSpYtefY4D9z9WZAK8hFgjeHR9PAtanybLO162Tu7o5i/nZkqRrJq0Rk8RqskQlhBPgT8eBw==",
"dependencies": {
"@braintree/sanitize-url": "^6.0.0",
"cytoscape": "^3.23.0",
"cytoscape-cose-bilkent": "^4.1.0",
"cytoscape-fcose": "^2.1.0",
"d3": "^7.0.0",
"d3": "^7.4.0",
"dagre-d3-es": "7.0.8",
"dompurify": "2.4.3",
"elkjs": "^0.8.2",
"khroma": "^2.0.0",
"lodash-es": "^4.17.21",
"moment": "^2.29.4",
"moment-mini": "^2.29.4",
"non-layered-tidy-tree-layout": "^2.0.2",
"stylis": "^4.1.2",
"ts-dedent": "^2.2.0",
"uuid": "^9.0.0"
"uuid": "^9.0.0",
"web-worker": "^1.2.0"
}
},
"node_modules/mermaid/node_modules/uuid": {
@ -9343,6 +9344,11 @@
"node": "*"
}
},
"node_modules/moment-mini": {
"version": "2.29.4",
"resolved": "https://registry.npmjs.org/moment-mini/-/moment-mini-2.29.4.tgz",
"integrity": "sha512-uhXpYwHFeiTbY9KSgPPRoo1nt8OxNVdMVoTBYHfSEKeRkIkwGpO+gERmhuhBtzfaeOyTkykSrm2+noJBgqt3Hg=="
},
"node_modules/mrmime": {
"version": "1.0.0",
"license": "MIT",
@ -12736,9 +12742,9 @@
}
},
"node_modules/tailwindcss": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.2.6.tgz",
"integrity": "sha512-BfgQWZrtqowOQMC2bwaSNe7xcIjdDEgixWGYOd6AL0CbKHJlvhfdbINeAW76l1sO+1ov/MJ93ODJ9yluRituIw==",
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.2.7.tgz",
"integrity": "sha512-B6DLqJzc21x7wntlH/GsZwEXTBttVSl1FtCzC8WP4oBc/NKef7kaax5jeihkkCEWc831/5NDJ9gRNDK6NEioQQ==",
"dev": true,
"dependencies": {
"arg": "^5.0.2",
@ -13856,6 +13862,11 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/web-worker": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz",
"integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA=="
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
@ -20783,25 +20794,26 @@
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
},
"mermaid": {
"version": "9.4.0",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-9.4.0.tgz",
"integrity": "sha512-4PWbOND7CNRbjHrdG3WUUGBreKAFVnMhdlPjttuUkeHbCQmAHkwzSh5dGwbrKmXGRaR4uTvfFVYzUcg++h0DkA==",
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.0.0.tgz",
"integrity": "sha512-syS1qyYCd3EPXCVSpYtefY4D9z9WZAK8hFgjeHR9PAtanybLO162Tu7o5i/nZkqRrJq0Rk8RqskQlhBPgT8eBw==",
"requires": {
"@braintree/sanitize-url": "^6.0.0",
"cytoscape": "^3.23.0",
"cytoscape-cose-bilkent": "^4.1.0",
"cytoscape-fcose": "^2.1.0",
"d3": "^7.0.0",
"d3": "^7.4.0",
"dagre-d3-es": "7.0.8",
"dompurify": "2.4.3",
"elkjs": "^0.8.2",
"khroma": "^2.0.0",
"lodash-es": "^4.17.21",
"moment": "^2.29.4",
"moment-mini": "^2.29.4",
"non-layered-tidy-tree-layout": "^2.0.2",
"stylis": "^4.1.2",
"ts-dedent": "^2.2.0",
"uuid": "^9.0.0"
"uuid": "^9.0.0",
"web-worker": "^1.2.0"
},
"dependencies": {
"uuid": {
@ -20922,6 +20934,11 @@
"resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz",
"integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w=="
},
"moment-mini": {
"version": "2.29.4",
"resolved": "https://registry.npmjs.org/moment-mini/-/moment-mini-2.29.4.tgz",
"integrity": "sha512-uhXpYwHFeiTbY9KSgPPRoo1nt8OxNVdMVoTBYHfSEKeRkIkwGpO+gERmhuhBtzfaeOyTkykSrm2+noJBgqt3Hg=="
},
"mrmime": {
"version": "1.0.0"
},
@ -23148,9 +23165,9 @@
}
},
"tailwindcss": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.2.6.tgz",
"integrity": "sha512-BfgQWZrtqowOQMC2bwaSNe7xcIjdDEgixWGYOd6AL0CbKHJlvhfdbINeAW76l1sO+1ov/MJ93ODJ9yluRituIw==",
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.2.7.tgz",
"integrity": "sha512-B6DLqJzc21x7wntlH/GsZwEXTBttVSl1FtCzC8WP4oBc/NKef7kaax5jeihkkCEWc831/5NDJ9gRNDK6NEioQQ==",
"dev": true,
"requires": {
"arg": "^5.0.2",
@ -23859,6 +23876,11 @@
"resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz",
"integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw=="
},
"web-worker": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz",
"integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA=="
},
"webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",

View File

@ -26,7 +26,7 @@
"feather-icons": "^4.29.0",
"jarallax": "^2.1.3",
"mdx-mermaid": "^1.3.2",
"mermaid": "^9.4.0",
"mermaid": "^10.0.0",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@ -39,7 +39,7 @@
"@iconify/react": "^4.1.0",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.21",
"tailwindcss": "^3.2.6"
"tailwindcss": "^3.2.7"
},
"browserslist": {
"production": [