Fix cases where we had a trailing comma (#4208)

Not sure if we wanna merge it as it might generate way too many conflicts, but this should help us add a linter in CI. If we are good, I'll add something that can do lints for this in a follow up PR.

Super hacky, but this fix was created using `while true ; do tree-grepper -q go '(argument_list "," @nope .)' | tail -n1| awk -F: "{print \$1,\"+\"\$2\" -c ':norm \$xJZZ'\"}" | xargs vim ; done`.

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [ ]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* https://github.com/alcionai/corso/issues/3654

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [ ]  Unit test
- [ ] 💚 E2E
This commit is contained in:
Abin Simon 2023-09-08 22:40:29 +05:30 committed by GitHub
parent ce082162b5
commit be59928f98
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
81 changed files with 451 additions and 913 deletions

View File

@ -45,8 +45,7 @@ type NoBackupExchangeE2ESuite struct {
func TestNoBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
@ -100,8 +99,7 @@ type BackupExchangeE2ESuite struct {
func TestBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupExchangeE2ESuite) SetupSuite() {
@ -244,8 +242,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.its.user.ID,
"--azure-client-id", "invalid-value",
)
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
@ -300,8 +297,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
"backup", "create", "exchange",
"--user", suite.its.user.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value",
)
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)

View File

@ -312,8 +312,7 @@ func validateGroupsBackupCreateFlags(groups, cats []string) error {
return clues.New(
"requires one or more --" +
flags.GroupFN + " ids, or the wildcard --" +
flags.GroupFN + " *",
)
flags.GroupFN + " *")
}
msg := fmt.Sprintf(

View File

@ -187,9 +187,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
assert.True(t,
strings.HasSuffix(
result,
fmt.Sprintf("Deleted OneDrive backup %s\n", string(suite.backupOp.Results.BackupID)),
),
)
fmt.Sprintf("Deleted OneDrive backup %s\n", string(suite.backupOp.Results.BackupID))))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(

View File

@ -198,15 +198,13 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
return clues.New(
"requires one or more --" +
flags.SiteFN + " urls, or the wildcard --" +
flags.SiteFN + " *",
)
flags.SiteFN + " *")
}
for _, d := range cats {
if d != flags.DataLibraries && d != flags.DataPages {
return clues.New(
d + " is an unrecognized data type; either " + flags.DataLibraries + "or " + flags.DataPages,
)
d + " is an unrecognized data type; either " + flags.DataLibraries + "or " + flags.DataPages)
}
}

View File

@ -37,8 +37,7 @@ type NoBackupSharePointE2ESuite struct {
func TestNoBackupSharePointE2ESuite(t *testing.T) {
suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
@ -151,9 +150,7 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
assert.True(t,
strings.HasSuffix(
result,
fmt.Sprintf("Deleted SharePoint backup %s\n", string(suite.backupOp.Results.BackupID)),
),
)
fmt.Sprintf("Deleted SharePoint backup %s\n", string(suite.backupOp.Results.BackupID))))
}
// moved out of the func above to make the linter happy

View File

@ -250,8 +250,7 @@ func validateTeamsBackupCreateFlags(teams, cats []string) error {
return clues.New(
"requires one or more --" +
flags.TeamFN + " ids, or the wildcard --" +
flags.TeamFN + " *",
)
flags.TeamFN + " *")
}
msg := fmt.Sprintf(

View File

@ -328,8 +328,7 @@ func (suite *ConfigSuite) TestReadFromFlags() {
vpr,
true,
false,
overrides,
)
overrides)
m365Config, _ := repoDetails.Account.M365Config()
s3Cfg, _ := repoDetails.Storage.S3Config()
@ -366,8 +365,7 @@ type ConfigIntegrationSuite struct {
func TestConfigIntegrationSuite(t *testing.T) {
suite.Run(t, &ConfigIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {

View File

@ -119,13 +119,11 @@ func configureStorage(
DoNotUseTLS: str.ParseBool(str.First(
overrides[storage.DoNotUseTLS],
strconv.FormatBool(s3Cfg.DoNotUseTLS),
"false",
)),
"false")),
DoNotVerifyTLS: str.ParseBool(str.First(
overrides[storage.DoNotVerifyTLS],
strconv.FormatBool(s3Cfg.DoNotVerifyTLS),
"false",
)),
"false")),
}
// compose the common config and credentials

View File

@ -93,8 +93,7 @@ func envGuide(cmd *cobra.Command, args []string) {
Info(ctx,
"\n--- Environment Variable Guide ---\n",
"As a best practice, Corso retrieves credentials and sensitive information from environment variables.\n ",
"\n",
)
"\n")
Table(ctx, toPrintable(corsoEVs))
Info(ctx, "\n")
Table(ctx, toPrintable(azureEVs))

View File

@ -28,8 +28,7 @@ type S3E2ESuite struct {
func TestS3E2ESuite(t *testing.T) {
suite.Run(t, &S3E2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *S3E2ESuite) TestInitS3Cmd() {
@ -113,8 +112,7 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
"--config-file", configFP,
"--bucket", cfg.Bucket,
"--prefix", cfg.Prefix,
"--succeed-if-exists",
)
"--succeed-if-exists")
cli.BuildCommandTree(cmd)
// run the command
@ -214,8 +212,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
"repo", "connect", "s3",
"--config-file", configFP,
"--bucket", test.bucketPrefix+cfg.Bucket,
"--prefix", cfg.Prefix,
)
"--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd)
// run the command

View File

@ -168,8 +168,7 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
given+" "+sur,
sur+", "+given,
given, mid, sur,
"123-456-7890",
)
"123-456-7890")
},
control.DefaultOptions(),
errs,

View File

@ -199,8 +199,7 @@ func (c *Client) ObjectRetention(
return mode, retainUntil, clues.Wrap(err, fmt.Sprintf(
"getting object (key) %q (versionID) %q",
obj.Key,
obj.Version,
)).
obj.Version)).
With("object_key", obj.Key, "object_version", obj.Version).
OrNil()
}

View File

@ -36,8 +36,7 @@ func (suite *EventsIntegrationSuite) TestNewBus() {
storage.S3Config{
Bucket: "bckt",
Prefix: "prfx",
},
)
})
require.NoError(t, err, clues.ToCore(err))
a, err := account.NewAccount(
@ -48,8 +47,7 @@ func (suite *EventsIntegrationSuite) TestNewBus() {
AzureClientSecret: "secret",
},
AzureTenantID: "tid",
},
)
})
require.NoError(t, err, clues.ToCore(err))
b, err := events.NewBus(ctx, s, a.ID(), control.DefaultOptions())

View File

@ -348,8 +348,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
assert.AnError,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -357,8 +356,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
1: testUser1Mail,
@ -383,8 +381,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
assert.AnError,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -392,8 +389,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{},
expectedAssistManifestReasons: map[int][]identity.Reasoner{
@ -424,8 +420,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -433,8 +428,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
1: testUser1Mail,
@ -458,8 +452,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -467,8 +460,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
1: testUser1Mail,
@ -496,8 +488,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testEvents,
testUser1,
testUser2,
testUser3,
),
testUser3),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testUser1Mail,
@ -524,8 +515,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testEvents,
testUser1,
testUser2,
testUser3,
),
testUser3),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testAllUsersAllCats,
@ -552,8 +542,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testEvents,
testUser1,
testUser2,
testUser3,
),
testUser3),
},
expectedBaseReasons: map[int][]identity.Reasoner{},
expectedAssistManifestReasons: map[int][]identity.Reasoner{
@ -586,8 +575,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testEvents,
testUser1,
testUser2,
testUser3,
),
testUser3),
newManifestInfo(
testID2,
testT2,
@ -597,8 +585,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testEvents,
testUser1,
testUser2,
testUser3,
),
testUser3),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: {
@ -641,8 +628,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
nil,
testEvents,
testUser1,
testUser2,
),
testUser2),
newManifestInfo(
testID2,
testT2,
@ -651,8 +637,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
nil,
testMail,
testUser1,
testUser2,
),
testUser2),
newManifestInfo(
testID1,
testT1,
@ -662,8 +647,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testMail,
testEvents,
testUser1,
testUser2,
),
testUser2),
},
expectedBaseReasons: map[int][]identity.Reasoner{
2: {
@ -728,8 +712,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID2,
testT2,
@ -737,8 +720,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testUser1Mail,
@ -763,8 +745,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID2,
testT2,
@ -772,8 +753,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
1: testUser1Mail,
@ -798,8 +778,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID2,
testT2,
@ -807,8 +786,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{},
expectedAssistManifestReasons: map[int][]identity.Reasoner{},
@ -829,8 +807,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testUser1Mail,
@ -855,8 +832,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -864,8 +840,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testUser1Mail,
@ -890,8 +865,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup4,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID3,
testT3,
@ -899,8 +873,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup3,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID2,
testT2,
@ -908,8 +881,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -917,8 +889,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
2: testUser1Mail,
@ -960,8 +931,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -969,8 +939,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testUser1Mail,
@ -1001,8 +970,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
newManifestInfo(
testID1,
testT1,
@ -1010,8 +978,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup1,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{},
expectedAssistManifestReasons: map[int][]identity.Reasoner{
@ -1048,8 +1015,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
testBackup2,
nil,
testMail,
testUser1,
),
testUser1),
},
expectedBaseReasons: map[int][]identity.Reasoner{
0: testUser1Mail,
@ -1124,8 +1090,7 @@ func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() {
testMail,
testUser1,
"fnords",
"smarf",
),
"smarf"),
}
backupData := []backupInfo{
newBackupModel(testBackup1, true, true, false, nil, nil),
@ -1234,8 +1199,7 @@ func checkManifestEntriesMatch(
reasons,
found.Reasons,
"incorrect reasons for snapshot with ID %s",
found.ID,
)
found.ID)
}
}
@ -1276,7 +1240,6 @@ func checkBackupEntriesMatch(
reasons,
found.Reasons,
"incorrect reasons for snapshot with ID %s",
found.ID,
)
found.ID)
}
}

View File

@ -154,8 +154,7 @@ func cleanupOrphanedData(
ctx,
model.BackupSchema,
bup.ModelStoreID,
&bm,
); err != nil {
&bm); err != nil {
if !errors.Is(err, data.ErrNotFound) {
return clues.Wrap(err, "getting backup model").
With("search_backup_id", bup.ID)

View File

@ -137,8 +137,7 @@ func (w *conn) Initialize(
cfg.KopiaCfgDir,
bst,
cfg.CorsoPassphrase,
defaultCompressor,
)
defaultCompressor)
if err != nil {
return err
}
@ -171,8 +170,7 @@ func (w *conn) Connect(ctx context.Context, opts repository.Options) error {
cfg.KopiaCfgDir,
bst,
cfg.CorsoPassphrase,
defaultCompressor,
)
defaultCompressor)
}
func (w *conn) commonConnect(
@ -205,8 +203,7 @@ func (w *conn) commonConnect(
cfgFile,
bst,
password,
kopiaOpts,
); err != nil {
kopiaOpts); err != nil {
return clues.Wrap(err, "connecting to repo").WithClues(ctx)
}
@ -532,8 +529,7 @@ func persistRetentionConfigs(
if !opts.ParamsChanged() {
return clues.Wrap(
dr.FormatManager().SetParameters(ctx, mp, blobCfg, requiredFeatures),
"persisting storage config",
).WithClues(ctx).OrNil()
"persisting storage config").WithClues(ctx).OrNil()
}
// Both blob and maintenance changed. A DirectWriteSession is required to
@ -558,8 +554,7 @@ func persistRetentionConfigs(
return clues.Wrap(
dr.FormatManager().SetParameters(ctx, mp, blobCfg, requiredFeatures),
"storage config",
).WithClues(ctx).OrNil()
"storage config").WithClues(ctx).OrNil()
})
return clues.Wrap(err, "persisting config changes").WithClues(ctx).OrNil()

View File

@ -71,8 +71,7 @@ func TestWrapperIntegrationSuite(t *testing.T) {
suite.Run(t, &WrapperIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -298,26 +297,22 @@ func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect
require.Equal(
t,
defaultRetention,
p.RetentionPolicy,
)
p.RetentionPolicy)
assert.Equal(
t,
math.MaxInt,
p.RetentionPolicy.EffectiveKeepLatest().OrDefault(42),
)
p.RetentionPolicy.EffectiveKeepLatest().OrDefault(42))
},
checkFunc: func(t *testing.T, p *policy.Policy) {
t.Helper()
require.Equal(
t,
newRetention,
p.RetentionPolicy,
)
p.RetentionPolicy)
assert.Equal(
t,
42,
p.RetentionPolicy.EffectiveKeepLatest().OrDefault(42),
)
p.RetentionPolicy.EffectiveKeepLatest().OrDefault(42))
},
mutator: func(innerCtx context.Context, p *policy.Policy) error {
updateRetentionOnPolicy(newRetention, p)
@ -468,8 +463,7 @@ func TestConnRetentionIntegrationSuite(t *testing.T) {
suite.Run(t, &ConnRetentionIntegrationSuite{
Suite: tester.NewRetentionSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}

View File

@ -126,30 +126,25 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(files[0].uuid),
nil,
),
nil),
r: newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(files[0].data)),
),
io.NopCloser(bytes.NewReader(files[0].data))),
size: int64(len(files[0].data) + versionSize),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(files[1].uuid),
nil,
),
nil),
r: newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(files[1].data)),
),
io.NopCloser(bytes.NewReader(files[1].data))),
size: int64(len(files[1].data) + versionSize),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(fileOpenErrName),
nil,
),
nil),
openErr: assert.AnError,
},
virtualfs.NewStaticDirectory(encodeAsPath(notFileErrName), []fs.Entry{}),
@ -301,28 +296,23 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetchItemByName() {
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(noErrFileName),
nil,
),
nil),
r: newBackupStreamReader(
serVersion,
io.NopCloser(bytes.NewReader([]byte(noErrFileData))),
),
io.NopCloser(bytes.NewReader([]byte(noErrFileData)))),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(errFileName),
nil,
),
nil),
r: newBackupStreamReader(
serVersion,
errReader.ToReader(),
),
errReader.ToReader()),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(errFileName2),
nil,
),
nil),
openErr: assert.AnError,
},
})

View File

@ -159,19 +159,16 @@ func (suite *MergeCollectionUnitSuite) TestFetchItemByName() {
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(fileName1),
nil,
),
nil),
r: newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(fileData1)),
),
io.NopCloser(bytes.NewReader(fileData1))),
size: int64(len(fileData1) + versionSize),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(fileOpenErrName),
nil,
),
nil),
openErr: assert.AnError,
},
})
@ -186,34 +183,28 @@ func (suite *MergeCollectionUnitSuite) TestFetchItemByName() {
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(fileName1),
nil,
),
nil),
r: newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(fileData2)),
),
io.NopCloser(bytes.NewReader(fileData2))),
size: int64(len(fileData2) + versionSize),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(fileName2),
nil,
),
nil),
r: newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(fileData1)),
),
io.NopCloser(bytes.NewReader(fileData1))),
size: int64(len(fileData1) + versionSize),
},
&mockFile{
StreamingFile: virtualfs.StreamingFileFromReader(
encodeAsPath(fileOpenErrName),
nil,
),
nil),
r: newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(fileData3)),
),
io.NopCloser(bytes.NewReader(fileData3))),
size: int64(len(fileData3) + versionSize),
},
})

View File

@ -472,8 +472,7 @@ func (ms *ModelStore) Update(
}
return nil
},
)
})
if err != nil {
return clues.Wrap(err, "updating model").WithClues(ctx)
}

View File

@ -84,8 +84,7 @@ func TestModelStoreIntegrationSuite(t *testing.T) {
suite.Run(t, &ModelStoreIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -786,8 +785,7 @@ func TestModelStoreRegressionSuite(t *testing.T) {
suite.Run(t, &ModelStoreRegressionSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -840,8 +838,7 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
newID = foo.ModelStoreID
return assert.AnError
},
)
})
assert.ErrorIs(t, err, assert.AnError, clues.ToCore(err))
@ -957,8 +954,7 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
ctx,
model.BackupSchema,
backupModel.ModelStoreID,
&gotBackup,
)
&gotBackup)
assert.Error(t, err, clues.ToCore(err))
// Old instance should still be able to access added model.
@ -967,7 +963,6 @@ func (suite *ModelStoreRegressionSuite) TestMultipleConfigs() {
ctx,
model.BackupSchema,
backupModel.ModelStoreID,
&gotBackup,
)
&gotBackup)
assert.NoError(t, err, clues.ToCore(err))
}

View File

@ -42,8 +42,7 @@ func (r *Opts) AsConfigs(
if err := maintenance.CheckExtendRetention(ctx, r.blobCfg, &r.params); err != nil {
return format.BlobStorageConfiguration{}, maintenance.Params{}, clues.Wrap(
err,
"invalid retention config",
).WithClues(ctx)
"invalid retention config").WithClues(ctx)
}
}
@ -63,8 +62,7 @@ func (r *Opts) Set(opts repository.Retention) error {
return clues.Wrap(
r.setBlobConfigParams(opts.Mode, opts.Duration),
"setting mode or duration",
).OrNil()
"setting mode or duration").OrNil()
}
func (r *Opts) setMaintenanceParams(extend *bool) {

View File

@ -585,8 +585,7 @@ func getStreamItemFunc(
baseDir,
seen,
globalExcludeSet,
progress,
); err != nil {
progress); err != nil {
return clues.Wrap(err, "streaming base snapshot entries")
}
@ -633,9 +632,7 @@ func buildKopiaDirs(
dir.collection,
dir.baseDir,
globalExcludeSet,
progress,
),
), nil
progress)), nil
}
type treeMap struct {
@ -1144,8 +1141,7 @@ func inflateBaseTree(
newSubtreePath.Dir(),
subtreeDir,
roots,
stats,
); err != nil {
stats); err != nil {
return clues.Wrap(err, "traversing base snapshot").WithClues(ictx)
}

View File

@ -50,16 +50,14 @@ func baseWithChildren(
if len(basic) == 1 {
return virtualfs.NewStaticDirectory(
encodeElements(basic[0])[0],
children,
)
children)
}
return virtualfs.NewStaticDirectory(
encodeElements(basic[0])[0],
[]fs.Entry{
baseWithChildren(basic[1:], children),
},
)
})
}
type expectedNode struct {
@ -279,8 +277,7 @@ func (suite *VersionReadersUnitSuite) TestWriteAndRead() {
expectedVersion: test.readVersion,
ReadCloser: newBackupStreamReader(
test.writeVersion,
io.NopCloser(baseReader),
),
io.NopCloser(baseReader)),
}
defer reversible.Close()
@ -635,8 +632,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch
prevPath := makePath(
suite.T(),
[]string{testTenant, service, testUser, category, testInboxDir, testFileName2},
true,
)
true)
// Location is sourced from collections now so we don't need to check it here.
expectedToMerge := []expectedRef{
@ -1151,12 +1147,9 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(testFileName)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData)),
),
},
),
},
)
io.NopCloser(bytes.NewReader(testFileData))),
}),
})
}
table := []struct {
@ -1185,8 +1178,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
name: testInboxID,
children: []*expectedNode{},
},
},
),
}),
},
{
name: "AddsNewItems",
@ -1221,8 +1213,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
},
},
},
),
}),
},
{
name: "SkipsUpdatedItems",
@ -1253,8 +1244,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
},
},
},
),
}),
},
{
name: "DeleteAndNew",
@ -1288,8 +1278,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
},
},
},
),
}),
},
{
name: "MovedAndNew",
@ -1332,8 +1321,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
},
},
},
),
}),
},
{
name: "NewDoesntMerge",
@ -1363,8 +1351,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
},
},
},
),
}),
},
}
@ -1478,37 +1465,29 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(inboxFileName1)[0],
time.Time{},
io.NopCloser(bytes.NewReader(inboxFileData1)),
),
io.NopCloser(bytes.NewReader(inboxFileData1))),
virtualfs.NewStaticDirectory(
encodeElements(personalID)[0],
[]fs.Entry{
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(personalFileName1)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData)),
),
io.NopCloser(bytes.NewReader(testFileData))),
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(personalFileName2)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData2)),
),
},
),
io.NopCloser(bytes.NewReader(testFileData2))),
}),
virtualfs.NewStaticDirectory(
encodeElements(workID)[0],
[]fs.Entry{
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(workFileName1)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData3)),
),
},
),
},
),
},
)
io.NopCloser(bytes.NewReader(testFileData3))),
}),
}),
})
}
table := []struct {
@ -1558,8 +1537,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "MovesSubtree",
@ -1618,8 +1596,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "MovesChildAfterAncestorMove",
@ -1690,8 +1667,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "MovesChildAfterAncestorDelete",
@ -1732,8 +1708,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "ReplaceDeletedDirectory",
@ -1773,8 +1748,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "ReplaceDeletedDirectoryWithNew",
@ -1824,8 +1798,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "ReplaceDeletedSubtreeWithNew",
@ -1858,8 +1831,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "ReplaceMovedDirectory",
@ -1919,8 +1891,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "MoveDirectoryAndMergeItems",
@ -1986,8 +1957,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "MoveParentDeleteFileNoMergeSubtreeMerge",
@ -2075,8 +2045,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
name: "NoMoveParentDeleteFileNoMergeSubtreeMerge",
@ -2143,8 +2112,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
// This could happen if a subfolder is moved out of the parent, the parent
@ -2197,8 +2165,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
// This could happen if a subfolder is moved out of the parent, the parent
@ -2243,8 +2210,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
{
// This could happen if a subfolder is moved out of the parent, the parent
@ -2287,8 +2253,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
},
},
},
),
}),
},
}
@ -2377,22 +2342,17 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(testFileName)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData)),
),
},
),
io.NopCloser(bytes.NewReader(testFileData))),
}),
virtualfs.NewStaticDirectory(
encodeElements(workDir)[0],
[]fs.Entry{
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(testFileName2)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData2)),
),
},
),
},
),
io.NopCloser(bytes.NewReader(testFileData2))),
}),
}),
virtualfs.NewStaticDirectory(
encodeElements(testArchiveID)[0],
[]fs.Entry{
@ -2402,24 +2362,18 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(testFileName3)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData3)),
),
},
),
io.NopCloser(bytes.NewReader(testFileData3))),
}),
virtualfs.NewStaticDirectory(
encodeElements(workDir)[0],
[]fs.Entry{
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(testFileName4)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData4)),
),
},
),
},
),
},
)
io.NopCloser(bytes.NewReader(testFileData4))),
}),
}),
})
}
expected := expectedTreeWithChildren(
@ -2453,8 +2407,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
},
},
},
},
)
})
progress := &corsoProgress{
ctx: ctx,
@ -2531,10 +2484,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(testFileName)[0],
time.Time{},
io.NopCloser(bytes.NewReader(testFileData)),
),
},
)
io.NopCloser(bytes.NewReader(testFileData))),
})
}
// Metadata subtree doesn't appear because we don't select it as one of the
@ -2556,8 +2507,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
},
},
},
},
)
})
progress := &corsoProgress{
ctx: ctx,
@ -2671,12 +2621,9 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(inboxFileName1)[0],
time.Time{},
io.NopCloser(bytes.NewReader(inboxFileData1)),
),
},
),
},
),
io.NopCloser(bytes.NewReader(inboxFileData1))),
}),
}),
virtualfs.NewStaticDirectory(
encodeElements(path.ContactsCategory.String())[0],
[]fs.Entry{
@ -2686,14 +2633,10 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(contactsFileName1)[0],
time.Time{},
io.NopCloser(bytes.NewReader(contactsFileData1)),
),
},
),
},
),
},
)
io.NopCloser(bytes.NewReader(contactsFileData1))),
}),
}),
})
}
// Must be a function that returns a new instance each time as StreamingFile
@ -2728,13 +2671,9 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
// Wrap with a backup reader so it gets the version injected.
newBackupStreamReader(
serializationVersion,
io.NopCloser(bytes.NewReader(inboxFileData1v2)),
),
),
},
),
},
),
io.NopCloser(bytes.NewReader(inboxFileData1v2)))),
}),
}),
virtualfs.NewStaticDirectory(
encodeElements(path.EventsCategory.String())[0],
[]fs.Entry{
@ -2744,14 +2683,10 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
virtualfs.StreamingFileWithModTimeFromReader(
encodeElements(eventsFileName1)[0],
time.Time{},
io.NopCloser(bytes.NewReader(eventsFileData1)),
),
},
),
},
),
},
)
io.NopCloser(bytes.NewReader(eventsFileData1))),
}),
}),
})
}
// Check the following:
@ -2813,8 +2748,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
},
},
},
},
)
})
progress := &corsoProgress{
ctx: ctx,
@ -2931,8 +2865,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
io.NopCloser(bytes.NewReader(contactsFileData1)))),
}),
}),
},
)
})
}
// Check the following:
@ -2982,8 +2915,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
},
},
},
},
)
})
progress := &corsoProgress{
ctx: ctx,

View File

@ -320,8 +320,7 @@ func (w Wrapper) makeSnapshotWithRoot(
}
return nil
},
)
})
// Telling kopia to always flush may hide other errors if it fails while
// flushing the write session (hence logging above).
if err != nil {

View File

@ -140,10 +140,8 @@ func (suite *KopiaUnitSuite) SetupSuite() {
path.ExchangeService.String(),
testUser,
path.EmailCategory.String(),
testInboxDir,
),
false,
)
testInboxDir),
false)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.testPath = tmp
@ -174,8 +172,7 @@ func TestBasicKopiaIntegrationSuite(t *testing.T) {
suite.Run(t, &BasicKopiaIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -398,8 +395,7 @@ func TestRetentionIntegrationSuite(t *testing.T) {
suite.Run(t, &RetentionIntegrationSuite{
Suite: tester.NewRetentionSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -717,8 +713,7 @@ func TestKopiaIntegrationSuite(t *testing.T) {
suite.Run(t, &KopiaIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -813,14 +808,12 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
testTenant,
suite.storePath1.ResourceOwner(),
suite.storePath1.Service(),
suite.storePath1.Category(),
),
suite.storePath1.Category()),
NewReason(
testTenant,
suite.storePath2.ResourceOwner(),
suite.storePath2.Service(),
suite.storePath2.Category(),
),
suite.storePath2.Category()),
}
expectedTags := map[string]string{}
@ -929,8 +922,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
t,
details,
// 47 file and 2 folder entries.
test.expectedUploadedFiles+test.expectedCachedFiles+2,
)
test.expectedUploadedFiles+test.expectedCachedFiles+2)
}
checkSnapshotTags(
@ -938,14 +930,12 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
ctx,
suite.w.c,
expectedTags,
stats.SnapshotID,
)
stats.SnapshotID)
snap, err := snapshot.LoadSnapshot(
ctx,
suite.w.c,
manifest.ID(stats.SnapshotID),
)
manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err))
res = ManifestEntry{
@ -1206,8 +1196,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
assert.Len(
t,
details,
test.numDeetsEntries+1,
)
test.numDeetsEntries+1)
for _, entry := range details {
if test.hasMetaDeets {
@ -1226,8 +1215,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
suite.ctx,
suite.w.c,
expectedTags,
stats.SnapshotID,
)
stats.SnapshotID)
snap, err := snapshot.LoadSnapshot(
suite.ctx,
@ -1239,8 +1227,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
ManifestEntry{
Manifest: snap,
Reasons: reasons,
},
)
})
})
}
}
@ -1464,8 +1451,7 @@ func TestKopiaSimpleRepoIntegrationSuite(t *testing.T) {
suite.Run(t, &KopiaSimpleRepoIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs}),
})
}
@ -1578,8 +1564,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
ItemID: item.itemPath.Item(),
Reader: io.NopCloser(bytes.NewReader(item.data)),
ItemInfo: exchMock.StubMailInfo(),
},
)
})
}
collections = append(collections, collection)
@ -1721,8 +1706,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
ManifestEntry{
Manifest: man,
Reasons: []identity.Reasoner{r},
},
),
}),
test.cols(),
excluded,
nil,

View File

@ -351,8 +351,7 @@ func TestSPCollectionIntgSuite(t *testing.T) {
suite.Run(t, &SPCollectionIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs},
),
[][]string{tconfig.M365AcctCredEnvs}),
})
}

View File

@ -728,8 +728,7 @@ func (c *Collections) UpdateCollections(
isFolder,
excluded,
itemCollection,
invalidPrevDelta,
); err != nil {
invalidPrevDelta); err != nil {
return clues.Stack(err).WithClues(ictx)
}

View File

@ -519,8 +519,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"subfolder",
true,
false,
false,
),
false),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
},
inputFolderMap: map[string]string{
@ -816,16 +815,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
},
},
),
}),
}
},
},
@ -847,8 +844,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
}
},
},
@ -868,8 +864,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
driveID1: {
folderID1: path1,
},
},
),
}),
}
},
},
@ -892,14 +887,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {},
},
),
}),
}
},
},
@ -920,16 +913,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
graph.DeltaURLsFileName,
map[string]string{
driveID1: "",
},
),
}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
},
},
),
}),
}
},
},
@ -949,32 +940,28 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
},
},
),
}),
}
},
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID2: deltaURL2},
),
map[string]string{driveID2: deltaURL2}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID2: {
folderID2: path2,
},
},
),
}),
}
},
},
@ -1002,8 +989,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
}
},
},
@ -1019,20 +1005,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
},
},
),
}),
graph.NewMetadataEntry(
"foo",
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
}
},
},
@ -1054,16 +1037,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
},
},
),
}),
}
},
func() []graph.MetadataCollectionEntry {
@ -1074,8 +1055,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
driveID1: {
folderID2: path2,
},
},
),
}),
}
},
},
@ -1091,24 +1071,21 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1},
),
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
},
},
),
}),
}
},
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID1: deltaURL2},
),
map[string]string{driveID1: deltaURL2}),
}
},
},
@ -2313,8 +2290,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
graph.PreviousPathFileName,
test.prevFolderPaths),
},
func(*support.ControllerOperationStatus) {},
)
func(*support.ControllerOperationStatus) {})
assert.NoError(t, err, "creating metadata collection", clues.ToCore(err))
prevMetadata := []data.RestoreCollection{data.NoFetchRestoreCollection{Collection: mc}}

View File

@ -82,8 +82,7 @@ func (h groupBackupHandler) CanonicalPath(
path.LibrariesCategory,
false,
odConsts.SitesPathDir,
h.siteID,
)
h.siteID)
}
func (h groupBackupHandler) SitePathPrefix(tenantID string) (path.Path, error) {

View File

@ -249,8 +249,7 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
Err: nil,
},
},
tooManyRetries...,
),
tooManyRetries...),
retry: true,
expectedErr: assert.Error,
expectedResults: nil,

View File

@ -381,8 +381,7 @@ func includeContainer(
logger.Ctx(ctx).With(
"included", ok,
"scope", scope,
"matches_input", directory,
).Debug("backup folder selection filter")
"matches_input", directory).Debug("backup folder selection filter")
return dirPath, loc, ok
}

View File

@ -309,8 +309,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
coll, err := graph.MakeMetadataCollection(
pathPrefix,
entries,
func(cos *support.ControllerOperationStatus) {},
)
func(cos *support.ControllerOperationStatus) {})
require.NoError(t, err, clues.ToCore(err))
cdps, canUsePreviousBackup, err := ParseMetadataCollections(ctx, []data.RestoreCollection{
@ -446,8 +445,7 @@ func (suite *BackupIntgSuite) TestMailFetch() {
name: "Folder Iterative Check Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{api.MailInbox},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
folderNames: map[string]struct{}{
api.MailInbox: {},
},
@ -457,8 +455,7 @@ func (suite *BackupIntgSuite) TestMailFetch() {
name: "Folder Iterative Check Mail Non-Delta",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{api.MailInbox},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
folderNames: map[string]struct{}{
api.MailInbox: {},
},
@ -533,22 +530,19 @@ func (suite *BackupIntgSuite) TestDelta() {
name: "Mail",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{api.MailInbox},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
},
{
name: "Contacts",
scope: selectors.NewExchangeBackup(users).ContactFolders(
[]string{api.DefaultContacts},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
},
{
name: "Events",
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{api.DefaultCalendar},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
},
}
for _, test := range tests {
@ -832,16 +826,14 @@ func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{api.DefaultCalendar},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
},
{
name: "Birthday Calendar",
expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{"Birthdays"},
selectors.PrefixMatch(),
)[0],
selectors.PrefixMatch())[0],
},
}
@ -1878,8 +1870,7 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
displayName: strPtr("prev"),
p: path.Builder{}.Append("2", "prev"),
l: path.Builder{}.Append("2", "prev"),
},
),
}),
dps: DeltaPaths{
"1": DeltaPath{
Delta: "old_delta_url",
@ -1977,8 +1968,7 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
displayName: strPtr("moved"),
p: path.Builder{}.Append("4", "moved"),
l: path.Builder{}.Append("4", "moved"),
},
),
}),
dps: DeltaPaths{
"2": DeltaPath{
Delta: "old_delta_url",

View File

@ -187,8 +187,7 @@ func restoreEvent(
destinationID,
ptr.Val(item.GetId()),
event,
errs,
)
errs)
if err != nil {
return nil, clues.Stack(err)
}

View File

@ -38,8 +38,7 @@ func TestMailFolderCacheIntegrationSuite(t *testing.T) {
suite.Run(t, &MailFolderCacheIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs},
),
[][]string{tconfig.M365AcctCredEnvs}),
})
}

View File

@ -232,8 +232,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
name: "Test Mail: Hydrated Item Attachment Mail",
bytes: exchMock.MessageWithNestedItemAttachmentMail(t,
exchMock.MessageBytes("Basic Item Attachment"),
"Mail Item Attachment",
),
"Mail Item Attachment"),
category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailbasicattch").Location
@ -248,8 +247,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
name: "Test Mail: Hydrated Item Attachment Mail One Attach",
bytes: exchMock.MessageWithNestedItemAttachmentMail(t,
exchMock.MessageWithDirectAttachment("Item Attachment Included"),
"Mail Item Attachment",
),
"Mail Item Attachment"),
category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailnestattch").Location
@ -264,8 +262,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
name: "Test Mail: Item Attachment_Contact",
bytes: exchMock.MessageWithNestedItemAttachmentContact(t,
exchMock.ContactBytes("Victor"),
"Contact Item Attachment",
),
"Contact Item Attachment"),
category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailcontactattch").Location

View File

@ -54,8 +54,7 @@ func TestSharePointCollectionSuite(t *testing.T) {
suite.Run(t, &SharePointCollectionSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs},
),
[][]string{tconfig.M365AcctCredEnvs}),
})
}

View File

@ -40,8 +40,7 @@ func TestListsUnitSuite(t *testing.T) {
suite.Run(t, &ListsUnitSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs},
),
[][]string{tconfig.M365AcctCredEnvs}),
})
}

View File

@ -355,8 +355,7 @@ func TestControllerIntegrationSuite(t *testing.T) {
suite.Run(t, &ControllerIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs},
),
[][]string{tconfig.M365AcctCredEnvs}),
})
}
@ -732,15 +731,13 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
{
Name: "someencodeditemID",
Data: exchMock.MessageWithDirectAttachment(
subjectText + "-1",
),
subjectText + "-1"),
LookupKey: subjectText + "-1",
},
{
Name: "someencodeditemID2",
Data: exchMock.MessageWithTwoAttachments(
subjectText + "-2",
),
subjectText + "-2"),
LookupKey: subjectText + "-2",
},
},
@ -760,8 +757,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
Data: exchMock.MessageWithBodyBytes(
subjectText+"-1",
bodyText+" 1.",
bodyText+" 1.",
),
bodyText+" 1."),
LookupKey: subjectText + "-1",
},
},
@ -775,8 +771,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
Data: exchMock.MessageWithBodyBytes(
subjectText+"-2",
bodyText+" 2.",
bodyText+" 2.",
),
bodyText+" 2."),
LookupKey: subjectText + "-2",
},
{
@ -784,8 +779,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
Data: exchMock.MessageWithBodyBytes(
subjectText+"-3",
bodyText+" 3.",
bodyText+" 3.",
),
bodyText+" 3."),
LookupKey: subjectText + "-3",
},
},
@ -799,8 +793,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
Data: exchMock.MessageWithBodyBytes(
subjectText+"-4",
bodyText+" 4.",
bodyText+" 4.",
),
bodyText+" 4."),
LookupKey: subjectText + "-4",
},
},
@ -814,8 +807,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
Data: exchMock.MessageWithBodyBytes(
subjectText+"-5",
bodyText+" 5.",
bodyText+" 5.",
),
bodyText+" 5."),
LookupKey: subjectText + "-5",
},
},
@ -1068,8 +1060,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
suite.user,
restoreCfg,
[]stub.ColInfo{collection},
version.Backup,
)
version.Backup)
require.NoError(t, err)
allItems += totalItems
@ -1082,8 +1073,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
"Restoring %v/%v collections to %s\n",
i+1,
len(test.collections),
restoreCfg.Location,
)
restoreCfg.Location)
restoreCtrl := newController(ctx, t, path.ExchangeService)

View File

@ -114,8 +114,7 @@ func NewItemPagesItemCanvasLayoutHorizontalSectionsHorizontalSectionItemRequestB
return NewItemPagesItemCanvasLayoutHorizontalSectionsHorizontalSectionItemRequestBuilderInternal(
urlParams,
requestAdapter,
)
requestAdapter)
}
// CreateDeleteRequestInformation delete navigation property horizontalSections for sites

View File

@ -119,8 +119,7 @@ func IsErrDeletedInFlight(err error) bool {
err,
errorItemNotFound,
itemNotFound,
syncFolderNotFound,
) {
syncFolderNotFound) {
return true
}

View File

@ -141,8 +141,7 @@ func (md MetadataCollection) Items(
Successes: len(md.items),
Bytes: totalBytes,
},
md.fullPath.Folder(false),
)
md.fullPath.Folder(false))
md.statusUpdater(status)
}()

View File

@ -61,8 +61,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
t,
len(itemNames),
len(itemData),
"Requires same number of items and data",
)
"Requires same number of items and data")
items := []MetadataItem{}
@ -85,8 +84,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
func(c *support.ControllerOperationStatus) {
assert.Equal(t, len(itemNames), c.Metrics.Objects)
assert.Equal(t, len(itemNames), c.Metrics.Successes)
},
)
})
gotData := [][]byte{}
gotNames := []string{}

View File

@ -142,8 +142,7 @@ func GetAuth(tenant string, client string, secret string) (*kauth.AzureIdentityA
auth, err := kauth.NewAzureIdentityAuthenticationProviderWithScopes(
cred,
[]string{"https://graph.microsoft.com/.default"},
)
[]string{"https://graph.microsoft.com/.default"})
if err != nil {
return nil, clues.Wrap(err, "creating azure authentication")
}

View File

@ -162,8 +162,7 @@ func recipientEqual(
// Don't compare names as M365 will override the name if the address is known.
return reflect.DeepEqual(
ptr.Val(expected.GetEmailAddress().GetAddress()),
ptr.Val(got.GetEmailAddress().GetAddress()),
)
ptr.Val(got.GetEmailAddress().GetAddress()))
}
func checkMessage(
@ -222,8 +221,7 @@ func checkMessage(
t,
ptr.Val(expected.GetIsDeliveryReceiptRequested()),
ptr.Val(got.GetIsDeliveryReceiptRequested()),
"IsDeliverReceiptRequested",
)
"IsDeliverReceiptRequested")
assert.Equal(t, ptr.Val(expected.GetIsDraft()), ptr.Val(got.GetIsDraft()), "IsDraft")
@ -542,8 +540,7 @@ func checkEvent(
t,
ptr.Val(expected.GetReminderMinutesBeforeStart()),
ptr.Val(got.GetReminderMinutesBeforeStart()),
"ReminderMinutesBeforeStart",
)
"ReminderMinutesBeforeStart")
assert.Equal(
t,
@ -777,8 +774,7 @@ func compareDriveItem(
t,
expectedData,
"unexpected metadata file with name %s",
name,
) {
name) {
return true
}
@ -1057,8 +1053,7 @@ func makeExchangeBackupSel(
toInclude = append(toInclude, builder(
[]string{d.dest},
selectors.PrefixMatch(),
))
selectors.PrefixMatch()))
}
}
@ -1084,8 +1079,7 @@ func makeOneDriveBackupSel(
toInclude = append(toInclude, sel.Folders(
[]string{d.dest},
selectors.PrefixMatch(),
))
selectors.PrefixMatch()))
}
sel := selectors.NewOneDriveBackup(maps.Keys(resourceOwners))
@ -1115,8 +1109,7 @@ func makeSharePointBackupSel(
toInclude = append(toInclude, sel.LibraryFolders(
[]string{d.dest},
selectors.PrefixMatch(),
))
selectors.PrefixMatch()))
}
}

View File

@ -81,8 +81,7 @@ func GetMailboxInfo(
mi.ErrGetMailBoxSetting = append(
mi.ErrGetMailBoxSetting,
api.ErrMailBoxSettingsAccessDenied,
)
api.ErrMailBoxSettingsAccessDenied)
} else {
mi = api.ParseMailboxSettings(mboxSettings, mi)
}

View File

@ -54,8 +54,7 @@ func ContactBytes(middleName string) []byte {
defaultContactGivenName,
middleName,
defaultContactSurname,
phone,
)
phone)
}
func ContactBytesWith(
@ -70,6 +69,5 @@ func ContactBytesWith(
phone,
givenName,
middleName,
surname,
))
surname))
}

View File

@ -240,8 +240,7 @@ func EventWithSubjectBytes(subject string) []byte {
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, endTime, NoRecurrence, NoAttendees,
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
)
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences)
}
func EventWithAttachment(subject string) []byte {
@ -255,8 +254,7 @@ func EventWithAttachment(subject string) []byte {
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, NoRecurrence, NoAttendees,
defaultEventAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
)
defaultEventAttachments, NoCancelledOccurrences, NoExceptionOccurrences)
}
func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte {
@ -272,15 +270,13 @@ func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte {
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
recurrenceTimeZone,
))
recurrenceTimeZone))
return EventWith(
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
)
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences)
}
func EventWithRecurrenceAndCancellationBytes(subject string) []byte {
@ -297,8 +293,7 @@ func EventWithRecurrenceAndCancellationBytes(subject string) []byte {
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
`"UTC"`,
))
`"UTC"`))
cancelledInstances := []string{fmt.Sprintf(cancelledOccurrenceInstanceFormat, dttm.FormatTo(nextYear, dttm.DateOnly))}
cancelledOccurrences := fmt.Sprintf(cancelledOccurrencesFormat, strings.Join(cancelledInstances, ","))
@ -307,8 +302,7 @@ func EventWithRecurrenceAndCancellationBytes(subject string) []byte {
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
defaultEventAttachments, cancelledOccurrences, NoExceptionOccurrences,
)
defaultEventAttachments, cancelledOccurrences, NoExceptionOccurrences)
}
func EventWithRecurrenceAndExceptionBytes(subject string) []byte {
@ -326,24 +320,21 @@ func EventWithRecurrenceAndExceptionBytes(subject string) []byte {
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
`"UTC"`,
))
`"UTC"`))
exceptionEvent := EventWith(
defaultEventOrganizer, subject+"(modified)",
defaultEventBody, defaultEventBodyPreview,
fmt.Sprintf(originalStartDateFormat, originalStartDate),
newTime, newTime, NoRecurrence, attendeesTmpl,
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
)
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences)
exceptionOccurrences := fmt.Sprintf(exceptionOccurrencesFormat, exceptionEvent)
return EventWith(
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
defaultEventAttachments, NoCancelledOccurrences, exceptionOccurrences,
)
defaultEventAttachments, NoCancelledOccurrences, exceptionOccurrences)
}
func EventWithRecurrenceAndExceptionAndAttachmentBytes(subject string) []byte {
@ -361,8 +352,7 @@ func EventWithRecurrenceAndExceptionAndAttachmentBytes(subject string) []byte {
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
`"UTC"`,
))
`"UTC"`))
exceptionEvent := EventWith(
defaultEventOrganizer, subject+"(modified)",
@ -370,19 +360,16 @@ func EventWithRecurrenceAndExceptionAndAttachmentBytes(subject string) []byte {
fmt.Sprintf(originalStartDateFormat, originalStartDate),
newTime, newTime, NoRecurrence, attendeesTmpl,
"\"attachments\":["+fmt.Sprintf(eventAttachmentFormat, "exception-database.db")+"],",
NoCancelledOccurrences, NoExceptionOccurrences,
)
NoCancelledOccurrences, NoExceptionOccurrences)
exceptionOccurrences := fmt.Sprintf(
exceptionOccurrencesFormat,
strings.Join([]string{string(exceptionEvent)}, ","),
)
strings.Join([]string{string(exceptionEvent)}, ","))
return EventWith(
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
defaultEventAttachments, NoCancelledOccurrences, exceptionOccurrences,
)
defaultEventAttachments, NoCancelledOccurrences, exceptionOccurrences)
}
func EventWithAttendeesBytes(subject string) []byte {
@ -396,8 +383,7 @@ func EventWithAttendeesBytes(subject string) []byte {
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, NoRecurrence, attendeesTmpl,
defaultEventAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
)
defaultEventAttachments, NoCancelledOccurrences, NoExceptionOccurrences)
}
// EventWith returns bytes for an Eventable item.
@ -442,6 +428,5 @@ func EventWith(
recurrence,
cancelledOccurrences,
exceptionOccurrences,
attendees,
))
attendees))
}

View File

@ -125,8 +125,7 @@ func MessageWithBodyBytes(subject, body, preview string) []byte {
defaultMessageCreatedTime,
defaultMessageModifiedTime,
defaultMessageSentTime,
defaultMessageReceivedTime,
)
defaultMessageReceivedTime)
}
// MessageWith returns bytes for a Messageable item.
@ -498,8 +497,7 @@ func MessageWithItemAttachmentMail(subject string) []byte {
defaultAlias,
defaultMessageSender,
defaultMessageFrom,
defaultMessageTo,
)
defaultMessageTo)
return []byte(message)
}
@ -684,8 +682,7 @@ func MessageWithNestedItemAttachmentEvent(subject string) []byte {
defaultAlias,
defaultMessageSender,
defaultMessageFrom,
defaultMessageTo,
)
defaultMessageTo)
return []byte(message)
}

View File

@ -110,8 +110,7 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
pageData := site.NewItem(
testName,
io.NopCloser(bytes.NewReader(byteArray)),
)
io.NopCloser(bytes.NewReader(byteArray)))
info, err := api.RestoreSitePage(
ctx,

View File

@ -95,8 +95,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
cache = idname.NewCache(
// Cache check with lowercased ids
map[string]string{strings.ToLower(driveID): driveName},
)
map[string]string{strings.ToLower(driveID): driveName})
dii = odStub.DriveItemInfo()
expectedPath = "Libraries/" + driveName
expectedItems = []export.Item{

View File

@ -523,8 +523,7 @@ func CollectionProgress(
mpb.PrependDecorators(decor.Name(string(category))),
mpb.AppendDecorators(
decor.CurrentNoUnit("%d - ", decor.WCSyncSpace),
decor.Name(plain),
),
decor.Name(plain)),
mpb.BarFillerOnComplete(spinFrames[0]),
}

View File

@ -295,8 +295,7 @@ func makeDetailsEntry(
assert.FailNowf(
t,
"category %s not supported in helper function",
p.Category().String(),
)
p.Category().String())
}
res.Exchange = &details.ExchangeInfo{
@ -546,8 +545,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
"work",
"item1",
},
true,
)
true)
locationPath1 = path.Builder{}.Append(odConsts.RootPathDir, "work-display-name")
itemPath2 = makePath(
suite.T(),
@ -562,8 +560,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
"personal",
"item2",
},
true,
)
true)
locationPath2 = path.Builder{}.Append(odConsts.RootPathDir, "personal-display-name")
itemPath3 = makePath(
suite.T(),
@ -575,8 +572,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
"personal",
"item3",
},
true,
)
true)
locationPath3 = path.Builder{}.Append("personal-display-name")
backup1 = backup.Backup{
@ -734,9 +730,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
itemPath1.ResourceOwner(),
path.UnknownCategory.String(),
},
itemPath1.Folders()...,
)...,
),
itemPath1.Folders()...)...),
ItemInfo: details.ItemInfo{
OneDrive: &details.OneDriveInfo{
ItemType: details.OneDriveItem,
@ -765,8 +759,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
"personal",
"item1",
},
true,
)
true)
res.add(itemPath1, p, nil)
@ -1616,16 +1609,14 @@ func makeMetadataCollectionEntries(
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID: deltaURL},
),
map[string]string{driveID: deltaURL}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID: {
folderID: p.PlainString(),
},
},
),
}),
}
}

View File

@ -277,8 +277,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
),
makeMan("id1", "", path.EmailCategory)),
},
},
rp: mockRestoreProducer{},
@ -290,16 +289,14 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
assertB: assert.False,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases(
makeMan("id1", "", path.EmailCategory),
),
makeMan("id1", "", path.EmailCategory)),
},
{
name: "don't get metadata, incomplete manifest",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan("id1", "checkpoint", path.EmailCategory),
),
makeMan("id1", "checkpoint", path.EmailCategory)),
},
},
rp: mockRestoreProducer{},
@ -314,16 +311,14 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
assertB: assert.True,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases(
makeMan("id1", "checkpoint", path.EmailCategory),
),
makeMan("id1", "checkpoint", path.EmailCategory)),
},
{
name: "one valid man, multiple reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory, path.ContactsCategory),
),
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)),
},
},
rp: mockRestoreProducer{
@ -354,23 +349,19 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
path.ContactsCategory,
},
p.Category(),
"read data category doesn't match a given reason",
)
"read data category doesn't match a given reason")
}
},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory, path.ContactsCategory),
),
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)),
},
{
name: "one valid man, extra incomplete man",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
).WithAssistBases(
makeMan("id2", "checkpoint", path.EmailCategory),
),
makeMan("id1", "", path.EmailCategory)).WithAssistBases(
makeMan("id2", "checkpoint", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -387,20 +378,16 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
).WithAssistBases(
makeMan("id2", "checkpoint", path.EmailCategory),
),
makeMan("id1", "", path.EmailCategory)).WithAssistBases(
makeMan("id2", "checkpoint", path.EmailCategory)),
},
{
name: "one valid man, extra incomplete man, no assist bases",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
).WithAssistBases(
makeMan("id2", "checkpoint", path.EmailCategory),
),
makeMan("id1", "", path.EmailCategory)).WithAssistBases(
makeMan("id2", "checkpoint", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -418,8 +405,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
).
makeMan("id1", "", path.EmailCategory)).
ClearMockAssistBases(),
},
{
@ -428,8 +414,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
makeMan("id2", "", path.EmailCategory),
),
makeMan("id2", "", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -447,16 +432,14 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
expectDCS: []mockColl{{id: "id1"}, {id: "id2"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
makeMan("id2", "", path.EmailCategory),
),
makeMan("id2", "", path.EmailCategory)),
},
{
name: "error collecting metadata",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
),
makeMan("id1", "", path.EmailCategory)),
},
},
rp: mockRestoreProducer{err: assert.AnError},
@ -503,8 +486,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
data.NoFetchRestoreCollection{},
dc,
"unexpected type returned [%T]",
dc,
) {
dc) {
continue
}
@ -515,8 +497,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
mockColl{},
tmp.Collection,
"unexpected type returned [%T]",
tmp.Collection,
) {
tmp.Collection) {
continue
}
@ -595,10 +576,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
},
rp: mockRestoreProducer{},
@ -608,18 +587,15 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.False,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)),
},
{
name: "only fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -633,20 +609,16 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
{
name: "only fallbacks, no assist",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -661,10 +633,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
).
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)).
ClearMockAssistBases(),
},
{
@ -672,13 +642,10 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -694,19 +661,16 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)),
},
{
name: "incomplete mans and fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory),
),
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -722,25 +686,19 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
},
{
name: "complete and incomplete mans and fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
).WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)).WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
).WithAssistBases(
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)).WithAssistBases(
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -758,23 +716,18 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
).WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)).WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
},
{
name: "incomplete mans and complete fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -790,25 +743,19 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
).WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)).WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
},
{
name: "incomplete mans and complete fallbacks, no assist bases",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory),
),
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -825,10 +772,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory),
).
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)).
ClearMockAssistBases(),
},
{
@ -836,11 +781,9 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory),
),
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)),
},
},
rp: mockRestoreProducer{
@ -856,21 +799,17 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)),
},
{
name: "complete mans and complete fallbacks, multiple reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory),
),
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory, path.ContactsCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory, path.ContactsCategory)),
},
},
rp: mockRestoreProducer{
@ -892,21 +831,17 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory),
),
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)),
},
{
name: "complete mans and complete fallbacks, distinct reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.ContactsCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.ContactsCategory),
),
makeMan(fbro, "fb_id1", "", path.ContactsCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.ContactsCategory)),
},
},
rp: mockRestoreProducer{
@ -925,23 +860,18 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
makeMan(fbro, "fb_id1", "", path.ContactsCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.ContactsCategory),
),
makeMan(fbro, "fb_id1", "", path.ContactsCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.ContactsCategory)),
},
{
name: "complete mans and complete fallbacks, fallback has superset of reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
),
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory, path.ContactsCategory),
),
makeMan(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory, path.ContactsCategory)),
},
},
rp: mockRestoreProducer{
@ -964,10 +894,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory),
makeMan(fbro, "fb_id1", "", path.ContactsCategory),
).WithBackups(
makeBackup(fbro, "fb_id1", path.ContactsCategory),
),
makeMan(fbro, "fb_id1", "", path.ContactsCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.ContactsCategory)),
},
}
@ -1003,8 +931,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
data.NoFetchRestoreCollection{},
dc,
"unexpected type returned [%T]",
dc,
) {
dc) {
continue
}
@ -1015,8 +942,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
mockColl{},
tmp.Collection,
"unexpected type returned [%T]",
tmp.Collection,
) {
tmp.Collection) {
continue
}

View File

@ -323,8 +323,7 @@ func checkMetadataFilesExist(
int64(0),
"empty metadata file: %s/%s",
col.FullPath(),
item.ID(),
)
item.ID())
itemNames = append(itemNames, item.ID())
}
@ -334,8 +333,7 @@ func checkMetadataFilesExist(
pathsByRef[col.FullPath().ShortRef()],
itemNames,
"collection %s missing expected files",
col.FullPath(),
)
col.FullPath())
}
})
}

View File

@ -53,9 +53,7 @@ func NewIntegrationSuite(
t,
append(
[]string{CorsoCITests},
runOnAnyEnv...,
)...,
)
runOnAnyEnv...)...)
MustGetEnvSets(t, envSets...)
@ -82,9 +80,7 @@ func NewE2ESuite(
t,
append(
[]string{CorsoE2ETests},
runOnAnyEnv...,
)...,
)
runOnAnyEnv...)...)
MustGetEnvSets(t, envSets...)
@ -111,9 +107,7 @@ func NewLoadSuite(
t,
append(
[]string{CorsoLoadTests},
runOnAnyEnv...,
)...,
)
runOnAnyEnv...)...)
MustGetEnvSets(t, envSets...)
@ -140,9 +134,7 @@ func NewNightlySuite(
t,
append(
[]string{CorsoNightlyTests},
runOnAnyEnv...,
)...,
)
runOnAnyEnv...)...)
MustGetEnvSets(t, envSets...)
@ -169,9 +161,7 @@ func NewRetentionSuite(
t,
append(
[]string{CorsoRetentionTests},
runOnAnyEnv...,
)...,
)
runOnAnyEnv...)...)
MustGetEnvSets(t, envSets...)

View File

@ -26,8 +26,7 @@ func NewM365Account(t *testing.T) account.Account {
account.M365Config{
M365: credentials.GetM365(),
AzureTenantID: cfg[TestCfgAzureTenantID],
},
)
})
require.NoError(t, err, "initializing account", clues.ToCore(err))
return acc
@ -42,8 +41,7 @@ func NewFakeM365Account(t *testing.T) account.Account {
AzureClientSecret: "abcde",
},
AzureTenantID: "09876",
},
)
})
require.NoError(t, err, "initializing mock account", clues.ToCore(err))
return acc

View File

@ -17,14 +17,11 @@ func AreSameFunc(t *testing.T, expect, have any) {
runtime.FuncForPC(
reflect.
ValueOf(expect).
Pointer(),
).Name(),
Pointer()).Name(),
runtime.FuncForPC(
reflect.
ValueOf(have).
Pointer(),
).Name(),
)
Pointer()).Name())
}
type TestT interface {

View File

@ -42,8 +42,7 @@ func (b *Builder) Add(
if err := b.addFolderEntries(
repoRef.ToBuilder().Dir(),
locationRef,
entry,
); err != nil {
entry); err != nil {
return clues.Wrap(err, "adding folder entries")
}

View File

@ -434,8 +434,7 @@ func ExampleBus_AddSkip() {
"deduplication-namespace",
"file-id",
"file-name",
map[string]any{"foo": "bar"},
))
map[string]any{"foo": "bar"}))
// later on, after processing, end users can scrutinize the skipped items.
fmt.Println(errs.Skipped()[0].String())

View File

@ -330,8 +330,7 @@ func genLogger(set Settings) (*zapcore.Core, *zap.SugaredLogger) {
out = zapcore.Lock(os.Stderr)
consoleEncoder = zapcore.NewConsoleEncoder(zap.NewDevelopmentEncoderConfig())
core = zapcore.NewTee(
zapcore.NewCore(consoleEncoder, out, levelFilter),
)
zapcore.NewCore(consoleEncoder, out, levelFilter))
cfg zap.Config
)

View File

@ -208,8 +208,7 @@ func FromDataLayerPath(p string, isItem bool) (Path, error) {
service, category, err := validateServiceAndCategoryStrings(
pb.elements[1],
pb.elements[3],
)
pb.elements[3])
if err != nil {
return nil, clues.Stack(errParsingPath, err).With("path_string", p)
}

View File

@ -334,14 +334,12 @@ func (suite *PathUnitSuite) TestFromDataLayerPath() {
testUser,
testElement1,
testElement2,
testElement3,
),
testElement3),
expectedFolder: fmt.Sprintf(
"%s/%s/%s",
testElementTrimmed,
testElement2,
testElement3,
),
testElement3),
expectedSplit: []string{
testElementTrimmed,
testElement2,
@ -351,8 +349,7 @@ func (suite *PathUnitSuite) TestFromDataLayerPath() {
expectedItemFolder: fmt.Sprintf(
"%s/%s",
testElementTrimmed,
testElement2,
),
testElement2),
expectedItemSplit: []string{
testElementTrimmed,
testElement2,
@ -366,14 +363,12 @@ func (suite *PathUnitSuite) TestFromDataLayerPath() {
testUser,
testElementTrimmed,
testElement2,
testElement3,
),
testElement3),
expectedFolder: fmt.Sprintf(
"%s/%s/%s",
testElementTrimmed,
testElement2,
testElement3,
),
testElement3),
expectedSplit: []string{
testElementTrimmed,
testElement2,
@ -383,8 +378,7 @@ func (suite *PathUnitSuite) TestFromDataLayerPath() {
expectedItemFolder: fmt.Sprintf(
"%s/%s",
testElementTrimmed,
testElement2,
),
testElement2),
expectedItemSplit: []string{
testElementTrimmed,
testElement2,

View File

@ -155,8 +155,7 @@ func (suite *DataLayerResourcePath) TestMissingInfoErrors() {
b,
test.tenant,
test.user,
m.isItem,
)
m.isItem)
assert.Error(t, err)
})
}
@ -178,8 +177,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
b,
testTenant,
testUser,
true,
)
true)
require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, p.Folder(false))
@ -192,8 +190,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
func (suite *DataLayerResourcePath) TestPopFront() {
expected := path.Builder{}.Append(append(
[]string{path.ExchangeService.String(), testUser, path.EmailCategory.String()},
rest...,
)...)
rest...)...)
for _, m := range modes {
suite.Run(m.name, func() {
@ -204,8 +201,7 @@ func (suite *DataLayerResourcePath) TestPopFront() {
testTenant,
testUser,
path.EmailCategory,
m.isItem,
)
m.isItem)
require.NoError(t, err, clues.ToCore(err))
b := p.PopFront()
@ -229,8 +225,7 @@ func (suite *DataLayerResourcePath) TestDir() {
testTenant,
testUser,
path.EmailCategory,
m.isItem,
)
m.isItem)
require.NoError(suite.T(), err, clues.ToCore(err))
for i := 1; i <= len(rest); i++ {
@ -439,8 +434,7 @@ func (suite *PopulatedDataLayerResourcePath) SetupSuite() {
testTenant,
testUser,
path.EmailCategory,
t,
)
t)
require.NoError(suite.T(), err, clues.ToCore(err))
suite.paths[t] = p
@ -495,8 +489,7 @@ func (suite *PopulatedDataLayerResourcePath) TestFolder() {
assert.Equal(
t,
strings.Join(m.expectedFolders, "/"),
suite.paths[m.isItem].Folder(false),
)
suite.paths[m.isItem].Folder(false))
})
}
}
@ -541,8 +534,7 @@ func (suite *PopulatedDataLayerResourcePath) TestAppend() {
hasItem: false,
expectedFolder: strings.Join(
append(append([]string{}, rest...), newElement),
"/",
),
"/"),
expectedItem: "",
},
}

View File

@ -419,8 +419,7 @@ func TestLoadExchangeSuite(t *testing.T) {
suite.Run(t, &LoadExchangeSuite{
Suite: tester.NewLoadSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
@ -470,8 +469,7 @@ func TestIndividualLoadExchangeSuite(t *testing.T) {
suite.Run(t, &IndividualLoadExchangeSuite{
Suite: tester.NewLoadSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
@ -524,8 +522,7 @@ func TestLoadOneDriveSuite(t *testing.T) {
suite.Run(t, &LoadOneDriveSuite{
Suite: tester.NewLoadSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
@ -572,8 +569,7 @@ func TestIndividualLoadOneDriveSuite(t *testing.T) {
suite.Run(t, &IndividualLoadOneDriveSuite{
Suite: tester.NewLoadSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
@ -623,8 +619,7 @@ func TestLoadSharePointSuite(t *testing.T) {
suite.Run(t, &LoadSharePointSuite{
Suite: tester.NewLoadSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
@ -671,8 +666,7 @@ func TestIndividualLoadSharePointSuite(t *testing.T) {
suite.Run(t, &IndividualLoadSharePointSuite{
Suite: tester.NewLoadSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}

View File

@ -24,8 +24,7 @@ func Example_newSelector() {
// owners (users, in this example). Only these users
// will be involved in the backup.
seb = selectors.NewExchangeBackup(
[]string{"your-user-id", "foo-user-id", "bar-user-id"},
)
[]string{"your-user-id", "foo-user-id", "bar-user-id"})
// The core selector can be passed around without slicing any
// application-specific data.
@ -59,8 +58,7 @@ func Example_newSelector() {
// ExampleIncludeFoldersAndItems demonstrates how to select for granular data.
func Example_includeFoldersAndItems() {
seb := selectors.NewExchangeBackup(
[]string{"your-user-id", "foo-user-id", "bar-user-id"},
)
[]string{"your-user-id", "foo-user-id", "bar-user-id"})
// Much of the data handled by Corso exists within an established hierarchy.
// Resource Owner-level data (such as users) sits at the top, with Folder
@ -88,15 +86,13 @@ func Example_includeFoldersAndItems() {
// usually unique, and have a low chance of collision.
seb.Mails(
selectors.Any(),
[]string{"item-id-1", "item-id-2"},
)
[]string{"item-id-1", "item-id-2"})
}
// ExampleFilters demonstrates selector filters.
func Example_filters() {
ser := selectors.NewExchangeRestore(
[]string{"your-user-id", "foo-user-id", "bar-user-id"},
)
[]string{"your-user-id", "foo-user-id", "bar-user-id"})
// In addition to data ownership details (user, folder, itemID), certain operations
// like `backup details` and restores allow items to be selected by filtering on
@ -112,8 +108,7 @@ func Example_filters() {
// make much sense to accept multiple values here.
ser.MailReceivedBefore("2006-01-02"),
// But you can still make a compound filter by adding each scope individually.
ser.MailSubject("the answer to life, the universe, and everything"),
)
ser.MailSubject("the answer to life, the universe, and everything"))
}
var (
@ -142,8 +137,7 @@ var (
// ExampleReduceDetails demonstrates how selectors are used to filter backup details.
func Example_reduceDetails() {
ser := selectors.NewExchangeRestore(
[]string{"your-user-id", "foo-user-id", "bar-user-id"},
)
[]string{"your-user-id", "foo-user-id", "bar-user-id"})
errAgg := fault.New(false)
// The Reduce() call is where our constructed selectors are applied to the data
@ -170,8 +164,7 @@ func Example_scopeMatching() {
// if an individual bit of data matches our scopes, too.
scope := selectors.
NewExchangeBackup(
[]string{"your-user-id", "foo-user-id", "bar-user-id"},
).
[]string{"your-user-id", "foo-user-id", "bar-user-id"}).
Mails([]string{"Inbox"}, selectors.Any())[0]
// To compare data against a scope, you need to specify the category of data,

View File

@ -81,8 +81,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_Contacts() {
map[categorizer][]string{
ExchangeContactFolder: {folder},
ExchangeContact: {c1, c2},
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_Contacts() {
@ -106,8 +105,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_Contacts() {
map[categorizer][]string{
ExchangeContactFolder: {folder},
ExchangeContact: {c1, c2},
},
)
})
assert.Equal(t, sel.Scopes()[0].Category(), ExchangeContact)
}
@ -132,8 +130,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_ContactFolders(
map[categorizer][]string{
ExchangeContactFolder: {f1, f2},
ExchangeContact: Any(),
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_ContactFolders() {
@ -156,8 +153,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_ContactFolders(
map[categorizer][]string{
ExchangeContactFolder: {f1, f2},
ExchangeContact: Any(),
},
)
})
assert.Equal(t, sel.Scopes()[0].Category(), ExchangeContactFolder)
}
@ -183,8 +179,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_Events() {
map[categorizer][]string{
ExchangeEventCalendar: {c1},
ExchangeEvent: {e1, e2},
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_EventCalendars() {
@ -207,8 +202,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_EventCalendars(
map[categorizer][]string{
ExchangeEventCalendar: {c1, c2},
ExchangeEvent: Any(),
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_Events() {
@ -232,8 +226,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_Events() {
map[categorizer][]string{
ExchangeEventCalendar: {c1},
ExchangeEvent: {e1, e2},
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_EventCalendars() {
@ -256,8 +249,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_EventCalendars(
map[categorizer][]string{
ExchangeEventCalendar: {c1, c2},
ExchangeEvent: Any(),
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_Mails() {
@ -281,8 +273,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_Mails() {
map[categorizer][]string{
ExchangeMailFolder: {folder},
ExchangeMail: {m1, m2},
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_Mails() {
@ -306,8 +297,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_Mails() {
map[categorizer][]string{
ExchangeMailFolder: {folder},
ExchangeMail: {m1, m2},
},
)
})
assert.Equal(t, sel.Scopes()[0].Category(), ExchangeMail)
}
@ -332,8 +322,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_MailFolders() {
map[categorizer][]string{
ExchangeMailFolder: {f1, f2},
ExchangeMail: Any(),
},
)
})
}
func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_MailFolders() {
@ -356,8 +345,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_MailFolders() {
map[categorizer][]string{
ExchangeMailFolder: {f1, f2},
ExchangeMail: Any(),
},
)
})
assert.Equal(t, sel.Scopes()[0].Category(), ExchangeMailFolder)
}
@ -383,8 +371,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_AllData() {
map[categorizer][]string{
ExchangeContact: Any(),
ExchangeContactFolder: Any(),
},
)
})
}
if sc[scopeKeyCategory].Compare(ExchangeEvent.String()) {
@ -393,8 +380,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_AllData() {
ExchangeScope(sc),
map[categorizer][]string{
ExchangeEvent: Any(),
},
)
})
}
if sc[scopeKeyCategory].Compare(ExchangeMailFolder.String()) {
@ -404,8 +390,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Exclude_AllData() {
map[categorizer][]string{
ExchangeMail: Any(),
ExchangeMailFolder: Any(),
},
)
})
}
}
}
@ -431,8 +416,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_AllData() {
map[categorizer][]string{
ExchangeContact: Any(),
ExchangeContactFolder: Any(),
},
)
})
}
if sc[scopeKeyCategory].Compare(ExchangeEvent.String()) {
@ -441,8 +425,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_AllData() {
ExchangeScope(sc),
map[categorizer][]string{
ExchangeEvent: Any(),
},
)
})
}
if sc[scopeKeyCategory].Compare(ExchangeMailFolder.String()) {
@ -452,8 +435,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeSelector_Include_AllData() {
map[categorizer][]string{
ExchangeMail: Any(),
ExchangeMailFolder: Any(),
},
)
})
}
}
}

View File

@ -74,8 +74,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveSelector_AllData() {
map[categorizer][]string{
OneDriveItem: Any(),
OneDriveFolder: Any(),
},
)
})
}
})
}
@ -106,8 +105,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveSelector_Include_AllData() {
map[categorizer][]string{
OneDriveItem: Any(),
OneDriveFolder: Any(),
},
)
})
}
}
@ -136,8 +134,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveSelector_Exclude_AllData() {
map[categorizer][]string{
OneDriveItem: Any(),
OneDriveFolder: Any(),
},
)
})
}
}

View File

@ -264,8 +264,7 @@ func (suite *SelectorScopesSuite) TestReduce() {
pathCatStub,
rootCatStub.String(),
"stub",
leafCatStub.String(),
),
leafCatStub.String()),
},
},
},
@ -309,8 +308,7 @@ func (suite *SelectorScopesSuite) TestReduce_locationRef() {
pathCatStub,
rootCatStub.String(),
"stub",
leafCatStub.String(),
),
leafCatStub.String()),
LocationRef: "a/b/c//defg",
},
},

View File

@ -54,8 +54,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailInboxPath.FolderLocation()},
))
[]string{testdata.ExchangeEmailInboxPath.FolderLocation()}))
return sel
},
@ -98,8 +97,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
sel.Filter(sel.MailSender("a-person"))
sel.Exclude(sel.Mails(
selectors.Any(),
[]string{deets[1].ShortRef},
))
[]string{deets[1].ShortRef}))
return sel
},
@ -134,8 +132,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Filter(sel.MailReceivedBefore(
dttm.Format(testdata.Time1.Add(time.Second)),
))
dttm.Format(testdata.Time1.Add(time.Second))))
return sel
},
@ -154,8 +151,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.Mails(
selectors.Any(),
[]string{testdata.ExchangeEmailItemPath1.ItemLocation()},
))
[]string{testdata.ExchangeEmailItemPath1.ItemLocation()}))
return sel
},
@ -180,8 +176,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.Mails(
selectors.Any(),
[]string{deets[0].ShortRef},
))
[]string{deets[0].ShortRef}))
return sel
},
@ -200,8 +195,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.Events(
selectors.Any(),
selectors.Any(),
))
selectors.Any()))
sel.Filter(sel.MailSubject("foo"))
return sel
@ -264,8 +258,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailBasePath.FolderLocation()},
))
[]string{testdata.ExchangeEmailBasePath.FolderLocation()}))
return sel
},
@ -306,8 +299,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailInboxPath.FolderLocation()},
))
[]string{testdata.ExchangeEmailInboxPath.FolderLocation()}))
return sel
},
@ -325,8 +317,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.ContactFolders(
[]string{testdata.ExchangeContactsBasePath.FolderLocation()},
))
[]string{testdata.ExchangeContactsBasePath.FolderLocation()}))
return sel
},
@ -344,8 +335,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.ContactFolders(
[]string{testdata.ExchangeContactsRootPath.FolderLocation()},
))
[]string{testdata.ExchangeContactsRootPath.FolderLocation()}))
return sel
},
@ -364,8 +354,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func(t *testing.T, wantVersion int) selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.EventCalendars(
[]string{testdata.ExchangeEventsBasePath.FolderLocation()},
))
[]string{testdata.ExchangeEventsBasePath.FolderLocation()}))
return sel
},

View File

@ -236,8 +236,7 @@ func (s *SharePointRestore) WebURL(urls []string, opts ...option) []SharePointSc
SharePointPage,
SharePointWebURL,
urls,
pathFilterFactory(os...)),
)
pathFilterFactory(os...)))
return scopes
}

View File

@ -63,8 +63,7 @@ func (suite *SharePointSelectorSuite) TestSharePointSelector_Include_WebURLs() {
scopeMustHave(
t,
SharePointScope(sc),
map[categorizer][]string{SharePointWebURL: s12},
)
map[categorizer][]string{SharePointWebURL: s12})
}
}
@ -98,8 +97,7 @@ func (suite *SharePointSelectorSuite) TestSharePointSelector_Include_WebURLs_any
scopeMustHave(
t,
SharePointScope(sc),
map[categorizer][]string{SharePointWebURL: test.expect},
)
map[categorizer][]string{SharePointWebURL: test.expect})
}
})
}
@ -124,8 +122,7 @@ func (suite *SharePointSelectorSuite) TestSharePointSelector_Exclude_WebURLs() {
scopeMustHave(
t,
SharePointScope(sc),
map[categorizer][]string{SharePointWebURL: s12},
)
map[categorizer][]string{SharePointWebURL: s12})
}
}

View File

@ -24,8 +24,7 @@ func TestExchangeServiceSuite(t *testing.T) {
suite.Run(t, &ExchangeServiceSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs},
),
[][]string{tconfig.M365AcctCredEnvs}),
})
}
@ -55,8 +54,7 @@ func (suite *ExchangeServiceSuite) TestHasAttachments() {
byteArray := exchMock.MessageWithBodyBytes(
"Test",
"This is testing",
"This is testing",
)
"This is testing")
message, err := BytesToMessageable(byteArray)
require.NoError(t, err, clues.ToCore(err))
return message.GetBody()

View File

@ -272,8 +272,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_RestoreLargeAttachment() {
ptr.Val(calendar.GetId()),
ptr.Val(item.GetId()),
"raboganm",
[]byte("mangobar"),
)
[]byte("mangobar"))
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, id, "empty id for large attachment")
}

View File

@ -346,8 +346,7 @@ func (c Mail) GetItem(
With(
"skipped_reason", fault.SkipNotFound,
"attachment_id", ptr.Val(a.GetId()),
"attachment_size", ptr.Val(a.GetSize()),
).Info("attachment not found")
"attachment_size", ptr.Val(a.GetSize())).Info("attachment not found")
// TODO This should use a `AddSkip` once we have
// figured out the semantics for skipping
// subcomponents of an item

View File

@ -398,8 +398,7 @@ func (suite *MailAPIIntgSuite) TestMail_RestoreLargeAttachment() {
ptr.Val(mailfolder.GetId()),
ptr.Val(item.GetId()),
"raboganm",
[]byte("mangobar"),
)
[]byte("mangobar"))
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, id, "empty id for large attachment")
}

View File

@ -206,8 +206,7 @@ func (c Users) GetMailboxSettings(
settings, err := users.
NewUserItemRequestBuilder(
fmt.Sprintf("https://graph.microsoft.com/v1.0/users/%s/mailboxSettings", userID),
c.Stable.Adapter(),
).
c.Stable.Adapter()).
Get(ctx, nil)
if err != nil {
return nil, graph.Stack(ctx, err)

View File

@ -109,8 +109,7 @@ func (suite *GroupsIntgSuite) TestGroups_InvalidCredentials() {
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
})
require.NoError(t, err, clues.ToCore(err))
return a

View File

@ -77,8 +77,7 @@ func (suite *siteIntegrationSuite) TestSites_InvalidCredentials() {
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
})
require.NoError(t, err, clues.ToCore(err))
return a

View File

@ -218,8 +218,7 @@ func (suite *userIntegrationSuite) TestUsers_InvalidCredentials() {
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
})
require.NoError(t, err, clues.ToCore(err))
return a

View File

@ -45,8 +45,7 @@ func NewPrefixedS3Storage(t tester.TestT) storage.Storage {
storage.CommonConfig{
Corso: GetAndInsertCorso(""),
KopiaCfgDir: t.TempDir(),
},
)
})
require.NoError(t, err, "creating storage", clues.ToCore(err))
return st