Integration tests for assist backups (#3987)

<!-- PR description-->

1. Tests which try out all combinations of failurePolicy vs fault errors
2. Incremental tests for deets propagation from assist/merge backups

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [x] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [ ]  Unit test
- [x] 💚 E2E
This commit is contained in:
Abhishek Pandey 2023-08-10 09:46:17 +05:30 committed by GitHub
parent 4e1b1d8fc6
commit 9ce6b9c5c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 831 additions and 14 deletions

View File

@ -1300,6 +1300,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
testForFiles(t, ctx, expected, result)
}
// TODO(pandeyabs): Switch to m365/mock/BackupCollection.
type mockBackupCollection struct {
path path.Path
loc *path.Builder

View File

@ -4,6 +4,8 @@ import (
"context"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
type RestoreCollection struct {
@ -22,3 +24,44 @@ func (rc RestoreCollection) FetchItemByName(
return res, nil
}
type BackupCollection struct {
Path path.Path
Loc *path.Builder
Streams []data.Stream
CState data.CollectionState
}
func (c *BackupCollection) Items(context.Context, *fault.Bus) <-chan data.Stream {
res := make(chan data.Stream)
go func() {
defer close(res)
for _, s := range c.Streams {
res <- s
}
}()
return res
}
func (c BackupCollection) FullPath() path.Path {
return c.Path
}
func (c BackupCollection) PreviousPath() path.Path {
return c.Path
}
func (c BackupCollection) LocationPath() *path.Builder {
return c.Loc
}
func (c BackupCollection) State() data.CollectionState {
return c.CState
}
func (c BackupCollection) DoNotMergeItems() bool {
return false
}

View File

@ -8,6 +8,7 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/extensions"
)
// ---------------------------------------------------------------------------
@ -16,21 +17,22 @@ import (
var _ data.Stream = &Data{}
// TODO: move to data/mock for service-agnostic mocking
// Data represents a single item retrieved from, or restored to, onedrive
type Data struct {
ID string
Reader io.ReadCloser
ReadErr error
size int64
modifiedTime time.Time
deleted bool
ID string
DriveID string
DriveName string
Reader io.ReadCloser
ReadErr error
Sz int64
ModifiedTime time.Time
Del bool
ExtensionData *details.ExtensionData
}
func (d *Data) UUID() string { return d.ID }
func (d *Data) Deleted() bool { return d.deleted }
func (d *Data) Size() int64 { return d.size }
func (d *Data) ModTime() time.Time { return d.modifiedTime }
func (d *Data) Deleted() bool { return d.Del }
func (d *Data) Size() int64 { return d.Sz }
func (d *Data) ModTime() time.Time { return d.ModifiedTime }
func (d *Data) ToReader() io.ReadCloser {
if d.ReadErr != nil {
@ -43,10 +45,14 @@ func (d *Data) ToReader() io.ReadCloser {
func (d *Data) Info() details.ItemInfo {
return details.ItemInfo{
OneDrive: &details.OneDriveInfo{
ItemType: details.OneDriveItem,
ItemName: "test.txt",
Size: 1,
ItemType: details.OneDriveItem,
ItemName: "test.txt",
Size: d.Sz,
DriveID: d.DriveID,
DriveName: d.DriveName,
Modified: d.ModifiedTime,
},
Extension: d.ExtensionData,
}
}
@ -81,6 +87,18 @@ func FileRespReadCloser(pl string) io.ReadCloser {
return io.NopCloser(bytes.NewReader([]byte(pl)))
}
func FileRespWithExtensions(pl string, extData *details.ExtensionData) io.ReadCloser {
rc := FileRespReadCloser(pl)
me := &extensions.MockExtension{
Ctx: context.Background(),
InnerRc: rc,
ExtData: extData,
}
return io.NopCloser(me)
}
const (
DriveItemFileName = "fnords.txt"
DriveFileMetaData = `{"fileName": "` + DriveItemFileName + `"}`

View File

@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
evmock "github.com/alcionai/corso/src/internal/events/mock"
@ -20,19 +21,26 @@ import (
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
odMock "github.com/alcionai/corso/src/internal/m365/onedrive/mock"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/streamstore"
ssmock "github.com/alcionai/corso/src/internal/streamstore/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/extensions"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store"
@ -1433,3 +1441,750 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
})
}
}
type AssistBackupIntegrationSuite struct {
tester.Suite
kopiaCloser func(ctx context.Context)
acct account.Account
kw *kopia.Wrapper
sw *store.Wrapper
ms *kopia.ModelStore
}
func TestAssistBackupIntegrationSuite(t *testing.T) {
suite.Run(t, &AssistBackupIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *AssistBackupIntegrationSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
st = storeTD.NewPrefixedS3Storage(t)
k = kopia.NewConn(st)
)
suite.acct = tconfig.NewM365Account(t)
err := k.Initialize(ctx, repository.Options{}, repository.Retention{})
require.NoError(t, err, clues.ToCore(err))
suite.kopiaCloser = func(ctx context.Context) {
k.Close(ctx)
}
kw, err := kopia.NewWrapper(k)
require.NoError(t, err, clues.ToCore(err))
suite.kw = kw
ms, err := kopia.NewModelStore(k)
require.NoError(t, err, clues.ToCore(err))
suite.ms = ms
sw := store.NewKopiaStore(ms)
suite.sw = sw
}
func (suite *AssistBackupIntegrationSuite) TearDownSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
if suite.ms != nil {
suite.ms.Close(ctx)
}
if suite.kw != nil {
suite.kw.Close(ctx)
}
if suite.kopiaCloser != nil {
suite.kopiaCloser(ctx)
}
}
var _ inject.BackupProducer = &mockBackupProducer{}
type mockBackupProducer struct {
colls []data.BackupCollection
dcs data.CollectionStats
injectNonRecoverableErr bool
}
func (mbp *mockBackupProducer) ProduceBackupCollections(
context.Context,
inject.BackupProducerConfig,
*fault.Bus,
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
if mbp.injectNonRecoverableErr {
return nil, nil, false, clues.New("non-recoverable error")
}
return mbp.colls, nil, true, nil
}
func (mbp *mockBackupProducer) IsBackupRunnable(
context.Context,
path.ServiceType,
string,
) (bool, error) {
return true, nil
}
func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
return &mbp.dcs
}
func makeBackupCollection(
p path.Path,
locPath *path.Builder,
items []odMock.Data,
) data.BackupCollection {
streams := make([]data.Stream, len(items))
for i := range items {
streams[i] = &items[i]
}
return &mock.BackupCollection{
Path: p,
Loc: locPath,
Streams: streams,
}
}
func makeMetadataCollectionEntries(
deltaURL, driveID, folderID string,
p path.Path,
) []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
map[string]string{driveID: deltaURL},
),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
map[string]map[string]string{
driveID: {
folderID: p.PlainString(),
},
},
),
}
}
const (
userID = "user-id"
driveID = "drive-id"
driveName = "drive-name"
folderID = "folder-id"
)
func makeODMockData(
fileID string,
extData *details.ExtensionData,
modTime time.Time,
del bool,
readErr error,
) odMock.Data {
rc := odMock.FileRespReadCloser(odMock.DriveFilePayloadData)
if extData != nil {
rc = odMock.FileRespWithExtensions(odMock.DriveFilePayloadData, extData)
}
return odMock.Data{
ID: fileID,
DriveID: driveID,
DriveName: driveName,
Reader: rc,
ReadErr: readErr,
Sz: 100,
ModifiedTime: modTime,
Del: del,
ExtensionData: extData,
}
}
// Check what kind of backup is produced for a given failurePolicy/observed fault
// bus combination.
//
// It's currently using errors generated during mockBackupProducer phase.
// Ideally we would test with errors generated in various phases of backup, but
// that needs putting produceManifestsAndMetadata and mergeDetails behind mockable
// interfaces.
//
// Note: Tests are incremental since we are reusing kopia repo between tests,
// but this is irrelevant here.
func (suite *AssistBackupIntegrationSuite) TestBackupTypesForFailureModes() {
var (
acct = tconfig.NewM365Account(suite.T())
tenantID = acct.Config[config.AzureTenantIDKey]
opts = control.DefaultOptions()
osel = selectors.NewOneDriveBackup([]string{userID})
)
osel.Include(selTD.OneDriveBackupFolderScope(osel))
pathElements := []string{odConsts.DrivesPathDir, "drive-id", odConsts.RootPathDir, folderID}
tmp, err := path.Build(tenantID, userID, path.OneDriveService, path.FilesCategory, false, pathElements...)
require.NoError(suite.T(), err, clues.ToCore(err))
locPath := path.Builder{}.Append(tmp.Folders()...)
table := []struct {
name string
collFunc func() []data.BackupCollection
injectNonRecoverableErr bool
failurePolicy control.FailurePolicy
expectRunErr assert.ErrorAssertionFunc
expectBackupTag string
expectFaults func(t *testing.T, errs *fault.Bus)
}{
{
name: "fail fast, no errors",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", nil, time.Now(), false, nil),
}),
}
return bc
},
failurePolicy: control.FailFast,
expectRunErr: assert.NoError,
expectBackupTag: model.MergeBackup,
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
assert.Empty(t, errs.Recovered(), "recovered errors")
},
},
{
name: "fail fast, any errors",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", nil, time.Now(), false, assert.AnError),
}),
}
return bc
},
failurePolicy: control.FailFast,
expectRunErr: assert.Error,
expectBackupTag: "",
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.Error(t, errs.Failure(), clues.ToCore(errs.Failure()))
},
},
{
name: "best effort, no errors",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", nil, time.Now(), false, nil),
}),
}
return bc
},
failurePolicy: control.BestEffort,
expectRunErr: assert.NoError,
expectBackupTag: model.MergeBackup,
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
assert.Empty(t, errs.Recovered(), "recovered errors")
},
},
{
name: "best effort, non-recoverable errors",
collFunc: func() []data.BackupCollection {
return nil
},
injectNonRecoverableErr: true,
failurePolicy: control.BestEffort,
expectRunErr: assert.Error,
expectBackupTag: "",
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.Error(t, errs.Failure(), clues.ToCore(errs.Failure()))
},
},
{
name: "best effort, recoverable errors",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", nil, time.Now(), false, assert.AnError),
}),
}
return bc
},
failurePolicy: control.BestEffort,
expectRunErr: assert.NoError,
expectBackupTag: model.MergeBackup,
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
assert.Greater(t, len(errs.Recovered()), 0, "recovered errors")
},
},
{
name: "fail after recovery, no errors",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", nil, time.Now(), false, nil),
makeODMockData("file2", nil, time.Now(), false, nil),
}),
}
return bc
},
failurePolicy: control.FailAfterRecovery,
expectRunErr: assert.NoError,
expectBackupTag: model.MergeBackup,
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
assert.Empty(t, errs.Recovered(), "recovered errors")
},
},
{
name: "fail after recovery, non-recoverable errors",
collFunc: func() []data.BackupCollection {
return nil
},
injectNonRecoverableErr: true,
failurePolicy: control.FailAfterRecovery,
expectRunErr: assert.Error,
expectBackupTag: "",
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.Error(t, errs.Failure(), clues.ToCore(errs.Failure()))
},
},
{
name: "fail after recovery, recoverable errors",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", nil, time.Now(), false, nil),
makeODMockData("file2", nil, time.Now(), false, assert.AnError),
}),
}
return bc
},
failurePolicy: control.FailAfterRecovery,
expectRunErr: assert.Error,
expectBackupTag: model.AssistBackup,
expectFaults: func(t *testing.T, errs *fault.Bus) {
assert.Error(t, errs.Failure(), clues.ToCore(errs.Failure()))
assert.Greater(t, len(errs.Recovered()), 0, "recovered errors")
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
cs := test.collFunc()
mc, err := graph.MakeMetadataCollection(
tenantID,
userID,
path.OneDriveService,
path.FilesCategory,
makeMetadataCollectionEntries("url/1", driveID, folderID, tmp),
func(*support.ControllerOperationStatus) {})
require.NoError(t, err, clues.ToCore(err))
cs = append(cs, mc)
bp := &mockBackupProducer{
colls: cs,
injectNonRecoverableErr: test.injectNonRecoverableErr,
}
opts.FailureHandling = test.failurePolicy
bo, err := NewBackupOperation(
ctx,
opts,
suite.kw,
suite.sw,
bp,
acct,
osel.Selector,
selectors.Selector{DiscreteOwner: userID},
evmock.NewBus())
require.NoError(t, err, clues.ToCore(err))
err = bo.Run(ctx)
test.expectRunErr(t, err, clues.ToCore(err))
test.expectFaults(t, bo.Errors)
if len(test.expectBackupTag) == 0 {
return
}
bID := bo.Results.BackupID
require.NotEmpty(t, bID)
bup := backup.Backup{}
err = suite.ms.Get(ctx, model.BackupSchema, bID, &bup)
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, test.expectBackupTag, bup.Tags[model.BackupTypeTag])
})
}
}
func selectFilesFromDeets(d details.Details) map[string]details.Entry {
files := make(map[string]details.Entry)
for _, ent := range d.Entries {
if ent.Folder != nil {
continue
}
files[ent.ItemRef] = ent
}
return files
}
// TestExtensionsIncrementals tests presence of corso extension data in details
// Note that since we are mocking out backup producer here, corso extensions can't be
// attached as they would in prod. However, this is fine here, since we are more interested
// in testing whether deets get carried over correctly for various scenarios.
func (suite *AssistBackupIntegrationSuite) TestExtensionsIncrementals() {
var (
acct = tconfig.NewM365Account(suite.T())
tenantID = acct.Config[config.AzureTenantIDKey]
opts = control.DefaultOptions()
osel = selectors.NewOneDriveBackup([]string{userID})
// Default policy used by SDK clients
failurePolicy = control.FailAfterRecovery
T1 = time.Now().Truncate(0)
T2 = T1.Add(time.Hour).Truncate(0)
T3 = T2.Add(time.Hour).Truncate(0)
extData = make(map[int]*details.ExtensionData)
)
for i := 0; i < 3; i++ {
d := make(map[string]any)
extData[i] = &details.ExtensionData{
Data: d,
}
}
osel.Include(selTD.OneDriveBackupFolderScope(osel))
sss := streamstore.NewStreamer(
suite.kw,
suite.acct.ID(),
osel.PathService())
pathElements := []string{odConsts.DrivesPathDir, "drive-id", odConsts.RootPathDir, folderID}
tmp, err := path.Build(tenantID, userID, path.OneDriveService, path.FilesCategory, false, pathElements...)
require.NoError(suite.T(), err, clues.ToCore(err))
locPath := path.Builder{}.Append(tmp.Folders()...)
table := []struct {
name string
collFunc func() []data.BackupCollection
expectRunErr assert.ErrorAssertionFunc
validateDeets func(t *testing.T, gotDeets details.Details)
}{
{
name: "Assist backup, 1 new deets",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", extData[0], T1, false, nil),
makeODMockData("file2", extData[1], T1, false, assert.AnError),
}),
}
return bc
},
expectRunErr: assert.Error,
validateDeets: func(t *testing.T, d details.Details) {
files := selectFilesFromDeets(d)
require.Len(t, files, 1)
f := files["file1"]
require.NotNil(t, f)
require.True(t, T1.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
},
},
{
name: "Assist backup after assist backup, 1 existing, 1 new deets",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", extData[0], T1, false, nil),
makeODMockData("file2", extData[1], T2, false, nil),
makeODMockData("file3", extData[2], T2, false, assert.AnError),
}),
}
return bc
},
expectRunErr: assert.Error,
validateDeets: func(t *testing.T, d details.Details) {
files := selectFilesFromDeets(d)
require.Len(t, files, 2)
for _, f := range files {
switch f.ItemRef {
case "file1":
require.True(t, T1.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
case "file2":
require.True(t, T2.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
default:
require.Fail(t, "unexpected file", f.ItemRef)
}
}
},
},
{
name: "Merge backup, 2 existing deets, 1 new deet",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", extData[0], T1, false, nil),
makeODMockData("file2", extData[1], T2, false, nil),
makeODMockData("file3", extData[2], T3, false, nil),
}),
}
return bc
},
expectRunErr: assert.NoError,
validateDeets: func(t *testing.T, d details.Details) {
files := selectFilesFromDeets(d)
require.Len(t, files, 3)
for _, f := range files {
switch f.ItemRef {
case "file1":
require.True(t, T1.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
case "file2":
require.True(t, T2.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
case "file3":
require.True(t, T3.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
default:
require.Fail(t, "unexpected file", f.ItemRef)
}
}
},
},
{
// Reset state so we can reuse the same test data
name: "All files deleted",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", extData[0], T1, true, nil),
makeODMockData("file2", extData[1], T2, true, nil),
makeODMockData("file3", extData[2], T3, true, nil),
}),
}
return bc
},
expectRunErr: assert.NoError,
validateDeets: func(t *testing.T, d details.Details) {
files := selectFilesFromDeets(d)
require.Len(t, files, 0)
},
},
{
name: "Merge backup, 1 new deets",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", extData[0], T1, false, nil),
}),
}
return bc
},
expectRunErr: assert.NoError,
validateDeets: func(t *testing.T, d details.Details) {
files := selectFilesFromDeets(d)
require.Len(t, files, 1)
for _, f := range files {
switch f.ItemRef {
case "file1":
require.True(t, T1.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
default:
require.Fail(t, "unexpected file", f.ItemRef)
}
}
},
},
// This test fails currently, need to rerun with Ashlie's PR.
{
name: "Assist backup after merge backup, 1 new deets, 1 existing deet",
collFunc: func() []data.BackupCollection {
bc := []data.BackupCollection{
makeBackupCollection(
tmp,
locPath,
[]odMock.Data{
makeODMockData("file1", extData[0], T1, false, nil),
makeODMockData("file2", extData[1], T2, false, nil),
makeODMockData("file3", extData[2], T3, false, assert.AnError),
}),
}
return bc
},
expectRunErr: assert.Error,
validateDeets: func(t *testing.T, d details.Details) {
files := selectFilesFromDeets(d)
require.Len(t, files, 2)
for _, f := range files {
switch f.ItemRef {
case "file1":
require.True(t, T1.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
case "file2":
require.True(t, T2.Equal(f.Modified()))
require.NotZero(t, f.Extension.Data[extensions.KNumBytes])
default:
require.Fail(t, "unexpected file", f.ItemRef)
}
}
},
},
// TODO(pandeyabs): Remaining tests.
// 1. Deets updated in assist backup. Following backup should have updated deets.
// 2. Concurrent overlapping reasons.
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
cs := test.collFunc()
mc, err := graph.MakeMetadataCollection(
tenantID,
userID,
path.OneDriveService,
path.FilesCategory,
makeMetadataCollectionEntries("url/1", driveID, folderID, tmp),
func(*support.ControllerOperationStatus) {})
require.NoError(t, err, clues.ToCore(err))
cs = append(cs, mc)
bp := &mockBackupProducer{
colls: cs,
}
opts.FailureHandling = failurePolicy
bo, err := NewBackupOperation(
ctx,
opts,
suite.kw,
suite.sw,
bp,
acct,
osel.Selector,
selectors.Selector{DiscreteOwner: userID},
evmock.NewBus())
require.NoError(t, err, clues.ToCore(err))
err = bo.Run(ctx)
test.expectRunErr(t, err, clues.ToCore(err))
assert.NotEmpty(t, bo.Results.BackupID)
deets, _ := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
tenantID,
userID,
path.OneDriveService,
deeTD.DriveIDFromRepoRef,
suite.ms,
sss)
assert.NotNil(t, deets)
test.validateDeets(t, deets)
// Clear extension data between test runs
for i := 0; i < 3; i++ {
d := make(map[string]any)
extData[i] = &details.ExtensionData{
Data: d,
}
}
})
}
}