Cleanup API for BackupBases (#4471)

Update the BackupBases API to return BackupBase
structs that contain both the snapshot and backup
model instead of having separate functions for
each. Minor logic updates to accommodate these
changes

This PR also updates tests and mock code for the
new API

Suggest viewing by commit

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* closes #3943

#### Test Plan

- [ ] 💪 Manual
- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
ashmrtn 2023-10-26 11:38:14 -07:00 committed by GitHub
parent 1470776f3c
commit 09531de08c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 450 additions and 657 deletions

View File

@ -17,16 +17,49 @@ import (
// TODO(ashmrtn): Move this into some inject package. Here to avoid import // TODO(ashmrtn): Move this into some inject package. Here to avoid import
// cycles. // cycles.
type BackupBases interface { type BackupBases interface {
// ConvertToAssistBase converts the base with the given item data snapshot ID // ConvertToAssistBase converts the base with the given backup ID from a merge
// from a merge base to an assist base. // base to an assist base.
ConvertToAssistBase(manifestID manifest.ID) ConvertToAssistBase(backupID model.StableID)
Backups() []BackupEntry // MergeBases returns a []BackupBase that corresponds to all the bases that
UniqueAssistBackups() []BackupEntry // will source unchanged information for this backup during hierarchy merging,
MinBackupVersion() int // snapshot creation, and details merging.
MergeBases() []ManifestEntry MergeBases() []BackupBase
// DisableMergeBases converts all merge bases in this BackupBases to assist
// bases. These bases can still participate in sourcing data kopia considers
// "cached" during the snapshot process and can source backup details entries
// for those cached items. However, they won't be used to source unchanged
// items during hierarchy merging, snapshot creation, or details merging.
//
// This call is order sensitive with DisableAssistBases.
DisableMergeBases() DisableMergeBases()
UniqueAssistBases() []ManifestEntry // UniqueAssistBases returns the set of assist bases for the backup operation.
// Assist bases are used to source item data and details entries if the item
// is considered "cached" by kopia. They are not used to source unchanged
// items during hierarchy merging.
UniqueAssistBases() []BackupBase
// DisableAssistBases clears the set of assist bases for this backup. Doing so
// will result in kopia not finding any "cached" items and assist bases won't
// participate in details merging.
//
// This call is order sensitive with DisableMergeBases.
DisableAssistBases() DisableAssistBases()
// MinBackupVersion returns the lowest version of all merge backups in the
// BackupBases.
MinBackupVersion() int
// MergeBackupBases takes another BackupBases and merges it's contained assist
// and merge bases into this BackupBases. The passed in BackupBases is
// considered an older alternative to this BackupBases meaning bases from
// other won't be selected unless there's no item in this BackupBases to cover
// that Reason.
//
// Callers pass in reasonToKey to control how individual BackupBase items are
// selected. For example, to migrate from using user name to user ID as the
// protected resource in the Reason the reasonToKey function could map
// BackupBase items with the same tenant, service, and category to the same
// key. This works because backup operations are already per protected
// resource.
//
// This call is order sensitive with DisableMergeBases and DisableAssistBases.
MergeBackupBases( MergeBackupBases(
ctx context.Context, ctx context.Context,
other BackupBases, other BackupBases,
@ -36,12 +69,7 @@ type BackupBases interface {
// incremental snapshot operations. It consists of the union of merge bases // incremental snapshot operations. It consists of the union of merge bases
// and assist bases. If DisableAssistBases has been called then it returns // and assist bases. If DisableAssistBases has been called then it returns
// nil. // nil.
SnapshotAssistBases() []ManifestEntry SnapshotAssistBases() []BackupBase
// TODO(ashmrtn): Remove other functions and just have these once other code
// is updated. Here for now so changes in this file can be made.
NewMergeBases() []BackupBase
NewUniqueAssistBases() []BackupBase
} }
type backupBases struct { type backupBases struct {
@ -53,37 +81,21 @@ type backupBases struct {
disableAssistBases bool disableAssistBases bool
} }
func (bb *backupBases) SnapshotAssistBases() []ManifestEntry { func (bb *backupBases) SnapshotAssistBases() []BackupBase {
if bb.disableAssistBases { if bb.disableAssistBases {
return nil return nil
} }
res := []ManifestEntry{}
for _, ab := range bb.assistBases {
res = append(res, ManifestEntry{
Manifest: ab.ItemDataSnapshot,
Reasons: ab.Reasons,
})
}
for _, mb := range bb.mergeBases {
res = append(res, ManifestEntry{
Manifest: mb.ItemDataSnapshot,
Reasons: mb.Reasons,
})
}
// Need to use the actual variables here because the functions will return nil // Need to use the actual variables here because the functions will return nil
// depending on what's been marked as disabled. // depending on what's been marked as disabled.
return res return append(slices.Clone(bb.mergeBases), bb.assistBases...)
} }
func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) { func (bb *backupBases) ConvertToAssistBase(backupID model.StableID) {
idx := slices.IndexFunc( idx := slices.IndexFunc(
bb.mergeBases, bb.mergeBases,
func(base BackupBase) bool { func(base BackupBase) bool {
return base.ItemDataSnapshot.ID == manifestID return base.Backup.ID == backupID
}) })
if idx >= 0 { if idx >= 0 {
bb.assistBases = append(bb.assistBases, bb.mergeBases[idx]) bb.assistBases = append(bb.assistBases, bb.mergeBases[idx])
@ -91,36 +103,6 @@ func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) {
} }
} }
func (bb backupBases) Backups() []BackupEntry {
res := []BackupEntry{}
for _, mb := range bb.mergeBases {
res = append(res, BackupEntry{
Backup: mb.Backup,
Reasons: mb.Reasons,
})
}
return res
}
func (bb backupBases) UniqueAssistBackups() []BackupEntry {
if bb.disableAssistBases {
return nil
}
res := []BackupEntry{}
for _, ab := range bb.assistBases {
res = append(res, BackupEntry{
Backup: ab.Backup,
Reasons: ab.Reasons,
})
}
return res
}
func (bb *backupBases) MinBackupVersion() int { func (bb *backupBases) MinBackupVersion() int {
min := version.NoBackup min := version.NoBackup
@ -137,20 +119,7 @@ func (bb *backupBases) MinBackupVersion() int {
return min return min
} }
func (bb backupBases) MergeBases() []ManifestEntry { func (bb backupBases) MergeBases() []BackupBase {
res := []ManifestEntry{}
for _, mb := range bb.mergeBases {
res = append(res, ManifestEntry{
Manifest: mb.ItemDataSnapshot,
Reasons: mb.Reasons,
})
}
return res
}
func (bb backupBases) NewMergeBases() []BackupBase {
return slices.Clone(bb.mergeBases) return slices.Clone(bb.mergeBases)
} }
@ -165,24 +134,7 @@ func (bb *backupBases) DisableMergeBases() {
bb.mergeBases = nil bb.mergeBases = nil
} }
func (bb backupBases) UniqueAssistBases() []ManifestEntry { func (bb backupBases) UniqueAssistBases() []BackupBase {
if bb.disableAssistBases {
return nil
}
res := []ManifestEntry{}
for _, ab := range bb.assistBases {
res = append(res, ManifestEntry{
Manifest: ab.ItemDataSnapshot,
Reasons: ab.Reasons,
})
}
return res
}
func (bb backupBases) NewUniqueAssistBases() []BackupBase {
if bb.disableAssistBases { if bb.disableAssistBases {
return nil return nil
} }
@ -250,11 +202,11 @@ func (bb *backupBases) MergeBackupBases(
other BackupBases, other BackupBases,
reasonToKey func(reason identity.Reasoner) string, reasonToKey func(reason identity.Reasoner) string,
) BackupBases { ) BackupBases {
if other == nil || (len(other.NewMergeBases()) == 0 && len(other.NewUniqueAssistBases()) == 0) { if other == nil || (len(other.MergeBases()) == 0 && len(other.UniqueAssistBases()) == 0) {
return bb return bb
} }
if bb == nil || (len(bb.NewMergeBases()) == 0 && len(bb.NewUniqueAssistBases()) == 0) { if bb == nil || (len(bb.MergeBases()) == 0 && len(bb.UniqueAssistBases()) == 0) {
return other return other
} }
@ -282,12 +234,12 @@ func (bb *backupBases) MergeBackupBases(
} }
} }
addMerge := getMissingBases(reasonToKey, toMerge, other.NewMergeBases()) addMerge := getMissingBases(reasonToKey, toMerge, other.MergeBases())
addAssist := getMissingBases(reasonToKey, assist, other.NewUniqueAssistBases()) addAssist := getMissingBases(reasonToKey, assist, other.UniqueAssistBases())
res := &backupBases{ res := &backupBases{
mergeBases: append(addMerge, bb.NewMergeBases()...), mergeBases: append(addMerge, bb.MergeBases()...),
assistBases: append(addAssist, bb.NewUniqueAssistBases()...), assistBases: append(addAssist, bb.UniqueAssistBases()...),
} }
return res return res

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -191,7 +192,7 @@ func (suite *BackupBasesUnitSuite) TestConvertToAssistBase() {
}, },
} }
delID := manifest.ID("its3") delID := model.StableID("3")
table := []struct { table := []struct {
name string name string
@ -298,39 +299,19 @@ func (suite *BackupBasesUnitSuite) TestDisableMergeBases() {
} }
bb.DisableMergeBases() bb.DisableMergeBases()
assert.Empty(t, bb.Backups())
assert.Empty(t, bb.MergeBases()) assert.Empty(t, bb.MergeBases())
// Merge bases should still appear in the assist base set passed in for kopia // Merge bases should still appear in the assist base set passed in for kopia
// snapshots and details merging. // snapshots and details merging.
assert.ElementsMatch( assert.ElementsMatch(
t, t,
[]ManifestEntry{ append(slices.Clone(merge), assist...),
{Manifest: merge[0].ItemDataSnapshot},
{Manifest: merge[1].ItemDataSnapshot},
{Manifest: assist[0].ItemDataSnapshot},
{Manifest: assist[1].ItemDataSnapshot},
},
bb.SnapshotAssistBases()) bb.SnapshotAssistBases())
assert.ElementsMatch( assert.ElementsMatch(
t, t,
[]ManifestEntry{ append(slices.Clone(merge), assist...),
{Manifest: merge[0].ItemDataSnapshot},
{Manifest: merge[1].ItemDataSnapshot},
{Manifest: assist[0].ItemDataSnapshot},
{Manifest: assist[1].ItemDataSnapshot},
},
bb.UniqueAssistBases()) bb.UniqueAssistBases())
assert.ElementsMatch(
t,
[]BackupEntry{
{Backup: merge[0].Backup},
{Backup: merge[1].Backup},
{Backup: assist[0].Backup},
{Backup: assist[1].Backup},
},
bb.UniqueAssistBackups())
} }
func (suite *BackupBasesUnitSuite) TestDisableAssistBases() { func (suite *BackupBasesUnitSuite) TestDisableAssistBases() {
@ -342,11 +323,9 @@ func (suite *BackupBasesUnitSuite) TestDisableAssistBases() {
bb.DisableAssistBases() bb.DisableAssistBases()
assert.Empty(t, bb.UniqueAssistBases()) assert.Empty(t, bb.UniqueAssistBases())
assert.Empty(t, bb.UniqueAssistBackups())
assert.Empty(t, bb.SnapshotAssistBases()) assert.Empty(t, bb.SnapshotAssistBases())
// Merge base should be unchanged. // Merge base should be unchanged.
assert.Len(t, bb.Backups(), 2)
assert.Len(t, bb.MergeBases(), 2) assert.Len(t, bb.MergeBases(), 2)
} }

View File

@ -42,16 +42,12 @@ func reasonKey(r identity.Reasoner) string {
return r.ProtectedResource() + r.Service().String() + r.Category().String() return r.ProtectedResource() + r.Service().String() + r.Category().String()
} }
type BackupEntry struct { type BackupBase struct {
*backup.Backup Backup *backup.Backup
Reasons []identity.Reasoner ItemDataSnapshot *snapshot.Manifest
} // Reasons contains the tenant, protected resource and service/categories that
// caused this snapshot to be selected as a base. It's possible some
type ManifestEntry struct { // (tenant, protected resources) will have a subset of the categories as
*snapshot.Manifest
// Reasons contains the ResourceOwners and Service/Categories that caused this
// snapshot to be selected as a base. We can't reuse OwnersCats here because
// it's possible some ResourceOwners will have a subset of the Categories as
// the reason for selecting a snapshot. For example: // the reason for selecting a snapshot. For example:
// 1. backup user1 email,contacts -> B1 // 1. backup user1 email,contacts -> B1
// 2. backup user1 contacts -> B2 (uses B1 as base) // 2. backup user1 contacts -> B2 (uses B1 as base)
@ -59,9 +55,9 @@ type ManifestEntry struct {
Reasons []identity.Reasoner Reasons []identity.Reasoner
} }
func (me ManifestEntry) GetTag(key string) (string, bool) { func (bb BackupBase) GetSnapshotTag(key string) (string, bool) {
k, _ := makeTagKV(key) k, _ := makeTagKV(key)
v, ok := me.Tags[k] v, ok := bb.ItemDataSnapshot.Tags[k]
return v, ok return v, ok
} }
@ -136,19 +132,6 @@ func (b *baseFinder) getBackupModel(
return bup, nil return bup, nil
} }
type BackupBase struct {
Backup *backup.Backup
ItemDataSnapshot *snapshot.Manifest
// Reasons contains the tenant, protected resource and service/categories that
// caused this snapshot to be selected as a base. It's possible some
// (tenant, protected resources) will have a subset of the categories as
// the reason for selecting a snapshot. For example:
// 1. backup user1 email,contacts -> B1
// 2. backup user1 contacts -> B2 (uses B1 as base)
// 3. backup user1 email,contacts,events (uses B1 for email, B2 for contacts)
Reasons []identity.Reasoner
}
// findBasesInSet goes through manifest metadata entries and sees if they're // findBasesInSet goes through manifest metadata entries and sees if they're
// incomplete or not. Manifests which don't have an associated backup // incomplete or not. Manifests which don't have an associated backup
// are discarded as incomplete. Manifests are then checked to see if they // are discarded as incomplete. Manifests are then checked to see if they

View File

@ -970,12 +970,12 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
checkBackupEntriesMatch( checkBackupEntriesMatch(
t, t,
bb.Backups(), bb.MergeBases(),
test.backupData, test.backupData,
test.expectedBaseReasons) test.expectedBaseReasons)
checkBackupEntriesMatch( checkBackupEntriesMatch(
t, t,
bb.UniqueAssistBackups(), bb.UniqueAssistBases(),
test.backupData, test.backupData,
test.expectedAssistReasons) test.expectedAssistReasons)
@ -1078,7 +1078,7 @@ func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() {
func checkManifestEntriesMatch( func checkManifestEntriesMatch(
t *testing.T, t *testing.T,
retSnaps []ManifestEntry, retSnaps []BackupBase,
allExpected []manifestInfo, allExpected []manifestInfo,
expectedIdxsAndReasons map[int][]identity.Reasoner, expectedIdxsAndReasons map[int][]identity.Reasoner,
) { ) {
@ -1090,7 +1090,7 @@ func checkManifestEntriesMatch(
got := make([]*snapshot.Manifest, 0, len(retSnaps)) got := make([]*snapshot.Manifest, 0, len(retSnaps))
for _, s := range retSnaps { for _, s := range retSnaps {
got = append(got, s.Manifest) got = append(got, s.ItemDataSnapshot)
} }
assert.ElementsMatch(t, expected, got) assert.ElementsMatch(t, expected, got)
@ -1102,7 +1102,7 @@ func checkManifestEntriesMatch(
} }
for _, found := range retSnaps { for _, found := range retSnaps {
reasons, ok := expectedReasons[found.ID] reasons, ok := expectedReasons[found.ItemDataSnapshot.ID]
if !ok { if !ok {
// Missing or extra snapshots will be reported by earlier checks. // Missing or extra snapshots will be reported by earlier checks.
continue continue
@ -1113,13 +1113,13 @@ func checkManifestEntriesMatch(
reasons, reasons,
found.Reasons, found.Reasons,
"incorrect reasons for snapshot with ID %s", "incorrect reasons for snapshot with ID %s",
found.ID) found.ItemDataSnapshot.ID)
} }
} }
func checkBackupEntriesMatch( func checkBackupEntriesMatch(
t *testing.T, t *testing.T,
retBups []BackupEntry, retBups []BackupBase,
allExpected []backupInfo, allExpected []backupInfo,
expectedIdxsAndReasons map[int][]identity.Reasoner, expectedIdxsAndReasons map[int][]identity.Reasoner,
) { ) {
@ -1143,7 +1143,7 @@ func checkBackupEntriesMatch(
} }
for _, found := range retBups { for _, found := range retBups {
reasons, ok := expectedReasons[found.ID] reasons, ok := expectedReasons[found.Backup.ID]
if !ok { if !ok {
// Missing or extra snapshots will be reported by earlier checks. // Missing or extra snapshots will be reported by earlier checks.
continue continue
@ -1154,6 +1154,6 @@ func checkBackupEntriesMatch(
reasons, reasons,
found.Reasons, found.Reasons,
"incorrect reasons for snapshot with ID %s", "incorrect reasons for snapshot with ID %s",
found.ID) found.Backup.ID)
} }
} }

View File

@ -14,71 +14,30 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
// TODO(ashmrtn): Temp function until all PRs in the series merge. func basesMatch(t *testing.T, expect, got []BackupBase, dataType string) {
func backupsMatch(t *testing.T, expect, got []BackupEntry, dataType string) {
expectBups := make([]*backup.Backup, 0, len(expect)) expectBups := make([]*backup.Backup, 0, len(expect))
expectMans := make([]*snapshot.Manifest, 0, len(expect))
gotBups := make([]*backup.Backup, 0, len(got)) gotBups := make([]*backup.Backup, 0, len(got))
gotBasesByID := map[model.StableID]BackupEntry{} gotMans := make([]*snapshot.Manifest, 0, len(got))
gotBasesByID := map[model.StableID]BackupBase{}
for _, e := range expect { for _, e := range expect {
if e.Backup != nil { expectBups = append(expectBups, e.Backup)
expectBups = append(expectBups, e.Backup) expectMans = append(expectMans, e.ItemDataSnapshot)
}
} }
for _, g := range got { for _, g := range got {
if g.Backup != nil { gotBups = append(gotBups, g.Backup)
gotBups = append(gotBups, g.Backup) gotMans = append(gotMans, g.ItemDataSnapshot)
gotBasesByID[g.Backup.ID] = g gotBasesByID[g.Backup.ID] = g
}
} }
assert.ElementsMatch(t, expectBups, gotBups, dataType+" backup model") assert.ElementsMatch(t, expectBups, gotBups, dataType+" backup model")
// Need to compare Reasons separately since they're also a slice.
for _, e := range expect {
if e.Backup == nil {
continue
}
b, ok := gotBasesByID[e.Backup.ID]
if !ok {
// Missing bases will be reported above.
continue
}
assert.ElementsMatch(t, e.Reasons, b.Reasons)
}
}
// TODO(ashmrtn): Temp function until all PRs in the series merge.
func manifestsMatch(t *testing.T, expect, got []ManifestEntry, dataType string) {
expectMans := make([]*snapshot.Manifest, 0, len(expect))
gotMans := make([]*snapshot.Manifest, 0, len(got))
gotBasesByID := map[manifest.ID]ManifestEntry{}
for _, e := range expect {
if e.Manifest != nil {
expectMans = append(expectMans, e.Manifest)
}
}
for _, g := range got {
if g.Manifest != nil {
gotMans = append(gotMans, g.Manifest)
gotBasesByID[g.Manifest.ID] = g
}
}
assert.ElementsMatch(t, expectMans, gotMans, dataType+" item data snapshot") assert.ElementsMatch(t, expectMans, gotMans, dataType+" item data snapshot")
// Need to compare Reasons separately since they're also a slice. // Need to compare Reasons separately since they're also a slice.
for _, e := range expect { for _, e := range expect {
if e.Manifest == nil { b, ok := gotBasesByID[e.Backup.ID]
continue
}
b, ok := gotBasesByID[e.Manifest.ID]
if !ok { if !ok {
// Missing bases will be reported above. // Missing bases will be reported above.
continue continue
@ -94,9 +53,7 @@ func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
} }
if expect == nil { if expect == nil {
assert.Empty(t, got.Backups(), "backups")
assert.Empty(t, got.MergeBases(), "merge bases") assert.Empty(t, got.MergeBases(), "merge bases")
assert.Empty(t, got.UniqueAssistBackups(), "assist backups")
assert.Empty(t, got.UniqueAssistBases(), "assist bases") assert.Empty(t, got.UniqueAssistBases(), "assist bases")
assert.Empty(t, got.SnapshotAssistBases(), "snapshot assist bases") assert.Empty(t, got.SnapshotAssistBases(), "snapshot assist bases")
@ -104,9 +61,7 @@ func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
} }
if got == nil { if got == nil {
if len(expect.Backups()) > 0 || if len(expect.MergeBases()) > 0 ||
len(expect.MergeBases()) > 0 ||
len(expect.UniqueAssistBackups()) > 0 ||
len(expect.UniqueAssistBases()) > 0 || len(expect.UniqueAssistBases()) > 0 ||
len(expect.SnapshotAssistBases()) > 0 { len(expect.SnapshotAssistBases()) > 0 {
assert.Fail(t, "got was nil but expected non-nil result %v", expect) assert.Fail(t, "got was nil but expected non-nil result %v", expect)
@ -115,11 +70,9 @@ func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
return return
} }
backupsMatch(t, expect.Backups(), got.Backups(), "merge backups") basesMatch(t, expect.MergeBases(), got.MergeBases(), "merge bases")
manifestsMatch(t, expect.MergeBases(), got.MergeBases(), "merge manifests") basesMatch(t, expect.UniqueAssistBases(), got.UniqueAssistBases(), "assist bases")
backupsMatch(t, expect.UniqueAssistBackups(), got.UniqueAssistBackups(), "assist backups") basesMatch(t, expect.SnapshotAssistBases(), got.SnapshotAssistBases(), "snapshot assist bases")
manifestsMatch(t, expect.UniqueAssistBases(), got.UniqueAssistBases(), "assist manifests")
manifestsMatch(t, expect.SnapshotAssistBases(), got.SnapshotAssistBases(), "snapshot assist bases")
} }
func NewMockBackupBases() *MockBackupBases { func NewMockBackupBases() *MockBackupBases {
@ -130,64 +83,13 @@ type MockBackupBases struct {
*backupBases *backupBases
} }
func (bb *MockBackupBases) WithBackups(b ...BackupEntry) *MockBackupBases { func (bb *MockBackupBases) WithMergeBases(b ...BackupBase) *MockBackupBases {
bases := make([]BackupBase, 0, len(b)) bb.backupBases.mergeBases = append(bb.MergeBases(), b...)
for _, base := range b {
bases = append(bases, BackupBase{
Backup: base.Backup,
Reasons: base.Reasons,
})
}
bb.backupBases.mergeBases = append(bb.NewMergeBases(), bases...)
return bb return bb
} }
func (bb *MockBackupBases) WithMergeBases(m ...ManifestEntry) *MockBackupBases { func (bb *MockBackupBases) WithAssistBases(b ...BackupBase) *MockBackupBases {
bases := make([]BackupBase, 0, len(m)) bb.backupBases.assistBases = append(bb.UniqueAssistBases(), b...)
for _, base := range m {
bases = append(bases, BackupBase{
ItemDataSnapshot: base.Manifest,
Reasons: base.Reasons,
})
}
bb.backupBases.mergeBases = append(bb.NewMergeBases(), bases...)
return bb
}
func (bb *MockBackupBases) WithAssistBackups(b ...BackupEntry) *MockBackupBases {
bases := make([]BackupBase, 0, len(b))
for _, base := range b {
bases = append(bases, BackupBase{
Backup: base.Backup,
Reasons: base.Reasons,
})
}
bb.backupBases.assistBases = append(bb.NewUniqueAssistBases(), bases...)
return bb
}
func (bb *MockBackupBases) WithAssistBases(m ...ManifestEntry) *MockBackupBases {
bases := make([]BackupBase, 0, len(m))
for _, base := range m {
bases = append(bases, BackupBase{
ItemDataSnapshot: base.Manifest,
Reasons: base.Reasons,
})
}
bb.backupBases.assistBases = append(bb.NewUniqueAssistBases(), bases...)
return bb
}
func (bb *MockBackupBases) NewWithMergeBases(b ...BackupBase) *MockBackupBases {
bb.backupBases.mergeBases = append(bb.NewMergeBases(), b...)
return bb return bb
} }

View File

@ -13,7 +13,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/kopia/kopia/fs" "github.com/kopia/kopia/fs"
"github.com/kopia/kopia/fs/virtualfs" "github.com/kopia/kopia/fs/virtualfs"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot/snapshotfs" "github.com/kopia/kopia/snapshot/snapshotfs"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
@ -1047,26 +1046,20 @@ func traverseBaseDir(
return nil return nil
} }
func logBaseInfo(ctx context.Context, m ManifestEntry) { func logBaseInfo(ctx context.Context, b BackupBase) {
svcs := map[string]struct{}{} svcs := map[string]struct{}{}
cats := map[string]struct{}{} cats := map[string]struct{}{}
for _, r := range m.Reasons { for _, r := range b.Reasons {
svcs[r.Service().String()] = struct{}{} svcs[r.Service().String()] = struct{}{}
cats[r.Category().String()] = struct{}{} cats[r.Category().String()] = struct{}{}
} }
mbID, _ := m.GetTag(TagBackupID) // Base backup ID and base snapshot ID are already in context clues.
if len(mbID) == 0 {
mbID = "no_backup_id_tag"
}
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
"using base for backup", "using base for backup",
"base_snapshot_id", m.ID,
"services", maps.Keys(svcs), "services", maps.Keys(svcs),
"categories", maps.Keys(cats), "categories", maps.Keys(cats))
"base_backup_id", mbID)
} }
const ( const (
@ -1093,20 +1086,32 @@ const (
func inflateBaseTree( func inflateBaseTree(
ctx context.Context, ctx context.Context,
loader snapshotLoader, loader snapshotLoader,
snap ManifestEntry, base BackupBase,
updatedPaths map[string]path.Path, updatedPaths map[string]path.Path,
roots map[string]*treeMap, roots map[string]*treeMap,
) error { ) error {
bupID := "no_backup_id"
if base.Backup != nil && len(base.Backup.ID) > 0 {
bupID = string(base.Backup.ID)
}
ctx = clues.Add(
ctx,
"base_backup_id", bupID,
"base_snapshot_id", base.ItemDataSnapshot.ID)
// Only complete snapshots should be used to source base information. // Only complete snapshots should be used to source base information.
// Snapshots for checkpoints will rely on kopia-assisted dedupe to efficiently // Snapshots for checkpoints will rely on kopia-assisted dedupe to efficiently
// handle items that were completely uploaded before Corso crashed. // handle items that were completely uploaded before Corso crashed.
if len(snap.IncompleteReason) > 0 { if len(base.ItemDataSnapshot.IncompleteReason) > 0 {
logger.Ctx(ctx).Info("skipping incomplete snapshot")
return nil return nil
} }
ctx = clues.Add(ctx, "snapshot_base_id", snap.ID) // Some logging to help track things.
logBaseInfo(ctx, base)
root, err := loader.SnapshotRoot(snap.Manifest) root, err := loader.SnapshotRoot(base.ItemDataSnapshot)
if err != nil { if err != nil {
return clues.Wrap(err, "getting snapshot root directory").WithClues(ctx) return clues.Wrap(err, "getting snapshot root directory").WithClues(ctx)
} }
@ -1116,13 +1121,10 @@ func inflateBaseTree(
return clues.New("snapshot root is not a directory").WithClues(ctx) return clues.New("snapshot root is not a directory").WithClues(ctx)
} }
// Some logging to help track things.
logBaseInfo(ctx, snap)
// For each subtree corresponding to the tuple // For each subtree corresponding to the tuple
// (resource owner, service, category) merge the directories in the base with // (resource owner, service, category) merge the directories in the base with
// what has been reported in the collections we got. // what has been reported in the collections we got.
for _, r := range snap.Reasons { for _, r := range base.Reasons {
ictx := clues.Add( ictx := clues.Add(
ctx, ctx,
"subtree_service", r.Service().String(), "subtree_service", r.Service().String(),
@ -1204,7 +1206,7 @@ func inflateBaseTree(
func inflateDirTree( func inflateDirTree(
ctx context.Context, ctx context.Context,
loader snapshotLoader, loader snapshotLoader,
baseSnaps []ManifestEntry, bases []BackupBase,
collections []data.BackupCollection, collections []data.BackupCollection,
globalExcludeSet prefixmatcher.StringSetReader, globalExcludeSet prefixmatcher.StringSetReader,
progress *corsoProgress, progress *corsoProgress,
@ -1214,22 +1216,18 @@ func inflateDirTree(
return nil, clues.Wrap(err, "inflating collection tree") return nil, clues.Wrap(err, "inflating collection tree")
} }
baseIDs := make([]manifest.ID, 0, len(baseSnaps)) // Individual backup/snapshot IDs will be logged when merging their hierarchy.
for _, snap := range baseSnaps { ctx = clues.Add(ctx, "len_bases", len(bases))
baseIDs = append(baseIDs, snap.ID)
}
ctx = clues.Add(ctx, "len_base_snapshots", len(baseSnaps), "base_snapshot_ids", baseIDs) if len(bases) > 0 {
logger.Ctx(ctx).Info("merging hierarchies from base backups")
if len(baseIDs) > 0 {
logger.Ctx(ctx).Info("merging hierarchies from base snapshots")
} else { } else {
logger.Ctx(ctx).Info("no base snapshots to merge") logger.Ctx(ctx).Info("no base backups to merge")
} }
for _, snap := range baseSnaps { for _, base := range bases {
if err = inflateBaseTree(ctx, loader, snap, updatedPaths, roots); err != nil { if err = inflateBaseTree(ctx, loader, base, updatedPaths, roots); err != nil {
return nil, clues.Wrap(err, "inflating base snapshot tree(s)") return nil, clues.Wrap(err, "inflating base backup tree(s)")
} }
} }

View File

@ -878,19 +878,19 @@ func (msw *mockSnapshotWalker) SnapshotRoot(*snapshot.Manifest) (fs.Entry, error
return msw.snapshotRoot, nil return msw.snapshotRoot, nil
} }
func makeManifestEntry( func makeBackupBase(
id, tenant, resourceOwner string, id, tenant, resourceOwner string,
service path.ServiceType, service path.ServiceType,
categories ...path.CategoryType, categories ...path.CategoryType,
) ManifestEntry { ) BackupBase {
var reasons []identity.Reasoner var reasons []identity.Reasoner
for _, c := range categories { for _, c := range categories {
reasons = append(reasons, identity.NewReason(tenant, resourceOwner, service, c)) reasons = append(reasons, identity.NewReason(tenant, resourceOwner, service, c))
} }
return ManifestEntry{ return BackupBase{
Manifest: &snapshot.Manifest{ ItemDataSnapshot: &snapshot.Manifest{
ID: manifest.ID(id), ID: manifest.ID(id),
}, },
Reasons: reasons, Reasons: reasons,
@ -1201,8 +1201,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
test.inputCollections(), test.inputCollections(),
pmMock.NewPrefixMap(nil), pmMock.NewPrefixMap(nil),
@ -1916,8 +1916,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
test.inputCollections(t), test.inputCollections(t),
ie, ie,
@ -2060,8 +2060,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
collections, collections,
pmMock.NewPrefixMap(nil), pmMock.NewPrefixMap(nil),
@ -2160,8 +2160,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
collections, collections,
pmMock.NewPrefixMap(nil), pmMock.NewPrefixMap(nil),
@ -2376,9 +2376,9 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.ContactsCategory), makeBackupBase("id1", testTenant, testUser, path.ExchangeService, path.ContactsCategory),
makeManifestEntry("id2", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeBackupBase("id2", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
collections, collections,
pmMock.NewPrefixMap(nil), pmMock.NewPrefixMap(nil),
@ -2529,8 +2529,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory), makeBackupBase("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory),
}, },
[]data.BackupCollection{mce, mcc}, []data.BackupCollection{mce, mcc},
pmMock.NewPrefixMap(nil), pmMock.NewPrefixMap(nil),
@ -3454,8 +3454,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_SelectiveSubtreeP
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []BackupBase{
makeManifestEntry("", tenant, user, path.OneDriveService, path.FilesCategory), makeBackupBase(
"id1",
tenant,
user,
path.OneDriveService,
path.FilesCategory),
}, },
test.inputCollections(t), test.inputCollections(t),
ie, ie,

View File

@ -179,8 +179,8 @@ func (w Wrapper) ConsumeBackupCollections(
// snapshot bases into inflateDirTree so that the new snapshot // snapshot bases into inflateDirTree so that the new snapshot
// includes historical data. // includes historical data.
var ( var (
mergeBase []ManifestEntry mergeBase []BackupBase
assistBase []ManifestEntry assistBase []BackupBase
) )
if bases != nil { if bases != nil {
@ -230,7 +230,7 @@ func (w Wrapper) ConsumeBackupCollections(
func (w Wrapper) makeSnapshotWithRoot( func (w Wrapper) makeSnapshotWithRoot(
ctx context.Context, ctx context.Context,
prevSnapEntries []ManifestEntry, prevBases []BackupBase,
root fs.Directory, root fs.Directory,
addlTags map[string]string, addlTags map[string]string,
progress *corsoProgress, progress *corsoProgress,
@ -244,17 +244,17 @@ func (w Wrapper) makeSnapshotWithRoot(
} }
) )
snapIDs := make([]manifest.ID, 0, len(prevSnapEntries)) // just for logging snapIDs := make([]manifest.ID, 0, len(prevBases)) // just for logging
prevSnaps := make([]*snapshot.Manifest, 0, len(prevSnapEntries)) prevSnaps := make([]*snapshot.Manifest, 0, len(prevBases))
for _, ent := range prevSnapEntries { for _, ent := range prevBases {
prevSnaps = append(prevSnaps, ent.Manifest) prevSnaps = append(prevSnaps, ent.ItemDataSnapshot)
snapIDs = append(snapIDs, ent.ID) snapIDs = append(snapIDs, ent.ItemDataSnapshot.ID)
} }
ctx = clues.Add( ctx = clues.Add(
ctx, ctx,
"num_assist_snapshots", len(prevSnapEntries), "num_assist_snapshots", len(prevBases),
"assist_snapshot_ids", snapIDs, "assist_snapshot_ids", snapIDs,
"additional_tags", addlTags) "additional_tags", addlTags)

View File

@ -80,14 +80,14 @@ func BenchmarkHierarchyMerge(b *testing.B) {
type testCase struct { type testCase struct {
name string name string
baseBackups func(base ManifestEntry) BackupBases baseBackups func(base BackupBase) BackupBases
collections []data.BackupCollection collections []data.BackupCollection
} }
// Initial backup. All files should be considered new by kopia. // Initial backup. All files should be considered new by kopia.
baseBackupCase := testCase{ baseBackupCase := testCase{
name: "Setup", name: "Setup",
baseBackups: func(ManifestEntry) BackupBases { baseBackups: func(BackupBase) BackupBases {
return NewMockBackupBases() return NewMockBackupBases()
}, },
collections: cols, collections: cols,
@ -97,8 +97,8 @@ func BenchmarkHierarchyMerge(b *testing.B) {
t tester.TestT, t tester.TestT,
ctx context.Context, ctx context.Context,
test testCase, test testCase,
base ManifestEntry, base BackupBase,
) ManifestEntry { ) BackupBase {
bbs := test.baseBackups(base) bbs := test.baseBackups(base)
counter := count.New() counter := count.New()
@ -126,20 +126,20 @@ func BenchmarkHierarchyMerge(b *testing.B) {
manifest.ID(stats.SnapshotID)) manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return ManifestEntry{ return BackupBase{
Manifest: snap, ItemDataSnapshot: snap,
Reasons: reasons, Reasons: reasons,
} }
} }
b.Logf("setting up base backup\n") b.Logf("setting up base backup\n")
base := runAndTestBackup(b, ctx, baseBackupCase, ManifestEntry{}) base := runAndTestBackup(b, ctx, baseBackupCase, BackupBase{})
table := []testCase{ table := []testCase{
{ {
name: "Merge All", name: "Merge All",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base) return NewMockBackupBases().WithMergeBases(base)
}, },
collections: func() []data.BackupCollection { collections: func() []data.BackupCollection {

View File

@ -839,7 +839,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
type testCase struct { type testCase struct {
name string name string
baseBackups func(base ManifestEntry) BackupBases baseBackups func(base BackupBase) BackupBases
collections []data.BackupCollection collections []data.BackupCollection
expectedUploadedFiles int expectedUploadedFiles int
expectedCachedFiles int expectedCachedFiles int
@ -864,7 +864,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
// Initial backup. All files should be considered new by kopia. // Initial backup. All files should be considered new by kopia.
baseBackupCase := testCase{ baseBackupCase := testCase{
name: "Uncached", name: "Uncached",
baseBackups: func(ManifestEntry) BackupBases { baseBackups: func(BackupBase) BackupBases {
return NewMockBackupBases() return NewMockBackupBases()
}, },
collections: collections, collections: collections,
@ -875,8 +875,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
uploadedBytes: []int64{8000, 10000}, uploadedBytes: []int64{8000, 10000},
} }
runAndTestBackup := func(test testCase, base ManifestEntry) ManifestEntry { runAndTestBackup := func(test testCase, base BackupBase) BackupBase {
var res ManifestEntry var man *snapshot.Manifest
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
@ -966,21 +966,22 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
manifest.ID(stats.SnapshotID)) manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
res = ManifestEntry{ man = snap
Manifest: snap,
Reasons: reasons,
}
}) })
return res return BackupBase{
ItemDataSnapshot: man,
Reasons: reasons,
}
} }
base := runAndTestBackup(baseBackupCase, ManifestEntry{}) base := runAndTestBackup(baseBackupCase, BackupBase{})
require.NotNil(suite.T(), base.ItemDataSnapshot)
table := []testCase{ table := []testCase{
{ {
name: "Kopia Assist And Merge All Files Changed", name: "Kopia Assist And Merge All Files Changed",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base) return NewMockBackupBases().WithMergeBases(base)
}, },
collections: collections, collections: collections,
@ -994,7 +995,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Kopia Assist And Merge No Files Changed", name: "Kopia Assist And Merge No Files Changed",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base) return NewMockBackupBases().WithMergeBases(base)
}, },
// Pass in empty collections to force a backup. Otherwise we'll skip // Pass in empty collections to force a backup. Otherwise we'll skip
@ -1016,7 +1017,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Kopia Assist Only", name: "Kopia Assist Only",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithAssistBases(base) return NewMockBackupBases().WithAssistBases(base)
}, },
collections: collections, collections: collections,
@ -1029,7 +1030,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Merge Only", name: "Merge Only",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base).MockDisableAssistBases() return NewMockBackupBases().WithMergeBases(base).MockDisableAssistBases()
}, },
// Pass in empty collections to force a backup. Otherwise we'll skip // Pass in empty collections to force a backup. Otherwise we'll skip
@ -1049,7 +1050,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Content Hash Only", name: "Content Hash Only",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases() return NewMockBackupBases()
}, },
collections: collections, collections: collections,
@ -1265,9 +1266,9 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
prevSnaps.WithMergeBases( prevSnaps.WithMergeBases(
ManifestEntry{ BackupBase{
Manifest: snap, ItemDataSnapshot: snap,
Reasons: reasons, Reasons: reasons,
}) })
}) })
} }
@ -1777,9 +1778,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
suite.ctx, suite.ctx,
[]identity.Reasoner{r}, []identity.Reasoner{r},
NewMockBackupBases().WithMergeBases( NewMockBackupBases().WithMergeBases(
ManifestEntry{ BackupBase{
Manifest: man, ItemDataSnapshot: man,
Reasons: []identity.Reasoner{r}, Reasons: []identity.Reasoner{r},
}), }),
test.cols(t), test.cols(t),
excluded, excluded,

View File

@ -176,7 +176,7 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
func (ctrl *Controller) GetMetadataPaths( func (ctrl *Controller) GetMetadataPaths(
ctx context.Context, ctx context.Context,
r kinject.RestoreProducer, r kinject.RestoreProducer,
man kopia.ManifestEntry, base kopia.BackupBase,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) { ) ([]path.RestorePaths, error) {
var ( var (
@ -184,12 +184,12 @@ func (ctrl *Controller) GetMetadataPaths(
err error err error
) )
for _, reason := range man.Reasons { for _, reason := range base.Reasons {
filePaths := [][]string{} filePaths := [][]string{}
switch true { switch true {
case reason.Service() == path.GroupsService && reason.Category() == path.LibrariesCategory: case reason.Service() == path.GroupsService && reason.Category() == path.LibrariesCategory:
filePaths, err = groups.MetadataFiles(ctx, reason, r, man.ID, errs) filePaths, err = groups.MetadataFiles(ctx, reason, r, base.ItemDataSnapshot.ID, errs)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -53,7 +53,7 @@ func (ctrl Controller) ProduceBackupCollections(
func (ctrl *Controller) GetMetadataPaths( func (ctrl *Controller) GetMetadataPaths(
ctx context.Context, ctx context.Context,
r kinject.RestoreProducer, r kinject.RestoreProducer,
man kopia.ManifestEntry, base kopia.BackupBase,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) { ) ([]path.RestorePaths, error) {
return nil, clues.New("not implemented") return nil, clues.New("not implemented")

View File

@ -604,7 +604,7 @@ func getNewPathRefs(
func mergeItemsFromBase( func mergeItemsFromBase(
ctx context.Context, ctx context.Context,
checkReason bool, checkReason bool,
baseBackup kopia.BackupEntry, baseBackup kopia.BackupBase,
detailsStore streamstore.Streamer, detailsStore streamstore.Streamer,
dataFromBackup kopia.DetailsMergeInfoer, dataFromBackup kopia.DetailsMergeInfoer,
deets *details.Builder, deets *details.Builder,
@ -617,7 +617,7 @@ func mergeItemsFromBase(
) )
// Can't be in the above block else it's counted as a redeclaration. // Can't be in the above block else it's counted as a redeclaration.
ctx = clues.Add(ctx, "base_backup_id", baseBackup.ID) ctx = clues.Add(ctx, "base_backup_id", baseBackup.Backup.ID)
baseDeets, err := getDetailsFromBackup( baseDeets, err := getDetailsFromBackup(
ctx, ctx,
@ -665,7 +665,7 @@ func mergeItemsFromBase(
dataFromBackup, dataFromBackup,
entry, entry,
rr, rr,
baseBackup.Version) baseBackup.Backup.Version)
if err != nil { if err != nil {
return manifestAddedEntries, return manifestAddedEntries,
clues.Wrap(err, "getting updated info for entry").WithClues(ictx) clues.Wrap(err, "getting updated info for entry").WithClues(ictx)
@ -746,7 +746,7 @@ func mergeDetails(
// leaves us in a bit of a pickle if the user has run any concurrent backups // leaves us in a bit of a pickle if the user has run any concurrent backups
// with overlapping Reasons that turn into assist bases, but the modTime check // with overlapping Reasons that turn into assist bases, but the modTime check
// in DetailsMergeInfoer should handle that. // in DetailsMergeInfoer should handle that.
for _, base := range bases.UniqueAssistBackups() { for _, base := range bases.UniqueAssistBases() {
added, err := mergeItemsFromBase( added, err := mergeItemsFromBase(
ctx, ctx,
false, false,
@ -771,7 +771,7 @@ func mergeDetails(
// We do want to enable matching entries based on Reasons because we // We do want to enable matching entries based on Reasons because we
// explicitly control which subtrees from the merge base backup are grafted // explicitly control which subtrees from the merge base backup are grafted
// onto the hierarchy for the currently running backup. // onto the hierarchy for the currently running backup.
for _, base := range bases.Backups() { for _, base := range bases.MergeBases() {
added, err := mergeItemsFromBase( added, err := mergeItemsFromBase(
ctx, ctx,
true, true,

View File

@ -479,14 +479,16 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
} }
bases = kopia.NewMockBackupBases().WithMergeBases( bases = kopia.NewMockBackupBases().WithMergeBases(
kopia.ManifestEntry{ kopia.BackupBase{
Manifest: manifest1, Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "1"}},
ItemDataSnapshot: manifest1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
emailReason, emailReason,
}, },
}).WithAssistBases( }).WithAssistBases(
kopia.ManifestEntry{ kopia.BackupBase{
Manifest: manifest2, Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "2"}},
ItemDataSnapshot: manifest2,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
contactsReason, contactsReason,
}, },
@ -634,8 +636,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
table := []struct { table := []struct {
name string name string
populatedDetails map[string]*details.Details populatedDetails map[string]*details.Details
inputBackups []kopia.BackupEntry inputBackups []kopia.BackupBase
inputAssistBackups []kopia.BackupEntry inputAssistBackups []kopia.BackupBase
mdm *mockDetailsMergeInfoer mdm *mockDetailsMergeInfoer
errCheck assert.ErrorAssertionFunc errCheck assert.ErrorAssertionFunc
@ -662,7 +664,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
@ -686,7 +688,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -713,7 +715,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -769,7 +771,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -796,7 +798,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -826,7 +828,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -856,7 +858,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -886,7 +888,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -917,7 +919,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -948,7 +950,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -996,7 +998,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -1005,7 +1007,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
}, },
}, },
}, },
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []kopia.BackupBase{
{Backup: &backup2}, {Backup: &backup2},
}, },
populatedDetails: map[string]*details.Details{ populatedDetails: map[string]*details.Details{
@ -1040,7 +1042,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []kopia.BackupBase{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -1048,7 +1050,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
}, },
}, },
}, },
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []kopia.BackupBase{
{Backup: &backup2}, {Backup: &backup2},
}, },
populatedDetails: map[string]*details.Details{ populatedDetails: map[string]*details.Details{
@ -1080,7 +1082,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1}, {Backup: &backup1},
{Backup: &backup2}, {Backup: &backup2},
}, },
@ -1113,7 +1115,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1}, {Backup: &backup1},
{Backup: &backup2}, {Backup: &backup2},
}, },
@ -1146,7 +1148,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1}, {Backup: &backup1},
{Backup: &backup2}, {Backup: &backup2},
}, },
@ -1176,7 +1178,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
mdm: func() *mockDetailsMergeInfoer { mdm: func() *mockDetailsMergeInfoer {
return newMockDetailsMergeInfoer() return newMockDetailsMergeInfoer()
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1}, {Backup: &backup1},
}, },
populatedDetails: map[string]*details.Details{ populatedDetails: map[string]*details.Details{
@ -1205,8 +1207,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
writeStats := kopia.BackupStats{} writeStats := kopia.BackupStats{}
bb := kopia.NewMockBackupBases(). bb := kopia.NewMockBackupBases().
WithBackups(test.inputBackups...). WithMergeBases(test.inputBackups...).
WithAssistBackups(test.inputAssistBackups...) WithAssistBases(test.inputAssistBackups...)
err := mergeDetails( err := mergeDetails(
ctx, ctx,
@ -1276,7 +1278,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
itemPath1.Service(), itemPath1.Service(),
itemPath1.Category()) itemPath1.Category())
backup1 = kopia.BackupEntry{ backup1 = kopia.BackupBase{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid1", ID: "bid1",
@ -1300,7 +1302,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
// itemDetails.Exchange.Modified = now // itemDetails.Exchange.Modified = now
populatedDetails := map[string]*details.Details{ populatedDetails := map[string]*details.Details{
backup1.DetailsID: { backup1.Backup.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.Entry{*itemDetails}, Entries: []details.Entry{*itemDetails},
}, },
@ -1335,7 +1337,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
err := mergeDetails( err := mergeDetails(
ctx, ctx,
mds, mds,
kopia.NewMockBackupBases().WithBackups(backup1), kopia.NewMockBackupBases().WithMergeBases(backup1),
mdm, mdm,
&deets, &deets,
&writeStats, &writeStats,

View File

@ -37,7 +37,7 @@ type (
GetMetadataPaths( GetMetadataPaths(
ctx context.Context, ctx context.Context,
r inject.RestoreProducer, r inject.RestoreProducer,
man kopia.ManifestEntry, base kopia.BackupBase,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) ) ([]path.RestorePaths, error)

View File

@ -62,9 +62,9 @@ func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
func (mbp mockBackupProducer) GetMetadataPaths( func (mbp mockBackupProducer) GetMetadataPaths(
ctx context.Context, ctx context.Context,
r kinject.RestoreProducer, r kinject.RestoreProducer,
man kopia.ManifestEntry, base kopia.BackupBase,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) { ) ([]path.RestorePaths, error) {
ctrl := m365.Controller{} ctrl := m365.Controller{}
return ctrl.GetMetadataPaths(ctx, r, man, errs) return ctrl.GetMetadataPaths(ctx, r, base, errs)
} }

View File

@ -92,8 +92,11 @@ func getManifestsAndMetadata(
return bb, nil, false, nil return bb, nil, false, nil
} }
for _, man := range bb.MergeBases() { for _, base := range bb.MergeBases() {
mctx := clues.Add(ctx, "manifest_id", man.ID) mctx := clues.Add(
ctx,
"base_item_data_snapshot_id", base.ItemDataSnapshot.ID,
"base_backup_id", base.Backup.ID)
// a local fault.Bus intance is used to collect metadata files here. // a local fault.Bus intance is used to collect metadata files here.
// we avoid the global fault.Bus because all failures here are ignorable, // we avoid the global fault.Bus because all failures here are ignorable,
@ -103,13 +106,18 @@ func getManifestsAndMetadata(
// spread around. Need to find more idiomatic handling. // spread around. Need to find more idiomatic handling.
fb := fault.New(true) fb := fault.New(true)
paths, err := bp.GetMetadataPaths(mctx, rp, man, fb) paths, err := bp.GetMetadataPaths(mctx, rp, base, fb)
if err != nil { if err != nil {
LogFaultErrors(ctx, fb.Errors(), "collecting metadata paths") LogFaultErrors(ctx, fb.Errors(), "collecting metadata paths")
return nil, nil, false, err return nil, nil, false, err
} }
colls, err := rp.ProduceRestoreCollections(ctx, string(man.ID), paths, nil, fb) colls, err := rp.ProduceRestoreCollections(
ctx,
string(base.ItemDataSnapshot.ID),
paths,
nil,
fb)
if err != nil { if err != nil {
// Restore is best-effort and we want to keep it that way since we want to // Restore is best-effort and we want to keep it that way since we want to
// return as much metadata as we can to reduce the work we'll need to do. // return as much metadata as we can to reduce the work we'll need to do.

View File

@ -3,6 +3,7 @@ package operations
import ( import (
"bytes" "bytes"
"context" "context"
"fmt"
"io" "io"
"testing" "testing"
@ -17,10 +18,8 @@ import (
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365" "github.com/alcionai/corso/src/internal/m365"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations/inject/mock" "github.com/alcionai/corso/src/internal/operations/inject/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/backup/metadata" "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -290,66 +289,34 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
mr := mockRestoreProducer{err: test.expectErr, colls: test.preFetchCollection} mr := mockRestoreProducer{err: test.expectErr, colls: test.preFetchCollection}
mr.buildRestoreFunc(t, test.manID, paths) mr.buildRestoreFunc(t, test.manID, paths)
man := kopia.ManifestEntry{ base := kopia.BackupBase{
Manifest: &snapshot.Manifest{ID: manifest.ID(test.manID)}, ItemDataSnapshot: &snapshot.Manifest{ID: manifest.ID(test.manID)},
Reasons: test.reasons, Reasons: test.reasons,
} }
controller := m365.Controller{} controller := m365.Controller{}
pths, err := controller.GetMetadataPaths(ctx, &mr, man, fault.New(true)) pths, err := controller.GetMetadataPaths(ctx, &mr, base, fault.New(true))
assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err)) assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err))
assert.ElementsMatch(t, test.restorePaths, pths, "restore paths") assert.ElementsMatch(t, test.restorePaths, pths, "restore paths")
}) })
} }
} }
func buildReasons(
tenant string,
ro string,
service path.ServiceType,
cats ...path.CategoryType,
) []identity.Reasoner {
var reasons []identity.Reasoner
for _, cat := range cats {
reasons = append(
reasons,
identity.NewReason(tenant, ro, service, cat))
}
return reasons
}
func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() { func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
const ( var (
ro = "resourceowner" ro = "resourceowner"
tid = "tenantid" tid = "tenantid"
did = "detailsid" emailReason = identity.NewReason(tid, ro, path.ExchangeService, path.EmailCategory)
baseBuilder = func(id int) *kopia.BackupBaseBuilder {
return kopia.NewBackupBaseBuilder("", id).
WithReasons(emailReason)
}
colID = func(id int) string {
return fmt.Sprintf("ID%d-item-data", id)
}
) )
makeMan := func(id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry {
return kopia.ManifestEntry{
Manifest: &snapshot.Manifest{
ID: manifest.ID(id),
IncompleteReason: incmpl,
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
makeBackup := func(snapID string, cats ...path.CategoryType) kopia.BackupEntry {
return kopia.BackupEntry{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID(snapID + "bup"),
},
SnapshotID: snapID,
StreamStoreID: snapID + "store",
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
table := []struct { table := []struct {
name string name string
bf *mockBackupFinder bf *mockBackupFinder
@ -378,35 +345,28 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()),
WithBackups(makeBackup("id1", path.EmailCategory)),
}, },
}, },
rp: mockRestoreProducer{}, rp: mockRestoreProducer{},
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{emailReason},
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
getMeta: false, getMeta: false,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.False, assertB: assert.False,
expectDCS: nil, expectDCS: nil,
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).MockDisableMergeBases(),
WithBackups(makeBackup("id1", path.EmailCategory)).
MockDisableMergeBases(),
}, },
{ {
name: "don't get metadata, incomplete manifest", name: "don't get metadata, assist base",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().
makeMan("id1", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(1).MarkAssistBase().Build()),
}, },
}, },
rp: mockRestoreProducer{}, rp: mockRestoreProducer{},
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{emailReason},
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
// Doesn't matter if it's true or false as merge/assist bases are // Doesn't matter if it's true or false as merge/assist bases are
@ -414,30 +374,32 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
// flag to kopia and just pass it the bases instead. // flag to kopia and just pass it the bases instead.
assertB: assert.True, assertB: assert.True,
expectDCS: nil, expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases( expectMans: kopia.NewMockBackupBases().
makeMan("id1", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(1).MarkAssistBase().Build()),
}, },
{ {
name: "one valid man, multiple reasons", name: "one valid man, multiple reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)), WithMergeBases(baseBuilder(1).AppendReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory), emailReason,
identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID(1)}},
expectPaths: func(t *testing.T, gotPaths []path.Path) { expectPaths: func(t *testing.T, gotPaths []path.Path) {
for _, p := range gotPaths { for _, p := range gotPaths {
assert.Equal( assert.Equal(
@ -456,60 +418,58 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
"read data category doesn't match a given reason") "read data category doesn't match a given reason")
} }
}, },
expectMans: kopia.NewMockBackupBases().WithMergeBases( expectMans: kopia.NewMockBackupBases().
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)), WithMergeBases(baseBuilder(1).AppendReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
{ {
name: "one valid man, extra incomplete man", name: "one valid man, extra incomplete man",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID(2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(2)}}},
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{emailReason},
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID(1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
}, },
{ {
name: "one valid man, extra incomplete man, drop assist bases", name: "one valid man, extra incomplete man, drop assist bases",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID(2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(2)}}},
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{emailReason},
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
getMeta: true, getMeta: true,
dropAssist: true, dropAssist: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID(1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).
MockDisableAssistBases(), MockDisableAssistBases(),
}, },
{ {
@ -517,39 +477,44 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory), baseBuilder(1).Build(),
makeMan("id2", "", path.EmailCategory)), baseBuilder(2).
WithReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.EventsCategory)).
Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID(2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(2)}}},
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory), emailReason,
identity.NewReason(tid, ro, path.ExchangeService, path.EventsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}, {id: "id2"}}, expectDCS: []mockColl{{id: colID(1)}, {id: colID(2)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases( expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory), baseBuilder(1).Build(),
makeMan("id2", "", path.EmailCategory)), baseBuilder(2).
WithReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.EventsCategory)).
Build()),
}, },
{ {
name: "error collecting metadata", name: "error collecting metadata",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{err: assert.AnError}, rp: mockRestoreProducer{err: assert.AnError},
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{emailReason},
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
getMeta: true, getMeta: true,
assertErr: assert.Error, assertErr: assert.Error,
assertB: assert.False, assertB: assert.False,
@ -621,54 +586,36 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
} }
func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_FallbackReasons() { func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_FallbackReasons() {
const ( var (
ro = "resourceowner" ro = "resourceowner"
fbro = "fb_resourceowner" fbro = "fb_resourceowner"
tid = "tenantid" tid = "tenantid"
did = "detailsid"
emailReason = identity.NewReason(
tid,
ro,
path.ExchangeService,
path.EmailCategory)
fbEmailReason = identity.NewReason(
tid,
fbro,
path.ExchangeService,
path.EmailCategory)
baseBuilder = func(id int) *kopia.BackupBaseBuilder {
return kopia.NewBackupBaseBuilder("", id).
WithReasons(emailReason)
}
fbBaseBuilder = func(id int) *kopia.BackupBaseBuilder {
return kopia.NewBackupBaseBuilder("fb", id).
WithReasons(fbEmailReason)
}
colID = func(prefix string, id int) string {
return fmt.Sprintf("%sID%d-item-data", prefix, id)
}
) )
makeMan := func(ro, id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry {
return kopia.ManifestEntry{
Manifest: &snapshot.Manifest{
ID: manifest.ID(id),
IncompleteReason: incmpl,
Tags: map[string]string{"tag:" + kopia.TagBackupID: id + "bup"},
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
makeBackupBase := func(ro, snapID, incmpl string, cats ...path.CategoryType) kopia.BackupBase {
return kopia.BackupBase{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID(snapID + "bup"),
},
SnapshotID: snapID,
StreamStoreID: snapID + "store",
},
ItemDataSnapshot: &snapshot.Manifest{
ID: manifest.ID(snapID),
IncompleteReason: incmpl,
Tags: map[string]string{"tag:" + kopia.TagBackupID: snapID + "bup"},
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
emailReason := identity.NewReason(
"",
ro,
path.ExchangeService,
path.EmailCategory)
fbEmailReason := identity.NewReason(
"",
fbro,
path.ExchangeService,
path.EmailCategory)
table := []struct { table := []struct {
name string name string
bf *mockBackupFinder bf *mockBackupFinder
@ -687,7 +634,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{}, rp: mockRestoreProducer{},
@ -697,7 +644,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.False, assertB: assert.False,
expectDCS: nil, expectDCS: nil,
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(fbBaseBuilder(1).Build()).
MockDisableMergeBases(), MockDisableMergeBases(),
}, },
{ {
@ -705,33 +652,33 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
fallbackReasons: []identity.Reasoner{fbEmailReason}, fallbackReasons: []identity.Reasoner{fbEmailReason},
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}}, expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
{ {
name: "only fallbacks, drop assist", name: "only fallbacks, drop assist",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
fallbackReasons: []identity.Reasoner{fbEmailReason}, fallbackReasons: []identity.Reasoner{fbEmailReason},
@ -739,9 +686,9 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
dropAssist: true, dropAssist: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}}, expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(fbBaseBuilder(1).Build()).
MockDisableAssistBases(), MockDisableAssistBases(),
}, },
{ {
@ -749,15 +696,15 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
@ -765,24 +712,25 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases( expectMans: kopia.NewMockBackupBases().
makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
}, },
{ {
name: "incomplete mans and fallbacks", name: "incomplete mans and fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(
fbro: kopia.NewMockBackupBases().WithAssistBases( baseBuilder(2).MarkAssistBase().Build()),
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)), fbro: kopia.NewMockBackupBases().
WithAssistBases(fbBaseBuilder(2).MarkAssistBase().Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
"fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}}, colID("fb", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 2)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
@ -791,27 +739,27 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: nil, expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases( expectMans: kopia.NewMockBackupBases().
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
}, },
{ {
name: "complete and incomplete mans and fallbacks", name: "complete and incomplete mans and fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(fbBaseBuilder(1).Build()).
WithAssistBases(makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)), WithAssistBases(fbBaseBuilder(2).MarkAssistBase().Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
"fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}}, colID("fb", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 2)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
@ -819,25 +767,26 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)). WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
}, },
{ {
name: "incomplete mans and complete fallbacks", name: "incomplete mans and complete fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(
baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
@ -845,25 +794,25 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}}, expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(fbBaseBuilder(1).Build()).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
}, },
{ {
name: "incomplete mans and complete fallbacks, no assist bases", name: "incomplete mans and complete fallbacks, no assist bases",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)), WithMergeBases(fbBaseBuilder(1).Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
@ -872,25 +821,25 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
dropAssist: true, dropAssist: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}}, expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(fbBaseBuilder(1).Build()).
MockDisableAssistBases(), MockDisableAssistBases(),
}, },
{ {
name: "complete mans and incomplete fallbacks", name: "complete mans and incomplete fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().
makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases().WithAssistBases( fbro: kopia.NewMockBackupBases().
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)), WithAssistBases(fbBaseBuilder(2).MarkAssistBase().Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
"fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}}, colID("fb", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 2)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
@ -898,100 +847,114 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases( expectMans: kopia.NewMockBackupBases().
makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
}, },
{ {
name: "complete mans and complete fallbacks, multiple reasons", name: "complete mans and complete fallbacks, multiple reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)), baseBuilder(1).
fbro: kopia.NewMockBackupBases(). AppendReasons(identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory)), Build()),
fbro: kopia.NewMockBackupBases().WithMergeBases(
fbBaseBuilder(1).
AppendReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
emailReason, emailReason,
identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
}, },
fallbackReasons: []identity.Reasoner{ fallbackReasons: []identity.Reasoner{
fbEmailReason, fbEmailReason,
identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}}, expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases( expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)), baseBuilder(1).
AppendReasons(identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
{ {
name: "complete mans and complete fallbacks, distinct reasons", name: "complete mans and complete fallbacks, distinct reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().
makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().WithMergeBases(
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.ContactsCategory)), fbBaseBuilder(1).
WithReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
fallbackReasons: []identity.Reasoner{ fallbackReasons: []identity.Reasoner{
identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}}, expectDCS: []mockColl{{id: colID("", 1)}, {id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().WithMergeBases(
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)). baseBuilder(1).Build(),
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.ContactsCategory)), fbBaseBuilder(1).
WithReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
{ {
name: "complete mans and complete fallbacks, fallback has superset of reasons", name: "complete mans and complete fallbacks, fallback has superset of reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().
makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().WithMergeBases(
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory)), fbBaseBuilder(1).
AppendReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
}, },
rp: mockRestoreProducer{ rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{ collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
emailReason, emailReason,
identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
}, },
fallbackReasons: []identity.Reasoner{ fallbackReasons: []identity.Reasoner{
fbEmailReason, fbEmailReason,
identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}}, expectDCS: []mockColl{{id: colID("", 1)}, {id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases(). expectMans: kopia.NewMockBackupBases().WithMergeBases(
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)). baseBuilder(1).Build(),
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.ContactsCategory)), fbBaseBuilder(1).
WithReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
}, },
} }

View File

@ -274,8 +274,8 @@ func checkBackupIsInManifests(
mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags) mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags)
for _, man := range mans.MergeBases() { for _, man := range mans.MergeBases() {
bID, ok := man.GetTag(kopia.TagBackupID) bID, ok := man.GetSnapshotTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ID) { if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ItemDataSnapshot.ID) {
continue continue
} }