Cleanup API for BackupBases (#4471)

Update the BackupBases API to return BackupBase
structs that contain both the snapshot and backup
model instead of having separate functions for
each. Minor logic updates to accommodate these
changes

This PR also updates tests and mock code for the
new API

Suggest viewing by commit

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* closes #3943

#### Test Plan

- [ ] 💪 Manual
- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
ashmrtn 2023-10-26 11:38:14 -07:00 committed by GitHub
parent 1470776f3c
commit 09531de08c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 450 additions and 657 deletions

View File

@ -17,16 +17,49 @@ import (
// TODO(ashmrtn): Move this into some inject package. Here to avoid import
// cycles.
type BackupBases interface {
// ConvertToAssistBase converts the base with the given item data snapshot ID
// from a merge base to an assist base.
ConvertToAssistBase(manifestID manifest.ID)
Backups() []BackupEntry
UniqueAssistBackups() []BackupEntry
MinBackupVersion() int
MergeBases() []ManifestEntry
// ConvertToAssistBase converts the base with the given backup ID from a merge
// base to an assist base.
ConvertToAssistBase(backupID model.StableID)
// MergeBases returns a []BackupBase that corresponds to all the bases that
// will source unchanged information for this backup during hierarchy merging,
// snapshot creation, and details merging.
MergeBases() []BackupBase
// DisableMergeBases converts all merge bases in this BackupBases to assist
// bases. These bases can still participate in sourcing data kopia considers
// "cached" during the snapshot process and can source backup details entries
// for those cached items. However, they won't be used to source unchanged
// items during hierarchy merging, snapshot creation, or details merging.
//
// This call is order sensitive with DisableAssistBases.
DisableMergeBases()
UniqueAssistBases() []ManifestEntry
// UniqueAssistBases returns the set of assist bases for the backup operation.
// Assist bases are used to source item data and details entries if the item
// is considered "cached" by kopia. They are not used to source unchanged
// items during hierarchy merging.
UniqueAssistBases() []BackupBase
// DisableAssistBases clears the set of assist bases for this backup. Doing so
// will result in kopia not finding any "cached" items and assist bases won't
// participate in details merging.
//
// This call is order sensitive with DisableMergeBases.
DisableAssistBases()
// MinBackupVersion returns the lowest version of all merge backups in the
// BackupBases.
MinBackupVersion() int
// MergeBackupBases takes another BackupBases and merges it's contained assist
// and merge bases into this BackupBases. The passed in BackupBases is
// considered an older alternative to this BackupBases meaning bases from
// other won't be selected unless there's no item in this BackupBases to cover
// that Reason.
//
// Callers pass in reasonToKey to control how individual BackupBase items are
// selected. For example, to migrate from using user name to user ID as the
// protected resource in the Reason the reasonToKey function could map
// BackupBase items with the same tenant, service, and category to the same
// key. This works because backup operations are already per protected
// resource.
//
// This call is order sensitive with DisableMergeBases and DisableAssistBases.
MergeBackupBases(
ctx context.Context,
other BackupBases,
@ -36,12 +69,7 @@ type BackupBases interface {
// incremental snapshot operations. It consists of the union of merge bases
// and assist bases. If DisableAssistBases has been called then it returns
// nil.
SnapshotAssistBases() []ManifestEntry
// TODO(ashmrtn): Remove other functions and just have these once other code
// is updated. Here for now so changes in this file can be made.
NewMergeBases() []BackupBase
NewUniqueAssistBases() []BackupBase
SnapshotAssistBases() []BackupBase
}
type backupBases struct {
@ -53,37 +81,21 @@ type backupBases struct {
disableAssistBases bool
}
func (bb *backupBases) SnapshotAssistBases() []ManifestEntry {
func (bb *backupBases) SnapshotAssistBases() []BackupBase {
if bb.disableAssistBases {
return nil
}
res := []ManifestEntry{}
for _, ab := range bb.assistBases {
res = append(res, ManifestEntry{
Manifest: ab.ItemDataSnapshot,
Reasons: ab.Reasons,
})
}
for _, mb := range bb.mergeBases {
res = append(res, ManifestEntry{
Manifest: mb.ItemDataSnapshot,
Reasons: mb.Reasons,
})
}
// Need to use the actual variables here because the functions will return nil
// depending on what's been marked as disabled.
return res
return append(slices.Clone(bb.mergeBases), bb.assistBases...)
}
func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) {
func (bb *backupBases) ConvertToAssistBase(backupID model.StableID) {
idx := slices.IndexFunc(
bb.mergeBases,
func(base BackupBase) bool {
return base.ItemDataSnapshot.ID == manifestID
return base.Backup.ID == backupID
})
if idx >= 0 {
bb.assistBases = append(bb.assistBases, bb.mergeBases[idx])
@ -91,36 +103,6 @@ func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) {
}
}
func (bb backupBases) Backups() []BackupEntry {
res := []BackupEntry{}
for _, mb := range bb.mergeBases {
res = append(res, BackupEntry{
Backup: mb.Backup,
Reasons: mb.Reasons,
})
}
return res
}
func (bb backupBases) UniqueAssistBackups() []BackupEntry {
if bb.disableAssistBases {
return nil
}
res := []BackupEntry{}
for _, ab := range bb.assistBases {
res = append(res, BackupEntry{
Backup: ab.Backup,
Reasons: ab.Reasons,
})
}
return res
}
func (bb *backupBases) MinBackupVersion() int {
min := version.NoBackup
@ -137,20 +119,7 @@ func (bb *backupBases) MinBackupVersion() int {
return min
}
func (bb backupBases) MergeBases() []ManifestEntry {
res := []ManifestEntry{}
for _, mb := range bb.mergeBases {
res = append(res, ManifestEntry{
Manifest: mb.ItemDataSnapshot,
Reasons: mb.Reasons,
})
}
return res
}
func (bb backupBases) NewMergeBases() []BackupBase {
func (bb backupBases) MergeBases() []BackupBase {
return slices.Clone(bb.mergeBases)
}
@ -165,24 +134,7 @@ func (bb *backupBases) DisableMergeBases() {
bb.mergeBases = nil
}
func (bb backupBases) UniqueAssistBases() []ManifestEntry {
if bb.disableAssistBases {
return nil
}
res := []ManifestEntry{}
for _, ab := range bb.assistBases {
res = append(res, ManifestEntry{
Manifest: ab.ItemDataSnapshot,
Reasons: ab.Reasons,
})
}
return res
}
func (bb backupBases) NewUniqueAssistBases() []BackupBase {
func (bb backupBases) UniqueAssistBases() []BackupBase {
if bb.disableAssistBases {
return nil
}
@ -250,11 +202,11 @@ func (bb *backupBases) MergeBackupBases(
other BackupBases,
reasonToKey func(reason identity.Reasoner) string,
) BackupBases {
if other == nil || (len(other.NewMergeBases()) == 0 && len(other.NewUniqueAssistBases()) == 0) {
if other == nil || (len(other.MergeBases()) == 0 && len(other.UniqueAssistBases()) == 0) {
return bb
}
if bb == nil || (len(bb.NewMergeBases()) == 0 && len(bb.NewUniqueAssistBases()) == 0) {
if bb == nil || (len(bb.MergeBases()) == 0 && len(bb.UniqueAssistBases()) == 0) {
return other
}
@ -282,12 +234,12 @@ func (bb *backupBases) MergeBackupBases(
}
}
addMerge := getMissingBases(reasonToKey, toMerge, other.NewMergeBases())
addAssist := getMissingBases(reasonToKey, assist, other.NewUniqueAssistBases())
addMerge := getMissingBases(reasonToKey, toMerge, other.MergeBases())
addAssist := getMissingBases(reasonToKey, assist, other.UniqueAssistBases())
res := &backupBases{
mergeBases: append(addMerge, bb.NewMergeBases()...),
assistBases: append(addAssist, bb.NewUniqueAssistBases()...),
mergeBases: append(addMerge, bb.MergeBases()...),
assistBases: append(addAssist, bb.UniqueAssistBases()...),
}
return res

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester"
@ -191,7 +192,7 @@ func (suite *BackupBasesUnitSuite) TestConvertToAssistBase() {
},
}
delID := manifest.ID("its3")
delID := model.StableID("3")
table := []struct {
name string
@ -298,39 +299,19 @@ func (suite *BackupBasesUnitSuite) TestDisableMergeBases() {
}
bb.DisableMergeBases()
assert.Empty(t, bb.Backups())
assert.Empty(t, bb.MergeBases())
// Merge bases should still appear in the assist base set passed in for kopia
// snapshots and details merging.
assert.ElementsMatch(
t,
[]ManifestEntry{
{Manifest: merge[0].ItemDataSnapshot},
{Manifest: merge[1].ItemDataSnapshot},
{Manifest: assist[0].ItemDataSnapshot},
{Manifest: assist[1].ItemDataSnapshot},
},
append(slices.Clone(merge), assist...),
bb.SnapshotAssistBases())
assert.ElementsMatch(
t,
[]ManifestEntry{
{Manifest: merge[0].ItemDataSnapshot},
{Manifest: merge[1].ItemDataSnapshot},
{Manifest: assist[0].ItemDataSnapshot},
{Manifest: assist[1].ItemDataSnapshot},
},
append(slices.Clone(merge), assist...),
bb.UniqueAssistBases())
assert.ElementsMatch(
t,
[]BackupEntry{
{Backup: merge[0].Backup},
{Backup: merge[1].Backup},
{Backup: assist[0].Backup},
{Backup: assist[1].Backup},
},
bb.UniqueAssistBackups())
}
func (suite *BackupBasesUnitSuite) TestDisableAssistBases() {
@ -342,11 +323,9 @@ func (suite *BackupBasesUnitSuite) TestDisableAssistBases() {
bb.DisableAssistBases()
assert.Empty(t, bb.UniqueAssistBases())
assert.Empty(t, bb.UniqueAssistBackups())
assert.Empty(t, bb.SnapshotAssistBases())
// Merge base should be unchanged.
assert.Len(t, bb.Backups(), 2)
assert.Len(t, bb.MergeBases(), 2)
}

View File

@ -42,16 +42,12 @@ func reasonKey(r identity.Reasoner) string {
return r.ProtectedResource() + r.Service().String() + r.Category().String()
}
type BackupEntry struct {
*backup.Backup
Reasons []identity.Reasoner
}
type ManifestEntry struct {
*snapshot.Manifest
// Reasons contains the ResourceOwners and Service/Categories that caused this
// snapshot to be selected as a base. We can't reuse OwnersCats here because
// it's possible some ResourceOwners will have a subset of the Categories as
type BackupBase struct {
Backup *backup.Backup
ItemDataSnapshot *snapshot.Manifest
// Reasons contains the tenant, protected resource and service/categories that
// caused this snapshot to be selected as a base. It's possible some
// (tenant, protected resources) will have a subset of the categories as
// the reason for selecting a snapshot. For example:
// 1. backup user1 email,contacts -> B1
// 2. backup user1 contacts -> B2 (uses B1 as base)
@ -59,9 +55,9 @@ type ManifestEntry struct {
Reasons []identity.Reasoner
}
func (me ManifestEntry) GetTag(key string) (string, bool) {
func (bb BackupBase) GetSnapshotTag(key string) (string, bool) {
k, _ := makeTagKV(key)
v, ok := me.Tags[k]
v, ok := bb.ItemDataSnapshot.Tags[k]
return v, ok
}
@ -136,19 +132,6 @@ func (b *baseFinder) getBackupModel(
return bup, nil
}
type BackupBase struct {
Backup *backup.Backup
ItemDataSnapshot *snapshot.Manifest
// Reasons contains the tenant, protected resource and service/categories that
// caused this snapshot to be selected as a base. It's possible some
// (tenant, protected resources) will have a subset of the categories as
// the reason for selecting a snapshot. For example:
// 1. backup user1 email,contacts -> B1
// 2. backup user1 contacts -> B2 (uses B1 as base)
// 3. backup user1 email,contacts,events (uses B1 for email, B2 for contacts)
Reasons []identity.Reasoner
}
// findBasesInSet goes through manifest metadata entries and sees if they're
// incomplete or not. Manifests which don't have an associated backup
// are discarded as incomplete. Manifests are then checked to see if they

View File

@ -970,12 +970,12 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
checkBackupEntriesMatch(
t,
bb.Backups(),
bb.MergeBases(),
test.backupData,
test.expectedBaseReasons)
checkBackupEntriesMatch(
t,
bb.UniqueAssistBackups(),
bb.UniqueAssistBases(),
test.backupData,
test.expectedAssistReasons)
@ -1078,7 +1078,7 @@ func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() {
func checkManifestEntriesMatch(
t *testing.T,
retSnaps []ManifestEntry,
retSnaps []BackupBase,
allExpected []manifestInfo,
expectedIdxsAndReasons map[int][]identity.Reasoner,
) {
@ -1090,7 +1090,7 @@ func checkManifestEntriesMatch(
got := make([]*snapshot.Manifest, 0, len(retSnaps))
for _, s := range retSnaps {
got = append(got, s.Manifest)
got = append(got, s.ItemDataSnapshot)
}
assert.ElementsMatch(t, expected, got)
@ -1102,7 +1102,7 @@ func checkManifestEntriesMatch(
}
for _, found := range retSnaps {
reasons, ok := expectedReasons[found.ID]
reasons, ok := expectedReasons[found.ItemDataSnapshot.ID]
if !ok {
// Missing or extra snapshots will be reported by earlier checks.
continue
@ -1113,13 +1113,13 @@ func checkManifestEntriesMatch(
reasons,
found.Reasons,
"incorrect reasons for snapshot with ID %s",
found.ID)
found.ItemDataSnapshot.ID)
}
}
func checkBackupEntriesMatch(
t *testing.T,
retBups []BackupEntry,
retBups []BackupBase,
allExpected []backupInfo,
expectedIdxsAndReasons map[int][]identity.Reasoner,
) {
@ -1143,7 +1143,7 @@ func checkBackupEntriesMatch(
}
for _, found := range retBups {
reasons, ok := expectedReasons[found.ID]
reasons, ok := expectedReasons[found.Backup.ID]
if !ok {
// Missing or extra snapshots will be reported by earlier checks.
continue
@ -1154,6 +1154,6 @@ func checkBackupEntriesMatch(
reasons,
found.Reasons,
"incorrect reasons for snapshot with ID %s",
found.ID)
found.Backup.ID)
}
}

View File

@ -14,71 +14,30 @@ import (
"github.com/alcionai/corso/src/pkg/path"
)
// TODO(ashmrtn): Temp function until all PRs in the series merge.
func backupsMatch(t *testing.T, expect, got []BackupEntry, dataType string) {
func basesMatch(t *testing.T, expect, got []BackupBase, dataType string) {
expectBups := make([]*backup.Backup, 0, len(expect))
expectMans := make([]*snapshot.Manifest, 0, len(expect))
gotBups := make([]*backup.Backup, 0, len(got))
gotBasesByID := map[model.StableID]BackupEntry{}
gotMans := make([]*snapshot.Manifest, 0, len(got))
gotBasesByID := map[model.StableID]BackupBase{}
for _, e := range expect {
if e.Backup != nil {
expectBups = append(expectBups, e.Backup)
}
expectMans = append(expectMans, e.ItemDataSnapshot)
}
for _, g := range got {
if g.Backup != nil {
gotBups = append(gotBups, g.Backup)
gotMans = append(gotMans, g.ItemDataSnapshot)
gotBasesByID[g.Backup.ID] = g
}
}
assert.ElementsMatch(t, expectBups, gotBups, dataType+" backup model")
// Need to compare Reasons separately since they're also a slice.
for _, e := range expect {
if e.Backup == nil {
continue
}
b, ok := gotBasesByID[e.Backup.ID]
if !ok {
// Missing bases will be reported above.
continue
}
assert.ElementsMatch(t, e.Reasons, b.Reasons)
}
}
// TODO(ashmrtn): Temp function until all PRs in the series merge.
func manifestsMatch(t *testing.T, expect, got []ManifestEntry, dataType string) {
expectMans := make([]*snapshot.Manifest, 0, len(expect))
gotMans := make([]*snapshot.Manifest, 0, len(got))
gotBasesByID := map[manifest.ID]ManifestEntry{}
for _, e := range expect {
if e.Manifest != nil {
expectMans = append(expectMans, e.Manifest)
}
}
for _, g := range got {
if g.Manifest != nil {
gotMans = append(gotMans, g.Manifest)
gotBasesByID[g.Manifest.ID] = g
}
}
assert.ElementsMatch(t, expectMans, gotMans, dataType+" item data snapshot")
// Need to compare Reasons separately since they're also a slice.
for _, e := range expect {
if e.Manifest == nil {
continue
}
b, ok := gotBasesByID[e.Manifest.ID]
b, ok := gotBasesByID[e.Backup.ID]
if !ok {
// Missing bases will be reported above.
continue
@ -94,9 +53,7 @@ func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
}
if expect == nil {
assert.Empty(t, got.Backups(), "backups")
assert.Empty(t, got.MergeBases(), "merge bases")
assert.Empty(t, got.UniqueAssistBackups(), "assist backups")
assert.Empty(t, got.UniqueAssistBases(), "assist bases")
assert.Empty(t, got.SnapshotAssistBases(), "snapshot assist bases")
@ -104,9 +61,7 @@ func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
}
if got == nil {
if len(expect.Backups()) > 0 ||
len(expect.MergeBases()) > 0 ||
len(expect.UniqueAssistBackups()) > 0 ||
if len(expect.MergeBases()) > 0 ||
len(expect.UniqueAssistBases()) > 0 ||
len(expect.SnapshotAssistBases()) > 0 {
assert.Fail(t, "got was nil but expected non-nil result %v", expect)
@ -115,11 +70,9 @@ func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) {
return
}
backupsMatch(t, expect.Backups(), got.Backups(), "merge backups")
manifestsMatch(t, expect.MergeBases(), got.MergeBases(), "merge manifests")
backupsMatch(t, expect.UniqueAssistBackups(), got.UniqueAssistBackups(), "assist backups")
manifestsMatch(t, expect.UniqueAssistBases(), got.UniqueAssistBases(), "assist manifests")
manifestsMatch(t, expect.SnapshotAssistBases(), got.SnapshotAssistBases(), "snapshot assist bases")
basesMatch(t, expect.MergeBases(), got.MergeBases(), "merge bases")
basesMatch(t, expect.UniqueAssistBases(), got.UniqueAssistBases(), "assist bases")
basesMatch(t, expect.SnapshotAssistBases(), got.SnapshotAssistBases(), "snapshot assist bases")
}
func NewMockBackupBases() *MockBackupBases {
@ -130,64 +83,13 @@ type MockBackupBases struct {
*backupBases
}
func (bb *MockBackupBases) WithBackups(b ...BackupEntry) *MockBackupBases {
bases := make([]BackupBase, 0, len(b))
for _, base := range b {
bases = append(bases, BackupBase{
Backup: base.Backup,
Reasons: base.Reasons,
})
}
bb.backupBases.mergeBases = append(bb.NewMergeBases(), bases...)
func (bb *MockBackupBases) WithMergeBases(b ...BackupBase) *MockBackupBases {
bb.backupBases.mergeBases = append(bb.MergeBases(), b...)
return bb
}
func (bb *MockBackupBases) WithMergeBases(m ...ManifestEntry) *MockBackupBases {
bases := make([]BackupBase, 0, len(m))
for _, base := range m {
bases = append(bases, BackupBase{
ItemDataSnapshot: base.Manifest,
Reasons: base.Reasons,
})
}
bb.backupBases.mergeBases = append(bb.NewMergeBases(), bases...)
return bb
}
func (bb *MockBackupBases) WithAssistBackups(b ...BackupEntry) *MockBackupBases {
bases := make([]BackupBase, 0, len(b))
for _, base := range b {
bases = append(bases, BackupBase{
Backup: base.Backup,
Reasons: base.Reasons,
})
}
bb.backupBases.assistBases = append(bb.NewUniqueAssistBases(), bases...)
return bb
}
func (bb *MockBackupBases) WithAssistBases(m ...ManifestEntry) *MockBackupBases {
bases := make([]BackupBase, 0, len(m))
for _, base := range m {
bases = append(bases, BackupBase{
ItemDataSnapshot: base.Manifest,
Reasons: base.Reasons,
})
}
bb.backupBases.assistBases = append(bb.NewUniqueAssistBases(), bases...)
return bb
}
func (bb *MockBackupBases) NewWithMergeBases(b ...BackupBase) *MockBackupBases {
bb.backupBases.mergeBases = append(bb.NewMergeBases(), b...)
func (bb *MockBackupBases) WithAssistBases(b ...BackupBase) *MockBackupBases {
bb.backupBases.assistBases = append(bb.UniqueAssistBases(), b...)
return bb
}

View File

@ -13,7 +13,6 @@ import (
"github.com/alcionai/clues"
"github.com/kopia/kopia/fs"
"github.com/kopia/kopia/fs/virtualfs"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot/snapshotfs"
"golang.org/x/exp/maps"
@ -1047,26 +1046,20 @@ func traverseBaseDir(
return nil
}
func logBaseInfo(ctx context.Context, m ManifestEntry) {
func logBaseInfo(ctx context.Context, b BackupBase) {
svcs := map[string]struct{}{}
cats := map[string]struct{}{}
for _, r := range m.Reasons {
for _, r := range b.Reasons {
svcs[r.Service().String()] = struct{}{}
cats[r.Category().String()] = struct{}{}
}
mbID, _ := m.GetTag(TagBackupID)
if len(mbID) == 0 {
mbID = "no_backup_id_tag"
}
// Base backup ID and base snapshot ID are already in context clues.
logger.Ctx(ctx).Infow(
"using base for backup",
"base_snapshot_id", m.ID,
"services", maps.Keys(svcs),
"categories", maps.Keys(cats),
"base_backup_id", mbID)
"categories", maps.Keys(cats))
}
const (
@ -1093,20 +1086,32 @@ const (
func inflateBaseTree(
ctx context.Context,
loader snapshotLoader,
snap ManifestEntry,
base BackupBase,
updatedPaths map[string]path.Path,
roots map[string]*treeMap,
) error {
bupID := "no_backup_id"
if base.Backup != nil && len(base.Backup.ID) > 0 {
bupID = string(base.Backup.ID)
}
ctx = clues.Add(
ctx,
"base_backup_id", bupID,
"base_snapshot_id", base.ItemDataSnapshot.ID)
// Only complete snapshots should be used to source base information.
// Snapshots for checkpoints will rely on kopia-assisted dedupe to efficiently
// handle items that were completely uploaded before Corso crashed.
if len(snap.IncompleteReason) > 0 {
if len(base.ItemDataSnapshot.IncompleteReason) > 0 {
logger.Ctx(ctx).Info("skipping incomplete snapshot")
return nil
}
ctx = clues.Add(ctx, "snapshot_base_id", snap.ID)
// Some logging to help track things.
logBaseInfo(ctx, base)
root, err := loader.SnapshotRoot(snap.Manifest)
root, err := loader.SnapshotRoot(base.ItemDataSnapshot)
if err != nil {
return clues.Wrap(err, "getting snapshot root directory").WithClues(ctx)
}
@ -1116,13 +1121,10 @@ func inflateBaseTree(
return clues.New("snapshot root is not a directory").WithClues(ctx)
}
// Some logging to help track things.
logBaseInfo(ctx, snap)
// For each subtree corresponding to the tuple
// (resource owner, service, category) merge the directories in the base with
// what has been reported in the collections we got.
for _, r := range snap.Reasons {
for _, r := range base.Reasons {
ictx := clues.Add(
ctx,
"subtree_service", r.Service().String(),
@ -1204,7 +1206,7 @@ func inflateBaseTree(
func inflateDirTree(
ctx context.Context,
loader snapshotLoader,
baseSnaps []ManifestEntry,
bases []BackupBase,
collections []data.BackupCollection,
globalExcludeSet prefixmatcher.StringSetReader,
progress *corsoProgress,
@ -1214,22 +1216,18 @@ func inflateDirTree(
return nil, clues.Wrap(err, "inflating collection tree")
}
baseIDs := make([]manifest.ID, 0, len(baseSnaps))
for _, snap := range baseSnaps {
baseIDs = append(baseIDs, snap.ID)
}
// Individual backup/snapshot IDs will be logged when merging their hierarchy.
ctx = clues.Add(ctx, "len_bases", len(bases))
ctx = clues.Add(ctx, "len_base_snapshots", len(baseSnaps), "base_snapshot_ids", baseIDs)
if len(baseIDs) > 0 {
logger.Ctx(ctx).Info("merging hierarchies from base snapshots")
if len(bases) > 0 {
logger.Ctx(ctx).Info("merging hierarchies from base backups")
} else {
logger.Ctx(ctx).Info("no base snapshots to merge")
logger.Ctx(ctx).Info("no base backups to merge")
}
for _, snap := range baseSnaps {
if err = inflateBaseTree(ctx, loader, snap, updatedPaths, roots); err != nil {
return nil, clues.Wrap(err, "inflating base snapshot tree(s)")
for _, base := range bases {
if err = inflateBaseTree(ctx, loader, base, updatedPaths, roots); err != nil {
return nil, clues.Wrap(err, "inflating base backup tree(s)")
}
}

View File

@ -878,19 +878,19 @@ func (msw *mockSnapshotWalker) SnapshotRoot(*snapshot.Manifest) (fs.Entry, error
return msw.snapshotRoot, nil
}
func makeManifestEntry(
func makeBackupBase(
id, tenant, resourceOwner string,
service path.ServiceType,
categories ...path.CategoryType,
) ManifestEntry {
) BackupBase {
var reasons []identity.Reasoner
for _, c := range categories {
reasons = append(reasons, identity.NewReason(tenant, resourceOwner, service, c))
}
return ManifestEntry{
Manifest: &snapshot.Manifest{
return BackupBase{
ItemDataSnapshot: &snapshot.Manifest{
ID: manifest.ID(id),
},
Reasons: reasons,
@ -1201,8 +1201,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
[]BackupBase{
makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
},
test.inputCollections(),
pmMock.NewPrefixMap(nil),
@ -1916,8 +1916,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
[]BackupBase{
makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
},
test.inputCollections(t),
ie,
@ -2060,8 +2060,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
[]BackupBase{
makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
},
collections,
pmMock.NewPrefixMap(nil),
@ -2160,8 +2160,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
[]BackupBase{
makeBackupBase("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
},
collections,
pmMock.NewPrefixMap(nil),
@ -2376,9 +2376,9 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.ContactsCategory),
makeManifestEntry("id2", testTenant, testUser, path.ExchangeService, path.EmailCategory),
[]BackupBase{
makeBackupBase("id1", testTenant, testUser, path.ExchangeService, path.ContactsCategory),
makeBackupBase("id2", testTenant, testUser, path.ExchangeService, path.EmailCategory),
},
collections,
pmMock.NewPrefixMap(nil),
@ -2529,8 +2529,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory),
[]BackupBase{
makeBackupBase("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory),
},
[]data.BackupCollection{mce, mcc},
pmMock.NewPrefixMap(nil),
@ -3454,8 +3454,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_SelectiveSubtreeP
dirTree, err := inflateDirTree(
ctx,
msw,
[]ManifestEntry{
makeManifestEntry("", tenant, user, path.OneDriveService, path.FilesCategory),
[]BackupBase{
makeBackupBase(
"id1",
tenant,
user,
path.OneDriveService,
path.FilesCategory),
},
test.inputCollections(t),
ie,

View File

@ -179,8 +179,8 @@ func (w Wrapper) ConsumeBackupCollections(
// snapshot bases into inflateDirTree so that the new snapshot
// includes historical data.
var (
mergeBase []ManifestEntry
assistBase []ManifestEntry
mergeBase []BackupBase
assistBase []BackupBase
)
if bases != nil {
@ -230,7 +230,7 @@ func (w Wrapper) ConsumeBackupCollections(
func (w Wrapper) makeSnapshotWithRoot(
ctx context.Context,
prevSnapEntries []ManifestEntry,
prevBases []BackupBase,
root fs.Directory,
addlTags map[string]string,
progress *corsoProgress,
@ -244,17 +244,17 @@ func (w Wrapper) makeSnapshotWithRoot(
}
)
snapIDs := make([]manifest.ID, 0, len(prevSnapEntries)) // just for logging
prevSnaps := make([]*snapshot.Manifest, 0, len(prevSnapEntries))
snapIDs := make([]manifest.ID, 0, len(prevBases)) // just for logging
prevSnaps := make([]*snapshot.Manifest, 0, len(prevBases))
for _, ent := range prevSnapEntries {
prevSnaps = append(prevSnaps, ent.Manifest)
snapIDs = append(snapIDs, ent.ID)
for _, ent := range prevBases {
prevSnaps = append(prevSnaps, ent.ItemDataSnapshot)
snapIDs = append(snapIDs, ent.ItemDataSnapshot.ID)
}
ctx = clues.Add(
ctx,
"num_assist_snapshots", len(prevSnapEntries),
"num_assist_snapshots", len(prevBases),
"assist_snapshot_ids", snapIDs,
"additional_tags", addlTags)

View File

@ -80,14 +80,14 @@ func BenchmarkHierarchyMerge(b *testing.B) {
type testCase struct {
name string
baseBackups func(base ManifestEntry) BackupBases
baseBackups func(base BackupBase) BackupBases
collections []data.BackupCollection
}
// Initial backup. All files should be considered new by kopia.
baseBackupCase := testCase{
name: "Setup",
baseBackups: func(ManifestEntry) BackupBases {
baseBackups: func(BackupBase) BackupBases {
return NewMockBackupBases()
},
collections: cols,
@ -97,8 +97,8 @@ func BenchmarkHierarchyMerge(b *testing.B) {
t tester.TestT,
ctx context.Context,
test testCase,
base ManifestEntry,
) ManifestEntry {
base BackupBase,
) BackupBase {
bbs := test.baseBackups(base)
counter := count.New()
@ -126,20 +126,20 @@ func BenchmarkHierarchyMerge(b *testing.B) {
manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err))
return ManifestEntry{
Manifest: snap,
return BackupBase{
ItemDataSnapshot: snap,
Reasons: reasons,
}
}
b.Logf("setting up base backup\n")
base := runAndTestBackup(b, ctx, baseBackupCase, ManifestEntry{})
base := runAndTestBackup(b, ctx, baseBackupCase, BackupBase{})
table := []testCase{
{
name: "Merge All",
baseBackups: func(base ManifestEntry) BackupBases {
baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base)
},
collections: func() []data.BackupCollection {

View File

@ -839,7 +839,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
type testCase struct {
name string
baseBackups func(base ManifestEntry) BackupBases
baseBackups func(base BackupBase) BackupBases
collections []data.BackupCollection
expectedUploadedFiles int
expectedCachedFiles int
@ -864,7 +864,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
// Initial backup. All files should be considered new by kopia.
baseBackupCase := testCase{
name: "Uncached",
baseBackups: func(ManifestEntry) BackupBases {
baseBackups: func(BackupBase) BackupBases {
return NewMockBackupBases()
},
collections: collections,
@ -875,8 +875,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
uploadedBytes: []int64{8000, 10000},
}
runAndTestBackup := func(test testCase, base ManifestEntry) ManifestEntry {
var res ManifestEntry
runAndTestBackup := func(test testCase, base BackupBase) BackupBase {
var man *snapshot.Manifest
suite.Run(test.name, func() {
t := suite.T()
@ -966,21 +966,22 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err))
res = ManifestEntry{
Manifest: snap,
Reasons: reasons,
}
man = snap
})
return res
return BackupBase{
ItemDataSnapshot: man,
Reasons: reasons,
}
}
base := runAndTestBackup(baseBackupCase, ManifestEntry{})
base := runAndTestBackup(baseBackupCase, BackupBase{})
require.NotNil(suite.T(), base.ItemDataSnapshot)
table := []testCase{
{
name: "Kopia Assist And Merge All Files Changed",
baseBackups: func(base ManifestEntry) BackupBases {
baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base)
},
collections: collections,
@ -994,7 +995,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
},
{
name: "Kopia Assist And Merge No Files Changed",
baseBackups: func(base ManifestEntry) BackupBases {
baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base)
},
// Pass in empty collections to force a backup. Otherwise we'll skip
@ -1016,7 +1017,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
},
{
name: "Kopia Assist Only",
baseBackups: func(base ManifestEntry) BackupBases {
baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithAssistBases(base)
},
collections: collections,
@ -1029,7 +1030,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
},
{
name: "Merge Only",
baseBackups: func(base ManifestEntry) BackupBases {
baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases().WithMergeBases(base).MockDisableAssistBases()
},
// Pass in empty collections to force a backup. Otherwise we'll skip
@ -1049,7 +1050,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
},
{
name: "Content Hash Only",
baseBackups: func(base ManifestEntry) BackupBases {
baseBackups: func(base BackupBase) BackupBases {
return NewMockBackupBases()
},
collections: collections,
@ -1265,8 +1266,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
require.NoError(t, err, clues.ToCore(err))
prevSnaps.WithMergeBases(
ManifestEntry{
Manifest: snap,
BackupBase{
ItemDataSnapshot: snap,
Reasons: reasons,
})
})
@ -1777,8 +1778,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
suite.ctx,
[]identity.Reasoner{r},
NewMockBackupBases().WithMergeBases(
ManifestEntry{
Manifest: man,
BackupBase{
ItemDataSnapshot: man,
Reasons: []identity.Reasoner{r},
}),
test.cols(t),

View File

@ -176,7 +176,7 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
func (ctrl *Controller) GetMetadataPaths(
ctx context.Context,
r kinject.RestoreProducer,
man kopia.ManifestEntry,
base kopia.BackupBase,
errs *fault.Bus,
) ([]path.RestorePaths, error) {
var (
@ -184,12 +184,12 @@ func (ctrl *Controller) GetMetadataPaths(
err error
)
for _, reason := range man.Reasons {
for _, reason := range base.Reasons {
filePaths := [][]string{}
switch true {
case reason.Service() == path.GroupsService && reason.Category() == path.LibrariesCategory:
filePaths, err = groups.MetadataFiles(ctx, reason, r, man.ID, errs)
filePaths, err = groups.MetadataFiles(ctx, reason, r, base.ItemDataSnapshot.ID, errs)
if err != nil {
return nil, err
}

View File

@ -53,7 +53,7 @@ func (ctrl Controller) ProduceBackupCollections(
func (ctrl *Controller) GetMetadataPaths(
ctx context.Context,
r kinject.RestoreProducer,
man kopia.ManifestEntry,
base kopia.BackupBase,
errs *fault.Bus,
) ([]path.RestorePaths, error) {
return nil, clues.New("not implemented")

View File

@ -604,7 +604,7 @@ func getNewPathRefs(
func mergeItemsFromBase(
ctx context.Context,
checkReason bool,
baseBackup kopia.BackupEntry,
baseBackup kopia.BackupBase,
detailsStore streamstore.Streamer,
dataFromBackup kopia.DetailsMergeInfoer,
deets *details.Builder,
@ -617,7 +617,7 @@ func mergeItemsFromBase(
)
// Can't be in the above block else it's counted as a redeclaration.
ctx = clues.Add(ctx, "base_backup_id", baseBackup.ID)
ctx = clues.Add(ctx, "base_backup_id", baseBackup.Backup.ID)
baseDeets, err := getDetailsFromBackup(
ctx,
@ -665,7 +665,7 @@ func mergeItemsFromBase(
dataFromBackup,
entry,
rr,
baseBackup.Version)
baseBackup.Backup.Version)
if err != nil {
return manifestAddedEntries,
clues.Wrap(err, "getting updated info for entry").WithClues(ictx)
@ -746,7 +746,7 @@ func mergeDetails(
// leaves us in a bit of a pickle if the user has run any concurrent backups
// with overlapping Reasons that turn into assist bases, but the modTime check
// in DetailsMergeInfoer should handle that.
for _, base := range bases.UniqueAssistBackups() {
for _, base := range bases.UniqueAssistBases() {
added, err := mergeItemsFromBase(
ctx,
false,
@ -771,7 +771,7 @@ func mergeDetails(
// We do want to enable matching entries based on Reasons because we
// explicitly control which subtrees from the merge base backup are grafted
// onto the hierarchy for the currently running backup.
for _, base := range bases.Backups() {
for _, base := range bases.MergeBases() {
added, err := mergeItemsFromBase(
ctx,
true,

View File

@ -479,14 +479,16 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
}
bases = kopia.NewMockBackupBases().WithMergeBases(
kopia.ManifestEntry{
Manifest: manifest1,
kopia.BackupBase{
Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "1"}},
ItemDataSnapshot: manifest1,
Reasons: []identity.Reasoner{
emailReason,
},
}).WithAssistBases(
kopia.ManifestEntry{
Manifest: manifest2,
kopia.BackupBase{
Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "2"}},
ItemDataSnapshot: manifest2,
Reasons: []identity.Reasoner{
contactsReason,
},
@ -634,8 +636,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
table := []struct {
name string
populatedDetails map[string]*details.Details
inputBackups []kopia.BackupEntry
inputAssistBackups []kopia.BackupEntry
inputBackups []kopia.BackupBase
inputAssistBackups []kopia.BackupBase
mdm *mockDetailsMergeInfoer
errCheck assert.ErrorAssertionFunc
@ -662,7 +664,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
@ -686,7 +688,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -713,7 +715,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -769,7 +771,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -796,7 +798,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -826,7 +828,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -856,7 +858,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -886,7 +888,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -917,7 +919,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -948,7 +950,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -996,7 +998,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -1005,7 +1007,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
},
},
},
inputAssistBackups: []kopia.BackupEntry{
inputAssistBackups: []kopia.BackupBase{
{Backup: &backup2},
},
populatedDetails: map[string]*details.Details{
@ -1040,7 +1042,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputBackups: []kopia.BackupEntry{
inputBackups: []kopia.BackupBase{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
@ -1048,7 +1050,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
},
},
},
inputAssistBackups: []kopia.BackupEntry{
inputAssistBackups: []kopia.BackupBase{
{Backup: &backup2},
},
populatedDetails: map[string]*details.Details{
@ -1080,7 +1082,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputAssistBackups: []kopia.BackupEntry{
inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1},
{Backup: &backup2},
},
@ -1113,7 +1115,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputAssistBackups: []kopia.BackupEntry{
inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1},
{Backup: &backup2},
},
@ -1146,7 +1148,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res
}(),
inputAssistBackups: []kopia.BackupEntry{
inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1},
{Backup: &backup2},
},
@ -1176,7 +1178,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
mdm: func() *mockDetailsMergeInfoer {
return newMockDetailsMergeInfoer()
}(),
inputAssistBackups: []kopia.BackupEntry{
inputAssistBackups: []kopia.BackupBase{
{Backup: &backup1},
},
populatedDetails: map[string]*details.Details{
@ -1205,8 +1207,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
writeStats := kopia.BackupStats{}
bb := kopia.NewMockBackupBases().
WithBackups(test.inputBackups...).
WithAssistBackups(test.inputAssistBackups...)
WithMergeBases(test.inputBackups...).
WithAssistBases(test.inputAssistBackups...)
err := mergeDetails(
ctx,
@ -1276,7 +1278,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
itemPath1.Service(),
itemPath1.Category())
backup1 = kopia.BackupEntry{
backup1 = kopia.BackupBase{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: "bid1",
@ -1300,7 +1302,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
// itemDetails.Exchange.Modified = now
populatedDetails := map[string]*details.Details{
backup1.DetailsID: {
backup1.Backup.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.Entry{*itemDetails},
},
@ -1335,7 +1337,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
err := mergeDetails(
ctx,
mds,
kopia.NewMockBackupBases().WithBackups(backup1),
kopia.NewMockBackupBases().WithMergeBases(backup1),
mdm,
&deets,
&writeStats,

View File

@ -37,7 +37,7 @@ type (
GetMetadataPaths(
ctx context.Context,
r inject.RestoreProducer,
man kopia.ManifestEntry,
base kopia.BackupBase,
errs *fault.Bus,
) ([]path.RestorePaths, error)

View File

@ -62,9 +62,9 @@ func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
func (mbp mockBackupProducer) GetMetadataPaths(
ctx context.Context,
r kinject.RestoreProducer,
man kopia.ManifestEntry,
base kopia.BackupBase,
errs *fault.Bus,
) ([]path.RestorePaths, error) {
ctrl := m365.Controller{}
return ctrl.GetMetadataPaths(ctx, r, man, errs)
return ctrl.GetMetadataPaths(ctx, r, base, errs)
}

View File

@ -92,8 +92,11 @@ func getManifestsAndMetadata(
return bb, nil, false, nil
}
for _, man := range bb.MergeBases() {
mctx := clues.Add(ctx, "manifest_id", man.ID)
for _, base := range bb.MergeBases() {
mctx := clues.Add(
ctx,
"base_item_data_snapshot_id", base.ItemDataSnapshot.ID,
"base_backup_id", base.Backup.ID)
// a local fault.Bus intance is used to collect metadata files here.
// we avoid the global fault.Bus because all failures here are ignorable,
@ -103,13 +106,18 @@ func getManifestsAndMetadata(
// spread around. Need to find more idiomatic handling.
fb := fault.New(true)
paths, err := bp.GetMetadataPaths(mctx, rp, man, fb)
paths, err := bp.GetMetadataPaths(mctx, rp, base, fb)
if err != nil {
LogFaultErrors(ctx, fb.Errors(), "collecting metadata paths")
return nil, nil, false, err
}
colls, err := rp.ProduceRestoreCollections(ctx, string(man.ID), paths, nil, fb)
colls, err := rp.ProduceRestoreCollections(
ctx,
string(base.ItemDataSnapshot.ID),
paths,
nil,
fb)
if err != nil {
// Restore is best-effort and we want to keep it that way since we want to
// return as much metadata as we can to reduce the work we'll need to do.

View File

@ -3,6 +3,7 @@ package operations
import (
"bytes"
"context"
"fmt"
"io"
"testing"
@ -17,10 +18,8 @@ import (
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations/inject/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault"
@ -290,66 +289,34 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
mr := mockRestoreProducer{err: test.expectErr, colls: test.preFetchCollection}
mr.buildRestoreFunc(t, test.manID, paths)
man := kopia.ManifestEntry{
Manifest: &snapshot.Manifest{ID: manifest.ID(test.manID)},
base := kopia.BackupBase{
ItemDataSnapshot: &snapshot.Manifest{ID: manifest.ID(test.manID)},
Reasons: test.reasons,
}
controller := m365.Controller{}
pths, err := controller.GetMetadataPaths(ctx, &mr, man, fault.New(true))
pths, err := controller.GetMetadataPaths(ctx, &mr, base, fault.New(true))
assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err))
assert.ElementsMatch(t, test.restorePaths, pths, "restore paths")
})
}
}
func buildReasons(
tenant string,
ro string,
service path.ServiceType,
cats ...path.CategoryType,
) []identity.Reasoner {
var reasons []identity.Reasoner
for _, cat := range cats {
reasons = append(
reasons,
identity.NewReason(tenant, ro, service, cat))
}
return reasons
}
func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
const (
var (
ro = "resourceowner"
tid = "tenantid"
did = "detailsid"
emailReason = identity.NewReason(tid, ro, path.ExchangeService, path.EmailCategory)
baseBuilder = func(id int) *kopia.BackupBaseBuilder {
return kopia.NewBackupBaseBuilder("", id).
WithReasons(emailReason)
}
colID = func(id int) string {
return fmt.Sprintf("ID%d-item-data", id)
}
)
makeMan := func(id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry {
return kopia.ManifestEntry{
Manifest: &snapshot.Manifest{
ID: manifest.ID(id),
IncompleteReason: incmpl,
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
makeBackup := func(snapID string, cats ...path.CategoryType) kopia.BackupEntry {
return kopia.BackupEntry{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID(snapID + "bup"),
},
SnapshotID: snapID,
StreamStoreID: snapID + "store",
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
table := []struct {
name string
bf *mockBackupFinder
@ -378,35 +345,28 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithBackups(makeBackup("id1", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
reasons: []identity.Reasoner{emailReason},
getMeta: false,
assertErr: assert.NoError,
assertB: assert.False,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithBackups(makeBackup("id1", path.EmailCategory)).
MockDisableMergeBases(),
WithMergeBases(baseBuilder(1).Build()).MockDisableMergeBases(),
},
{
name: "don't get metadata, incomplete manifest",
name: "don't get metadata, assist base",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan("id1", "checkpoint", path.EmailCategory)),
ro: kopia.NewMockBackupBases().
WithAssistBases(baseBuilder(1).MarkAssistBase().Build()),
},
},
rp: mockRestoreProducer{},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
reasons: []identity.Reasoner{emailReason},
getMeta: true,
assertErr: assert.NoError,
// Doesn't matter if it's true or false as merge/assist bases are
@ -414,30 +374,32 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
// flag to kopia and just pass it the bases instead.
assertB: assert.True,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases(
makeMan("id1", "checkpoint", path.EmailCategory)),
expectMans: kopia.NewMockBackupBases().
WithAssistBases(baseBuilder(1).MarkAssistBase().Build()),
},
{
name: "one valid man, multiple reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)),
ro: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).AppendReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
},
},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory),
emailReason,
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectDCS: []mockColl{{id: colID(1)}},
expectPaths: func(t *testing.T, gotPaths []path.Path) {
for _, p := range gotPaths {
assert.Equal(
@ -456,60 +418,58 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
"read data category doesn't match a given reason")
}
},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)),
expectMans: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).AppendReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
{
name: "one valid man, extra incomplete man",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
colID(2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(2)}}},
},
},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
reasons: []identity.Reasoner{emailReason},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectDCS: []mockColl{{id: colID(1)}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
},
{
name: "one valid man, extra incomplete man, drop assist bases",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
colID(2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(2)}}},
},
},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
reasons: []identity.Reasoner{emailReason},
getMeta: true,
dropAssist: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectDCS: []mockColl{{id: colID(1)}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithMergeBases(baseBuilder(1).Build()).
MockDisableAssistBases(),
},
{
@ -517,39 +477,44 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
makeMan("id2", "", path.EmailCategory)),
baseBuilder(1).Build(),
baseBuilder(2).
WithReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.EventsCategory)).
Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
colID(1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(1)}}},
colID(2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID(2)}}},
},
},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
emailReason,
identity.NewReason(tid, ro, path.ExchangeService, path.EventsCategory),
},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}, {id: "id2"}},
expectDCS: []mockColl{{id: colID(1)}, {id: colID(2)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory),
makeMan("id2", "", path.EmailCategory)),
baseBuilder(1).Build(),
baseBuilder(2).
WithReasons(
identity.NewReason(tid, ro, path.ExchangeService, path.EventsCategory)).
Build()),
},
{
name: "error collecting metadata",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{err: assert.AnError},
reasons: []identity.Reasoner{
identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
},
reasons: []identity.Reasoner{emailReason},
getMeta: true,
assertErr: assert.Error,
assertB: assert.False,
@ -621,54 +586,36 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}
func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_FallbackReasons() {
const (
var (
ro = "resourceowner"
fbro = "fb_resourceowner"
tid = "tenantid"
did = "detailsid"
)
makeMan := func(ro, id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry {
return kopia.ManifestEntry{
Manifest: &snapshot.Manifest{
ID: manifest.ID(id),
IncompleteReason: incmpl,
Tags: map[string]string{"tag:" + kopia.TagBackupID: id + "bup"},
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
makeBackupBase := func(ro, snapID, incmpl string, cats ...path.CategoryType) kopia.BackupBase {
return kopia.BackupBase{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID(snapID + "bup"),
},
SnapshotID: snapID,
StreamStoreID: snapID + "store",
},
ItemDataSnapshot: &snapshot.Manifest{
ID: manifest.ID(snapID),
IncompleteReason: incmpl,
Tags: map[string]string{"tag:" + kopia.TagBackupID: snapID + "bup"},
},
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
}
}
emailReason := identity.NewReason(
"",
emailReason = identity.NewReason(
tid,
ro,
path.ExchangeService,
path.EmailCategory)
fbEmailReason := identity.NewReason(
"",
fbEmailReason = identity.NewReason(
tid,
fbro,
path.ExchangeService,
path.EmailCategory)
baseBuilder = func(id int) *kopia.BackupBaseBuilder {
return kopia.NewBackupBaseBuilder("", id).
WithReasons(emailReason)
}
fbBaseBuilder = func(id int) *kopia.BackupBaseBuilder {
return kopia.NewBackupBaseBuilder("fb", id).
WithReasons(fbEmailReason)
}
colID = func(prefix string, id int) string {
return fmt.Sprintf("%sID%d-item-data", prefix, id)
}
)
table := []struct {
name string
bf *mockBackupFinder
@ -687,7 +634,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{},
@ -697,7 +644,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertB: assert.False,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)).
WithMergeBases(fbBaseBuilder(1).Build()).
MockDisableMergeBases(),
},
{
@ -705,33 +652,33 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
fallbackReasons: []identity.Reasoner{fbEmailReason},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
{
name: "only fallbacks, drop assist",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
fallbackReasons: []identity.Reasoner{fbEmailReason},
@ -739,9 +686,9 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
dropAssist: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)).
WithMergeBases(fbBaseBuilder(1).Build()).
MockDisableAssistBases(),
},
{
@ -749,15 +696,15 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
reasons: []identity.Reasoner{emailReason},
@ -765,24 +712,25 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)),
expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).Build()),
},
{
name: "incomplete mans and fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)),
ro: kopia.NewMockBackupBases().
WithAssistBases(
baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases().
WithAssistBases(fbBaseBuilder(2).MarkAssistBase().Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
"fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}},
colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
colID("fb", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 2)}}},
},
},
reasons: []identity.Reasoner{emailReason},
@ -791,27 +739,27 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: nil,
expectMans: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
expectMans: kopia.NewMockBackupBases().
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
},
{
name: "complete and incomplete mans and fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)).
WithAssistBases(makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()).
WithAssistBases(fbBaseBuilder(2).MarkAssistBase().Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
"fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}},
colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
colID("fb", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 2)}}},
},
},
reasons: []identity.Reasoner{emailReason},
@ -819,25 +767,26 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
WithMergeBases(baseBuilder(1).Build()).
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
},
{
name: "incomplete mans and complete fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
ro: kopia.NewMockBackupBases().
WithAssistBases(
baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
reasons: []identity.Reasoner{emailReason},
@ -845,25 +794,25 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()).
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
},
{
name: "incomplete mans and complete fallbacks, no assist bases",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
ro: kopia.NewMockBackupBases().
WithAssistBases(baseBuilder(2).MarkAssistBase().Build()),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)),
WithMergeBases(fbBaseBuilder(1).Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 2)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
reasons: []identity.Reasoner{emailReason},
@ -872,25 +821,25 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
dropAssist: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "fb_id1"}},
expectDCS: []mockColl{{id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory)).
WithMergeBases(fbBaseBuilder(1).Build()).
MockDisableAssistBases(),
},
{
name: "complete mans and incomplete fallbacks",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory)),
ro: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases().
WithAssistBases(fbBaseBuilder(2).MarkAssistBase().Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}},
colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
colID("fb", 2): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 2)}}},
},
},
reasons: []identity.Reasoner{emailReason},
@ -898,100 +847,114 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)),
expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).Build()),
},
{
name: "complete mans and complete fallbacks, multiple reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory)),
baseBuilder(1).
AppendReasons(identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
fbro: kopia.NewMockBackupBases().WithMergeBases(
fbBaseBuilder(1).
AppendReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
reasons: []identity.Reasoner{
emailReason,
identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory),
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
},
fallbackReasons: []identity.Reasoner{
fbEmailReason,
identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory),
identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory),
},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}},
expectDCS: []mockColl{{id: colID("", 1)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)),
baseBuilder(1).
AppendReasons(identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
{
name: "complete mans and complete fallbacks, distinct reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.ContactsCategory)),
ro: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases().WithMergeBases(
fbBaseBuilder(1).
WithReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
reasons: []identity.Reasoner{emailReason},
fallbackReasons: []identity.Reasoner{
identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory),
identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory),
},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)).
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.ContactsCategory)),
expectDCS: []mockColl{{id: colID("", 1)}, {id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
baseBuilder(1).Build(),
fbBaseBuilder(1).
WithReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
{
name: "complete mans and complete fallbacks, fallback has superset of reasons",
bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory)),
ro: kopia.NewMockBackupBases().
WithMergeBases(baseBuilder(1).Build()),
fbro: kopia.NewMockBackupBases().WithMergeBases(
fbBaseBuilder(1).
AppendReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
},
rp: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}},
"fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}},
colID("", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("", 1)}}},
colID("fb", 1): {data.NoFetchRestoreCollection{Collection: mockColl{id: colID("fb", 1)}}},
},
},
reasons: []identity.Reasoner{
emailReason,
identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory),
identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
},
fallbackReasons: []identity.Reasoner{
fbEmailReason,
identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory),
identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory),
},
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}},
expectMans: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)).
NewWithMergeBases(makeBackupBase(fbro, "fb_id1", "", path.ContactsCategory)),
expectDCS: []mockColl{{id: colID("", 1)}, {id: colID("fb", 1)}},
expectMans: kopia.NewMockBackupBases().WithMergeBases(
baseBuilder(1).Build(),
fbBaseBuilder(1).
WithReasons(identity.NewReason(tid, fbro, path.ExchangeService, path.ContactsCategory)).
Build()),
},
}

View File

@ -274,8 +274,8 @@ func checkBackupIsInManifests(
mans := bf.FindBases(ctx, []identity.Reasoner{r}, tags)
for _, man := range mans.MergeBases() {
bID, ok := man.GetTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ID) {
bID, ok := man.GetSnapshotTag(kopia.TagBackupID)
if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ItemDataSnapshot.ID) {
continue
}