Use prefix matcher when merging backup details (#3055)

Store all locations in the prefix matcher
and then look them up when merging details

Intermediate step to get things setup for
having OneDrive locations during merging

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

- [x] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #2486

#### Test Plan

- [x] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
ashmrtn 2023-04-10 20:23:13 -07:00 committed by GitHub
parent 7c9eada5a9
commit 2ebab1a78b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 509 additions and 54 deletions

View File

@ -101,8 +101,16 @@ func NewMockContactCollection(pathRepresentation path.Path, numMessagesToReturn
return c return c
} }
func (medc MockExchangeDataCollection) FullPath() path.Path { return medc.fullPath } func (medc MockExchangeDataCollection) FullPath() path.Path { return medc.fullPath }
func (medc MockExchangeDataCollection) LocationPath() path.Path { return medc.LocPath }
func (medc MockExchangeDataCollection) LocationPath() *path.Builder {
if medc.LocPath == nil {
return nil
}
return path.Builder{}.Append(medc.LocPath.Folders()...)
}
func (medc MockExchangeDataCollection) PreviousPath() path.Path { return medc.PrevPath } func (medc MockExchangeDataCollection) PreviousPath() path.Path { return medc.PrevPath }
func (medc MockExchangeDataCollection) State() data.CollectionState { return medc.ColState } func (medc MockExchangeDataCollection) State() data.CollectionState { return medc.ColState }
func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return medc.DoNotMerge } func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return medc.DoNotMerge }

View File

@ -0,0 +1,41 @@
package kopia
import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/pkg/path"
)
type LocationPrefixMatcher struct {
m prefixmatcher.Matcher[*path.Builder]
}
func (m *LocationPrefixMatcher) Add(oldRef path.Path, newLoc *path.Builder) error {
if _, ok := m.m.Get(oldRef.String()); ok {
return clues.New("RepoRef already in matcher").With("repo_ref", oldRef)
}
m.m.Add(oldRef.String(), newLoc)
return nil
}
func (m *LocationPrefixMatcher) LongestPrefix(oldRef string) *path.Builder {
if m == nil {
return nil
}
k, v, _ := m.m.LongestPrefix(oldRef)
if k != oldRef {
// For now we only want to allow exact matches because this is only enabled
// for Exchange at the moment.
return nil
}
return v
}
func NewLocationPrefixMatcher() *LocationPrefixMatcher {
return &LocationPrefixMatcher{m: prefixmatcher.NewMatcher[*path.Builder]()}
}

View File

@ -0,0 +1,154 @@
package kopia_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
var (
testTenant = "a-tenant"
testUser = "a-user"
service = path.ExchangeService
category = path.EmailCategory
)
type LocationPrefixMatcherUnitSuite struct {
tester.Suite
}
func makePath(
t *testing.T,
service path.ServiceType,
category path.CategoryType,
tenant, user string,
folders []string,
) path.Path {
p, err := path.Build(tenant, user, service, category, false, folders...)
require.NoError(t, err, clues.ToCore(err))
return p
}
func TestLocationPrefixMatcherUnitSuite(t *testing.T) {
suite.Run(t, &LocationPrefixMatcherUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type inputData struct {
repoRef path.Path
locRef *path.Builder
}
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
t := suite.T()
p := makePath(
t,
service,
category,
testTenant,
testUser,
[]string{"folder1"})
loc1 := path.Builder{}.Append("folder1")
loc2 := path.Builder{}.Append("folder2")
lpm := kopia.NewLocationPrefixMatcher()
err := lpm.Add(p, loc1)
require.NoError(t, err, clues.ToCore(err))
err = lpm.Add(p, loc2)
assert.Error(t, err, clues.ToCore(err))
}
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
p1 := makePath(
suite.T(),
service,
category,
testTenant,
testUser,
[]string{"folder1"})
p1Parent, err := p1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
p2 := makePath(
suite.T(),
service,
category,
testTenant,
testUser,
[]string{"folder2"})
loc1 := path.Builder{}.Append("folder1")
table := []struct {
name string
inputs []inputData
searchKey string
check require.ValueAssertionFunc
expected *path.Builder
}{
{
name: "Exact Match",
inputs: []inputData{
{
repoRef: p1,
locRef: loc1,
},
},
searchKey: p1.String(),
check: require.NotNil,
expected: loc1,
},
{
name: "No Match",
inputs: []inputData{
{
repoRef: p1,
locRef: loc1,
},
},
searchKey: p2.String(),
check: require.Nil,
},
{
name: "No Prefix Match",
inputs: []inputData{
{
repoRef: p1Parent,
locRef: loc1,
},
},
searchKey: p1.String(),
check: require.Nil,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
lpm := kopia.NewLocationPrefixMatcher()
for _, input := range test.inputs {
err := lpm.Add(input.repoRef, input.locRef)
require.NoError(t, err, clues.ToCore(err))
}
loc := lpm.LongestPrefix(test.searchKey)
test.check(t, loc)
if loc == nil {
return
}
assert.Equal(t, test.expected.String(), loc.String())
})
}
}

View File

@ -711,7 +711,7 @@ func getTreeNode(roots map[string]*treeMap, pathElements []string) *treeMap {
func inflateCollectionTree( func inflateCollectionTree(
ctx context.Context, ctx context.Context,
collections []data.BackupCollection, collections []data.BackupCollection,
) (map[string]*treeMap, map[string]path.Path, error) { ) (map[string]*treeMap, map[string]path.Path, *LocationPrefixMatcher, error) {
roots := make(map[string]*treeMap) roots := make(map[string]*treeMap)
// Contains the old path for collections that have been moved or renamed. // Contains the old path for collections that have been moved or renamed.
// Allows resolving what the new path should be when walking the base // Allows resolving what the new path should be when walking the base
@ -720,18 +720,28 @@ func inflateCollectionTree(
// Temporary variable just to track the things that have been marked as // Temporary variable just to track the things that have been marked as
// changed while keeping a reference to their path. // changed while keeping a reference to their path.
changedPaths := []path.Path{} changedPaths := []path.Path{}
// updatedLocations maps from the collections RepoRef to the updated location
// path for all moved collections. New collections aren't tracked because we
// will have their location explicitly. This is used by the backup details
// merge code to update locations for items in nested folders that got moved
// when the top-level folder got moved. The nested folder may not generate a
// delta result but will need the location updated.
//
// This could probably use a path.Builder as the value instead of a string if
// we wanted.
updatedLocations := NewLocationPrefixMatcher()
for _, s := range collections { for _, s := range collections {
switch s.State() { switch s.State() {
case data.DeletedState: case data.DeletedState:
if s.PreviousPath() == nil { if s.PreviousPath() == nil {
return nil, nil, clues.New("nil previous path on deleted collection") return nil, nil, nil, clues.New("nil previous path on deleted collection")
} }
changedPaths = append(changedPaths, s.PreviousPath()) changedPaths = append(changedPaths, s.PreviousPath())
if _, ok := updatedPaths[s.PreviousPath().String()]; ok { if _, ok := updatedPaths[s.PreviousPath().String()]; ok {
return nil, nil, clues.New("multiple previous state changes to collection"). return nil, nil, nil, clues.New("multiple previous state changes to collection").
With("collection_previous_path", s.PreviousPath()) With("collection_previous_path", s.PreviousPath())
} }
@ -743,26 +753,35 @@ func inflateCollectionTree(
changedPaths = append(changedPaths, s.PreviousPath()) changedPaths = append(changedPaths, s.PreviousPath())
if _, ok := updatedPaths[s.PreviousPath().String()]; ok { if _, ok := updatedPaths[s.PreviousPath().String()]; ok {
return nil, nil, clues.New("multiple previous state changes to collection"). return nil, nil, nil, clues.New("multiple previous state changes to collection").
With("collection_previous_path", s.PreviousPath()) With("collection_previous_path", s.PreviousPath())
} }
updatedPaths[s.PreviousPath().String()] = s.FullPath() updatedPaths[s.PreviousPath().String()] = s.FullPath()
} }
// TODO(ashmrtn): Get old location ref and add it to the prefix matcher.
lp, ok := s.(data.LocationPather)
if ok && s.PreviousPath() != nil {
if err := updatedLocations.Add(s.PreviousPath(), lp.LocationPath()); err != nil {
return nil, nil, nil, clues.Wrap(err, "building updated location set").
With("collection_location", lp.LocationPath())
}
}
if s.FullPath() == nil || len(s.FullPath().Elements()) == 0 { if s.FullPath() == nil || len(s.FullPath().Elements()) == 0 {
return nil, nil, clues.New("no identifier for collection") return nil, nil, nil, clues.New("no identifier for collection")
} }
node := getTreeNode(roots, s.FullPath().Elements()) node := getTreeNode(roots, s.FullPath().Elements())
if node == nil { if node == nil {
return nil, nil, clues.New("getting tree node").With("collection_full_path", s.FullPath()) return nil, nil, nil, clues.New("getting tree node").With("collection_full_path", s.FullPath())
} }
// Make sure there's only a single collection adding items for any given // Make sure there's only a single collection adding items for any given
// path in the new hierarchy. // path in the new hierarchy.
if node.collection != nil { if node.collection != nil {
return nil, nil, clues.New("multiple instances of collection").With("collection_full_path", s.FullPath()) return nil, nil, nil, clues.New("multiple instances of collection").With("collection_full_path", s.FullPath())
} }
node.collection = s node.collection = s
@ -780,11 +799,11 @@ func inflateCollectionTree(
} }
if node.collection != nil && node.collection.State() == data.NotMovedState { if node.collection != nil && node.collection.State() == data.NotMovedState {
return nil, nil, clues.New("conflicting states for collection").With("changed_path", p) return nil, nil, nil, clues.New("conflicting states for collection").With("changed_path", p)
} }
} }
return roots, updatedPaths, nil return roots, updatedPaths, updatedLocations, nil
} }
// traverseBaseDir is an unoptimized function that reads items in a directory // traverseBaseDir is an unoptimized function that reads items in a directory
@ -1015,10 +1034,10 @@ func inflateDirTree(
collections []data.BackupCollection, collections []data.BackupCollection,
globalExcludeSet map[string]map[string]struct{}, globalExcludeSet map[string]map[string]struct{},
progress *corsoProgress, progress *corsoProgress,
) (fs.Directory, error) { ) (fs.Directory, *LocationPrefixMatcher, error) {
roots, updatedPaths, err := inflateCollectionTree(ctx, collections) roots, updatedPaths, updatedLocations, err := inflateCollectionTree(ctx, collections)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "inflating collection tree") return nil, nil, clues.Wrap(err, "inflating collection tree")
} }
baseIDs := make([]manifest.ID, 0, len(baseSnaps)) baseIDs := make([]manifest.ID, 0, len(baseSnaps))
@ -1036,12 +1055,12 @@ func inflateDirTree(
for _, snap := range baseSnaps { for _, snap := range baseSnaps {
if err = inflateBaseTree(ctx, loader, snap, updatedPaths, roots); err != nil { if err = inflateBaseTree(ctx, loader, snap, updatedPaths, roots); err != nil {
return nil, clues.Wrap(err, "inflating base snapshot tree(s)") return nil, nil, clues.Wrap(err, "inflating base snapshot tree(s)")
} }
} }
if len(roots) > 1 { if len(roots) > 1 {
return nil, clues.New("multiple root directories") return nil, nil, clues.New("multiple root directories")
} }
var res fs.Directory var res fs.Directory
@ -1049,11 +1068,11 @@ func inflateDirTree(
for dirName, dir := range roots { for dirName, dir := range roots {
tmp, err := buildKopiaDirs(dirName, dir, globalExcludeSet, progress) tmp, err := buildKopiaDirs(dirName, dir, globalExcludeSet, progress)
if err != nil { if err != nil {
return nil, err return nil, nil, err
} }
res = tmp res = tmp
} }
return res, nil return res, updatedLocations, nil
} }

View File

@ -673,6 +673,84 @@ func TestHierarchyBuilderUnitSuite(t *testing.T) {
suite.Run(t, &HierarchyBuilderUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &HierarchyBuilderUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *HierarchyBuilderUnitSuite) TestPopulatesPrefixMatcher() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
p1 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder1"},
false)
p2 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder2"},
false)
p3 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder3"},
false)
p4 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder4"},
false)
c1 := mockconnector.NewMockExchangeCollection(p1, p1, 1)
c1.PrevPath = p1
c1.ColState = data.NotMovedState
c2 := mockconnector.NewMockExchangeCollection(p2, p2, 1)
c2.PrevPath = p3
c1.ColState = data.MovedState
c3 := mockconnector.NewMockExchangeCollection(nil, nil, 0)
c3.PrevPath = p4
c3.ColState = data.DeletedState
cols := []data.BackupCollection{c1, c2, c3}
_, locPaths, err := inflateDirTree(ctx, nil, nil, cols, nil, nil)
require.NoError(t, err)
table := []struct {
inputPath string
check require.ValueAssertionFunc
expectedLoc *path.Builder
}{
{
inputPath: p1.String(),
check: require.NotNil,
expectedLoc: path.Builder{}.Append(p1.Folders()...),
},
{
inputPath: p3.String(),
check: require.NotNil,
expectedLoc: path.Builder{}.Append(p2.Folders()...),
},
{
inputPath: p4.String(),
check: require.Nil,
expectedLoc: nil,
},
}
for _, test := range table {
suite.Run(test.inputPath, func() {
t := suite.T()
loc := locPaths.LongestPrefix(test.inputPath)
test.check(t, loc)
if loc == nil {
return
}
assert.Equal(t, test.expectedLoc.String(), loc.String())
})
}
}
func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() { func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
tester.LogTimeOfTest(suite.T()) tester.LogTimeOfTest(suite.T())
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
@ -723,7 +801,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
// - emails // - emails
// - Inbox // - Inbox
// - 42 separate files // - 42 separate files
dirTree, err := inflateDirTree(ctx, nil, nil, collections, nil, progress) dirTree, _, err := inflateDirTree(ctx, nil, nil, collections, nil, progress)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, encodeAsPath(testTenant), dirTree.Name()) assert.Equal(t, encodeAsPath(testTenant), dirTree.Name())
@ -819,7 +897,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
errs: fault.New(true), errs: fault.New(true),
} }
dirTree, err := inflateDirTree(ctx, nil, nil, test.layout, nil, progress) dirTree, _, err := inflateDirTree(ctx, nil, nil, test.layout, nil, progress)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, encodeAsPath(testTenant), dirTree.Name()) assert.Equal(t, encodeAsPath(testTenant), dirTree.Name())
@ -920,7 +998,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
_, err := inflateDirTree(ctx, nil, nil, test.layout, nil, nil) _, _, err := inflateDirTree(ctx, nil, nil, test.layout, nil, nil)
assert.Error(t, err, clues.ToCore(err)) assert.Error(t, err, clues.ToCore(err))
}) })
} }
@ -1032,7 +1110,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
cols = append(cols, mc) cols = append(cols, mc)
} }
_, err := inflateDirTree(ctx, nil, nil, cols, nil, progress) _, _, err := inflateDirTree(ctx, nil, nil, cols, nil, progress)
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
}) })
} }
@ -1307,7 +1385,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
snapshotRoot: getBaseSnapshot(), snapshotRoot: getBaseSnapshot(),
} }
dirTree, err := inflateDirTree( dirTree, _, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]IncrementalBase{ []IncrementalBase{
@ -2086,7 +2164,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
snapshotRoot: getBaseSnapshot(), snapshotRoot: getBaseSnapshot(),
} }
dirTree, err := inflateDirTree( dirTree, _, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]IncrementalBase{ []IncrementalBase{
@ -2249,7 +2327,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
// - file3 // - file3
// - work // - work
// - file4 // - file4
dirTree, err := inflateDirTree( dirTree, _, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]IncrementalBase{ []IncrementalBase{
@ -2353,7 +2431,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
// - emails // - emails
// - Archive // - Archive
// - file2 // - file2
dirTree, err := inflateDirTree( dirTree, _, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]IncrementalBase{ []IncrementalBase{
@ -2602,7 +2680,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
collections := []data.BackupCollection{mc} collections := []data.BackupCollection{mc}
dirTree, err := inflateDirTree( dirTree, _, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]IncrementalBase{ []IncrementalBase{

View File

@ -145,16 +145,16 @@ func (w Wrapper) ConsumeBackupCollections(
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Bus, errs *fault.Bus,
) (*BackupStats, *details.Builder, map[string]PrevRefs, error) { ) (*BackupStats, *details.Builder, map[string]PrevRefs, *LocationPrefixMatcher, error) {
if w.c == nil { if w.c == nil {
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx) return nil, nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
} }
ctx, end := diagnostics.Span(ctx, "kopia:consumeBackupCollections") ctx, end := diagnostics.Span(ctx, "kopia:consumeBackupCollections")
defer end() defer end()
if len(collections) == 0 && len(globalExcludeSet) == 0 { if len(collections) == 0 && len(globalExcludeSet) == 0 {
return &BackupStats{}, &details.Builder{}, nil, nil return &BackupStats{}, &details.Builder{}, nil, nil, nil
} }
progress := &corsoProgress{ progress := &corsoProgress{
@ -172,7 +172,7 @@ func (w Wrapper) ConsumeBackupCollections(
base = previousSnapshots base = previousSnapshots
} }
dirTree, err := inflateDirTree( dirTree, updatedLocations, err := inflateDirTree(
ctx, ctx,
w.c, w.c,
base, base,
@ -180,7 +180,7 @@ func (w Wrapper) ConsumeBackupCollections(
globalExcludeSet, globalExcludeSet,
progress) progress)
if err != nil { if err != nil {
return nil, nil, nil, clues.Wrap(err, "building kopia directories") return nil, nil, nil, nil, clues.Wrap(err, "building kopia directories")
} }
s, err := w.makeSnapshotWithRoot( s, err := w.makeSnapshotWithRoot(
@ -190,10 +190,10 @@ func (w Wrapper) ConsumeBackupCollections(
tags, tags,
progress) progress)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, nil, nil, err
} }
return s, progress.deets, progress.toMerge, progress.errs.Failure() return s, progress.deets, progress.toMerge, updatedLocations, progress.errs.Failure()
} }
func (w Wrapper) makeSnapshotWithRoot( func (w Wrapper) makeSnapshotWithRoot(

View File

@ -276,7 +276,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
stats, deets, _, err := suite.w.ConsumeBackupCollections( stats, deets, _, _, err := suite.w.ConsumeBackupCollections(
suite.ctx, suite.ctx,
prevSnaps, prevSnaps,
collections, collections,
@ -423,7 +423,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
t := suite.T() t := suite.T()
collections := test.cols() collections := test.cols()
stats, deets, prevShortRefs, err := suite.w.ConsumeBackupCollections( stats, deets, prevShortRefs, _, err := suite.w.ConsumeBackupCollections(
suite.ctx, suite.ctx,
prevSnaps, prevSnaps,
collections, collections,
@ -525,7 +525,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
fp2, err := suite.storePath2.Append(dc2.Names[0], true) fp2, err := suite.storePath2.Append(dc2.Names[0], true)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
stats, _, _, err := w.ConsumeBackupCollections( stats, _, _, _, err := w.ConsumeBackupCollections(
ctx, ctx,
nil, nil,
[]data.BackupCollection{dc1, dc2}, []data.BackupCollection{dc1, dc2},
@ -644,7 +644,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
}, },
} }
stats, deets, _, err := suite.w.ConsumeBackupCollections( stats, deets, _, _, err := suite.w.ConsumeBackupCollections(
suite.ctx, suite.ctx,
nil, nil,
collections, collections,
@ -706,7 +706,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
s, d, _, err := suite.w.ConsumeBackupCollections( s, d, _, _, err := suite.w.ConsumeBackupCollections(
ctx, ctx,
nil, nil,
test.collections, test.collections,
@ -866,7 +866,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
tags[k] = "" tags[k] = ""
} }
stats, deets, _, err := suite.w.ConsumeBackupCollections( stats, deets, _, _, err := suite.w.ConsumeBackupCollections(
suite.ctx, suite.ctx,
nil, nil,
collections, collections,
@ -1018,7 +1018,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
} }
} }
stats, _, _, err := suite.w.ConsumeBackupCollections( stats, _, _, _, err := suite.w.ConsumeBackupCollections(
suite.ctx, suite.ctx,
[]IncrementalBase{ []IncrementalBase{
{ {

View File

@ -265,7 +265,7 @@ func (op *BackupOperation) do(
ctx = clues.Add(ctx, "coll_count", len(cs)) ctx = clues.Add(ctx, "coll_count", len(cs))
writeStats, deets, toMerge, err := consumeBackupCollections( writeStats, deets, toMerge, updatedLocs, err := consumeBackupCollections(
ctx, ctx,
op.kopia, op.kopia,
op.account.ID(), op.account.ID(),
@ -288,6 +288,7 @@ func (op *BackupOperation) do(
detailsStore, detailsStore,
mans, mans,
toMerge, toMerge,
updatedLocs,
deets, deets,
op.Errors) op.Errors)
if err != nil { if err != nil {
@ -411,7 +412,7 @@ func consumeBackupCollections(
backupID model.StableID, backupID model.StableID,
isIncremental bool, isIncremental bool,
errs *fault.Bus, errs *fault.Bus,
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) { ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, *kopia.LocationPrefixMatcher, error) {
complete, closer := observe.MessageWithCompletion(ctx, "Backing up data") complete, closer := observe.MessageWithCompletion(ctx, "Backing up data")
defer func() { defer func() {
complete <- struct{}{} complete <- struct{}{}
@ -440,7 +441,7 @@ func consumeBackupCollections(
for _, reason := range m.Reasons { for _, reason := range m.Reasons {
pb, err := builderFromReason(ctx, tenantID, reason) pb, err := builderFromReason(ctx, tenantID, reason)
if err != nil { if err != nil {
return nil, nil, nil, clues.Wrap(err, "getting subtree paths for bases") return nil, nil, nil, nil, clues.Wrap(err, "getting subtree paths for bases")
} }
paths = append(paths, pb) paths = append(paths, pb)
@ -476,7 +477,7 @@ func consumeBackupCollections(
"base_backup_id", mbID) "base_backup_id", mbID)
} }
kopiaStats, deets, itemsSourcedFromBase, err := bc.ConsumeBackupCollections( kopiaStats, deets, itemsSourcedFromBase, updatedLocs, err := bc.ConsumeBackupCollections(
ctx, ctx,
bases, bases,
cs, cs,
@ -486,10 +487,10 @@ func consumeBackupCollections(
errs) errs)
if err != nil { if err != nil {
if kopiaStats == nil { if kopiaStats == nil {
return nil, nil, nil, err return nil, nil, nil, nil, err
} }
return nil, nil, nil, clues.Stack(err).With( return nil, nil, nil, nil, clues.Stack(err).With(
"kopia_errors", kopiaStats.ErrorCount, "kopia_errors", kopiaStats.ErrorCount,
"kopia_ignored_errors", kopiaStats.IgnoredErrorCount) "kopia_ignored_errors", kopiaStats.IgnoredErrorCount)
} }
@ -501,7 +502,7 @@ func consumeBackupCollections(
"kopia_ignored_errors", kopiaStats.IgnoredErrorCount) "kopia_ignored_errors", kopiaStats.IgnoredErrorCount)
} }
return kopiaStats, deets, itemsSourcedFromBase, err return kopiaStats, deets, itemsSourcedFromBase, updatedLocs, err
} }
func matchesReason(reasons []kopia.Reason, p path.Path) bool { func matchesReason(reasons []kopia.Reason, p path.Path) bool {
@ -522,6 +523,7 @@ func mergeDetails(
detailsStore streamstore.Streamer, detailsStore streamstore.Streamer,
mans []*kopia.ManifestEntry, mans []*kopia.ManifestEntry,
shortRefsFromPrevBackup map[string]kopia.PrevRefs, shortRefsFromPrevBackup map[string]kopia.PrevRefs,
updatedLocs *kopia.LocationPrefixMatcher,
deets *details.Builder, deets *details.Builder,
errs *fault.Bus, errs *fault.Bus,
) error { ) error {
@ -587,7 +589,9 @@ func mergeDetails(
} }
newPath := prev.Repo newPath := prev.Repo
newLoc := prev.Location // Locations are done by collection RepoRef so remove the item from the
// input.
newLoc := updatedLocs.LongestPrefix(rr.ToBuilder().Dir().String())
// Fixup paths in the item. // Fixup paths in the item.
item := entry.ItemInfo item := entry.ItemInfo

View File

@ -102,12 +102,12 @@ func (mbu mockBackupConsumer) ConsumeBackupCollections(
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Bus, errs *fault.Bus,
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) { ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, *kopia.LocationPrefixMatcher, error) {
if mbu.checkFunc != nil { if mbu.checkFunc != nil {
mbu.checkFunc(bases, cs, tags, buildTreeWithBase) mbu.checkFunc(bases, cs, tags, buildTreeWithBase)
} }
return &kopia.BackupStats{}, &details.Builder{}, nil, nil return &kopia.BackupStats{}, &details.Builder{}, nil, nil, nil
} }
// ----- model store for backups // ----- model store for backups
@ -674,6 +674,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
populatedDetails map[string]*details.Details populatedDetails map[string]*details.Details
inputMans []*kopia.ManifestEntry inputMans []*kopia.ManifestEntry
inputShortRefsFromPrevBackup map[string]kopia.PrevRefs inputShortRefsFromPrevBackup map[string]kopia.PrevRefs
prefixMatcher *kopia.LocationPrefixMatcher
errCheck assert.ErrorAssertionFunc errCheck assert.ErrorAssertionFunc
expectedEntries []*details.DetailsEntry expectedEntries []*details.DetailsEntry
@ -699,6 +700,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), "foo", ""), Manifest: makeManifest(suite.T(), "foo", ""),
@ -717,6 +729,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -747,6 +770,23 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath2, Location: locationPath2,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
rr, err = itemPath2.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath2)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -777,6 +817,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -813,6 +864,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath2, Location: locationPath2,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath2)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -872,6 +934,10 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
), ),
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -902,6 +968,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -934,6 +1011,10 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Repo: itemPath1, Repo: itemPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -967,6 +1048,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -1000,6 +1092,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -1034,6 +1137,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath2, Location: locationPath2,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath2)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -1071,6 +1185,23 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath3, Location: locationPath3,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
rr, err = itemPath3.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath3)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -1122,6 +1253,17 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
Location: locationPath1, Location: locationPath1,
}, },
}, },
prefixMatcher: func() *kopia.LocationPrefixMatcher {
p := kopia.NewLocationPrefixMatcher()
rr, err := itemPath1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
err = p.Add(rr, locationPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
return p
}(),
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
Manifest: makeManifest(suite.T(), backup1.ID, ""), Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -1181,6 +1323,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
mds, mds,
test.inputMans, test.inputMans,
test.inputShortRefsFromPrevBackup, test.inputShortRefsFromPrevBackup,
test.prefixMatcher,
&deets, &deets,
fault.New(true)) fault.New(true))
test.errCheck(t, err, clues.ToCore(err)) test.errCheck(t, err, clues.ToCore(err))
@ -1255,6 +1398,13 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
// later = now.Add(42 * time.Minute) // later = now.Add(42 * time.Minute)
) )
itemDir, err := itemPath1.Dir()
require.NoError(t, err, clues.ToCore(err))
prefixMatcher := kopia.NewLocationPrefixMatcher()
err = prefixMatcher.Add(itemDir, locPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
itemDetails := makeDetailsEntry(t, itemPath1, locPath1, itemSize, false) itemDetails := makeDetailsEntry(t, itemPath1, locPath1, itemSize, false)
// itemDetails.Exchange.Modified = now // itemDetails.Exchange.Modified = now
@ -1288,12 +1438,13 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
deets = details.Builder{} deets = details.Builder{}
) )
err := mergeDetails( err = mergeDetails(
ctx, ctx,
w, w,
mds, mds,
inputMans, inputMans,
inputToMerge, inputToMerge,
prefixMatcher,
&deets, &deets,
fault.New(true)) fault.New(true))
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))

View File

@ -37,7 +37,7 @@ type (
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Bus, errs *fault.Bus,
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, *kopia.LocationPrefixMatcher, error)
} }
RestoreProducer interface { RestoreProducer interface {

View File

@ -228,7 +228,7 @@ func write(
dbcs []data.BackupCollection, dbcs []data.BackupCollection,
errs *fault.Bus, errs *fault.Bus,
) (string, error) { ) (string, error) {
backupStats, _, _, err := bup.ConsumeBackupCollections( backupStats, _, _, _, err := bup.ConsumeBackupCollections(
ctx, ctx,
nil, nil,
dbcs, dbcs,