Wire up LocationIRef stuff to details merge (#3110)

This still doesn't populate LocationRef for
OneDrive, but gets a lot of the other logic
that will be required for that in place.

Mostly it focuses on starting to use
LocationIDer for lookups when merging
details

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

- [x] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #2486

#### Test Plan

- [x] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
ashmrtn 2023-04-13 17:48:12 -07:00 committed by GitHub
parent f5b1b55971
commit a25948042a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 442 additions and 320 deletions

View File

@ -104,6 +104,17 @@ type LocationPather interface {
LocationPath() *path.Builder
}
// PreviousLocationPather provides both the current location of the collection
// as well as the location of the item in the previous backup.
//
// TODO(ashmrtn): If we guarantee that we persist the location of collections in
// addition to the path of the item then we could just have a single
// *LocationPather interface with current and previous location functions.
type PreviousLocationPather interface {
LocationPather
PreviousLocationPath() details.LocationIDer
}
// StreamInfo is used to provide service specific
// information about the Stream
type StreamInfo interface {

View File

@ -4,24 +4,33 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
)
type DetailsMergeInfoer interface {
// Count returns the number of items that need to be merged.
// ItemsToMerge returns the number of items that need to be merged.
ItemsToMerge() int
// GetNewRepoRef takes the path of the old location of the item and returns
// its new RepoRef if the item needs merged. If the item doesn't need merged
// returns nil.
GetNewRepoRef(oldRef *path.Builder) path.Path
// GetNewLocation takes the path of the folder containing the item and returns
// the location of the folder containing the item if it was updated. Otherwise
// returns nil.
GetNewLocation(oldRef *path.Builder) *path.Builder
// GetNewPathRefs takes the old RepoRef and old LocationRef of an item and
// returns the new RepoRef, a prefix of the old LocationRef to replace, and
// the new LocationRefPrefix of the item if the item should be merged. If the
// item shouldn't be merged nils are returned.
//
// If the returned old LocationRef prefix is equal to the old LocationRef then
// the entire LocationRef should be replaced with the returned value.
GetNewPathRefs(
oldRef *path.Builder,
oldLoc details.LocationIDer,
) (path.Path, *path.Builder, *path.Builder)
}
type prevRef struct {
repoRef path.Path
locRef *path.Builder
}
type mergeDetails struct {
repoRefs map[string]path.Path
repoRefs map[string]prevRef
locations *locationPrefixMatcher
}
@ -33,64 +42,97 @@ func (m *mergeDetails) ItemsToMerge() int {
return len(m.repoRefs)
}
func (m *mergeDetails) addRepoRef(oldRef *path.Builder, newRef path.Path) error {
func (m *mergeDetails) addRepoRef(
oldRef *path.Builder,
newRef path.Path,
newLocRef *path.Builder,
) error {
if newRef == nil {
return clues.New("nil RepoRef")
}
if _, ok := m.repoRefs[oldRef.ShortRef()]; ok {
return clues.New("duplicate RepoRef").With("repo_ref", oldRef.String())
}
m.repoRefs[oldRef.ShortRef()] = newRef
pr := prevRef{
repoRef: newRef,
locRef: newLocRef,
}
m.repoRefs[oldRef.ShortRef()] = pr
return nil
}
func (m *mergeDetails) GetNewRepoRef(oldRef *path.Builder) path.Path {
return m.repoRefs[oldRef.ShortRef()]
func (m *mergeDetails) GetNewPathRefs(
oldRef *path.Builder,
oldLoc details.LocationIDer,
) (path.Path, *path.Builder, *path.Builder) {
pr, ok := m.repoRefs[oldRef.ShortRef()]
if !ok {
return nil, nil, nil
}
// This was a location specified directly by a collection. Say the prefix is
// the whole oldLoc so other code will replace everything.
//
// TODO(ashmrtn): Should be able to remove the nil check later as we'll be
// able to ensure that old locations actually exist in backup details.
if oldLoc == nil {
return pr.repoRef, nil, pr.locRef
} else if pr.locRef != nil {
return pr.repoRef, oldLoc.InDetails(), pr.locRef
}
// This is a location that we need to do prefix matching on because we didn't
// see the new location of it in a collection. For example, it's a subfolder
// whose parent folder was moved.
prefixes := m.locations.longestPrefix(oldLoc.ID())
return pr.repoRef, prefixes.oldLoc, prefixes.newLoc
}
func (m *mergeDetails) addLocation(oldRef, newLoc *path.Builder) error {
return m.locations.add(oldRef, newLoc)
}
func (m *mergeDetails) GetNewLocation(oldRef *path.Builder) *path.Builder {
return m.locations.longestPrefix(oldRef.String())
func (m *mergeDetails) addLocation(
oldRef details.LocationIDer,
newLoc *path.Builder,
) error {
return m.locations.add(oldRef.ID(), newLoc)
}
func newMergeDetails() *mergeDetails {
return &mergeDetails{
repoRefs: map[string]path.Path{},
repoRefs: map[string]prevRef{},
locations: newLocationPrefixMatcher(),
}
}
type locRefs struct {
oldLoc *path.Builder
newLoc *path.Builder
}
type locationPrefixMatcher struct {
m prefixmatcher.Matcher[*path.Builder]
m prefixmatcher.Matcher[locRefs]
}
func (m *locationPrefixMatcher) add(oldRef, newLoc *path.Builder) error {
if _, ok := m.m.Get(oldRef.String()); ok {
key := oldRef.String()
if _, ok := m.m.Get(key); ok {
return clues.New("RepoRef already in matcher").With("repo_ref", oldRef)
}
m.m.Add(oldRef.String(), newLoc)
m.m.Add(key, locRefs{oldLoc: oldRef, newLoc: newLoc})
return nil
}
func (m *locationPrefixMatcher) longestPrefix(oldRef string) *path.Builder {
if m == nil {
return nil
}
k, v, _ := m.m.LongestPrefix(oldRef)
if k != oldRef {
// For now we only want to allow exact matches because this is only enabled
// for Exchange at the moment.
return nil
}
func (m *locationPrefixMatcher) longestPrefix(oldRef *path.Builder) locRefs {
_, v, _ := m.m.LongestPrefix(oldRef.String())
return v
}
func newLocationPrefixMatcher() *locationPrefixMatcher {
return &locationPrefixMatcher{m: prefixmatcher.NewMatcher[*path.Builder]()}
return &locationPrefixMatcher{m: prefixmatcher.NewMatcher[locRefs]()}
}

View File

@ -12,6 +12,18 @@ import (
"github.com/alcionai/corso/src/pkg/path"
)
type mockLocationIDer struct {
pb *path.Builder
}
func (ul mockLocationIDer) ID() *path.Builder {
return ul.pb
}
func (ul mockLocationIDer) InDetails() *path.Builder {
return ul.pb
}
type DetailsMergeInfoerUnitSuite struct {
tester.Suite
}
@ -20,11 +32,9 @@ func TestDetailsMergeInfoerUnitSuite(t *testing.T) {
suite.Run(t, &DetailsMergeInfoerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
// TestRepoRefs is a basic sanity test to ensure lookups are working properly
// for stored RepoRefs.
func (suite *DetailsMergeInfoerUnitSuite) TestRepoRefs() {
func (suite *DetailsMergeInfoerUnitSuite) TestAddRepoRef_DuplicateFails() {
t := suite.T()
oldRef := makePath(
oldRef1 := makePath(
t,
[]string{
testTenant,
@ -33,29 +43,148 @@ func (suite *DetailsMergeInfoerUnitSuite) TestRepoRefs() {
category,
"folder1",
},
false).ToBuilder()
newRef := makePath(
false)
dm := newMergeDetails()
err := dm.addRepoRef(oldRef1.ToBuilder(), oldRef1, nil)
require.NoError(t, err, clues.ToCore(err))
err = dm.addRepoRef(oldRef1.ToBuilder(), oldRef1, nil)
require.Error(t, err, clues.ToCore(err))
}
// TestRepoRefs is a basic sanity test to ensure lookups are working properly
// for stored RepoRefs.
func (suite *DetailsMergeInfoerUnitSuite) TestGetNewPathRefs() {
t := suite.T()
oldRef1 := makePath(
t,
[]string{
testTenant,
service,
testUser,
category,
"folder1",
},
false)
oldRef2 := makePath(
t,
[]string{
testTenant,
service,
testUser,
category,
"folder1",
"folder2",
},
false)
newRef1 := makePath(
t,
[]string{
testTenant,
service,
testUser,
category,
"folder3",
},
false)
newRef2 := makePath(
t,
[]string{
testTenant,
service,
testUser,
category,
"folder3",
"folder4",
},
false)
newLoc1 := path.Builder{}.Append(newRef1.Folders()...)
oldLoc1 := path.Builder{}.Append(oldRef1.Folders()...)
oldLoc2 := path.Builder{}.Append(oldRef2.Folders()...)
searchLoc1 := mockLocationIDer{oldLoc1}
searchLoc2 := mockLocationIDer{oldLoc2}
dm := newMergeDetails()
err := dm.addRepoRef(oldRef, newRef)
err := dm.addRepoRef(oldRef1.ToBuilder(), newRef1, newLoc1)
require.NoError(t, err, clues.ToCore(err))
got := dm.GetNewRepoRef(oldRef)
require.NotNil(t, got)
assert.Equal(t, newRef.String(), got.String())
err = dm.addRepoRef(oldRef2.ToBuilder(), newRef2, nil)
require.NoError(t, err, clues.ToCore(err))
got = dm.GetNewRepoRef(newRef.ToBuilder())
assert.Nil(t, got)
// Add prefix matcher entry.
err = dm.addLocation(searchLoc1, newLoc1)
require.NoError(t, err, clues.ToCore(err))
table := []struct {
name string
searchRef *path.Builder
searchLoc mockLocationIDer
expectedRef path.Path
prefixFound bool
expectedOldPrefix *path.Builder
}{
{
name: "Exact Match With Loc",
searchRef: oldRef1.ToBuilder(),
searchLoc: searchLoc1,
expectedRef: newRef1,
prefixFound: true,
expectedOldPrefix: oldLoc1,
},
{
name: "Exact Match Without Loc",
searchRef: oldRef1.ToBuilder(),
expectedRef: newRef1,
prefixFound: true,
expectedOldPrefix: nil,
},
{
name: "Prefix Match",
searchRef: oldRef2.ToBuilder(),
searchLoc: searchLoc2,
expectedRef: newRef2,
prefixFound: true,
expectedOldPrefix: oldLoc1,
},
{
name: "Not Found",
searchRef: newRef1.ToBuilder(),
expectedRef: nil,
},
{
name: "Not Found With Loc",
searchRef: newRef1.ToBuilder(),
searchLoc: searchLoc1,
expectedRef: nil,
},
{
name: "Ref Found Loc Not",
searchRef: oldRef2.ToBuilder(),
searchLoc: mockLocationIDer{path.Builder{}.Append("foo")},
expectedRef: newRef2,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
newRef, oldPrefix, newPrefix := dm.GetNewPathRefs(test.searchRef, test.searchLoc)
assert.Equal(t, test.expectedRef, newRef, "RepoRef")
if !test.prefixFound {
assert.Nil(t, oldPrefix)
assert.Nil(t, newPrefix)
return
}
assert.Equal(t, test.expectedOldPrefix, oldPrefix, "old prefix")
assert.Equal(t, newLoc1, newPrefix, "new prefix")
})
}
}
type LocationPrefixMatcherUnitSuite struct {
@ -66,11 +195,6 @@ func TestLocationPrefixMatcherUnitSuite(t *testing.T) {
suite.Run(t, &LocationPrefixMatcherUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type inputData struct {
repoRef path.Path
locRef *path.Builder
}
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
t := suite.T()
p := makePath(
@ -96,93 +220,53 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
}
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
p1 := makePath(
suite.T(),
[]string{
testTenant,
service,
testUser,
category,
"folder1",
},
false)
loc1 := path.Builder{}.Append("folder1")
p1Parent, err := p1.Dir()
require.NoError(suite.T(), err, clues.ToCore(err))
loc2 := loc1.Append("folder2")
loc3 := path.Builder{}.Append("foo")
p2 := makePath(
suite.T(),
[]string{
testTenant,
service,
testUser,
category,
"folder2",
},
false)
res1 := path.Builder{}.Append("1")
lpm := newLocationPrefixMatcher()
err := lpm.add(loc1, res1)
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
inputs []inputData
searchKey string
check require.ValueAssertionFunc
expected *path.Builder
searchKey *path.Builder
found bool
}{
{
name: "Exact Match",
inputs: []inputData{
{
repoRef: p1,
locRef: loc1,
},
},
searchKey: p1.String(),
check: require.NotNil,
expected: loc1,
name: "Exact Match",
searchKey: loc1,
found: true,
},
{
name: "No Match",
inputs: []inputData{
{
repoRef: p1,
locRef: loc1,
},
},
searchKey: p2.String(),
check: require.Nil,
name: "No Match",
searchKey: loc3,
},
{
name: "No Prefix Match",
inputs: []inputData{
{
repoRef: p1Parent,
locRef: loc1,
},
},
searchKey: p1.String(),
check: require.Nil,
name: "Prefix Match",
searchKey: loc2,
found: true,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
lpm := newLocationPrefixMatcher()
for _, input := range test.inputs {
err := lpm.add(input.repoRef.ToBuilder(), input.locRef)
require.NoError(t, err, clues.ToCore(err))
}
prefixes := lpm.longestPrefix(test.searchKey)
loc := lpm.longestPrefix(test.searchKey)
test.check(t, loc)
if !test.found {
assert.Nil(t, prefixes.oldLoc)
assert.Nil(t, prefixes.newLoc)
if loc == nil {
return
}
assert.Equal(t, test.expected.String(), loc.String())
assert.Equal(t, loc1, prefixes.oldLoc, "old prefix")
assert.Equal(t, res1, prefixes.newLoc, "new prefix")
})
}
}

View File

@ -195,7 +195,7 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
cp.mu.Lock()
defer cp.mu.Unlock()
err := cp.toMerge.addRepoRef(d.prevPath.ToBuilder(), d.repoPath)
err := cp.toMerge.addRepoRef(d.prevPath.ToBuilder(), d.repoPath, d.locationPath)
if err != nil {
cp.errs.AddRecoverable(clues.Wrap(err, "adding item to merge list").
With(
@ -713,6 +713,31 @@ func getTreeNode(roots map[string]*treeMap, pathElements []string) *treeMap {
return dir
}
func addMergeLocation(col data.BackupCollection, toMerge *mergeDetails) error {
lp, ok := col.(data.PreviousLocationPather)
if !ok {
return nil
}
prevLoc := lp.PreviousLocationPath()
newLoc := lp.LocationPath()
if prevLoc == nil {
return clues.New("moved collection with nil previous location")
} else if newLoc == nil {
return clues.New("moved collection with nil location")
}
if err := toMerge.addLocation(prevLoc, newLoc); err != nil {
return clues.Wrap(err, "building updated location set").
With(
"collection_previous_location", prevLoc,
"collection_location", newLoc)
}
return nil
}
func inflateCollectionTree(
ctx context.Context,
collections []data.BackupCollection,
@ -728,17 +753,22 @@ func inflateCollectionTree(
changedPaths := []path.Path{}
for _, s := range collections {
ictx := clues.Add(
ctx,
"collection_full_path", s.FullPath(),
"collection_previous_path", s.PreviousPath())
switch s.State() {
case data.DeletedState:
if s.PreviousPath() == nil {
return nil, nil, clues.New("nil previous path on deleted collection")
return nil, nil, clues.New("nil previous path on deleted collection").WithClues(ictx)
}
changedPaths = append(changedPaths, s.PreviousPath())
if _, ok := updatedPaths[s.PreviousPath().String()]; ok {
return nil, nil, clues.New("multiple previous state changes to collection").
With("collection_previous_path", s.PreviousPath())
WithClues(ictx)
}
updatedPaths[s.PreviousPath().String()] = nil
@ -750,34 +780,34 @@ func inflateCollectionTree(
if _, ok := updatedPaths[s.PreviousPath().String()]; ok {
return nil, nil, clues.New("multiple previous state changes to collection").
With("collection_previous_path", s.PreviousPath())
WithClues(ictx)
}
updatedPaths[s.PreviousPath().String()] = s.FullPath()
}
// TODO(ashmrtn): Get old location ref and add it to the prefix matcher.
lp, ok := s.(data.LocationPather)
if ok && s.PreviousPath() != nil {
if err := toMerge.addLocation(s.PreviousPath().ToBuilder(), lp.LocationPath()); err != nil {
return nil, nil, clues.Wrap(err, "building updated location set").
With("collection_location", lp.LocationPath())
// Only safe when collections are moved since we only need prefix matching
// if a nested folder's path changed in some way that didn't generate a
// collection. For that to the be case, the nested folder's path must have
// changed via one of the ancestor folders being moved. This catches the
// ancestor folder move.
if err := addMergeLocation(s, toMerge); err != nil {
return nil, nil, clues.Wrap(err, "adding merge location").WithClues(ictx)
}
}
if s.FullPath() == nil || len(s.FullPath().Elements()) == 0 {
return nil, nil, clues.New("no identifier for collection")
return nil, nil, clues.New("no identifier for collection").WithClues(ictx)
}
node := getTreeNode(roots, s.FullPath().Elements())
if node == nil {
return nil, nil, clues.New("getting tree node").With("collection_full_path", s.FullPath())
return nil, nil, clues.New("getting tree node").WithClues(ictx)
}
// Make sure there's only a single collection adding items for any given
// path in the new hierarchy.
if node.collection != nil {
return nil, nil, clues.New("multiple instances of collection").With("collection_full_path", s.FullPath())
return nil, nil, clues.New("multiple instances of collection").WithClues(ictx)
}
node.collection = s

View File

@ -632,7 +632,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch
assert.Empty(t, cp.deets)
for _, expected := range expectedToMerge {
gotRef := cp.toMerge.GetNewRepoRef(expected.oldRef)
gotRef, _, _ := cp.toMerge.GetNewPathRefs(expected.oldRef, nil)
if !assert.NotNil(t, gotRef) {
continue
}
@ -687,89 +687,6 @@ func TestHierarchyBuilderUnitSuite(t *testing.T) {
suite.Run(t, &HierarchyBuilderUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *HierarchyBuilderUnitSuite) TestPopulatesPrefixMatcher() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
p1 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder1"},
false)
p2 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder2"},
false)
p3 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder3"},
false)
p4 := makePath(
t,
[]string{testTenant, service, testUser, category, "folder4"},
false)
c1 := mockconnector.NewMockExchangeCollection(p1, p1, 1)
c1.PrevPath = p1
c1.ColState = data.NotMovedState
c2 := mockconnector.NewMockExchangeCollection(p2, p2, 1)
c2.PrevPath = p3
c1.ColState = data.MovedState
c3 := mockconnector.NewMockExchangeCollection(nil, nil, 0)
c3.PrevPath = p4
c3.ColState = data.DeletedState
cols := []data.BackupCollection{c1, c2, c3}
cp := corsoProgress{
toMerge: newMergeDetails(),
errs: fault.New(true),
}
_, err := inflateDirTree(ctx, nil, nil, cols, nil, &cp)
require.NoError(t, err)
table := []struct {
inputPath *path.Builder
check require.ValueAssertionFunc
expectedLoc *path.Builder
}{
{
inputPath: p1.ToBuilder(),
check: require.NotNil,
expectedLoc: path.Builder{}.Append(p1.Folders()...),
},
{
inputPath: p3.ToBuilder(),
check: require.NotNil,
expectedLoc: path.Builder{}.Append(p2.Folders()...),
},
{
inputPath: p4.ToBuilder(),
check: require.Nil,
expectedLoc: nil,
},
}
for _, test := range table {
suite.Run(test.inputPath.String(), func() {
t := suite.T()
loc := cp.toMerge.GetNewLocation(test.inputPath)
test.check(t, loc)
if loc == nil {
return
}
assert.Equal(t, test.expectedLoc.String(), loc.String())
})
}
}
func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
tester.LogTimeOfTest(suite.T())
ctx, flush := tester.NewContext()

View File

@ -516,6 +516,37 @@ func matchesReason(reasons []kopia.Reason, p path.Path) bool {
return false
}
// getNewPathRefs returns
// 1. the new RepoRef for the item if it needs merged
// 2. the new locationPath
// 3. if the location was likely updated
// 4. any errors encountered
func getNewPathRefs(
dataFromBackup kopia.DetailsMergeInfoer,
entry *details.DetailsEntry,
repoRef *path.Builder,
) (path.Path, *path.Builder, bool, error) {
// Right now we can't guarantee that we have an old location in the
// previous details entry so first try a lookup without a location to see
// if it matches so we don't need to try parsing from the old entry.
//
// TODO(ashmrtn): In the future we can remove this first check as we'll be
// able to assume we always have the location in the previous entry. We'll end
// up doing some extra parsing, but it will simplify this code.
newPath, _, newLocPrefix := dataFromBackup.GetNewPathRefs(repoRef, nil)
if newPath == nil {
// This entry doesn't need merging.
return nil, nil, false, nil
}
// OneDrive doesn't return prefixes yet.
if newLocPrefix == nil {
newLocPrefix = &path.Builder{}
}
return newPath, newLocPrefix, newLocPrefix.String() != entry.LocationRef, nil
}
func mergeDetails(
ctx context.Context,
ms *store.Wrapper,
@ -579,40 +610,36 @@ func mergeDetails(
continue
}
pb := rr.ToBuilder()
mctx = clues.Add(mctx, "repo_ref", rr)
newPath := dataFromBackup.GetNewRepoRef(pb)
if newPath == nil {
// This entry was not sourced from a base snapshot or cached from a
// previous backup, skip it.
continue
newPath, newLoc, locUpdated, err := getNewPathRefs(
dataFromBackup,
entry,
rr.ToBuilder())
if err != nil {
return clues.Wrap(err, "getting updated info for entry").WithClues(mctx)
}
newLoc := dataFromBackup.GetNewLocation(pb.Dir())
// This entry isn't merged.
if newPath == nil {
continue
}
// Fixup paths in the item.
item := entry.ItemInfo
if err := details.UpdateItem(&item, newPath, newLoc); err != nil {
return clues.New("updating item details").WithClues(mctx)
return clues.Wrap(err, "updating merged item info").WithClues(mctx)
}
// TODO(ashmrtn): This may need updated if we start using this merge
// strategry for items that were cached in kopia.
var (
itemUpdated = newPath.String() != rr.String()
newLocStr string
)
if newLoc != nil {
newLocStr = newLoc.String()
itemUpdated = itemUpdated || newLocStr != entry.LocationRef
}
itemUpdated := newPath.String() != rr.String() || locUpdated
err = deets.Add(
newPath.String(),
newPath.ShortRef(),
newPath.ToBuilder().Dir().ShortRef(),
newLocStr,
newLoc.String(),
itemUpdated,
item)
if err != nil {

View File

@ -183,28 +183,36 @@ func (mbs mockBackupStorer) Update(context.Context, model.Schema, model.Model) e
// ----- model store for backups
type mockDetailsMergeInfoer struct {
repoRefs map[string]path.Path
locs map[string]*path.Builder
type locPair struct {
old *path.Builder
newL *path.Builder
}
func (m *mockDetailsMergeInfoer) add(oldRef, newRef path.Path, newLoc *path.Builder) {
type mockDetailsMergeInfoer struct {
repoRefs map[string]path.Path
locs map[string]locPair
}
func (m *mockDetailsMergeInfoer) add(oldRef, newRef path.Path, oldPrefix, newLoc *path.Builder) {
oldPB := oldRef.ToBuilder()
// Items are indexed individually.
m.repoRefs[oldPB.ShortRef()] = newRef
if newLoc != nil {
// Locations are indexed by directory.
m.locs[oldPB.Dir().ShortRef()] = newLoc
m.locs[oldPB.ShortRef()] = locPair{
old: oldPrefix,
newL: newLoc,
}
}
}
func (m *mockDetailsMergeInfoer) GetNewRepoRef(oldRef *path.Builder) path.Path {
return m.repoRefs[oldRef.ShortRef()]
}
func (m *mockDetailsMergeInfoer) GetNewLocation(oldRef *path.Builder) *path.Builder {
return m.locs[oldRef.ShortRef()]
func (m *mockDetailsMergeInfoer) GetNewPathRefs(
oldRef *path.Builder,
oldLoc details.LocationIDer,
) (path.Path, *path.Builder, *path.Builder) {
locs := m.locs[oldRef.ShortRef()]
return m.repoRefs[oldRef.ShortRef()], locs.old, locs.newL
}
func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
@ -218,7 +226,7 @@ func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
func newMockDetailsMergeInfoer() *mockDetailsMergeInfoer {
return &mockDetailsMergeInfoer{
repoRefs: map[string]path.Path{},
locs: map[string]*path.Builder{},
locs: map[string]locPair{},
}
}
@ -736,7 +744,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "BackupIDNotFound",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -754,7 +762,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "DetailsIDNotFound",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -780,8 +788,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "BaseMissingItems",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath2, itemPath2, locationPath2)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath2, itemPath2, locationPath2, locationPath2)
return res
}(),
@ -811,7 +819,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "TooManyItems",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -847,7 +855,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "BadBaseRepoRef",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath2, locationPath2)
res.add(itemPath1, itemPath2, locationPath1, locationPath2)
return res
}(),
@ -909,7 +917,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
true,
)
res.add(itemPath1, p, nil)
res.add(itemPath1, p, locationPath1, nil)
return res
}(),
@ -939,7 +947,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMerged",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -968,44 +976,11 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
{
name: "ItemMergedNoLocation",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, nil)
return res
}(),
inputMans: []*kopia.ManifestEntry{
{
Manifest: makeManifest(suite.T(), backup1.ID, ""),
Reasons: []kopia.Reason{
pathReason1,
},
},
},
populatedModels: map[model.StableID]backup.Backup{
backup1.ID: backup1,
},
populatedDetails: map[string]*details.Details{
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
},
},
{
name: "ItemMergedSameLocation",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -1038,7 +1013,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMergedExtraItemsInBase",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -1072,7 +1047,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMoved",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath2, locationPath2)
res.add(itemPath1, itemPath2, locationPath1, locationPath2)
return res
}(),
@ -1105,8 +1080,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "MultipleBases",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath3, itemPath3, locationPath3)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath3, itemPath3, locationPath3, locationPath3)
return res
}(),
@ -1157,7 +1132,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "SomeBasesIncomplete",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
return res
}(),
@ -1288,7 +1263,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
)
mdm := newMockDetailsMergeInfoer()
mdm.add(itemPath1, itemPath1, locPath1)
mdm.add(itemPath1, itemPath1, locPath1, locPath1)
itemDetails := makeDetailsEntry(t, itemPath1, locPath1, itemSize, false)
// itemDetails.Exchange.Modified = now

View File

@ -759,13 +759,7 @@ func (i ExchangeInfo) Values() []string {
}
func (i *ExchangeInfo) UpdateParentPath(_ path.Path, locPath *path.Builder) error {
// Not all data types have this set yet.
if locPath == nil {
return nil
}
i.ParentPath = locPath.String()
return nil
}

View File

@ -240,6 +240,33 @@ func (pb Builder) LastElem() string {
return pb.elements[len(pb.elements)-1]
}
// UpdateParent updates leading elements matching prev to be cur and returns
// true if it was updated. If prev is not a prefix of this Builder changes
// nothing and returns false. If either prev or cur is nil does nothing and
// returns false.
func (pb *Builder) UpdateParent(prev, cur *Builder) bool {
if prev == cur || prev == nil || cur == nil || len(prev.Elements()) > len(pb.Elements()) {
return false
}
parent := true
for i, e := range prev.Elements() {
if pb.elements[i] != e {
parent = false
break
}
}
if !parent {
return false
}
pb.elements = append(cur.Elements(), pb.elements[len(prev.Elements()):]...)
return true
}
// ShortRef produces a truncated hash of the builder that
// acts as a unique identifier.
func (pb Builder) ShortRef() string {

View File

@ -274,24 +274,5 @@ func (rp dataLayerResourcePath) ToBuilder() *Builder {
}
func (rp *dataLayerResourcePath) UpdateParent(prev, cur Path) bool {
if prev == cur || len(prev.Elements()) > len(rp.Elements()) {
return false
}
parent := true
for i, e := range prev.Elements() {
if rp.elements[i] != e {
parent = false
break
}
}
if !parent {
return false
}
rp.elements = append(cur.Elements(), rp.elements[len(prev.Elements()):]...)
return true
return rp.Builder.UpdateParent(prev.ToBuilder(), cur.ToBuilder())
}

View File

@ -639,3 +639,37 @@ func (suite *PopulatedDataLayerResourcePath) TestUpdateParent() {
})
}
}
func (suite *PopulatedDataLayerResourcePath) TestUpdateParent_NoopsNils() {
oldPB := path.Builder{}.Append("hello", "world")
newPB := path.Builder{}.Append("hola", "mundo")
// So we can get a new copy for each test.
testPBElems := []string{"bar", "baz"}
table := []struct {
name string
oldPB *path.Builder
newPB *path.Builder
}{
{
name: "Nil Prev",
newPB: newPB,
},
{
name: "Nil New",
oldPB: oldPB,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
base := oldPB.Append(testPBElems...)
expected := base.String()
assert.False(t, base.UpdateParent(test.oldPB, test.newPB))
assert.Equal(t, expected, base.String())
})
}
}