Populate and persist OneDrive LocationRef (#3111)

Leverage structs from previous PRs to
populate and persist the LocationRef
for OneDrive

Right now the source of the
LocationRef is still based off the
RepoRef path, but this is in the
OneDrive code only

Manually tested
* updating a subfolder path when parent folder was moved
* populating LocationRef when the base backup didn't have LocationRefs when nothing was changed in OneDrive
* populating LocationRef when the base backup didn't have LocationRefs when something was changed in OneDrive

---

#### Does this PR need a docs update or release note?

- [x]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [ ]  No

#### Type of change

- [x] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

* closes #2486

#### Test Plan

- [x] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
ashmrtn 2023-04-14 14:31:16 -07:00 committed by GitHub
parent b0869fad4d
commit 1826b66e8d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 277 additions and 266 deletions

View File

@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.)
- LocationRef is now populated for all services and data types. It should be used in place of RepoRef if a location for an item is required.
### Fixed
- Fixed permissions restore in latest backup version.

View File

@ -186,7 +186,7 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm permDat
c.items = append(c.items, metadata)
c.aux = append(c.aux, metadata)
case version.OneDrive6NameInMeta:
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef:
c.items = append(c.items, onedriveItemWithData(
c.t,
name+onedrive.DataFileSuffix,
@ -213,7 +213,7 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm permDat
func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection {
switch c.backupVersion {
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
version.OneDrive6NameInMeta:
version.OneDrive6NameInMeta, version.OneDrive7LocationRef:
return c
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:

View File

@ -97,6 +97,12 @@ type Collection struct {
// moved. It will be empty on its first retrieval.
prevPath path.Path
// locPath represents the human-readable location of this collection.
locPath *path.Builder
// prevLocPath represents the human-readable location of this collection in
// the previous backup.
prevLocPath *path.Builder
// Specifies if it new, moved/rename or deleted
state data.CollectionState
@ -135,6 +141,19 @@ type itemMetaReaderFunc func(
item models.DriveItemable,
) (io.ReadCloser, int, error)
func pathToLocation(p path.Path) (*path.Builder, error) {
if p == nil {
return nil, nil
}
odp, err := path.ToOneDrivePath(p)
if err != nil {
return nil, err
}
return path.Builder{}.Append(odp.Root).Append(odp.Folders...), nil
}
// NewCollection creates a Collection
func NewCollection(
itemClient *http.Client,
@ -147,11 +166,27 @@ func NewCollection(
ctrlOpts control.Options,
colScope collectionScope,
doNotMergeItems bool,
) *Collection {
) (*Collection, error) {
// TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need
// to be changed as we won't be able to extract path information from the
// storage path. In that case, we'll need to start storing the location paths
// like we do the previous path.
locPath, err := pathToLocation(folderPath)
if err != nil {
return nil, clues.Wrap(err, "getting location").With("folder_path", folderPath.String())
}
prevLocPath, err := pathToLocation(prevPath)
if err != nil {
return nil, clues.Wrap(err, "getting previous location").With("prev_path", prevPath.String())
}
c := &Collection{
itemClient: itemClient,
folderPath: folderPath,
prevPath: prevPath,
locPath: locPath,
prevLocPath: prevLocPath,
driveItems: map[string]models.DriveItemable{},
driveID: driveID,
source: source,
@ -176,7 +211,7 @@ func NewCollection(
c.itemMetaReader = oneDriveItemMetaReader
}
return c
return c, nil
}
// Adds an itemID to the collection. This will make it eligible to be
@ -229,6 +264,28 @@ func (oc *Collection) SetFullPath(curPath path.Path) {
oc.state = data.StateOf(oc.prevPath, curPath)
}
func (oc Collection) LocationPath() *path.Builder {
return oc.locPath
}
func (oc Collection) PreviousLocationPath() details.LocationIDer {
if oc.prevLocPath == nil {
return nil
}
switch oc.source {
case OneDriveSource:
return details.NewOneDriveLocationIDer(
oc.driveID,
oc.prevLocPath.Elements()...)
default:
return details.NewSharePointLocationIDer(
oc.driveID,
oc.prevLocPath.Elements()...)
}
}
func (oc Collection) State() data.CollectionState {
return oc.state
}

View File

@ -205,7 +205,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
driveFolderPath, err := path.GetDriveFolderPath(folderPath)
require.NoError(t, err, clues.ToCore(err))
coll := NewCollection(
coll, err := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
folderPath,
nil,
@ -216,6 +216,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, coll)
assert.Equal(t, folderPath, coll.FullPath())
@ -344,7 +345,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err, clues.ToCore(err))
coll := NewCollection(
coll, err := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
folderPath,
nil,
@ -355,6 +356,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID)
@ -433,7 +435,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err)
coll := NewCollection(
coll, err := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
folderPath,
nil,
@ -444,6 +446,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID)
@ -532,7 +535,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err, clues.ToCore(err))
coll := NewCollection(
coll, err := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
folderPath,
nil,
@ -543,6 +546,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
mtime := time.Now().AddDate(0, -1, 0)
mockItem := models.NewDriveItem()

View File

@ -416,7 +416,7 @@ func (c *Collections) Get(
return nil, map[string]map[string]struct{}{}, err
}
col := NewCollection(
col, err := NewCollection(
c.itemClient,
nil,
prevPath,
@ -427,6 +427,9 @@ func (c *Collections) Get(
c.ctrl,
CollectionScopeUnknown,
true)
if err != nil {
return nil, map[string]map[string]struct{}{}, clues.Wrap(err, "making collection").WithClues(ictx)
}
c.CollectionMap[driveID][fldID] = col
}
@ -578,7 +581,7 @@ func (c *Collections) handleDelete(
return nil
}
col := NewCollection(
col, err := NewCollection(
c.itemClient,
nil,
prevPath,
@ -590,6 +593,12 @@ func (c *Collections) handleDelete(
CollectionScopeUnknown,
// DoNotMerge is not checked for deleted items.
false)
if err != nil {
return clues.Wrap(err, "making collection").With(
"drive_id", driveID,
"item_id", itemID,
"path_string", prevPathStr)
}
c.CollectionMap[driveID][itemID] = col
@ -765,7 +774,7 @@ func (c *Collections) UpdateCollections(
colScope = CollectionScopePackage
}
col := NewCollection(
col, err := NewCollection(
c.itemClient,
collectionPath,
prevPath,
@ -777,6 +786,10 @@ func (c *Collections) UpdateCollections(
colScope,
invalidPrevDelta,
)
if err != nil {
return clues.Stack(err).WithClues(ictx)
}
col.driveName = driveName
c.CollectionMap[driveID][itemID] = col

View File

@ -24,9 +24,7 @@ func TestRestoreUnitSuite(t *testing.T) {
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
// Adding a simple test here so that we can be sure that this
// function gets updated whenever we add a new version.
if version.Backup > version.OneDrive6NameInMeta {
require.Less(suite.T(), version.OneDrive6NameInMeta+1, version.Backup, "unsupported backup version")
}
require.LessOrEqual(suite.T(), version.Backup, version.OneDrive7LocationRef, "unsupported backup version")
table := []struct {
name string

View File

@ -21,7 +21,7 @@ type DetailsMergeInfoer interface {
GetNewPathRefs(
oldRef *path.Builder,
oldLoc details.LocationIDer,
) (path.Path, *path.Builder, *path.Builder)
) (path.Path, *path.Builder, error)
}
type prevRef struct {
@ -68,36 +68,37 @@ func (m *mergeDetails) addRepoRef(
func (m *mergeDetails) GetNewPathRefs(
oldRef *path.Builder,
oldLoc details.LocationIDer,
) (path.Path, *path.Builder, *path.Builder) {
) (path.Path, *path.Builder, error) {
pr, ok := m.repoRefs[oldRef.ShortRef()]
if !ok {
return nil, nil, nil
}
// This was a location specified directly by a collection. Say the prefix is
// the whole oldLoc so other code will replace everything.
//
// TODO(ashmrtn): Should be able to remove the nil check later as we'll be
// able to ensure that old locations actually exist in backup details.
if oldLoc == nil {
return pr.repoRef, nil, pr.locRef
} else if pr.locRef != nil {
return pr.repoRef, oldLoc.InDetails(), pr.locRef
// This was a location specified directly by a collection.
if pr.locRef != nil {
return pr.repoRef, pr.locRef, nil
} else if oldLoc == nil || oldLoc.ID() == nil || len(oldLoc.ID().Elements()) == 0 {
return nil, nil, clues.New("empty location key")
}
// This is a location that we need to do prefix matching on because we didn't
// see the new location of it in a collection. For example, it's a subfolder
// whose parent folder was moved.
prefixes := m.locations.longestPrefix(oldLoc.ID())
prefixes := m.locations.longestPrefix(oldLoc)
newLoc := oldLoc.InDetails()
return pr.repoRef, prefixes.oldLoc, prefixes.newLoc
// Noop if prefix or newPrefix are nil. Them being nil means that the
// LocationRef hasn't changed.
newLoc.UpdateParent(prefixes.oldLoc, prefixes.newLoc)
return pr.repoRef, newLoc, nil
}
func (m *mergeDetails) addLocation(
oldRef details.LocationIDer,
newLoc *path.Builder,
) error {
return m.locations.add(oldRef.ID(), newLoc)
return m.locations.add(oldRef, newLoc)
}
func newMergeDetails() *mergeDetails {
@ -116,20 +117,25 @@ type locationPrefixMatcher struct {
m prefixmatcher.Matcher[locRefs]
}
func (m *locationPrefixMatcher) add(oldRef, newLoc *path.Builder) error {
key := oldRef.String()
func (m *locationPrefixMatcher) add(
oldRef details.LocationIDer,
newLoc *path.Builder,
) error {
key := oldRef.ID().String()
if _, ok := m.m.Get(key); ok {
return clues.New("RepoRef already in matcher").With("repo_ref", oldRef)
}
m.m.Add(key, locRefs{oldLoc: oldRef, newLoc: newLoc})
m.m.Add(key, locRefs{oldLoc: oldRef.InDetails(), newLoc: newLoc})
return nil
}
func (m *locationPrefixMatcher) longestPrefix(oldRef *path.Builder) locRefs {
_, v, _ := m.m.LongestPrefix(oldRef.String())
func (m *locationPrefixMatcher) longestPrefix(
oldRef details.LocationIDer,
) locRefs {
_, v, _ := m.m.LongestPrefix(oldRef.ID().String())
return v
}

View File

@ -97,10 +97,11 @@ func (suite *DetailsMergeInfoerUnitSuite) TestGetNewPathRefs() {
testUser,
category,
"folder3",
"folder4",
"folder2",
},
false)
newLoc1 := path.Builder{}.Append(newRef1.Folders()...)
newLoc2 := path.Builder{}.Append(newRef2.Folders()...)
oldLoc1 := path.Builder{}.Append(oldRef1.Folders()...)
oldLoc2 := path.Builder{}.Append(oldRef2.Folders()...)
@ -120,69 +121,62 @@ func (suite *DetailsMergeInfoerUnitSuite) TestGetNewPathRefs() {
require.NoError(t, err, clues.ToCore(err))
table := []struct {
name string
searchRef *path.Builder
searchLoc mockLocationIDer
expectedRef path.Path
prefixFound bool
expectedOldPrefix *path.Builder
name string
searchRef *path.Builder
searchLoc mockLocationIDer
errCheck require.ErrorAssertionFunc
expectedRef path.Path
expectedLoc *path.Builder
}{
{
name: "Exact Match With Loc",
searchRef: oldRef1.ToBuilder(),
searchLoc: searchLoc1,
expectedRef: newRef1,
prefixFound: true,
expectedOldPrefix: oldLoc1,
},
{
name: "Exact Match Without Loc",
searchRef: oldRef1.ToBuilder(),
expectedRef: newRef1,
prefixFound: true,
expectedOldPrefix: nil,
},
{
name: "Prefix Match",
searchRef: oldRef2.ToBuilder(),
searchLoc: searchLoc2,
expectedRef: newRef2,
prefixFound: true,
expectedOldPrefix: oldLoc1,
},
{
name: "Not Found",
searchRef: newRef1.ToBuilder(),
expectedRef: nil,
},
{
name: "Not Found With Loc",
searchRef: newRef1.ToBuilder(),
name: "Exact Match With Loc",
searchRef: oldRef1.ToBuilder(),
searchLoc: searchLoc1,
expectedRef: nil,
errCheck: require.NoError,
expectedRef: newRef1,
expectedLoc: newLoc1,
},
{
name: "Ref Found Loc Not",
name: "Exact Match Without Loc",
searchRef: oldRef1.ToBuilder(),
errCheck: require.NoError,
expectedRef: newRef1,
expectedLoc: newLoc1,
},
{
name: "Prefix Match",
searchRef: oldRef2.ToBuilder(),
searchLoc: mockLocationIDer{path.Builder{}.Append("foo")},
searchLoc: searchLoc2,
errCheck: require.NoError,
expectedRef: newRef2,
expectedLoc: newLoc2,
},
{
name: "Would Be Prefix Match Without Old Loc Errors",
searchRef: oldRef2.ToBuilder(),
errCheck: require.Error,
},
{
name: "Not Found With Old Loc",
searchRef: newRef1.ToBuilder(),
searchLoc: searchLoc2,
errCheck: require.NoError,
},
{
name: "Not Found Without Old Loc",
searchRef: newRef1.ToBuilder(),
errCheck: require.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
newRef, oldPrefix, newPrefix := dm.GetNewPathRefs(test.searchRef, test.searchLoc)
newRef, newLoc, err := dm.GetNewPathRefs(test.searchRef, test.searchLoc)
test.errCheck(t, err, clues.ToCore(err))
assert.Equal(t, test.expectedRef, newRef, "RepoRef")
if !test.prefixFound {
assert.Nil(t, oldPrefix)
assert.Nil(t, newPrefix)
return
}
assert.Equal(t, test.expectedOldPrefix, oldPrefix, "old prefix")
assert.Equal(t, newLoc1, newPrefix, "new prefix")
assert.Equal(t, test.expectedLoc, newLoc, "LocationRef")
})
}
}
@ -197,7 +191,7 @@ func TestLocationPrefixMatcherUnitSuite(t *testing.T) {
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
t := suite.T()
p := makePath(
p := mockLocationIDer{makePath(
t,
[]string{
testTenant,
@ -206,7 +200,7 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
category,
"folder1",
},
false).ToBuilder()
false).ToBuilder()}
loc1 := path.Builder{}.Append("folder1")
loc2 := path.Builder{}.Append("folder2")
@ -220,20 +214,20 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
}
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
loc1 := path.Builder{}.Append("folder1")
loc2 := loc1.Append("folder2")
loc3 := path.Builder{}.Append("foo")
loc1 := mockLocationIDer{path.Builder{}.Append("folder1")}
loc2 := mockLocationIDer{loc1.InDetails().Append("folder2")}
loc3 := mockLocationIDer{path.Builder{}.Append("foo")}
res1 := path.Builder{}.Append("1")
res1 := mockLocationIDer{path.Builder{}.Append("1")}
lpm := newLocationPrefixMatcher()
err := lpm.add(loc1, res1)
err := lpm.add(loc1, res1.InDetails())
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
searchKey *path.Builder
searchKey mockLocationIDer
found bool
}{
{
@ -265,8 +259,8 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
return
}
assert.Equal(t, loc1, prefixes.oldLoc, "old prefix")
assert.Equal(t, res1, prefixes.newLoc, "new prefix")
assert.Equal(t, loc1.InDetails(), prefixes.oldLoc, "old prefix")
assert.Equal(t, res1.InDetails(), prefixes.newLoc, "new prefix")
})
}
}

View File

@ -524,7 +524,8 @@ func matchesReason(reasons []kopia.Reason, p path.Path) bool {
func getNewPathRefs(
dataFromBackup kopia.DetailsMergeInfoer,
entry *details.DetailsEntry,
repoRef *path.Builder,
repoRef path.Path,
backupVersion int,
) (path.Path, *path.Builder, bool, error) {
// Right now we can't guarantee that we have an old location in the
// previous details entry so first try a lookup without a location to see
@ -533,18 +534,52 @@ func getNewPathRefs(
// TODO(ashmrtn): In the future we can remove this first check as we'll be
// able to assume we always have the location in the previous entry. We'll end
// up doing some extra parsing, but it will simplify this code.
newPath, _, newLocPrefix := dataFromBackup.GetNewPathRefs(repoRef, nil)
if newPath == nil {
// This entry doesn't need merging.
if repoRef.Service() == path.ExchangeService {
newPath, newLoc, err := dataFromBackup.GetNewPathRefs(repoRef.ToBuilder(), nil)
if err != nil {
return nil, nil, false, clues.Wrap(err, "getting new paths")
} else if newPath == nil {
// This entry doesn't need merging.
return nil, nil, false, nil
} else if newLoc == nil {
return nil, nil, false, clues.New("unable to find new exchange location")
}
// This is kind of jank cause we're in a transitionary period, but even if
// we're consesrvative here about marking something as updated the RepoRef
// comparison in the caller should catch the change. Calendars is the only
// exception, since it uses IDs for folders, but we should already be
// populating the LocationRef for them.
//
// Without this, all OneDrive items will be marked as updated the first time
// around because OneDrive hasn't been persisting LocationRef before now.
updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef
return newPath, newLoc, updated, nil
}
// We didn't have an exact entry, so retry with a location.
locRef, err := entry.ToLocationIDer(backupVersion)
if err != nil {
return nil, nil, false, clues.Wrap(err, "getting previous item location")
}
if locRef == nil {
return nil, nil, false, clues.New("entry with empty LocationRef")
}
newPath, newLoc, err := dataFromBackup.GetNewPathRefs(repoRef.ToBuilder(), locRef)
if err != nil {
return nil, nil, false, clues.Wrap(err, "getting new paths with old location")
} else if newPath == nil {
return nil, nil, false, nil
} else if newLoc == nil {
return nil, nil, false, clues.New("unable to get new paths")
}
// OneDrive doesn't return prefixes yet.
if newLocPrefix == nil {
newLocPrefix = &path.Builder{}
}
updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef
return newPath, newLocPrefix, newLocPrefix.String() != entry.LocationRef, nil
return newPath, newLoc, updated, nil
}
func mergeDetails(
@ -582,7 +617,7 @@ func mergeDetails(
mctx = clues.Add(mctx, "base_manifest_backup_id", bID)
_, baseDeets, err := getBackupAndDetailsFromID(
baseBackup, baseDeets, err := getBackupAndDetailsFromID(
mctx,
model.StableID(bID),
ms,
@ -615,7 +650,8 @@ func mergeDetails(
newPath, newLoc, locUpdated, err := getNewPathRefs(
dataFromBackup,
entry,
rr.ToBuilder())
rr,
baseBackup.Version)
if err != nil {
return clues.Wrap(err, "getting updated info for entry").WithClues(mctx)
}
@ -627,9 +663,7 @@ func mergeDetails(
// Fixup paths in the item.
item := entry.ItemInfo
if err := details.UpdateItem(&item, newPath, newLoc); err != nil {
return clues.Wrap(err, "updating merged item info").WithClues(mctx)
}
details.UpdateItem(&item, newLoc)
// TODO(ashmrtn): This may need updated if we start using this merge
// strategry for items that were cached in kopia.

View File

@ -183,36 +183,25 @@ func (mbs mockBackupStorer) Update(context.Context, model.Schema, model.Model) e
// ----- model store for backups
type locPair struct {
old *path.Builder
newL *path.Builder
}
type mockDetailsMergeInfoer struct {
repoRefs map[string]path.Path
locs map[string]locPair
locs map[string]*path.Builder
}
func (m *mockDetailsMergeInfoer) add(oldRef, newRef path.Path, oldPrefix, newLoc *path.Builder) {
func (m *mockDetailsMergeInfoer) add(oldRef, newRef path.Path, newLoc *path.Builder) {
oldPB := oldRef.ToBuilder()
// Items are indexed individually.
m.repoRefs[oldPB.ShortRef()] = newRef
if newLoc != nil {
// Locations are indexed by directory.
m.locs[oldPB.ShortRef()] = locPair{
old: oldPrefix,
newL: newLoc,
}
}
// Locations are indexed by directory.
m.locs[oldPB.ShortRef()] = newLoc
}
func (m *mockDetailsMergeInfoer) GetNewPathRefs(
oldRef *path.Builder,
oldLoc details.LocationIDer,
) (path.Path, *path.Builder, *path.Builder) {
locs := m.locs[oldRef.ShortRef()]
return m.repoRefs[oldRef.ShortRef()], locs.old, locs.newL
_ details.LocationIDer,
) (path.Path, *path.Builder, error) {
return m.repoRefs[oldRef.ShortRef()], m.locs[oldRef.ShortRef()], nil
}
func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
@ -226,7 +215,7 @@ func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
func newMockDetailsMergeInfoer() *mockDetailsMergeInfoer {
return &mockDetailsMergeInfoer{
repoRefs: map[string]path.Path{},
locs: map[string]locPair{},
locs: map[string]*path.Builder{},
}
}
@ -351,13 +340,13 @@ func makeDetailsEntry(
}
case path.OneDriveService:
parent, err := path.GetDriveFolderPath(p)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, l)
res.OneDrive = &details.OneDriveInfo{
ItemType: details.OneDriveItem,
ParentPath: parent,
ParentPath: l.PopFront().String(),
Size: int64(size),
DriveID: "drive-id",
}
default:
@ -744,7 +733,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "BackupIDNotFound",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -762,7 +751,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "DetailsIDNotFound",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -788,8 +777,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "BaseMissingItems",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath2, itemPath2, locationPath2, locationPath2)
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath2, itemPath2, locationPath2)
return res
}(),
@ -819,7 +808,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "TooManyItems",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -855,7 +844,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "BadBaseRepoRef",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath2, locationPath1, locationPath2)
res.add(itemPath1, itemPath2, locationPath2)
return res
}(),
@ -917,7 +906,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
true,
)
res.add(itemPath1, p, locationPath1, nil)
res.add(itemPath1, p, nil)
return res
}(),
@ -947,7 +936,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMerged",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -980,7 +969,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMergedSameLocation",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -1013,7 +1002,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMergedExtraItemsInBase",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -1047,7 +1036,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "ItemMoved",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath2, locationPath1, locationPath2)
res.add(itemPath1, itemPath2, locationPath2)
return res
}(),
@ -1080,8 +1069,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "MultipleBases",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath3, itemPath3, locationPath3, locationPath3)
res.add(itemPath1, itemPath1, locationPath1)
res.add(itemPath3, itemPath3, locationPath3)
return res
}(),
@ -1132,7 +1121,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
name: "SomeBasesIncomplete",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
res.add(itemPath1, itemPath1, locationPath1)
return res
}(),
@ -1263,7 +1252,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
)
mdm := newMockDetailsMergeInfoer()
mdm.add(itemPath1, itemPath1, locPath1, locPath1)
mdm.add(itemPath1, itemPath1, locPath1)
itemDetails := makeDetailsEntry(t, itemPath1, locPath1, itemSize, false)
// itemDetails.Exchange.Modified = now

View File

@ -1,6 +1,6 @@
package version
const Backup = 6
const Backup = 7
// Various labels to refer to important version changes.
// Labels don't need 1:1 service:version representation. Add a new
@ -38,5 +38,5 @@ const (
// OneDriveXLocationRef provides LocationRef information for Exchange,
// OneDrive, and SharePoint libraries.
OneDriveXLocationRef = Backup + 1
OneDrive7LocationRef = 7
)

View File

@ -457,7 +457,7 @@ func (de DetailsEntry) ToLocationIDer(backupVersion int) (LocationIDer, error) {
return de.ItemInfo.uniqueLocation(baseLoc)
}
if backupVersion >= version.OneDriveXLocationRef ||
if backupVersion >= version.OneDrive7LocationRef ||
(de.ItemInfo.infoType() != OneDriveItem &&
de.ItemInfo.infoType() != SharePointLibrary) {
return nil, clues.New("no previous location for entry")
@ -578,10 +578,10 @@ const (
FolderItem ItemType = 306
)
func UpdateItem(item *ItemInfo, repoPath path.Path, locPath *path.Builder) error {
func UpdateItem(item *ItemInfo, newLocPath *path.Builder) {
// Only OneDrive and SharePoint have information about parent folders
// contained in them.
var updatePath func(repo path.Path, location *path.Builder) error
var updatePath func(newLocPath *path.Builder)
switch item.infoType() {
case ExchangeContact, ExchangeEvent, ExchangeMail:
@ -591,10 +591,10 @@ func UpdateItem(item *ItemInfo, repoPath path.Path, locPath *path.Builder) error
case OneDriveItem:
updatePath = item.OneDrive.UpdateParentPath
default:
return nil
return
}
return updatePath(repoPath, locPath)
updatePath(newLocPath)
}
// ItemInfo is a oneOf that contains service specific
@ -758,9 +758,8 @@ func (i ExchangeInfo) Values() []string {
return []string{}
}
func (i *ExchangeInfo) UpdateParentPath(_ path.Path, locPath *path.Builder) error {
i.ParentPath = locPath.String()
return nil
func (i *ExchangeInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.String()
}
func (i *ExchangeInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
@ -812,15 +811,8 @@ func (i SharePointInfo) Values() []string {
}
}
func (i *SharePointInfo) UpdateParentPath(newPath path.Path, _ *path.Builder) error {
newParent, err := path.GetDriveFolderPath(newPath)
if err != nil {
return clues.Wrap(err, "making sharePoint path").With("path", newPath)
}
i.ParentPath = newParent
return nil
func (i *SharePointInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *SharePointInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
@ -864,15 +856,8 @@ func (i OneDriveInfo) Values() []string {
}
}
func (i *OneDriveInfo) UpdateParentPath(newPath path.Path, _ *path.Builder) error {
newParent, err := path.GetDriveFolderPath(newPath)
if err != nil {
return clues.Wrap(err, "making oneDrive path").With("path", newPath)
}
i.ParentPath = newParent
return nil
func (i *OneDriveInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *OneDriveInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {

View File

@ -857,47 +857,17 @@ func makeItemPath(
func (suite *DetailsUnitSuite) TestUpdateItem() {
const (
tenant = "a-tenant"
resourceOwner = "a-user"
driveID = "abcd"
folder1 = "f1"
folder2 = "f2"
folder3 = "f3"
item = "hello.txt"
folder1 = "f1"
folder2 = "f2"
)
// Making both OneDrive paths is alright because right now they're the same as
// SharePoint path and there's no extra validation.
newOneDrivePath := makeItemPath(
suite.T(),
path.OneDriveService,
path.FilesCategory,
tenant,
resourceOwner,
[]string{
"drives",
driveID,
"root:",
folder2,
item,
},
)
newExchangePB := path.Builder{}.Append(folder3)
badOneDrivePath := makeItemPath(
suite.T(),
path.OneDriveService,
path.FilesCategory,
tenant,
resourceOwner,
[]string{item},
)
newExchangePB := path.Builder{}.Append(folder2)
newOneDrivePB := path.Builder{}.Append("root:", folder2)
table := []struct {
name string
input ItemInfo
repoPath path.Path
locPath *path.Builder
errCheck assert.ErrorAssertionFunc
expectedItem ItemInfo
}{
{
@ -908,13 +878,11 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
repoPath: newOneDrivePath,
locPath: newExchangePB,
errCheck: assert.NoError,
locPath: newExchangePB,
expectedItem: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeEvent,
ParentPath: folder3,
ParentPath: folder2,
},
},
},
@ -926,13 +894,11 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
repoPath: newOneDrivePath,
locPath: newExchangePB,
errCheck: assert.NoError,
locPath: newExchangePB,
expectedItem: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeContact,
ParentPath: folder3,
ParentPath: folder2,
},
},
},
@ -944,13 +910,11 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
repoPath: newOneDrivePath,
locPath: newExchangePB,
errCheck: assert.NoError,
locPath: newExchangePB,
expectedItem: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeMail,
ParentPath: folder3,
ParentPath: folder2,
},
},
},
@ -962,9 +926,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
repoPath: newOneDrivePath,
locPath: newExchangePB,
errCheck: assert.NoError,
locPath: newOneDrivePB,
expectedItem: ItemInfo{
OneDrive: &OneDriveInfo{
ItemType: OneDriveItem,
@ -980,9 +942,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
repoPath: newOneDrivePath,
locPath: newExchangePB,
errCheck: assert.NoError,
locPath: newOneDrivePB,
expectedItem: ItemInfo{
SharePoint: &SharePointInfo{
ItemType: SharePointLibrary,
@ -990,44 +950,14 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
},
},
},
{
name: "OneDriveBadPath",
input: ItemInfo{
OneDrive: &OneDriveInfo{
ItemType: OneDriveItem,
ParentPath: folder1,
},
},
repoPath: badOneDrivePath,
locPath: newExchangePB,
errCheck: assert.Error,
},
{
name: "SharePointBadPath",
input: ItemInfo{
SharePoint: &SharePointInfo{
ItemType: SharePointLibrary,
ParentPath: folder1,
},
},
repoPath: badOneDrivePath,
locPath: newExchangePB,
errCheck: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
item := test.input
err := UpdateItem(&item, test.repoPath, test.locPath)
test.errCheck(t, err, clues.ToCore(err))
if err != nil {
return
}
UpdateItem(&item, test.locPath)
assert.Equal(t, test.expectedItem, item)
})
}
@ -1255,7 +1185,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
DriveID: driveID,
},
},
backupVersion: version.OneDriveXLocationRef - 1,
backupVersion: version.OneDrive7LocationRef - 1,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.FilesCategory),
},
@ -1269,7 +1199,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
DriveID: driveID,
},
},
backupVersion: version.OneDriveXLocationRef,
backupVersion: version.OneDrive7LocationRef,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.FilesCategory),
@ -1284,7 +1214,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
DriveID: driveID,
},
},
backupVersion: version.OneDriveXLocationRef,
backupVersion: version.OneDrive7LocationRef,
expectedErr: require.Error,
},
{
@ -1297,7 +1227,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
DriveID: driveID,
},
},
backupVersion: version.OneDriveXLocationRef - 1,
backupVersion: version.OneDrive7LocationRef - 1,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.LibrariesCategory),
},
@ -1311,7 +1241,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
DriveID: driveID,
},
},
backupVersion: version.OneDriveXLocationRef,
backupVersion: version.OneDrive7LocationRef,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.LibrariesCategory),
@ -1326,7 +1256,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
DriveID: driveID,
},
},
backupVersion: version.OneDriveXLocationRef,
backupVersion: version.OneDrive7LocationRef,
expectedErr: require.Error,
},
{
@ -1338,7 +1268,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDriveXLocationRef - 1,
backupVersion: version.OneDrive7LocationRef - 1,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
@ -1352,7 +1282,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDriveXLocationRef,
backupVersion: version.OneDrive7LocationRef,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
@ -1366,7 +1296,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDriveXLocationRef - 1,
backupVersion: version.OneDrive7LocationRef - 1,
expectedErr: require.Error,
},
{
@ -1378,7 +1308,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDriveXLocationRef,
backupVersion: version.OneDrive7LocationRef,
expectedErr: require.Error,
},
}