Populate and persist OneDrive LocationRef (#3111)
Leverage structs from previous PRs to populate and persist the LocationRef for OneDrive Right now the source of the LocationRef is still based off the RepoRef path, but this is in the OneDrive code only Manually tested * updating a subfolder path when parent folder was moved * populating LocationRef when the base backup didn't have LocationRefs when nothing was changed in OneDrive * populating LocationRef when the base backup didn't have LocationRefs when something was changed in OneDrive --- #### Does this PR need a docs update or release note? - [x] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [ ] ⛔ No #### Type of change - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) * closes #2486 #### Test Plan - [x] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
b0869fad4d
commit
1826b66e8d
@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
### Added
|
### Added
|
||||||
- Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.)
|
- Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.)
|
||||||
|
- LocationRef is now populated for all services and data types. It should be used in place of RepoRef if a location for an item is required.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Fixed permissions restore in latest backup version.
|
- Fixed permissions restore in latest backup version.
|
||||||
|
|||||||
@ -186,7 +186,7 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm permDat
|
|||||||
c.items = append(c.items, metadata)
|
c.items = append(c.items, metadata)
|
||||||
c.aux = append(c.aux, metadata)
|
c.aux = append(c.aux, metadata)
|
||||||
|
|
||||||
case version.OneDrive6NameInMeta:
|
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef:
|
||||||
c.items = append(c.items, onedriveItemWithData(
|
c.items = append(c.items, onedriveItemWithData(
|
||||||
c.t,
|
c.t,
|
||||||
name+onedrive.DataFileSuffix,
|
name+onedrive.DataFileSuffix,
|
||||||
@ -213,7 +213,7 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm permDat
|
|||||||
func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection {
|
func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection {
|
||||||
switch c.backupVersion {
|
switch c.backupVersion {
|
||||||
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
||||||
version.OneDrive6NameInMeta:
|
version.OneDrive6NameInMeta, version.OneDrive7LocationRef:
|
||||||
return c
|
return c
|
||||||
|
|
||||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
||||||
|
|||||||
@ -97,6 +97,12 @@ type Collection struct {
|
|||||||
// moved. It will be empty on its first retrieval.
|
// moved. It will be empty on its first retrieval.
|
||||||
prevPath path.Path
|
prevPath path.Path
|
||||||
|
|
||||||
|
// locPath represents the human-readable location of this collection.
|
||||||
|
locPath *path.Builder
|
||||||
|
// prevLocPath represents the human-readable location of this collection in
|
||||||
|
// the previous backup.
|
||||||
|
prevLocPath *path.Builder
|
||||||
|
|
||||||
// Specifies if it new, moved/rename or deleted
|
// Specifies if it new, moved/rename or deleted
|
||||||
state data.CollectionState
|
state data.CollectionState
|
||||||
|
|
||||||
@ -135,6 +141,19 @@ type itemMetaReaderFunc func(
|
|||||||
item models.DriveItemable,
|
item models.DriveItemable,
|
||||||
) (io.ReadCloser, int, error)
|
) (io.ReadCloser, int, error)
|
||||||
|
|
||||||
|
func pathToLocation(p path.Path) (*path.Builder, error) {
|
||||||
|
if p == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
odp, err := path.ToOneDrivePath(p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return path.Builder{}.Append(odp.Root).Append(odp.Folders...), nil
|
||||||
|
}
|
||||||
|
|
||||||
// NewCollection creates a Collection
|
// NewCollection creates a Collection
|
||||||
func NewCollection(
|
func NewCollection(
|
||||||
itemClient *http.Client,
|
itemClient *http.Client,
|
||||||
@ -147,11 +166,27 @@ func NewCollection(
|
|||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
colScope collectionScope,
|
colScope collectionScope,
|
||||||
doNotMergeItems bool,
|
doNotMergeItems bool,
|
||||||
) *Collection {
|
) (*Collection, error) {
|
||||||
|
// TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need
|
||||||
|
// to be changed as we won't be able to extract path information from the
|
||||||
|
// storage path. In that case, we'll need to start storing the location paths
|
||||||
|
// like we do the previous path.
|
||||||
|
locPath, err := pathToLocation(folderPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "getting location").With("folder_path", folderPath.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
prevLocPath, err := pathToLocation(prevPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "getting previous location").With("prev_path", prevPath.String())
|
||||||
|
}
|
||||||
|
|
||||||
c := &Collection{
|
c := &Collection{
|
||||||
itemClient: itemClient,
|
itemClient: itemClient,
|
||||||
folderPath: folderPath,
|
folderPath: folderPath,
|
||||||
prevPath: prevPath,
|
prevPath: prevPath,
|
||||||
|
locPath: locPath,
|
||||||
|
prevLocPath: prevLocPath,
|
||||||
driveItems: map[string]models.DriveItemable{},
|
driveItems: map[string]models.DriveItemable{},
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
source: source,
|
source: source,
|
||||||
@ -176,7 +211,7 @@ func NewCollection(
|
|||||||
c.itemMetaReader = oneDriveItemMetaReader
|
c.itemMetaReader = oneDriveItemMetaReader
|
||||||
}
|
}
|
||||||
|
|
||||||
return c
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds an itemID to the collection. This will make it eligible to be
|
// Adds an itemID to the collection. This will make it eligible to be
|
||||||
@ -229,6 +264,28 @@ func (oc *Collection) SetFullPath(curPath path.Path) {
|
|||||||
oc.state = data.StateOf(oc.prevPath, curPath)
|
oc.state = data.StateOf(oc.prevPath, curPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (oc Collection) LocationPath() *path.Builder {
|
||||||
|
return oc.locPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func (oc Collection) PreviousLocationPath() details.LocationIDer {
|
||||||
|
if oc.prevLocPath == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch oc.source {
|
||||||
|
case OneDriveSource:
|
||||||
|
return details.NewOneDriveLocationIDer(
|
||||||
|
oc.driveID,
|
||||||
|
oc.prevLocPath.Elements()...)
|
||||||
|
|
||||||
|
default:
|
||||||
|
return details.NewSharePointLocationIDer(
|
||||||
|
oc.driveID,
|
||||||
|
oc.prevLocPath.Elements()...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (oc Collection) State() data.CollectionState {
|
func (oc Collection) State() data.CollectionState {
|
||||||
return oc.state
|
return oc.state
|
||||||
}
|
}
|
||||||
|
|||||||
@ -205,7 +205,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
|||||||
driveFolderPath, err := path.GetDriveFolderPath(folderPath)
|
driveFolderPath, err := path.GetDriveFolderPath(folderPath)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
coll := NewCollection(
|
coll, err := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
nil,
|
nil,
|
||||||
@ -216,6 +216,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
|||||||
control.Options{ToggleFeatures: control.Toggles{}},
|
control.Options{ToggleFeatures: control.Toggles{}},
|
||||||
CollectionScopeFolder,
|
CollectionScopeFolder,
|
||||||
true)
|
true)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.NotNil(t, coll)
|
require.NotNil(t, coll)
|
||||||
assert.Equal(t, folderPath, coll.FullPath())
|
assert.Equal(t, folderPath, coll.FullPath())
|
||||||
|
|
||||||
@ -344,7 +345,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
|
|||||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
coll := NewCollection(
|
coll, err := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
nil,
|
nil,
|
||||||
@ -355,6 +356,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
|
|||||||
control.Options{ToggleFeatures: control.Toggles{}},
|
control.Options{ToggleFeatures: control.Toggles{}},
|
||||||
CollectionScopeFolder,
|
CollectionScopeFolder,
|
||||||
true)
|
true)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mockItem := models.NewDriveItem()
|
mockItem := models.NewDriveItem()
|
||||||
mockItem.SetId(&testItemID)
|
mockItem.SetId(&testItemID)
|
||||||
@ -433,7 +435,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
|
|||||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
coll := NewCollection(
|
coll, err := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
nil,
|
nil,
|
||||||
@ -444,6 +446,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
|
|||||||
control.Options{ToggleFeatures: control.Toggles{}},
|
control.Options{ToggleFeatures: control.Toggles{}},
|
||||||
CollectionScopeFolder,
|
CollectionScopeFolder,
|
||||||
true)
|
true)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mockItem := models.NewDriveItem()
|
mockItem := models.NewDriveItem()
|
||||||
mockItem.SetId(&testItemID)
|
mockItem.SetId(&testItemID)
|
||||||
@ -532,7 +535,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
|
|||||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
coll := NewCollection(
|
coll, err := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
nil,
|
nil,
|
||||||
@ -543,6 +546,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
|
|||||||
control.Options{ToggleFeatures: control.Toggles{}},
|
control.Options{ToggleFeatures: control.Toggles{}},
|
||||||
CollectionScopeFolder,
|
CollectionScopeFolder,
|
||||||
true)
|
true)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mtime := time.Now().AddDate(0, -1, 0)
|
mtime := time.Now().AddDate(0, -1, 0)
|
||||||
mockItem := models.NewDriveItem()
|
mockItem := models.NewDriveItem()
|
||||||
|
|||||||
@ -416,7 +416,7 @@ func (c *Collections) Get(
|
|||||||
return nil, map[string]map[string]struct{}{}, err
|
return nil, map[string]map[string]struct{}{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
col := NewCollection(
|
col, err := NewCollection(
|
||||||
c.itemClient,
|
c.itemClient,
|
||||||
nil,
|
nil,
|
||||||
prevPath,
|
prevPath,
|
||||||
@ -427,6 +427,9 @@ func (c *Collections) Get(
|
|||||||
c.ctrl,
|
c.ctrl,
|
||||||
CollectionScopeUnknown,
|
CollectionScopeUnknown,
|
||||||
true)
|
true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, map[string]map[string]struct{}{}, clues.Wrap(err, "making collection").WithClues(ictx)
|
||||||
|
}
|
||||||
|
|
||||||
c.CollectionMap[driveID][fldID] = col
|
c.CollectionMap[driveID][fldID] = col
|
||||||
}
|
}
|
||||||
@ -578,7 +581,7 @@ func (c *Collections) handleDelete(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
col := NewCollection(
|
col, err := NewCollection(
|
||||||
c.itemClient,
|
c.itemClient,
|
||||||
nil,
|
nil,
|
||||||
prevPath,
|
prevPath,
|
||||||
@ -590,6 +593,12 @@ func (c *Collections) handleDelete(
|
|||||||
CollectionScopeUnknown,
|
CollectionScopeUnknown,
|
||||||
// DoNotMerge is not checked for deleted items.
|
// DoNotMerge is not checked for deleted items.
|
||||||
false)
|
false)
|
||||||
|
if err != nil {
|
||||||
|
return clues.Wrap(err, "making collection").With(
|
||||||
|
"drive_id", driveID,
|
||||||
|
"item_id", itemID,
|
||||||
|
"path_string", prevPathStr)
|
||||||
|
}
|
||||||
|
|
||||||
c.CollectionMap[driveID][itemID] = col
|
c.CollectionMap[driveID][itemID] = col
|
||||||
|
|
||||||
@ -765,7 +774,7 @@ func (c *Collections) UpdateCollections(
|
|||||||
colScope = CollectionScopePackage
|
colScope = CollectionScopePackage
|
||||||
}
|
}
|
||||||
|
|
||||||
col := NewCollection(
|
col, err := NewCollection(
|
||||||
c.itemClient,
|
c.itemClient,
|
||||||
collectionPath,
|
collectionPath,
|
||||||
prevPath,
|
prevPath,
|
||||||
@ -777,6 +786,10 @@ func (c *Collections) UpdateCollections(
|
|||||||
colScope,
|
colScope,
|
||||||
invalidPrevDelta,
|
invalidPrevDelta,
|
||||||
)
|
)
|
||||||
|
if err != nil {
|
||||||
|
return clues.Stack(err).WithClues(ictx)
|
||||||
|
}
|
||||||
|
|
||||||
col.driveName = driveName
|
col.driveName = driveName
|
||||||
|
|
||||||
c.CollectionMap[driveID][itemID] = col
|
c.CollectionMap[driveID][itemID] = col
|
||||||
|
|||||||
@ -24,9 +24,7 @@ func TestRestoreUnitSuite(t *testing.T) {
|
|||||||
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
|
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
|
||||||
// Adding a simple test here so that we can be sure that this
|
// Adding a simple test here so that we can be sure that this
|
||||||
// function gets updated whenever we add a new version.
|
// function gets updated whenever we add a new version.
|
||||||
if version.Backup > version.OneDrive6NameInMeta {
|
require.LessOrEqual(suite.T(), version.Backup, version.OneDrive7LocationRef, "unsupported backup version")
|
||||||
require.Less(suite.T(), version.OneDrive6NameInMeta+1, version.Backup, "unsupported backup version")
|
|
||||||
}
|
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
|
|||||||
@ -21,7 +21,7 @@ type DetailsMergeInfoer interface {
|
|||||||
GetNewPathRefs(
|
GetNewPathRefs(
|
||||||
oldRef *path.Builder,
|
oldRef *path.Builder,
|
||||||
oldLoc details.LocationIDer,
|
oldLoc details.LocationIDer,
|
||||||
) (path.Path, *path.Builder, *path.Builder)
|
) (path.Path, *path.Builder, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type prevRef struct {
|
type prevRef struct {
|
||||||
@ -68,36 +68,37 @@ func (m *mergeDetails) addRepoRef(
|
|||||||
func (m *mergeDetails) GetNewPathRefs(
|
func (m *mergeDetails) GetNewPathRefs(
|
||||||
oldRef *path.Builder,
|
oldRef *path.Builder,
|
||||||
oldLoc details.LocationIDer,
|
oldLoc details.LocationIDer,
|
||||||
) (path.Path, *path.Builder, *path.Builder) {
|
) (path.Path, *path.Builder, error) {
|
||||||
pr, ok := m.repoRefs[oldRef.ShortRef()]
|
pr, ok := m.repoRefs[oldRef.ShortRef()]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, nil
|
return nil, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// This was a location specified directly by a collection. Say the prefix is
|
// This was a location specified directly by a collection.
|
||||||
// the whole oldLoc so other code will replace everything.
|
if pr.locRef != nil {
|
||||||
//
|
return pr.repoRef, pr.locRef, nil
|
||||||
// TODO(ashmrtn): Should be able to remove the nil check later as we'll be
|
} else if oldLoc == nil || oldLoc.ID() == nil || len(oldLoc.ID().Elements()) == 0 {
|
||||||
// able to ensure that old locations actually exist in backup details.
|
return nil, nil, clues.New("empty location key")
|
||||||
if oldLoc == nil {
|
|
||||||
return pr.repoRef, nil, pr.locRef
|
|
||||||
} else if pr.locRef != nil {
|
|
||||||
return pr.repoRef, oldLoc.InDetails(), pr.locRef
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is a location that we need to do prefix matching on because we didn't
|
// This is a location that we need to do prefix matching on because we didn't
|
||||||
// see the new location of it in a collection. For example, it's a subfolder
|
// see the new location of it in a collection. For example, it's a subfolder
|
||||||
// whose parent folder was moved.
|
// whose parent folder was moved.
|
||||||
prefixes := m.locations.longestPrefix(oldLoc.ID())
|
prefixes := m.locations.longestPrefix(oldLoc)
|
||||||
|
newLoc := oldLoc.InDetails()
|
||||||
|
|
||||||
return pr.repoRef, prefixes.oldLoc, prefixes.newLoc
|
// Noop if prefix or newPrefix are nil. Them being nil means that the
|
||||||
|
// LocationRef hasn't changed.
|
||||||
|
newLoc.UpdateParent(prefixes.oldLoc, prefixes.newLoc)
|
||||||
|
|
||||||
|
return pr.repoRef, newLoc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mergeDetails) addLocation(
|
func (m *mergeDetails) addLocation(
|
||||||
oldRef details.LocationIDer,
|
oldRef details.LocationIDer,
|
||||||
newLoc *path.Builder,
|
newLoc *path.Builder,
|
||||||
) error {
|
) error {
|
||||||
return m.locations.add(oldRef.ID(), newLoc)
|
return m.locations.add(oldRef, newLoc)
|
||||||
}
|
}
|
||||||
|
|
||||||
func newMergeDetails() *mergeDetails {
|
func newMergeDetails() *mergeDetails {
|
||||||
@ -116,20 +117,25 @@ type locationPrefixMatcher struct {
|
|||||||
m prefixmatcher.Matcher[locRefs]
|
m prefixmatcher.Matcher[locRefs]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *locationPrefixMatcher) add(oldRef, newLoc *path.Builder) error {
|
func (m *locationPrefixMatcher) add(
|
||||||
key := oldRef.String()
|
oldRef details.LocationIDer,
|
||||||
|
newLoc *path.Builder,
|
||||||
|
) error {
|
||||||
|
key := oldRef.ID().String()
|
||||||
|
|
||||||
if _, ok := m.m.Get(key); ok {
|
if _, ok := m.m.Get(key); ok {
|
||||||
return clues.New("RepoRef already in matcher").With("repo_ref", oldRef)
|
return clues.New("RepoRef already in matcher").With("repo_ref", oldRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
m.m.Add(key, locRefs{oldLoc: oldRef, newLoc: newLoc})
|
m.m.Add(key, locRefs{oldLoc: oldRef.InDetails(), newLoc: newLoc})
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *locationPrefixMatcher) longestPrefix(oldRef *path.Builder) locRefs {
|
func (m *locationPrefixMatcher) longestPrefix(
|
||||||
_, v, _ := m.m.LongestPrefix(oldRef.String())
|
oldRef details.LocationIDer,
|
||||||
|
) locRefs {
|
||||||
|
_, v, _ := m.m.LongestPrefix(oldRef.ID().String())
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -97,10 +97,11 @@ func (suite *DetailsMergeInfoerUnitSuite) TestGetNewPathRefs() {
|
|||||||
testUser,
|
testUser,
|
||||||
category,
|
category,
|
||||||
"folder3",
|
"folder3",
|
||||||
"folder4",
|
"folder2",
|
||||||
},
|
},
|
||||||
false)
|
false)
|
||||||
newLoc1 := path.Builder{}.Append(newRef1.Folders()...)
|
newLoc1 := path.Builder{}.Append(newRef1.Folders()...)
|
||||||
|
newLoc2 := path.Builder{}.Append(newRef2.Folders()...)
|
||||||
oldLoc1 := path.Builder{}.Append(oldRef1.Folders()...)
|
oldLoc1 := path.Builder{}.Append(oldRef1.Folders()...)
|
||||||
oldLoc2 := path.Builder{}.Append(oldRef2.Folders()...)
|
oldLoc2 := path.Builder{}.Append(oldRef2.Folders()...)
|
||||||
|
|
||||||
@ -120,69 +121,62 @@ func (suite *DetailsMergeInfoerUnitSuite) TestGetNewPathRefs() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
searchRef *path.Builder
|
searchRef *path.Builder
|
||||||
searchLoc mockLocationIDer
|
searchLoc mockLocationIDer
|
||||||
expectedRef path.Path
|
errCheck require.ErrorAssertionFunc
|
||||||
prefixFound bool
|
expectedRef path.Path
|
||||||
expectedOldPrefix *path.Builder
|
expectedLoc *path.Builder
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "Exact Match With Loc",
|
name: "Exact Match With Loc",
|
||||||
searchRef: oldRef1.ToBuilder(),
|
searchRef: oldRef1.ToBuilder(),
|
||||||
searchLoc: searchLoc1,
|
|
||||||
expectedRef: newRef1,
|
|
||||||
prefixFound: true,
|
|
||||||
expectedOldPrefix: oldLoc1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Exact Match Without Loc",
|
|
||||||
searchRef: oldRef1.ToBuilder(),
|
|
||||||
expectedRef: newRef1,
|
|
||||||
prefixFound: true,
|
|
||||||
expectedOldPrefix: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Prefix Match",
|
|
||||||
searchRef: oldRef2.ToBuilder(),
|
|
||||||
searchLoc: searchLoc2,
|
|
||||||
expectedRef: newRef2,
|
|
||||||
prefixFound: true,
|
|
||||||
expectedOldPrefix: oldLoc1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Not Found",
|
|
||||||
searchRef: newRef1.ToBuilder(),
|
|
||||||
expectedRef: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Not Found With Loc",
|
|
||||||
searchRef: newRef1.ToBuilder(),
|
|
||||||
searchLoc: searchLoc1,
|
searchLoc: searchLoc1,
|
||||||
expectedRef: nil,
|
errCheck: require.NoError,
|
||||||
|
expectedRef: newRef1,
|
||||||
|
expectedLoc: newLoc1,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Ref Found Loc Not",
|
name: "Exact Match Without Loc",
|
||||||
|
searchRef: oldRef1.ToBuilder(),
|
||||||
|
errCheck: require.NoError,
|
||||||
|
expectedRef: newRef1,
|
||||||
|
expectedLoc: newLoc1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Prefix Match",
|
||||||
searchRef: oldRef2.ToBuilder(),
|
searchRef: oldRef2.ToBuilder(),
|
||||||
searchLoc: mockLocationIDer{path.Builder{}.Append("foo")},
|
searchLoc: searchLoc2,
|
||||||
|
errCheck: require.NoError,
|
||||||
expectedRef: newRef2,
|
expectedRef: newRef2,
|
||||||
|
expectedLoc: newLoc2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Would Be Prefix Match Without Old Loc Errors",
|
||||||
|
searchRef: oldRef2.ToBuilder(),
|
||||||
|
errCheck: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Not Found With Old Loc",
|
||||||
|
searchRef: newRef1.ToBuilder(),
|
||||||
|
searchLoc: searchLoc2,
|
||||||
|
errCheck: require.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Not Found Without Old Loc",
|
||||||
|
searchRef: newRef1.ToBuilder(),
|
||||||
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
newRef, oldPrefix, newPrefix := dm.GetNewPathRefs(test.searchRef, test.searchLoc)
|
newRef, newLoc, err := dm.GetNewPathRefs(test.searchRef, test.searchLoc)
|
||||||
|
test.errCheck(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
assert.Equal(t, test.expectedRef, newRef, "RepoRef")
|
assert.Equal(t, test.expectedRef, newRef, "RepoRef")
|
||||||
|
assert.Equal(t, test.expectedLoc, newLoc, "LocationRef")
|
||||||
if !test.prefixFound {
|
|
||||||
assert.Nil(t, oldPrefix)
|
|
||||||
assert.Nil(t, newPrefix)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.Equal(t, test.expectedOldPrefix, oldPrefix, "old prefix")
|
|
||||||
assert.Equal(t, newLoc1, newPrefix, "new prefix")
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -197,7 +191,7 @@ func TestLocationPrefixMatcherUnitSuite(t *testing.T) {
|
|||||||
|
|
||||||
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
|
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
p := makePath(
|
p := mockLocationIDer{makePath(
|
||||||
t,
|
t,
|
||||||
[]string{
|
[]string{
|
||||||
testTenant,
|
testTenant,
|
||||||
@ -206,7 +200,7 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
|
|||||||
category,
|
category,
|
||||||
"folder1",
|
"folder1",
|
||||||
},
|
},
|
||||||
false).ToBuilder()
|
false).ToBuilder()}
|
||||||
loc1 := path.Builder{}.Append("folder1")
|
loc1 := path.Builder{}.Append("folder1")
|
||||||
loc2 := path.Builder{}.Append("folder2")
|
loc2 := path.Builder{}.Append("folder2")
|
||||||
|
|
||||||
@ -220,20 +214,20 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_Twice_Fails() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
|
func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
|
||||||
loc1 := path.Builder{}.Append("folder1")
|
loc1 := mockLocationIDer{path.Builder{}.Append("folder1")}
|
||||||
loc2 := loc1.Append("folder2")
|
loc2 := mockLocationIDer{loc1.InDetails().Append("folder2")}
|
||||||
loc3 := path.Builder{}.Append("foo")
|
loc3 := mockLocationIDer{path.Builder{}.Append("foo")}
|
||||||
|
|
||||||
res1 := path.Builder{}.Append("1")
|
res1 := mockLocationIDer{path.Builder{}.Append("1")}
|
||||||
|
|
||||||
lpm := newLocationPrefixMatcher()
|
lpm := newLocationPrefixMatcher()
|
||||||
|
|
||||||
err := lpm.add(loc1, res1)
|
err := lpm.add(loc1, res1.InDetails())
|
||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
searchKey *path.Builder
|
searchKey mockLocationIDer
|
||||||
found bool
|
found bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
@ -265,8 +259,8 @@ func (suite *LocationPrefixMatcherUnitSuite) TestAdd_And_Match() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, loc1, prefixes.oldLoc, "old prefix")
|
assert.Equal(t, loc1.InDetails(), prefixes.oldLoc, "old prefix")
|
||||||
assert.Equal(t, res1, prefixes.newLoc, "new prefix")
|
assert.Equal(t, res1.InDetails(), prefixes.newLoc, "new prefix")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -524,7 +524,8 @@ func matchesReason(reasons []kopia.Reason, p path.Path) bool {
|
|||||||
func getNewPathRefs(
|
func getNewPathRefs(
|
||||||
dataFromBackup kopia.DetailsMergeInfoer,
|
dataFromBackup kopia.DetailsMergeInfoer,
|
||||||
entry *details.DetailsEntry,
|
entry *details.DetailsEntry,
|
||||||
repoRef *path.Builder,
|
repoRef path.Path,
|
||||||
|
backupVersion int,
|
||||||
) (path.Path, *path.Builder, bool, error) {
|
) (path.Path, *path.Builder, bool, error) {
|
||||||
// Right now we can't guarantee that we have an old location in the
|
// Right now we can't guarantee that we have an old location in the
|
||||||
// previous details entry so first try a lookup without a location to see
|
// previous details entry so first try a lookup without a location to see
|
||||||
@ -533,18 +534,52 @@ func getNewPathRefs(
|
|||||||
// TODO(ashmrtn): In the future we can remove this first check as we'll be
|
// TODO(ashmrtn): In the future we can remove this first check as we'll be
|
||||||
// able to assume we always have the location in the previous entry. We'll end
|
// able to assume we always have the location in the previous entry. We'll end
|
||||||
// up doing some extra parsing, but it will simplify this code.
|
// up doing some extra parsing, but it will simplify this code.
|
||||||
newPath, _, newLocPrefix := dataFromBackup.GetNewPathRefs(repoRef, nil)
|
if repoRef.Service() == path.ExchangeService {
|
||||||
if newPath == nil {
|
newPath, newLoc, err := dataFromBackup.GetNewPathRefs(repoRef.ToBuilder(), nil)
|
||||||
// This entry doesn't need merging.
|
if err != nil {
|
||||||
|
return nil, nil, false, clues.Wrap(err, "getting new paths")
|
||||||
|
} else if newPath == nil {
|
||||||
|
// This entry doesn't need merging.
|
||||||
|
return nil, nil, false, nil
|
||||||
|
} else if newLoc == nil {
|
||||||
|
return nil, nil, false, clues.New("unable to find new exchange location")
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is kind of jank cause we're in a transitionary period, but even if
|
||||||
|
// we're consesrvative here about marking something as updated the RepoRef
|
||||||
|
// comparison in the caller should catch the change. Calendars is the only
|
||||||
|
// exception, since it uses IDs for folders, but we should already be
|
||||||
|
// populating the LocationRef for them.
|
||||||
|
//
|
||||||
|
// Without this, all OneDrive items will be marked as updated the first time
|
||||||
|
// around because OneDrive hasn't been persisting LocationRef before now.
|
||||||
|
updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef
|
||||||
|
|
||||||
|
return newPath, newLoc, updated, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// We didn't have an exact entry, so retry with a location.
|
||||||
|
locRef, err := entry.ToLocationIDer(backupVersion)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false, clues.Wrap(err, "getting previous item location")
|
||||||
|
}
|
||||||
|
|
||||||
|
if locRef == nil {
|
||||||
|
return nil, nil, false, clues.New("entry with empty LocationRef")
|
||||||
|
}
|
||||||
|
|
||||||
|
newPath, newLoc, err := dataFromBackup.GetNewPathRefs(repoRef.ToBuilder(), locRef)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false, clues.Wrap(err, "getting new paths with old location")
|
||||||
|
} else if newPath == nil {
|
||||||
return nil, nil, false, nil
|
return nil, nil, false, nil
|
||||||
|
} else if newLoc == nil {
|
||||||
|
return nil, nil, false, clues.New("unable to get new paths")
|
||||||
}
|
}
|
||||||
|
|
||||||
// OneDrive doesn't return prefixes yet.
|
updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef
|
||||||
if newLocPrefix == nil {
|
|
||||||
newLocPrefix = &path.Builder{}
|
|
||||||
}
|
|
||||||
|
|
||||||
return newPath, newLocPrefix, newLocPrefix.String() != entry.LocationRef, nil
|
return newPath, newLoc, updated, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func mergeDetails(
|
func mergeDetails(
|
||||||
@ -582,7 +617,7 @@ func mergeDetails(
|
|||||||
|
|
||||||
mctx = clues.Add(mctx, "base_manifest_backup_id", bID)
|
mctx = clues.Add(mctx, "base_manifest_backup_id", bID)
|
||||||
|
|
||||||
_, baseDeets, err := getBackupAndDetailsFromID(
|
baseBackup, baseDeets, err := getBackupAndDetailsFromID(
|
||||||
mctx,
|
mctx,
|
||||||
model.StableID(bID),
|
model.StableID(bID),
|
||||||
ms,
|
ms,
|
||||||
@ -615,7 +650,8 @@ func mergeDetails(
|
|||||||
newPath, newLoc, locUpdated, err := getNewPathRefs(
|
newPath, newLoc, locUpdated, err := getNewPathRefs(
|
||||||
dataFromBackup,
|
dataFromBackup,
|
||||||
entry,
|
entry,
|
||||||
rr.ToBuilder())
|
rr,
|
||||||
|
baseBackup.Version)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Wrap(err, "getting updated info for entry").WithClues(mctx)
|
return clues.Wrap(err, "getting updated info for entry").WithClues(mctx)
|
||||||
}
|
}
|
||||||
@ -627,9 +663,7 @@ func mergeDetails(
|
|||||||
|
|
||||||
// Fixup paths in the item.
|
// Fixup paths in the item.
|
||||||
item := entry.ItemInfo
|
item := entry.ItemInfo
|
||||||
if err := details.UpdateItem(&item, newPath, newLoc); err != nil {
|
details.UpdateItem(&item, newLoc)
|
||||||
return clues.Wrap(err, "updating merged item info").WithClues(mctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(ashmrtn): This may need updated if we start using this merge
|
// TODO(ashmrtn): This may need updated if we start using this merge
|
||||||
// strategry for items that were cached in kopia.
|
// strategry for items that were cached in kopia.
|
||||||
|
|||||||
@ -183,36 +183,25 @@ func (mbs mockBackupStorer) Update(context.Context, model.Schema, model.Model) e
|
|||||||
|
|
||||||
// ----- model store for backups
|
// ----- model store for backups
|
||||||
|
|
||||||
type locPair struct {
|
|
||||||
old *path.Builder
|
|
||||||
newL *path.Builder
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockDetailsMergeInfoer struct {
|
type mockDetailsMergeInfoer struct {
|
||||||
repoRefs map[string]path.Path
|
repoRefs map[string]path.Path
|
||||||
locs map[string]locPair
|
locs map[string]*path.Builder
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mockDetailsMergeInfoer) add(oldRef, newRef path.Path, oldPrefix, newLoc *path.Builder) {
|
func (m *mockDetailsMergeInfoer) add(oldRef, newRef path.Path, newLoc *path.Builder) {
|
||||||
oldPB := oldRef.ToBuilder()
|
oldPB := oldRef.ToBuilder()
|
||||||
// Items are indexed individually.
|
// Items are indexed individually.
|
||||||
m.repoRefs[oldPB.ShortRef()] = newRef
|
m.repoRefs[oldPB.ShortRef()] = newRef
|
||||||
|
|
||||||
if newLoc != nil {
|
// Locations are indexed by directory.
|
||||||
// Locations are indexed by directory.
|
m.locs[oldPB.ShortRef()] = newLoc
|
||||||
m.locs[oldPB.ShortRef()] = locPair{
|
|
||||||
old: oldPrefix,
|
|
||||||
newL: newLoc,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mockDetailsMergeInfoer) GetNewPathRefs(
|
func (m *mockDetailsMergeInfoer) GetNewPathRefs(
|
||||||
oldRef *path.Builder,
|
oldRef *path.Builder,
|
||||||
oldLoc details.LocationIDer,
|
_ details.LocationIDer,
|
||||||
) (path.Path, *path.Builder, *path.Builder) {
|
) (path.Path, *path.Builder, error) {
|
||||||
locs := m.locs[oldRef.ShortRef()]
|
return m.repoRefs[oldRef.ShortRef()], m.locs[oldRef.ShortRef()], nil
|
||||||
return m.repoRefs[oldRef.ShortRef()], locs.old, locs.newL
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
|
func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
|
||||||
@ -226,7 +215,7 @@ func (m *mockDetailsMergeInfoer) ItemsToMerge() int {
|
|||||||
func newMockDetailsMergeInfoer() *mockDetailsMergeInfoer {
|
func newMockDetailsMergeInfoer() *mockDetailsMergeInfoer {
|
||||||
return &mockDetailsMergeInfoer{
|
return &mockDetailsMergeInfoer{
|
||||||
repoRefs: map[string]path.Path{},
|
repoRefs: map[string]path.Path{},
|
||||||
locs: map[string]locPair{},
|
locs: map[string]*path.Builder{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,13 +340,13 @@ func makeDetailsEntry(
|
|||||||
}
|
}
|
||||||
|
|
||||||
case path.OneDriveService:
|
case path.OneDriveService:
|
||||||
parent, err := path.GetDriveFolderPath(p)
|
require.NotNil(t, l)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
res.OneDrive = &details.OneDriveInfo{
|
res.OneDrive = &details.OneDriveInfo{
|
||||||
ItemType: details.OneDriveItem,
|
ItemType: details.OneDriveItem,
|
||||||
ParentPath: parent,
|
ParentPath: l.PopFront().String(),
|
||||||
Size: int64(size),
|
Size: int64(size),
|
||||||
|
DriveID: "drive-id",
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -744,7 +733,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "BackupIDNotFound",
|
name: "BackupIDNotFound",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -762,7 +751,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "DetailsIDNotFound",
|
name: "DetailsIDNotFound",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -788,8 +777,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "BaseMissingItems",
|
name: "BaseMissingItems",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
res.add(itemPath2, itemPath2, locationPath2, locationPath2)
|
res.add(itemPath2, itemPath2, locationPath2)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -819,7 +808,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "TooManyItems",
|
name: "TooManyItems",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -855,7 +844,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "BadBaseRepoRef",
|
name: "BadBaseRepoRef",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath2, locationPath1, locationPath2)
|
res.add(itemPath1, itemPath2, locationPath2)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -917,7 +906,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
|
|
||||||
res.add(itemPath1, p, locationPath1, nil)
|
res.add(itemPath1, p, nil)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -947,7 +936,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "ItemMerged",
|
name: "ItemMerged",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -980,7 +969,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "ItemMergedSameLocation",
|
name: "ItemMergedSameLocation",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -1013,7 +1002,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "ItemMergedExtraItemsInBase",
|
name: "ItemMergedExtraItemsInBase",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -1047,7 +1036,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "ItemMoved",
|
name: "ItemMoved",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath2, locationPath1, locationPath2)
|
res.add(itemPath1, itemPath2, locationPath2)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -1080,8 +1069,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "MultipleBases",
|
name: "MultipleBases",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
res.add(itemPath3, itemPath3, locationPath3, locationPath3)
|
res.add(itemPath3, itemPath3, locationPath3)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -1132,7 +1121,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
name: "SomeBasesIncomplete",
|
name: "SomeBasesIncomplete",
|
||||||
mdm: func() *mockDetailsMergeInfoer {
|
mdm: func() *mockDetailsMergeInfoer {
|
||||||
res := newMockDetailsMergeInfoer()
|
res := newMockDetailsMergeInfoer()
|
||||||
res.add(itemPath1, itemPath1, locationPath1, locationPath1)
|
res.add(itemPath1, itemPath1, locationPath1)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}(),
|
}(),
|
||||||
@ -1263,7 +1252,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
|
|||||||
)
|
)
|
||||||
|
|
||||||
mdm := newMockDetailsMergeInfoer()
|
mdm := newMockDetailsMergeInfoer()
|
||||||
mdm.add(itemPath1, itemPath1, locPath1, locPath1)
|
mdm.add(itemPath1, itemPath1, locPath1)
|
||||||
|
|
||||||
itemDetails := makeDetailsEntry(t, itemPath1, locPath1, itemSize, false)
|
itemDetails := makeDetailsEntry(t, itemPath1, locPath1, itemSize, false)
|
||||||
// itemDetails.Exchange.Modified = now
|
// itemDetails.Exchange.Modified = now
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
package version
|
package version
|
||||||
|
|
||||||
const Backup = 6
|
const Backup = 7
|
||||||
|
|
||||||
// Various labels to refer to important version changes.
|
// Various labels to refer to important version changes.
|
||||||
// Labels don't need 1:1 service:version representation. Add a new
|
// Labels don't need 1:1 service:version representation. Add a new
|
||||||
@ -38,5 +38,5 @@ const (
|
|||||||
|
|
||||||
// OneDriveXLocationRef provides LocationRef information for Exchange,
|
// OneDriveXLocationRef provides LocationRef information for Exchange,
|
||||||
// OneDrive, and SharePoint libraries.
|
// OneDrive, and SharePoint libraries.
|
||||||
OneDriveXLocationRef = Backup + 1
|
OneDrive7LocationRef = 7
|
||||||
)
|
)
|
||||||
|
|||||||
@ -457,7 +457,7 @@ func (de DetailsEntry) ToLocationIDer(backupVersion int) (LocationIDer, error) {
|
|||||||
return de.ItemInfo.uniqueLocation(baseLoc)
|
return de.ItemInfo.uniqueLocation(baseLoc)
|
||||||
}
|
}
|
||||||
|
|
||||||
if backupVersion >= version.OneDriveXLocationRef ||
|
if backupVersion >= version.OneDrive7LocationRef ||
|
||||||
(de.ItemInfo.infoType() != OneDriveItem &&
|
(de.ItemInfo.infoType() != OneDriveItem &&
|
||||||
de.ItemInfo.infoType() != SharePointLibrary) {
|
de.ItemInfo.infoType() != SharePointLibrary) {
|
||||||
return nil, clues.New("no previous location for entry")
|
return nil, clues.New("no previous location for entry")
|
||||||
@ -578,10 +578,10 @@ const (
|
|||||||
FolderItem ItemType = 306
|
FolderItem ItemType = 306
|
||||||
)
|
)
|
||||||
|
|
||||||
func UpdateItem(item *ItemInfo, repoPath path.Path, locPath *path.Builder) error {
|
func UpdateItem(item *ItemInfo, newLocPath *path.Builder) {
|
||||||
// Only OneDrive and SharePoint have information about parent folders
|
// Only OneDrive and SharePoint have information about parent folders
|
||||||
// contained in them.
|
// contained in them.
|
||||||
var updatePath func(repo path.Path, location *path.Builder) error
|
var updatePath func(newLocPath *path.Builder)
|
||||||
|
|
||||||
switch item.infoType() {
|
switch item.infoType() {
|
||||||
case ExchangeContact, ExchangeEvent, ExchangeMail:
|
case ExchangeContact, ExchangeEvent, ExchangeMail:
|
||||||
@ -591,10 +591,10 @@ func UpdateItem(item *ItemInfo, repoPath path.Path, locPath *path.Builder) error
|
|||||||
case OneDriveItem:
|
case OneDriveItem:
|
||||||
updatePath = item.OneDrive.UpdateParentPath
|
updatePath = item.OneDrive.UpdateParentPath
|
||||||
default:
|
default:
|
||||||
return nil
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
return updatePath(repoPath, locPath)
|
updatePath(newLocPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ItemInfo is a oneOf that contains service specific
|
// ItemInfo is a oneOf that contains service specific
|
||||||
@ -758,9 +758,8 @@ func (i ExchangeInfo) Values() []string {
|
|||||||
return []string{}
|
return []string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *ExchangeInfo) UpdateParentPath(_ path.Path, locPath *path.Builder) error {
|
func (i *ExchangeInfo) UpdateParentPath(newLocPath *path.Builder) {
|
||||||
i.ParentPath = locPath.String()
|
i.ParentPath = newLocPath.String()
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *ExchangeInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
|
func (i *ExchangeInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
|
||||||
@ -812,15 +811,8 @@ func (i SharePointInfo) Values() []string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *SharePointInfo) UpdateParentPath(newPath path.Path, _ *path.Builder) error {
|
func (i *SharePointInfo) UpdateParentPath(newLocPath *path.Builder) {
|
||||||
newParent, err := path.GetDriveFolderPath(newPath)
|
i.ParentPath = newLocPath.PopFront().String()
|
||||||
if err != nil {
|
|
||||||
return clues.Wrap(err, "making sharePoint path").With("path", newPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
i.ParentPath = newParent
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *SharePointInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
|
func (i *SharePointInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
|
||||||
@ -864,15 +856,8 @@ func (i OneDriveInfo) Values() []string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *OneDriveInfo) UpdateParentPath(newPath path.Path, _ *path.Builder) error {
|
func (i *OneDriveInfo) UpdateParentPath(newLocPath *path.Builder) {
|
||||||
newParent, err := path.GetDriveFolderPath(newPath)
|
i.ParentPath = newLocPath.PopFront().String()
|
||||||
if err != nil {
|
|
||||||
return clues.Wrap(err, "making oneDrive path").With("path", newPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
i.ParentPath = newParent
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *OneDriveInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
|
func (i *OneDriveInfo) uniqueLocation(baseLoc *path.Builder) (LocationIDer, error) {
|
||||||
|
|||||||
@ -857,47 +857,17 @@ func makeItemPath(
|
|||||||
|
|
||||||
func (suite *DetailsUnitSuite) TestUpdateItem() {
|
func (suite *DetailsUnitSuite) TestUpdateItem() {
|
||||||
const (
|
const (
|
||||||
tenant = "a-tenant"
|
folder1 = "f1"
|
||||||
resourceOwner = "a-user"
|
folder2 = "f2"
|
||||||
driveID = "abcd"
|
|
||||||
folder1 = "f1"
|
|
||||||
folder2 = "f2"
|
|
||||||
folder3 = "f3"
|
|
||||||
item = "hello.txt"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Making both OneDrive paths is alright because right now they're the same as
|
newExchangePB := path.Builder{}.Append(folder2)
|
||||||
// SharePoint path and there's no extra validation.
|
newOneDrivePB := path.Builder{}.Append("root:", folder2)
|
||||||
newOneDrivePath := makeItemPath(
|
|
||||||
suite.T(),
|
|
||||||
path.OneDriveService,
|
|
||||||
path.FilesCategory,
|
|
||||||
tenant,
|
|
||||||
resourceOwner,
|
|
||||||
[]string{
|
|
||||||
"drives",
|
|
||||||
driveID,
|
|
||||||
"root:",
|
|
||||||
folder2,
|
|
||||||
item,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
newExchangePB := path.Builder{}.Append(folder3)
|
|
||||||
badOneDrivePath := makeItemPath(
|
|
||||||
suite.T(),
|
|
||||||
path.OneDriveService,
|
|
||||||
path.FilesCategory,
|
|
||||||
tenant,
|
|
||||||
resourceOwner,
|
|
||||||
[]string{item},
|
|
||||||
)
|
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
input ItemInfo
|
input ItemInfo
|
||||||
repoPath path.Path
|
|
||||||
locPath *path.Builder
|
locPath *path.Builder
|
||||||
errCheck assert.ErrorAssertionFunc
|
|
||||||
expectedItem ItemInfo
|
expectedItem ItemInfo
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
@ -908,13 +878,11 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
repoPath: newOneDrivePath,
|
locPath: newExchangePB,
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
Exchange: &ExchangeInfo{
|
Exchange: &ExchangeInfo{
|
||||||
ItemType: ExchangeEvent,
|
ItemType: ExchangeEvent,
|
||||||
ParentPath: folder3,
|
ParentPath: folder2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -926,13 +894,11 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
repoPath: newOneDrivePath,
|
locPath: newExchangePB,
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
Exchange: &ExchangeInfo{
|
Exchange: &ExchangeInfo{
|
||||||
ItemType: ExchangeContact,
|
ItemType: ExchangeContact,
|
||||||
ParentPath: folder3,
|
ParentPath: folder2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -944,13 +910,11 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
repoPath: newOneDrivePath,
|
locPath: newExchangePB,
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
Exchange: &ExchangeInfo{
|
Exchange: &ExchangeInfo{
|
||||||
ItemType: ExchangeMail,
|
ItemType: ExchangeMail,
|
||||||
ParentPath: folder3,
|
ParentPath: folder2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -962,9 +926,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
repoPath: newOneDrivePath,
|
locPath: newOneDrivePB,
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
OneDrive: &OneDriveInfo{
|
OneDrive: &OneDriveInfo{
|
||||||
ItemType: OneDriveItem,
|
ItemType: OneDriveItem,
|
||||||
@ -980,9 +942,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
repoPath: newOneDrivePath,
|
locPath: newOneDrivePB,
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.NoError,
|
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
SharePoint: &SharePointInfo{
|
SharePoint: &SharePointInfo{
|
||||||
ItemType: SharePointLibrary,
|
ItemType: SharePointLibrary,
|
||||||
@ -990,44 +950,14 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "OneDriveBadPath",
|
|
||||||
input: ItemInfo{
|
|
||||||
OneDrive: &OneDriveInfo{
|
|
||||||
ItemType: OneDriveItem,
|
|
||||||
ParentPath: folder1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
repoPath: badOneDrivePath,
|
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "SharePointBadPath",
|
|
||||||
input: ItemInfo{
|
|
||||||
SharePoint: &SharePointInfo{
|
|
||||||
ItemType: SharePointLibrary,
|
|
||||||
ParentPath: folder1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
repoPath: badOneDrivePath,
|
|
||||||
locPath: newExchangePB,
|
|
||||||
errCheck: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
item := test.input
|
item := test.input
|
||||||
|
UpdateItem(&item, test.locPath)
|
||||||
err := UpdateItem(&item, test.repoPath, test.locPath)
|
|
||||||
test.errCheck(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.Equal(t, test.expectedItem, item)
|
assert.Equal(t, test.expectedItem, item)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -1255,7 +1185,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef - 1,
|
backupVersion: version.OneDrive7LocationRef - 1,
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.FilesCategory),
|
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.FilesCategory),
|
||||||
},
|
},
|
||||||
@ -1269,7 +1199,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef,
|
backupVersion: version.OneDrive7LocationRef,
|
||||||
hasLocRef: true,
|
hasLocRef: true,
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.FilesCategory),
|
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.FilesCategory),
|
||||||
@ -1284,7 +1214,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef,
|
backupVersion: version.OneDrive7LocationRef,
|
||||||
expectedErr: require.Error,
|
expectedErr: require.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -1297,7 +1227,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef - 1,
|
backupVersion: version.OneDrive7LocationRef - 1,
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.LibrariesCategory),
|
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.LibrariesCategory),
|
||||||
},
|
},
|
||||||
@ -1311,7 +1241,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef,
|
backupVersion: version.OneDrive7LocationRef,
|
||||||
hasLocRef: true,
|
hasLocRef: true,
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.LibrariesCategory),
|
expectedUniqueLoc: fmt.Sprintf(expectedUniqueLocFmt, path.LibrariesCategory),
|
||||||
@ -1326,7 +1256,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef,
|
backupVersion: version.OneDrive7LocationRef,
|
||||||
expectedErr: require.Error,
|
expectedErr: require.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -1338,7 +1268,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
ItemType: ExchangeMail,
|
ItemType: ExchangeMail,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef - 1,
|
backupVersion: version.OneDrive7LocationRef - 1,
|
||||||
hasLocRef: true,
|
hasLocRef: true,
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
|
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
|
||||||
@ -1352,7 +1282,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
ItemType: ExchangeMail,
|
ItemType: ExchangeMail,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef,
|
backupVersion: version.OneDrive7LocationRef,
|
||||||
hasLocRef: true,
|
hasLocRef: true,
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
|
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
|
||||||
@ -1366,7 +1296,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
ItemType: ExchangeMail,
|
ItemType: ExchangeMail,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef - 1,
|
backupVersion: version.OneDrive7LocationRef - 1,
|
||||||
expectedErr: require.Error,
|
expectedErr: require.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -1378,7 +1308,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
|
|||||||
ItemType: ExchangeMail,
|
ItemType: ExchangeMail,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
backupVersion: version.OneDriveXLocationRef,
|
backupVersion: version.OneDrive7LocationRef,
|
||||||
expectedErr: require.Error,
|
expectedErr: require.Error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user