Change to storing files by ID instead of display name (#2496)
## Description Change to storing OneDrive files by ID instead of their OneDrive display name. This allows delta token-based incrementals to use an exclude list to remove previously backed up items from a backup during hierarchy merging Also updates the following: * selectors to match on file display name instead of kopia file name * ShortRefs for OneDrive files to update when the file display name is updated ## Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No ## Type of change - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Test - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup ## Issue(s) * closes #1535 ## Test Plan - [x] 💪 Manual - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
eb6d6f59cb
commit
8a81624b98
62
src/cli/utils/testdata/opts.go
vendored
62
src/cli/utils/testdata/opts.go
vendored
@ -387,6 +387,37 @@ var (
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "SingleItem",
|
||||
Expected: []details.DetailsEntry{testdata.OneDriveItems[0]},
|
||||
Opts: utils.OneDriveOpts{
|
||||
FileNames: []string{
|
||||
testdata.OneDriveItems[0].OneDrive.ItemName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "MultipleItems",
|
||||
Expected: []details.DetailsEntry{
|
||||
testdata.OneDriveItems[0],
|
||||
testdata.OneDriveItems[1],
|
||||
},
|
||||
Opts: utils.OneDriveOpts{
|
||||
FileNames: []string{
|
||||
testdata.OneDriveItems[0].OneDrive.ItemName,
|
||||
testdata.OneDriveItems[1].OneDrive.ItemName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "NoSelectRepoItemName",
|
||||
Expected: []details.DetailsEntry{},
|
||||
Opts: utils.OneDriveOpts{
|
||||
FileNames: []string{
|
||||
testdata.OneDriveItemPath1.Item(),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "CreatedBefore",
|
||||
Expected: []details.DetailsEntry{testdata.OneDriveItems[1]},
|
||||
@ -475,6 +506,37 @@ var (
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "SingleItem",
|
||||
Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[0]},
|
||||
Opts: utils.SharePointOpts{
|
||||
FileNames: []string{
|
||||
testdata.SharePointLibraryItems[0].SharePoint.ItemName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "MultipleItems",
|
||||
Expected: []details.DetailsEntry{
|
||||
testdata.SharePointLibraryItems[0],
|
||||
testdata.SharePointLibraryItems[1],
|
||||
},
|
||||
Opts: utils.SharePointOpts{
|
||||
FileNames: []string{
|
||||
testdata.SharePointLibraryItems[0].SharePoint.ItemName,
|
||||
testdata.SharePointLibraryItems[1].SharePoint.ItemName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "NoSelectRepoItemName",
|
||||
Expected: []details.DetailsEntry{},
|
||||
Opts: utils.SharePointOpts{
|
||||
FileNames: []string{
|
||||
testdata.SharePointLibraryItemPath1.Item(),
|
||||
},
|
||||
},
|
||||
},
|
||||
// {
|
||||
// Name: "CreatedBefore",
|
||||
// Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[1]},
|
||||
|
||||
@ -449,7 +449,7 @@ func restoreCollection(
|
||||
locationRef = itemPath.Folder(false)
|
||||
}
|
||||
|
||||
deets.Add(
|
||||
err = deets.Add(
|
||||
itemPath.String(),
|
||||
itemPath.ShortRef(),
|
||||
"",
|
||||
@ -458,6 +458,10 @@ func restoreCollection(
|
||||
details.ItemInfo{
|
||||
Exchange: info,
|
||||
})
|
||||
if err != nil {
|
||||
// Not critical enough to need to stop restore operation.
|
||||
logger.Ctx(ctx).Infow("accounting for restored item", "error", err)
|
||||
}
|
||||
|
||||
colProgress <- struct{}{}
|
||||
}
|
||||
|
||||
@ -754,6 +754,11 @@ func compareOneDriveItem(
|
||||
}
|
||||
|
||||
expectedData := expected[name]
|
||||
|
||||
if strings.HasSuffix(name, onedrive.MetaFileSuffix) {
|
||||
expectedData = expected[itemMeta.FileName]
|
||||
}
|
||||
|
||||
if !assert.NotNil(
|
||||
t,
|
||||
expectedData,
|
||||
|
||||
@ -80,7 +80,7 @@ func onedriveItemWithData(
|
||||
|
||||
func onedriveMetadata(
|
||||
t *testing.T,
|
||||
fileName, itemID string,
|
||||
fileName, itemID, lookupKey string,
|
||||
perm permData,
|
||||
permUseID bool,
|
||||
) itemInfo {
|
||||
@ -94,7 +94,7 @@ func onedriveMetadata(
|
||||
return itemInfo{
|
||||
name: itemID,
|
||||
data: testMetaJSON,
|
||||
lookupKey: itemID,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
@ -205,6 +205,24 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm permDat
|
||||
c.t,
|
||||
"",
|
||||
name+onedrive.MetaFileSuffix,
|
||||
name+onedrive.MetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
c.items = append(c.items, metadata)
|
||||
c.aux = append(c.aux, metadata)
|
||||
|
||||
case version.OneDrive6NameInMeta:
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name+onedrive.DataFileSuffix,
|
||||
name+onedrive.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
metadata := onedriveMetadata(
|
||||
c.t,
|
||||
name,
|
||||
name+onedrive.MetaFileSuffix,
|
||||
name,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
c.items = append(c.items, metadata)
|
||||
@ -219,7 +237,8 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm permDat
|
||||
|
||||
func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection {
|
||||
switch c.backupVersion {
|
||||
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
|
||||
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
||||
version.OneDrive6NameInMeta:
|
||||
return c
|
||||
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
||||
@ -229,6 +248,7 @@ func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCol
|
||||
c.t,
|
||||
"",
|
||||
name+onedrive.DirMetaFileSuffix,
|
||||
name+onedrive.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID))
|
||||
|
||||
@ -264,6 +284,7 @@ func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection
|
||||
c.t,
|
||||
name,
|
||||
metaName+onedrive.DirMetaFileSuffix,
|
||||
metaName+onedrive.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
|
||||
|
||||
@ -357,7 +357,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
if isFile {
|
||||
atomic.AddInt64(&itemsFound, 1)
|
||||
|
||||
metaFileName = itemName
|
||||
metaFileName = itemID
|
||||
metaSuffix = MetaFileSuffix
|
||||
} else {
|
||||
atomic.AddInt64(&dirsFound, 1)
|
||||
@ -443,7 +443,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
ctx,
|
||||
itemData,
|
||||
observe.ItemBackupMsg,
|
||||
observe.PII(itemName+dataSuffix),
|
||||
observe.PII(itemID+dataSuffix),
|
||||
itemSize)
|
||||
go closer()
|
||||
|
||||
@ -451,7 +451,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
})
|
||||
|
||||
oc.data <- &Item{
|
||||
id: itemName + dataSuffix,
|
||||
id: itemID + dataSuffix,
|
||||
data: itemReader,
|
||||
info: itemInfo,
|
||||
}
|
||||
@ -461,7 +461,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
metaReader := lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
|
||||
progReader, closer := observe.ItemProgress(
|
||||
ctx, itemMeta, observe.ItemBackupMsg,
|
||||
observe.PII(itemName+metaSuffix), int64(itemMetaSize))
|
||||
observe.PII(metaFileName+metaSuffix), int64(itemMetaSize))
|
||||
go closer()
|
||||
return progReader, nil
|
||||
})
|
||||
|
||||
@ -254,9 +254,9 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
||||
readItemInfo := readItem.(data.StreamInfo)
|
||||
|
||||
if test.source == OneDriveSource {
|
||||
assert.Equal(t, testItemName+DataFileSuffix, readItem.UUID())
|
||||
assert.Equal(t, testItemID+DataFileSuffix, readItem.UUID())
|
||||
} else {
|
||||
assert.Equal(t, testItemName, readItem.UUID())
|
||||
assert.Equal(t, testItemID, readItem.UUID())
|
||||
}
|
||||
|
||||
require.Implements(t, (*data.StreamModTime)(nil), readItem)
|
||||
@ -283,7 +283,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
||||
if test.source == OneDriveSource {
|
||||
readItemMeta := readItems[1]
|
||||
|
||||
assert.Equal(t, testItemName+MetaFileSuffix, readItemMeta.UUID())
|
||||
assert.Equal(t, testItemID+MetaFileSuffix, readItemMeta.UUID())
|
||||
|
||||
readMetaData, err := io.ReadAll(readItemMeta.ToReader())
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -229,7 +229,7 @@ func RestoreCollection(
|
||||
err error
|
||||
)
|
||||
|
||||
if backupVersion < version.OneDriveXNameInMeta {
|
||||
if backupVersion < version.OneDrive6NameInMeta {
|
||||
itemInfo, err = restoreV1File(
|
||||
ctx,
|
||||
source,
|
||||
@ -262,13 +262,18 @@ func RestoreCollection(
|
||||
continue
|
||||
}
|
||||
|
||||
deets.Add(
|
||||
err = deets.Add(
|
||||
itemPath.String(),
|
||||
itemPath.ShortRef(),
|
||||
"",
|
||||
"", // TODO: implement locationRef
|
||||
true,
|
||||
itemInfo)
|
||||
if err != nil {
|
||||
// Not critical enough to need to stop restore operation.
|
||||
logger.Ctx(ctx).Infow("accounting for restored item", "error", err)
|
||||
}
|
||||
|
||||
metrics.Successes++
|
||||
} else if strings.HasSuffix(name, MetaFileSuffix) {
|
||||
// Just skip this for the moment since we moved the code to the above
|
||||
@ -315,13 +320,18 @@ func RestoreCollection(
|
||||
continue
|
||||
}
|
||||
|
||||
deets.Add(
|
||||
err = deets.Add(
|
||||
itemPath.String(),
|
||||
itemPath.ShortRef(),
|
||||
"",
|
||||
"", // TODO: implement locationRef
|
||||
true,
|
||||
itemInfo)
|
||||
if err != nil {
|
||||
// Not critical enough to need to stop restore operation.
|
||||
logger.Ctx(ctx).Infow("accounting for restored item", "error", err)
|
||||
}
|
||||
|
||||
metrics.Successes++
|
||||
}
|
||||
}
|
||||
|
||||
@ -21,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
@ -261,13 +262,17 @@ func RestoreListCollection(
|
||||
continue
|
||||
}
|
||||
|
||||
deets.Add(
|
||||
err = deets.Add(
|
||||
itemPath.String(),
|
||||
itemPath.ShortRef(),
|
||||
"",
|
||||
"", // TODO: implement locationRef
|
||||
true,
|
||||
itemInfo)
|
||||
if err != nil {
|
||||
// Not critical enough to need to stop restore operation.
|
||||
logger.Ctx(ctx).Infow("accounting for restored item", "error", err)
|
||||
}
|
||||
|
||||
metrics.Successes++
|
||||
}
|
||||
@ -344,13 +349,17 @@ func RestorePageCollection(
|
||||
continue
|
||||
}
|
||||
|
||||
deets.Add(
|
||||
err = deets.Add(
|
||||
itemPath.String(),
|
||||
itemPath.ShortRef(),
|
||||
"",
|
||||
"", // TODO: implement locationRef
|
||||
true,
|
||||
itemInfo)
|
||||
if err != nil {
|
||||
// Not critical enough to need to stop restore operation.
|
||||
logger.Ctx(ctx).Infow("accounting for restored item", "error", err)
|
||||
}
|
||||
|
||||
metrics.Successes++
|
||||
}
|
||||
|
||||
@ -187,7 +187,8 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
|
||||
With(
|
||||
"service", d.repoPath.Service().String(),
|
||||
"category", d.repoPath.Category().String(),
|
||||
))
|
||||
).
|
||||
Label(fault.LabelForceNoBackupCreation))
|
||||
|
||||
return
|
||||
}
|
||||
@ -221,13 +222,23 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
|
||||
}
|
||||
}
|
||||
|
||||
cp.deets.Add(
|
||||
err = cp.deets.Add(
|
||||
d.repoPath.String(),
|
||||
d.repoPath.ShortRef(),
|
||||
parent.ShortRef(),
|
||||
locationFolders,
|
||||
!d.cached,
|
||||
*d.info)
|
||||
if err != nil {
|
||||
cp.errs.AddRecoverable(clues.New("adding item to details").
|
||||
With(
|
||||
"service", d.repoPath.Service().String(),
|
||||
"category", d.repoPath.Category().String(),
|
||||
).
|
||||
Label(fault.LabelForceNoBackupCreation))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
folders := details.FolderEntriesForPath(parent, locPB)
|
||||
cp.deets.AddFoldersForItem(
|
||||
|
||||
@ -587,13 +587,16 @@ func mergeDetails(
|
||||
itemUpdated = itemUpdated || newLocStr != entry.LocationRef
|
||||
}
|
||||
|
||||
deets.Add(
|
||||
err = deets.Add(
|
||||
newPath.String(),
|
||||
newPath.ShortRef(),
|
||||
newPath.ToBuilder().Dir().ShortRef(),
|
||||
newLocStr,
|
||||
itemUpdated,
|
||||
item)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "adding item to details")
|
||||
}
|
||||
|
||||
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), locBuilder)
|
||||
deets.AddFoldersForItem(folders, item, itemUpdated)
|
||||
|
||||
@ -47,12 +47,14 @@ func (suite *StreamDetailsIntegrationSuite) TestDetails() {
|
||||
|
||||
deetsBuilder := &details.Builder{}
|
||||
|
||||
deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
|
||||
details.ItemInfo{
|
||||
Exchange: &details.ExchangeInfo{
|
||||
Subject: "hello world",
|
||||
},
|
||||
})
|
||||
require.NoError(
|
||||
t,
|
||||
deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
|
||||
details.ItemInfo{
|
||||
Exchange: &details.ExchangeInfo{
|
||||
Subject: "hello world",
|
||||
},
|
||||
}))
|
||||
|
||||
var (
|
||||
deets = deetsBuilder.Details()
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
package version
|
||||
|
||||
import "math"
|
||||
|
||||
const Backup = 5
|
||||
const Backup = 6
|
||||
|
||||
// Various labels to refer to important version changes.
|
||||
// Labels don't need 1:1 service:version representation. Add a new
|
||||
@ -30,10 +28,8 @@ const (
|
||||
// during incremental backups.
|
||||
OneDrive5DirMetaNoName = 5
|
||||
|
||||
// OneDriveXNameInMeta points to the backup format version where we begin
|
||||
// OneDrive6NameInMeta points to the backup format version where we begin
|
||||
// storing files in kopia with their item ID instead of their OneDrive file
|
||||
// name.
|
||||
// TODO(ashmrtn): Update this to a real value when we merge the file name
|
||||
// change. Set to MAXINT for now to keep the if-check using it working.
|
||||
OneDriveXNameInMeta = math.MaxInt
|
||||
OneDrive6NameInMeta = 6
|
||||
)
|
||||
|
||||
@ -152,10 +152,11 @@ func (b *Builder) Add(
|
||||
repoRef, shortRef, parentRef, locationRef string,
|
||||
updated bool,
|
||||
info ItemInfo,
|
||||
) {
|
||||
) error {
|
||||
b.mu.Lock()
|
||||
defer b.mu.Unlock()
|
||||
b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
|
||||
|
||||
return b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
|
||||
}
|
||||
|
||||
func (b *Builder) Details() *Details {
|
||||
@ -283,15 +284,52 @@ func (d *Details) add(
|
||||
repoRef, shortRef, parentRef, locationRef string,
|
||||
updated bool,
|
||||
info ItemInfo,
|
||||
) {
|
||||
d.Entries = append(d.Entries, DetailsEntry{
|
||||
) error {
|
||||
entry := DetailsEntry{
|
||||
RepoRef: repoRef,
|
||||
ShortRef: shortRef,
|
||||
ParentRef: parentRef,
|
||||
LocationRef: locationRef,
|
||||
Updated: updated,
|
||||
ItemInfo: info,
|
||||
})
|
||||
}
|
||||
|
||||
// Use the item name and the path for the ShortRef. This ensures that renames
|
||||
// within a directory generate unique ShortRefs.
|
||||
if info.infoType() == OneDriveItem || info.infoType() == SharePointLibrary {
|
||||
p, err := path.FromDataLayerPath(repoRef, true)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "munging OneDrive or SharePoint ShortRef")
|
||||
}
|
||||
|
||||
if info.OneDrive == nil && info.SharePoint == nil {
|
||||
return clues.New("item is not SharePoint or OneDrive type")
|
||||
}
|
||||
|
||||
filename := ""
|
||||
if info.OneDrive != nil {
|
||||
filename = info.OneDrive.ItemName
|
||||
} else if info.SharePoint != nil {
|
||||
filename = info.SharePoint.ItemName
|
||||
}
|
||||
|
||||
// Make the new path contain all display names and then the M365 item ID.
|
||||
// This ensures the path will be unique, thus ensuring the ShortRef will be
|
||||
// unique.
|
||||
//
|
||||
// If we appended the file's display name to the path then it's possible
|
||||
// for a folder in the parent directory to have the same display name as the
|
||||
// M365 ID of this file and also have a subfolder in the folder with a
|
||||
// display name that matches the file's display name. That would result in
|
||||
// duplicate ShortRefs, which we can't allow.
|
||||
elements := p.Elements()
|
||||
elements = append(elements[:len(elements)-1], filename, p.Item())
|
||||
entry.ShortRef = path.Builder{}.Append(elements...).ShortRef()
|
||||
}
|
||||
|
||||
d.Entries = append(d.Entries, entry)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// addFolder adds an entry for the given folder.
|
||||
|
||||
@ -365,6 +365,183 @@ func (suite *DetailsUnitSuite) TestDetailsModel_FilterMetaFiles() {
|
||||
assert.Len(t, d.Entries, 3)
|
||||
}
|
||||
|
||||
func (suite *DetailsUnitSuite) TestDetails_Add_ShortRefs() {
|
||||
itemNames := []string{
|
||||
"item1",
|
||||
"item2",
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
service path.ServiceType
|
||||
category path.CategoryType
|
||||
itemInfoFunc func(name string) ItemInfo
|
||||
expectedUniqueRefs int
|
||||
}{
|
||||
{
|
||||
name: "OneDrive",
|
||||
service: path.OneDriveService,
|
||||
category: path.FilesCategory,
|
||||
itemInfoFunc: func(name string) ItemInfo {
|
||||
return ItemInfo{
|
||||
OneDrive: &OneDriveInfo{
|
||||
ItemType: OneDriveItem,
|
||||
ItemName: name,
|
||||
},
|
||||
}
|
||||
},
|
||||
expectedUniqueRefs: len(itemNames),
|
||||
},
|
||||
{
|
||||
name: "SharePoint",
|
||||
service: path.SharePointService,
|
||||
category: path.LibrariesCategory,
|
||||
itemInfoFunc: func(name string) ItemInfo {
|
||||
return ItemInfo{
|
||||
SharePoint: &SharePointInfo{
|
||||
ItemType: SharePointLibrary,
|
||||
ItemName: name,
|
||||
},
|
||||
}
|
||||
},
|
||||
expectedUniqueRefs: len(itemNames),
|
||||
},
|
||||
{
|
||||
name: "SharePoint List",
|
||||
service: path.SharePointService,
|
||||
category: path.ListsCategory,
|
||||
itemInfoFunc: func(name string) ItemInfo {
|
||||
return ItemInfo{
|
||||
SharePoint: &SharePointInfo{
|
||||
ItemType: SharePointList,
|
||||
ItemName: name,
|
||||
},
|
||||
}
|
||||
},
|
||||
// Should all end up as the starting shortref.
|
||||
expectedUniqueRefs: 1,
|
||||
},
|
||||
{
|
||||
name: "Exchange no change",
|
||||
service: path.ExchangeService,
|
||||
category: path.EmailCategory,
|
||||
itemInfoFunc: func(name string) ItemInfo {
|
||||
return ItemInfo{
|
||||
Exchange: &ExchangeInfo{
|
||||
ItemType: ExchangeMail,
|
||||
Sender: "a-person@foo.com",
|
||||
Subject: name,
|
||||
Recipient: []string{"another-person@bar.com"},
|
||||
},
|
||||
}
|
||||
},
|
||||
// Should all end up as the starting shortref.
|
||||
expectedUniqueRefs: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
b := Builder{}
|
||||
|
||||
for _, name := range itemNames {
|
||||
item := test.itemInfoFunc(name)
|
||||
itemPath := makeItemPath(
|
||||
suite.T(),
|
||||
test.service,
|
||||
test.category,
|
||||
"a-tenant",
|
||||
"a-user",
|
||||
[]string{
|
||||
"drive-id",
|
||||
"root:",
|
||||
"folder",
|
||||
name + "-id",
|
||||
},
|
||||
)
|
||||
|
||||
require.NoError(t, b.Add(
|
||||
itemPath.String(),
|
||||
"deadbeef",
|
||||
itemPath.ToBuilder().Dir().String(),
|
||||
itemPath.String(),
|
||||
false,
|
||||
item,
|
||||
))
|
||||
}
|
||||
|
||||
deets := b.Details()
|
||||
shortRefs := map[string]struct{}{}
|
||||
|
||||
for _, d := range deets.Items() {
|
||||
shortRefs[d.ShortRef] = struct{}{}
|
||||
}
|
||||
|
||||
assert.Len(t, shortRefs, test.expectedUniqueRefs, "items don't have unique ShortRefs")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *DetailsUnitSuite) TestDetails_Add_ShortRefs_Unique_From_Folder() {
|
||||
t := suite.T()
|
||||
|
||||
b := Builder{}
|
||||
name := "itemName"
|
||||
info := ItemInfo{
|
||||
OneDrive: &OneDriveInfo{
|
||||
ItemType: OneDriveItem,
|
||||
ItemName: name,
|
||||
},
|
||||
}
|
||||
|
||||
itemPath := makeItemPath(
|
||||
t,
|
||||
path.OneDriveService,
|
||||
path.FilesCategory,
|
||||
"a-tenant",
|
||||
"a-user",
|
||||
[]string{
|
||||
"drive-id",
|
||||
"root:",
|
||||
"folder",
|
||||
name + "-id",
|
||||
},
|
||||
)
|
||||
|
||||
otherItemPath := makeItemPath(
|
||||
t,
|
||||
path.OneDriveService,
|
||||
path.FilesCategory,
|
||||
"a-tenant",
|
||||
"a-user",
|
||||
[]string{
|
||||
"drive-id",
|
||||
"root:",
|
||||
"folder",
|
||||
name + "-id",
|
||||
name,
|
||||
},
|
||||
)
|
||||
|
||||
err := b.Add(
|
||||
itemPath.String(),
|
||||
"deadbeef",
|
||||
itemPath.ToBuilder().Dir().String(),
|
||||
itemPath.String(),
|
||||
false,
|
||||
info)
|
||||
require.NoError(t, err)
|
||||
|
||||
items := b.Details().Items()
|
||||
require.Len(t, items, 1)
|
||||
|
||||
// If the ShortRefs match then it means it's possible for the user to
|
||||
// construct folder names such that they'll generate a ShortRef collision.
|
||||
assert.NotEqual(t, otherItemPath.ShortRef(), items[0].ShortRef, "same ShortRef as subfolder item")
|
||||
}
|
||||
|
||||
func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
||||
itemTime := time.Date(2022, 10, 21, 10, 0, 0, 0, time.UTC)
|
||||
folderTimeOlderThanItem := time.Date(2022, 9, 21, 10, 0, 0, 0, time.UTC)
|
||||
|
||||
@ -4,6 +4,8 @@ import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
@ -580,7 +582,10 @@ func (ec exchangeCategory) isLeaf() bool {
|
||||
// Example:
|
||||
// [tenantID, service, userPN, category, mailFolder, mailID]
|
||||
// => {exchMailFolder: mailFolder, exchMail: mailID}
|
||||
func (ec exchangeCategory) pathValues(repo path.Path, ent details.DetailsEntry) map[categorizer][]string {
|
||||
func (ec exchangeCategory) pathValues(
|
||||
repo path.Path,
|
||||
ent details.DetailsEntry,
|
||||
) (map[categorizer][]string, error) {
|
||||
var folderCat, itemCat categorizer
|
||||
|
||||
switch ec {
|
||||
@ -594,7 +599,7 @@ func (ec exchangeCategory) pathValues(repo path.Path, ent details.DetailsEntry)
|
||||
folderCat, itemCat = ExchangeMailFolder, ExchangeMail
|
||||
|
||||
default:
|
||||
return map[categorizer][]string{}
|
||||
return nil, clues.New("bad exchanageCategory").With("category", ec)
|
||||
}
|
||||
|
||||
result := map[categorizer][]string{
|
||||
@ -606,7 +611,7 @@ func (ec exchangeCategory) pathValues(repo path.Path, ent details.DetailsEntry)
|
||||
result[folderCat] = append(result[folderCat], ent.LocationRef)
|
||||
}
|
||||
|
||||
return result
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
||||
|
||||
@ -764,7 +764,9 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
|
||||
scopes := setScopesToDefault(test.scope)
|
||||
var aMatch bool
|
||||
for _, scope := range scopes {
|
||||
pvs := ExchangeMail.pathValues(repo, ent)
|
||||
pvs, err := ExchangeMail.pathValues(repo, ent)
|
||||
require.NoError(t, err)
|
||||
|
||||
if matchesPathValues(scope, ExchangeMail, pvs) {
|
||||
aMatch = true
|
||||
break
|
||||
@ -1345,7 +1347,8 @@ func (suite *ExchangeSelectorSuite) TestPasses() {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
pvs := cat.pathValues(repo, ent)
|
||||
pvs, err := cat.pathValues(repo, ent)
|
||||
require.NoError(t, err)
|
||||
|
||||
result := passes(
|
||||
cat,
|
||||
@ -1486,7 +1489,8 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
|
||||
ShortRef: "short",
|
||||
}
|
||||
|
||||
pvs := test.cat.pathValues(test.path, ent)
|
||||
pvs, err := test.cat.pathValues(test.path, ent)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expect, pvs)
|
||||
})
|
||||
}
|
||||
|
||||
@ -55,11 +55,14 @@ func (mc mockCategorizer) isLeaf() bool {
|
||||
return mc == leafCatStub
|
||||
}
|
||||
|
||||
func (mc mockCategorizer) pathValues(repo path.Path, ent details.DetailsEntry) map[categorizer][]string {
|
||||
func (mc mockCategorizer) pathValues(
|
||||
repo path.Path,
|
||||
ent details.DetailsEntry,
|
||||
) (map[categorizer][]string, error) {
|
||||
return map[categorizer][]string{
|
||||
rootCatStub: {"root"},
|
||||
leafCatStub: {"leaf"},
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (mc mockCategorizer) pathKeys() []categorizer {
|
||||
|
||||
@ -3,6 +3,8 @@ package selectors
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
@ -376,20 +378,27 @@ func (c oneDriveCategory) isLeaf() bool {
|
||||
// Example:
|
||||
// [tenantID, service, userPN, category, folder, fileID]
|
||||
// => {odFolder: folder, odFileID: fileID}
|
||||
func (c oneDriveCategory) pathValues(repo path.Path, ent details.DetailsEntry) map[categorizer][]string {
|
||||
func (c oneDriveCategory) pathValues(
|
||||
repo path.Path,
|
||||
ent details.DetailsEntry,
|
||||
) (map[categorizer][]string, error) {
|
||||
if ent.OneDrive == nil {
|
||||
return nil, clues.New("no OneDrive ItemInfo in details")
|
||||
}
|
||||
|
||||
// Ignore `drives/<driveID>/root:` for folder comparison
|
||||
rFld := path.Builder{}.Append(repo.Folders()...).PopFront().PopFront().PopFront().String()
|
||||
|
||||
result := map[categorizer][]string{
|
||||
OneDriveFolder: {rFld},
|
||||
OneDriveItem: {repo.Item(), ent.ShortRef},
|
||||
OneDriveItem: {ent.OneDrive.ItemName, ent.ShortRef},
|
||||
}
|
||||
|
||||
if len(ent.LocationRef) > 0 {
|
||||
result[OneDriveFolder] = append(result[OneDriveFolder], ent.LocationRef)
|
||||
}
|
||||
|
||||
return result
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
||||
|
||||
@ -175,6 +175,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
OneDrive: &details.OneDriveInfo{
|
||||
ItemType: details.OneDriveItem,
|
||||
ItemName: "fileName",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -183,6 +184,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
OneDrive: &details.OneDriveInfo{
|
||||
ItemType: details.OneDriveItem,
|
||||
ItemName: "fileName2",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -191,6 +193,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
OneDrive: &details.OneDriveInfo{
|
||||
ItemType: details.OneDriveItem,
|
||||
ItemName: "fileName3",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -223,7 +226,7 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() {
|
||||
deets,
|
||||
func() *OneDriveRestore {
|
||||
odr := NewOneDriveRestore(Any())
|
||||
odr.Include(odr.Items(Any(), []string{"file2"}))
|
||||
odr.Include(odr.Items(Any(), []string{"fileName2"}))
|
||||
return odr
|
||||
},
|
||||
arr(file2),
|
||||
@ -257,21 +260,30 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() {
|
||||
func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() {
|
||||
t := suite.T()
|
||||
|
||||
elems := []string{"drive", "driveID", "root:", "dir1", "dir2", "file"}
|
||||
fileName := "file"
|
||||
shortRef := "short"
|
||||
elems := []string{"drive", "driveID", "root:", "dir1", "dir2", fileName + "-id"}
|
||||
|
||||
filePath, err := path.Build("tenant", "user", path.OneDriveService, path.FilesCategory, true, elems...)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := map[categorizer][]string{
|
||||
OneDriveFolder: {"dir1/dir2"},
|
||||
OneDriveItem: {"file", "short"},
|
||||
OneDriveItem: {fileName, shortRef},
|
||||
}
|
||||
|
||||
ent := details.DetailsEntry{
|
||||
RepoRef: filePath.String(),
|
||||
ShortRef: "short",
|
||||
ShortRef: shortRef,
|
||||
ItemInfo: details.ItemInfo{
|
||||
OneDrive: &details.OneDriveInfo{
|
||||
ItemName: fileName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
r := OneDriveItem.pathValues(filePath, ent)
|
||||
r, err := OneDriveItem.pathValues(filePath, ent)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, expected, r)
|
||||
}
|
||||
|
||||
|
||||
@ -88,7 +88,7 @@ type (
|
||||
// folderCat: folder,
|
||||
// itemCat: itemID,
|
||||
// }
|
||||
pathValues(path.Path, details.DetailsEntry) map[categorizer][]string
|
||||
pathValues(path.Path, details.DetailsEntry) (map[categorizer][]string, error)
|
||||
|
||||
// pathKeys produces a list of categorizers that can be used as keys in the pathValues
|
||||
// map. The combination of the two funcs generically interprets the context of the
|
||||
@ -328,9 +328,11 @@ func reduce[T scopeT, C categoryT](
|
||||
|
||||
// for each entry, compare that entry against the scopes of the same data type
|
||||
for _, ent := range deets.Items() {
|
||||
ictx := clues.Add(ctx, "short_ref", ent.ShortRef)
|
||||
|
||||
repoPath, err := path.FromDataLayerPath(ent.RepoRef, true)
|
||||
if err != nil {
|
||||
el.AddRecoverable(clues.Wrap(err, "transforming repoRef to path").WithClues(ctx))
|
||||
el.AddRecoverable(clues.Wrap(err, "transforming repoRef to path").WithClues(ictx))
|
||||
continue
|
||||
}
|
||||
|
||||
@ -351,7 +353,11 @@ func reduce[T scopeT, C categoryT](
|
||||
continue
|
||||
}
|
||||
|
||||
pv := dc.pathValues(repoPath, *ent)
|
||||
pv, err := dc.pathValues(repoPath, *ent)
|
||||
if err != nil {
|
||||
el.AddRecoverable(clues.Wrap(err, "getting path values").WithClues(ictx))
|
||||
continue
|
||||
}
|
||||
|
||||
passed := passes(dc, pv, *ent, e, f, i)
|
||||
if passed {
|
||||
|
||||
@ -360,9 +360,11 @@ func (suite *SelectorScopesSuite) TestPasses() {
|
||||
entry = details.DetailsEntry{
|
||||
RepoRef: pth.String(),
|
||||
}
|
||||
pvs = cat.pathValues(pth, entry)
|
||||
)
|
||||
|
||||
pvs, err := cat.pathValues(pth, entry)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
for _, test := range reduceTestTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
@ -3,6 +3,8 @@ package selectors
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
@ -496,30 +498,44 @@ func (c sharePointCategory) isLeaf() bool {
|
||||
// Example:
|
||||
// [tenantID, service, siteID, category, folder, itemID]
|
||||
// => {spFolder: folder, spItemID: itemID}
|
||||
func (c sharePointCategory) pathValues(repo path.Path, ent details.DetailsEntry) map[categorizer][]string {
|
||||
var folderCat, itemCat categorizer
|
||||
func (c sharePointCategory) pathValues(
|
||||
repo path.Path,
|
||||
ent details.DetailsEntry,
|
||||
) (map[categorizer][]string, error) {
|
||||
var (
|
||||
folderCat, itemCat categorizer
|
||||
itemName = repo.Item()
|
||||
)
|
||||
|
||||
switch c {
|
||||
case SharePointLibraryFolder, SharePointLibraryItem:
|
||||
if ent.SharePoint == nil {
|
||||
return nil, clues.New("no SharePoint ItemInfo in details")
|
||||
}
|
||||
|
||||
folderCat, itemCat = SharePointLibraryFolder, SharePointLibraryItem
|
||||
itemName = ent.SharePoint.ItemName
|
||||
|
||||
case SharePointList, SharePointListItem:
|
||||
folderCat, itemCat = SharePointList, SharePointListItem
|
||||
|
||||
case SharePointPage, SharePointPageFolder:
|
||||
folderCat, itemCat = SharePointPageFolder, SharePointPage
|
||||
|
||||
default:
|
||||
return map[categorizer][]string{}
|
||||
return nil, clues.New("unrecognized sharePointCategory").With("category", c)
|
||||
}
|
||||
|
||||
result := map[categorizer][]string{
|
||||
folderCat: {repo.Folder(false)},
|
||||
itemCat: {repo.Item(), ent.ShortRef},
|
||||
itemCat: {itemName, ent.ShortRef},
|
||||
}
|
||||
|
||||
if len(ent.LocationRef) > 0 {
|
||||
result[folderCat] = append(result[folderCat], ent.LocationRef)
|
||||
}
|
||||
|
||||
return result
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
||||
|
||||
@ -215,6 +215,7 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
SharePoint: &details.SharePointInfo{
|
||||
ItemType: details.SharePointLibrary,
|
||||
ItemName: "itemName",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -223,6 +224,7 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
SharePoint: &details.SharePointInfo{
|
||||
ItemType: details.SharePointLibrary,
|
||||
ItemName: "itemName2",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -231,6 +233,7 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
SharePoint: &details.SharePointInfo{
|
||||
ItemType: details.SharePointLibrary,
|
||||
ItemName: "itemName3",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -239,6 +242,7 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
SharePoint: &details.SharePointInfo{
|
||||
ItemType: details.SharePointPage,
|
||||
ItemName: "itemName4",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -247,6 +251,7 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
ItemInfo: details.ItemInfo{
|
||||
SharePoint: &details.SharePointInfo{
|
||||
ItemType: details.SharePointPage,
|
||||
ItemName: "itemName5",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -279,7 +284,7 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
deets: deets,
|
||||
makeSelector: func() *SharePointRestore {
|
||||
odr := NewSharePointRestore(Any())
|
||||
odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
|
||||
odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
|
||||
return odr
|
||||
},
|
||||
expect: arr(item2),
|
||||
@ -321,6 +326,10 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
||||
}
|
||||
|
||||
func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
|
||||
itemName := "item"
|
||||
shortRef := "short"
|
||||
elems := []string{"dir1", "dir2", itemName + "-id"}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
sc sharePointCategory
|
||||
@ -331,7 +340,7 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
|
||||
sc: SharePointLibraryItem,
|
||||
expected: map[categorizer][]string{
|
||||
SharePointLibraryFolder: {"dir1/dir2"},
|
||||
SharePointLibraryItem: {"item", "short"},
|
||||
SharePointLibraryItem: {itemName, shortRef},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -339,7 +348,7 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
|
||||
sc: SharePointListItem,
|
||||
expected: map[categorizer][]string{
|
||||
SharePointList: {"dir1/dir2"},
|
||||
SharePointListItem: {"item", "short"},
|
||||
SharePointListItem: {"item-id", shortRef},
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -354,15 +363,21 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
|
||||
path.SharePointService,
|
||||
test.sc.PathType(),
|
||||
true,
|
||||
"dir1", "dir2", "item")
|
||||
elems...)
|
||||
require.NoError(t, err)
|
||||
|
||||
ent := details.DetailsEntry{
|
||||
RepoRef: itemPath.String(),
|
||||
ShortRef: "short",
|
||||
ShortRef: shortRef,
|
||||
ItemInfo: details.ItemInfo{
|
||||
SharePoint: &details.SharePointInfo{
|
||||
ItemName: itemName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
pv := test.sc.pathValues(itemPath, ent)
|
||||
pv, err := test.sc.pathValues(itemPath, ent)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected, pv)
|
||||
})
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user