Compare commits

...

4 Commits

Author SHA1 Message Date
Keepers
210b543280 populate the location path values (#2430)
Adds location path values to all exchange types.
Only propagates these values if the collection is
an event category.
2023-02-08 14:59:18 -07:00
ryanfkeepers
52add8dad6 minor cleanups to comments 2023-02-08 14:59:18 -07:00
ryanfkeepers
43c47d678e fix segfault panic 2023-02-08 14:59:18 -07:00
ryanfkeepers
1b1629e09c add locationRef to details
Adds a new reference to the details ent: location-
ref.  The location holds the human-readable
version of the item's location in whatever m365
service sourced the item.  Hookup is incomplete,
following PRs will fill out functionality.

Also adds a LocationPather interface to data_
collections to pass this data back and forth
between producers and consumers.
2023-02-08 14:59:18 -07:00
24 changed files with 654 additions and 156 deletions

View File

@ -169,10 +169,8 @@ func (c Contacts) EnumerateContainers(
continue
}
temp := graph.NewCacheFolder(fold, nil)
err = fn(temp)
if err != nil {
temp := graph.NewCacheFolder(fold, nil, nil)
if err := fn(temp); err != nil {
errs = multierror.Append(err, errs)
continue
}

View File

@ -209,10 +209,11 @@ func (c Events) EnumerateContainers(
continue
}
temp := graph.NewCacheFolder(cd, path.Builder{}.Append(*cd.GetDisplayName()))
err = fn(temp)
if err != nil {
temp := graph.NewCacheFolder(
cd,
path.Builder{}.Append(*cd.GetDisplayName()),
path.Builder{}.Append(*cd.GetDisplayName()))
if err := fn(temp); err != nil {
errs = multierror.Append(err, errs)
continue
}

View File

@ -198,8 +198,7 @@ func (c Mail) EnumerateContainers(
}
for _, v := range resp.GetValue() {
temp := graph.NewCacheFolder(v, nil)
temp := graph.NewCacheFolder(v, nil, nil)
if err := fn(temp); err != nil {
errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta"))
continue

View File

@ -29,8 +29,10 @@ func (cfc *contactFolderCache) populateContactRoot(
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...))
temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(baseContainerPath...), // storage path
path.Builder{}.Append(baseContainerPath...)) // display location
if err := cfc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir")
}

View File

@ -26,16 +26,19 @@ type mockContainer struct {
displayName *string
parentID *string
p *path.Builder
l *path.Builder
}
//nolint:revive
func (m mockContainer) GetId() *string { return m.id }
//nolint:revive
func (m mockContainer) GetParentFolderId() *string { return m.parentID }
func (m mockContainer) GetDisplayName() *string { return m.displayName }
func (m mockContainer) Path() *path.Builder { return m.p }
func (m mockContainer) SetPath(p *path.Builder) {}
func (m mockContainer) GetParentFolderId() *string { return m.parentID }
func (m mockContainer) GetDisplayName() *string { return m.displayName }
func (m mockContainer) Location() *path.Builder { return m.l }
func (m mockContainer) SetLocation(p *path.Builder) {}
func (m mockContainer) Path() *path.Builder { return m.p }
func (m mockContainer) SetPath(p *path.Builder) {}
func strPtr(s string) *string {
return &s
@ -168,7 +171,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil,
},
nil,
),
nil),
check: assert.Error,
},
{
@ -180,7 +183,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil,
},
path.Builder{}.Append("foo"),
),
path.Builder{}.Append("loc")),
check: assert.NoError,
},
{
@ -192,7 +195,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID,
},
path.Builder{}.Append("foo"),
),
path.Builder{}.Append("loc")),
check: assert.Error,
},
{
@ -204,7 +207,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID,
},
path.Builder{}.Append("foo"),
),
path.Builder{}.Append("loc")),
check: assert.Error,
},
{
@ -216,7 +219,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID,
},
nil,
),
nil),
check: assert.NoError,
},
}
@ -241,31 +244,21 @@ type mockCachedContainer struct {
id string
parentID string
displayName string
l *path.Builder
p *path.Builder
expectedPath string
}
//nolint:revive
func (m mockCachedContainer) GetId() *string {
return &m.id
}
func (m mockCachedContainer) GetId() *string { return &m.id }
//nolint:revive
func (m mockCachedContainer) GetParentFolderId() *string {
return &m.parentID
}
func (m mockCachedContainer) GetDisplayName() *string {
return &m.displayName
}
func (m mockCachedContainer) Path() *path.Builder {
return m.p
}
func (m *mockCachedContainer) SetPath(newPath *path.Builder) {
m.p = newPath
}
func (m mockCachedContainer) GetParentFolderId() *string { return &m.parentID }
func (m mockCachedContainer) GetDisplayName() *string { return &m.displayName }
func (m mockCachedContainer) Location() *path.Builder { return m.l }
func (m *mockCachedContainer) SetLocation(newLoc *path.Builder) { m.l = newLoc }
func (m mockCachedContainer) Path() *path.Builder { return m.p }
func (m *mockCachedContainer) SetPath(newPath *path.Builder) { m.p = newPath }
func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) {
containers := make([]*mockCachedContainer, 0, numContainers)
@ -595,7 +588,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
folderID, err := CreateContainerDestinaion(
folderID, err := CreateContainerDestination(
ctx,
m365,
test.pathFunc1(t),
@ -608,7 +601,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
_, err = resolver.IDToPath(ctx, folderID)
assert.NoError(t, err)
secondID, err := CreateContainerDestinaion(
secondID, err := CreateContainerDestination(
ctx,
m365,
test.pathFunc2(t),

View File

@ -44,7 +44,10 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(container))
temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(container), // storage path
path.Builder{}.Append(container)) // display location
if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "initializing calendar resolver")
}
@ -91,7 +94,10 @@ func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container
return errors.Wrap(err, "validating container")
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName()))
temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName()), // storage path
path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName())) // display location
if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding container")

View File

@ -77,6 +77,11 @@ type Collection struct {
// moved. It will be empty on its first retrieval.
prevPath path.Path
// LocationPath contains the path with human-readable display names.
// IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t"
// Currently only implemented for Exchange Calendars.
locationPath path.Path
state data.CollectionState
// doNotMergeItems should only be true if the old delta token expired.
@ -91,7 +96,7 @@ type Collection struct {
// or notMoved (if they match).
func NewCollection(
user string,
curr, prev path.Path,
curr, prev, location path.Path,
category path.CategoryType,
items itemer,
statusUpdater support.StatusUpdater,
@ -99,18 +104,19 @@ func NewCollection(
doNotMergeItems bool,
) Collection {
collection := Collection{
added: make(map[string]struct{}, 0),
category: category,
ctrl: ctrlOpts,
data: make(chan data.Stream, collectionChannelBufferSize),
doNotMergeItems: doNotMergeItems,
fullPath: curr,
added: make(map[string]struct{}, 0),
removed: make(map[string]struct{}, 0),
items: items,
locationPath: location,
prevPath: prev,
removed: make(map[string]struct{}, 0),
state: stateOf(prev, curr),
statusUpdater: statusUpdater,
user: user,
items: items,
}
return collection
@ -144,6 +150,12 @@ func (col *Collection) FullPath() path.Path {
return col.fullPath
}
// LocationPath produces the Collection's full path, but with display names
// instead of IDs in the folders. Only populated for Calendars.
func (col *Collection) LocationPath() path.Path {
return col.locationPath
}
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
// and new folder hierarchies.
func (col Collection) PreviousPath() path.Path {

View File

@ -127,28 +127,36 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
Append("bar").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
locP, err := path.Builder{}.
Append("human-readable").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
table := []struct {
name string
prev path.Path
curr path.Path
loc path.Path
expect data.CollectionState
}{
{
name: "new",
curr: fooP,
loc: locP,
expect: data.NewState,
},
{
name: "not moved",
prev: fooP,
curr: fooP,
loc: locP,
expect: data.NotMovedState,
},
{
name: "moved",
prev: fooP,
curr: barP,
loc: locP,
expect: data.MovedState,
},
{
@ -161,12 +169,15 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
suite.T().Run(test.name, func(t *testing.T) {
c := NewCollection(
"u",
test.curr, test.prev,
test.curr, test.prev, test.loc,
0,
&mockItemer{}, nil,
control.Options{},
false)
assert.Equal(t, test.expect, c.State())
assert.Equal(t, test.expect, c.State(), "collection state")
assert.Equal(t, test.curr, c.fullPath, "full path")
assert.Equal(t, test.prev, c.prevPath, "prev path")
assert.Equal(t, test.loc, c.locationPath, "location path")
})
}
}

View File

@ -53,7 +53,9 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
directory = DefaultMailFolder
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory))
temp := graph.NewCacheFolder(f,
path.Builder{}.Append(directory), // storage path
path.Builder{}.Append(directory)) // display location
if err := mc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir")
}

View File

@ -86,44 +86,70 @@ func PopulateExchangeContainerResolver(
}
// Returns true if the container passes the scope comparison and should be included.
// Also returns the path representing the directory.
// Returns:
// - the path representing the directory as it should be stored in the repository.
// - the human-readable path using display names.
// - true if the path passes the scope comparison.
func includeContainer(
qp graph.QueryParams,
c graph.CachedContainer,
scope selectors.ExchangeScope,
) (path.Path, bool) {
) (path.Path, path.Path, bool) {
var (
category = scope.Category().PathType()
directory string
locPath path.Path
category = scope.Category().PathType()
pb = c.Path()
loc = c.Location()
)
// Clause ensures that DefaultContactFolder is inspected properly
if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder {
pb = c.Path().Append(DefaultContactFolder)
pb = pb.Append(DefaultContactFolder)
if loc != nil {
loc = loc.Append(DefaultContactFolder)
}
}
dirPath, err := pb.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID,
qp.ResourceOwner,
category,
false,
)
false)
// Containers without a path (e.g. Root mail folder) always err here.
if err != nil {
return nil, false
return nil, nil, false
}
directory = pb.String()
directory = dirPath.Folder()
if loc != nil {
locPath, err = loc.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID,
qp.ResourceOwner,
category,
false)
// Containers without a path (e.g. Root mail folder) always err here.
if err != nil {
return nil, nil, false
}
directory = locPath.Folder()
}
var ok bool
switch category {
case path.EmailCategory:
return dirPath, scope.Matches(selectors.ExchangeMailFolder, directory)
ok = scope.Matches(selectors.ExchangeMailFolder, directory)
case path.ContactsCategory:
return dirPath, scope.Matches(selectors.ExchangeContactFolder, directory)
ok = scope.Matches(selectors.ExchangeContactFolder, directory)
case path.EventsCategory:
return dirPath, scope.Matches(selectors.ExchangeEventCalendar, directory)
ok = scope.Matches(selectors.ExchangeEventCalendar, directory)
default:
return dirPath, false
return nil, nil, false
}
return dirPath, locPath, ok
}

View File

@ -70,7 +70,7 @@ func filterContainersAndFillCollections(
cID := *c.GetId()
delete(tombstones, cID)
currPath, ok := includeContainer(qp, c, scope)
currPath, locPath, ok := includeContainer(qp, c, scope)
// Only create a collection if the path matches the scope.
if !ok {
continue
@ -110,10 +110,15 @@ func filterContainersAndFillCollections(
deltaURLs[cID] = newDelta.URL
}
if qp.Category != path.EventsCategory {
locPath = nil
}
edc := NewCollection(
qp.ResourceOwner,
currPath,
prevPath,
locPath,
scope.Category().PathType(),
ibt,
statusUpdater,
@ -167,6 +172,7 @@ func filterContainersAndFillCollections(
qp.ResourceOwner,
nil, // marks the collection as deleted
prevPath,
nil, // tombstones don't need a location
scope.Category().PathType(),
ibt,
statusUpdater,

View File

@ -344,7 +344,7 @@ func RestoreExchangeDataCollections(
userCaches = directoryCaches[userID]
}
containerID, err := CreateContainerDestinaion(
containerID, err := CreateContainerDestination(
ctx,
creds,
dc.FullPath(),
@ -447,10 +447,16 @@ func restoreCollection(
continue
}
// var locationRef string
// if category == path.ContactsCategory {
// locationRef = itemPath.Folder()
// }
deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: locationRef
true,
details.ItemInfo{
Exchange: info,
@ -461,12 +467,12 @@ func restoreCollection(
}
}
// CreateContainerDestinaion builds the destination into the container
// CreateContainerDestination builds the destination into the container
// at the provided path. As a precondition, the destination cannot
// already exist. If it does then an error is returned. The provided
// containerResolver is updated with the new destination.
// @ returns the container ID of the new destination container.
func CreateContainerDestinaion(
func CreateContainerDestination(
ctx context.Context,
creds account.M365Config,
directory path.Path,

View File

@ -12,6 +12,12 @@ import (
// reuse logic in IDToPath.
type CachedContainer interface {
Container
// Location contains either the display names for the dirs (if this is a calendar)
// or nil
Location() *path.Builder
SetLocation(*path.Builder)
// Path contains either the ids for the dirs (if this is a calendar)
// or the display names for the dirs
Path() *path.Builder
SetPath(*path.Builder)
}
@ -45,13 +51,15 @@ var _ CachedContainer = &CacheFolder{}
type CacheFolder struct {
Container
l *path.Builder
p *path.Builder
}
// NewCacheFolder public constructor for struct
func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
func NewCacheFolder(c Container, pb, lpb *path.Builder) CacheFolder {
cf := CacheFolder{
Container: c,
l: lpb,
p: pb,
}
@ -62,6 +70,14 @@ func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
// Required Functions to satisfy interfaces
// =========================================
func (cf CacheFolder) Location() *path.Builder {
return cf.l
}
func (cf *CacheFolder) SetLocation(newLocation *path.Builder) {
cf.l = newLocation
}
func (cf CacheFolder) Path() *path.Builder {
return cf.p
}

View File

@ -235,7 +235,13 @@ func RestoreCollection(
restoredIDs[trimmedName] = itemID
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
// Mark it as success without processing .meta
// file if we are not restoring permissions
@ -343,7 +349,13 @@ func RestoreCollection(
continue
}
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
metrics.Successes++
}
}

View File

@ -276,6 +276,7 @@ func RestoreListCollection(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
@ -355,6 +356,7 @@ func RestorePageCollection(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo,
)

View File

@ -92,6 +92,12 @@ type Stream interface {
Deleted() bool
}
// LocationPather provides a LocationPath describing the path with Display Names
// instead of canonical IDs
type LocationPather interface {
LocationPath() path.Path
}
// StreamInfo is used to provide service specific
// information about the Stream
type StreamInfo interface {

View File

@ -124,10 +124,11 @@ func (rw *restoreStreamReader) Read(p []byte) (n int, err error) {
}
type itemDetails struct {
info *details.ItemInfo
repoPath path.Path
prevPath path.Path
cached bool
info *details.ItemInfo
repoPath path.Path
prevPath path.Path
locationPath path.Path
cached bool
}
type corsoProgress struct {
@ -188,20 +189,29 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
parent := d.repoPath.ToBuilder().Dir()
var locationFolders string
if d.locationPath != nil {
locationFolders = d.locationPath.Folder()
}
cp.deets.Add(
d.repoPath.String(),
d.repoPath.ShortRef(),
parent.ShortRef(),
locationFolders,
!d.cached,
*d.info,
)
*d.info)
folders := details.FolderEntriesForPath(parent)
var locPB *path.Builder
if d.locationPath != nil {
locPB = d.locationPath.ToBuilder()
}
folders := details.FolderEntriesForPath(parent, locPB)
cp.deets.AddFoldersForItem(
folders,
*d.info,
!d.cached,
)
!d.cached)
}
// Kopia interface function used as a callback when kopia finishes hashing a file.
@ -311,6 +321,12 @@ func collectionEntries(
continue
}
var locationPath path.Path
if lp, ok := e.(data.LocationPather); ok {
locationPath = lp.LocationPath()
}
trace.Log(ctx, "kopia:streamEntries:item", itemPath.String())
if e.Deleted() {
@ -332,7 +348,11 @@ func collectionEntries(
// previous snapshot then we should populate prevPath here and leave
// info nil.
itemInfo := ei.Info()
d := &itemDetails{info: &itemInfo, repoPath: itemPath}
d := &itemDetails{
info: &itemInfo,
repoPath: itemPath,
locationPath: locationPath,
}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
}

View File

@ -581,10 +581,11 @@ func mergeDetails(
newPath.String(),
newPath.ShortRef(),
newPath.ToBuilder().Dir().ShortRef(),
"", // TODO Location Ref,
itemUpdated,
item)
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir())
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), nil)
deets.AddFoldersForItem(folders, item, itemUpdated)
// Track how many entries we added so that we know if we got them all when

View File

@ -44,7 +44,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
deetsBuilder := &details.Builder{}
deetsBuilder.Add("ref", "shortref", "parentref", true,
deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
details.ItemInfo{
Exchange: &details.ExchangeInfo{
Subject: "hello world",
@ -66,6 +66,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef)
assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef)
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef)
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
assert.NotNil(t, readDeets.Entries[0].Exchange)
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)

View File

@ -15,11 +15,12 @@ import (
)
type folderEntry struct {
RepoRef string
ShortRef string
ParentRef string
Updated bool
Info ItemInfo
RepoRef string
ShortRef string
ParentRef string
LocationRef string
Updated bool
Info ItemInfo
}
// --------------------------------------------------------------------------------
@ -110,10 +111,14 @@ type Builder struct {
knownFolders map[string]folderEntry `json:"-"`
}
func (b *Builder) Add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) {
func (b *Builder) Add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
b.mu.Lock()
defer b.mu.Unlock()
b.d.add(repoRef, shortRef, parentRef, updated, info)
b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
}
func (b *Builder) Details() *Details {
@ -131,16 +136,23 @@ func (b *Builder) Details() *Details {
// TODO(ashmrtn): If we never need to pre-populate the modified time of a folder
// we should just merge this with AddFoldersForItem, have Add call
// AddFoldersForItem, and unexport AddFoldersForItem.
func FolderEntriesForPath(parent *path.Builder) []folderEntry {
func FolderEntriesForPath(parent, location *path.Builder) []folderEntry {
folders := []folderEntry{}
lfs := locationRefOf(location)
for len(parent.Elements()) > 0 {
nextParent := parent.Dir()
var lr string
if lfs != nil {
lr = lfs.String()
}
folders = append(folders, folderEntry{
RepoRef: parent.String(),
ShortRef: parent.ShortRef(),
ParentRef: nextParent.ShortRef(),
RepoRef: parent.String(),
ShortRef: parent.ShortRef(),
ParentRef: nextParent.ShortRef(),
LocationRef: lr,
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
@ -150,11 +162,30 @@ func FolderEntriesForPath(parent *path.Builder) []folderEntry {
})
parent = nextParent
if lfs != nil {
lfs = lfs.Dir()
}
}
return folders
}
// assumes the pb contains a path like:
// <tenant>/<service>/<owner>/<category>/<logical_containers>...
// and returns a string with only <logical_containers>/...
func locationRefOf(pb *path.Builder) *path.Builder {
if pb == nil {
return nil
}
for i := 0; i < 4; i++ {
pb = pb.PopFront()
}
return pb
}
// AddFoldersForItem adds entries for the given folders. It skips adding entries that
// have been added by previous calls.
func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) {
@ -202,7 +233,11 @@ type Details struct {
DetailsModel
}
func (d *Details) add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) {
func (d *Details) add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
d.Entries = append(d.Entries, DetailsEntry{
RepoRef: repoRef,
ShortRef: shortRef,
@ -233,9 +268,21 @@ type DetailsEntry struct {
RepoRef string `json:"repoRef"`
ShortRef string `json:"shortRef"`
ParentRef string `json:"parentRef,omitempty"`
// LocationRef contains the logical path structure by its human-readable
// display names. IE: If an item is located at "/Inbox/Important", we
// hold that string in the LocationRef, while the actual IDs of each
// container are used for the RepoRef.
// LocationRef only holds the container values, and does not include
// the metadata prefixes (tenant, service, owner, etc) found in the
// repoRef.
// Currently only implemented for Exchange Calendars.
LocationRef string `json:"locationRef,omitempty"`
// Indicates the item was added or updated in this backup
// Always `true` for full backups
Updated bool `json:"updated"`
ItemInfo
}

View File

@ -39,8 +39,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{
name: "no info",
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
},
expectHs: []string{"ID"},
expectVs: []string{"deadbeef"},
@ -48,8 +49,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{
name: "exchange event info",
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeEvent,
@ -67,8 +69,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{
name: "exchange contact info",
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeContact,
@ -82,8 +85,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{
name: "exchange mail info",
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeMail,
@ -99,8 +103,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{
name: "sharepoint info",
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
SharePoint: &SharePointInfo{
ItemName: "itemName",
@ -128,8 +133,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{
name: "oneDrive info",
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
OneDrive: &OneDriveInfo{
ItemName: "itemName",
@ -157,37 +163,57 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
}
var pathItemsTable = []struct {
name string
ents []DetailsEntry
expectRefs []string
name string
ents []DetailsEntry
expectRepoRefs []string
expectLocationRefs []string
}{
{
name: "nil entries",
ents: nil,
expectRefs: []string{},
name: "nil entries",
ents: nil,
expectRepoRefs: []string{},
expectLocationRefs: []string{},
},
{
name: "single entry",
ents: []DetailsEntry{
{RepoRef: "abcde"},
{
RepoRef: "abcde",
LocationRef: "locationref",
},
},
expectRefs: []string{"abcde"},
expectRepoRefs: []string{"abcde"},
expectLocationRefs: []string{"locationref"},
},
{
name: "multiple entries",
ents: []DetailsEntry{
{RepoRef: "abcde"},
{RepoRef: "12345"},
{
RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
},
expectRefs: []string{"abcde", "12345"},
expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
},
{
name: "multiple entries with folder",
ents: []DetailsEntry{
{RepoRef: "abcde"},
{RepoRef: "12345"},
{
RepoRef: "deadbeef",
RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
{
RepoRef: "deadbeef",
LocationRef: "locationref3",
ItemInfo: ItemInfo{
Folder: &FolderInfo{
DisplayName: "test folder",
@ -195,7 +221,8 @@ var pathItemsTable = []struct {
},
},
},
expectRefs: []string{"abcde", "12345"},
expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
},
}
@ -207,7 +234,7 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Path() {
Entries: test.ents,
},
}
assert.Equal(t, test.expectRefs, d.Paths())
assert.ElementsMatch(t, test.expectRepoRefs, d.Paths())
})
}
}
@ -222,10 +249,11 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Items() {
}
ents := d.Items()
assert.Len(t, ents, len(test.expectRefs))
assert.Len(t, ents, len(test.expectRepoRefs))
for _, e := range ents {
assert.Contains(t, test.expectRefs, e.RepoRef)
assert.Contains(t, test.expectRepoRefs, e.RepoRef)
assert.Contains(t, test.expectLocationRefs, e.LocationRef)
}
})
}
@ -253,9 +281,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
name: "MultipleFolders",
folders: []folderEntry{
{
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -263,9 +292,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
},
},
{
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeNewerThanItem,
@ -283,9 +313,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
name: "MultipleFoldersWithRepeats",
folders: []folderEntry{
{
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -293,9 +324,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
},
},
{
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -303,9 +335,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
},
},
{
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -313,9 +346,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
},
},
{
RepoRef: "rr3",
ShortRef: "sr3",
ParentRef: "pr3",
RepoRef: "rr3",
ShortRef: "sr3",
ParentRef: "pr3",
LocationRef: "lr3",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeNewerThanItem,
@ -363,18 +397,20 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
name: "ItemNotUpdated_NoChange",
folders: []folderEntry{
{
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{},
},
Updated: true,
},
{
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -390,17 +426,19 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
name: "ItemUpdated",
folders: []folderEntry{
{
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{},
},
},
{
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -482,9 +520,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersDifferentServices() {
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
folder := folderEntry{
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{},
},

View File

@ -1041,6 +1041,233 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
}
}
func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce_locationRef() {
var (
contact = stubRepoRef(path.ExchangeService, path.ContactsCategory, "uid", "id5/id6", "cid")
contactLocation = "conts/my_cont"
event = stubRepoRef(path.ExchangeService, path.EventsCategory, "uid", "id1/id2", "eid")
eventLocation = "cal/my_cal"
mail = stubRepoRef(path.ExchangeService, path.EmailCategory, "uid", "id3/id4", "mid")
mailLocation = "inbx/my_mail"
)
makeDeets := func(refs ...string) *details.Details {
deets := &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{},
},
}
for _, r := range refs {
var (
location string
itype = details.UnknownType
)
switch r {
case contact:
itype = details.ExchangeContact
location = contactLocation
case event:
itype = details.ExchangeEvent
location = eventLocation
case mail:
itype = details.ExchangeMail
location = mailLocation
}
deets.Entries = append(deets.Entries, details.DetailsEntry{
RepoRef: r,
LocationRef: location,
ItemInfo: details.ItemInfo{
Exchange: &details.ExchangeInfo{
ItemType: itype,
},
},
})
}
return deets
}
arr := func(s ...string) []string {
return s
}
table := []struct {
name string
deets *details.Details
makeSelector func() *ExchangeRestore
expect []string
}{
{
"no refs",
makeDeets(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{},
},
{
"contact only",
makeDeets(contact),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(contact),
},
{
"event only",
makeDeets(event),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(event),
},
{
"mail only",
makeDeets(mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(mail),
},
{
"all",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(contact, event, mail),
},
{
"only match contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er
},
arr(contact),
},
{
"only match event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Events([]string{eventLocation}, []string{"eid"}))
return er
},
arr(event),
},
{
"only match mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Mails([]string{mailLocation}, []string{"mid"}))
return er
},
arr(mail),
},
{
"exclude contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er
},
arr(event, mail),
},
{
"exclude event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Events([]string{eventLocation}, []string{"eid"}))
return er
},
arr(contact, mail),
},
{
"exclude mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Mails([]string{mailLocation}, []string{"mid"}))
return er
},
arr(contact, event),
},
{
"filter on mail subject",
func() *details.Details {
ds := makeDeets(mail)
for i := range ds.Entries {
ds.Entries[i].Exchange.Subject = "has a subject"
}
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
arr(mail),
},
{
"filter on mail subject multiple input categories",
func() *details.Details {
mds := makeDeets(mail)
for i := range mds.Entries {
mds.Entries[i].Exchange.Subject = "has a subject"
}
ds := makeDeets(contact, event)
ds.Entries = append(ds.Entries, mds.Entries...)
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
arr(mail),
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
errs := mock.NewAdder()
sel := test.makeSelector()
results := sel.Reduce(ctx, test.deets, errs)
paths := results.Paths()
assert.Equal(t, test.expect, paths)
assert.Empty(t, errs.Errs)
})
}
}
func (suite *ExchangeSelectorSuite) TestScopesByCategory() {
var (
es = NewExchangeRestore(Any())

View File

@ -317,6 +317,27 @@ func reduce[T scopeT, C categoryT](
continue
}
// if the details entry has a locationRef specified, use those folders in place
// of the repoRef folders, so that scopes can match against the display names
// instead of container IDs.
if len(ent.LocationRef) > 0 {
pb, err := path.Builder{}.
Append(path.Split(ent.LocationRef)...).
Append(repoPath.Item()).
ToDataLayerPath(
repoPath.Tenant(),
repoPath.ResourceOwner(),
repoPath.Service(),
repoPath.Category(),
true)
if err != nil {
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
continue
}
repoPath = pb
}
// first check, every entry needs to match the selector's resource owners.
if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) {
continue

View File

@ -290,6 +290,50 @@ func (suite *SelectorScopesSuite) TestReduce() {
}
}
func (suite *SelectorScopesSuite) TestReduce_locationRef() {
deets := func() details.Details {
return details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
{
RepoRef: stubRepoRef(
pathServiceStub,
pathCatStub,
rootCatStub.String(),
"stub",
leafCatStub.String(),
),
LocationRef: "a/b/c//defg",
},
},
},
}
}
dataCats := map[path.CategoryType]mockCategorizer{
pathCatStub: rootCatStub,
}
for _, test := range reduceTestTable {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
errs := mock.NewAdder()
ds := deets()
result := reduce[mockScope](
ctx,
&ds,
test.sel().Selector,
dataCats,
errs)
require.NotNil(t, result)
require.Empty(t, errs.Errs, "iteration errors")
assert.Len(t, result.Entries, test.expectLen)
})
}
}
func (suite *SelectorScopesSuite) TestScopesByCategory() {
t := suite.T()
s1 := stubScope("")