Compare commits

...

4 Commits

Author SHA1 Message Date
Keepers
210b543280 populate the location path values (#2430)
Adds location path values to all exchange types.
Only propagates these values if the collection is
an event category.
2023-02-08 14:59:18 -07:00
ryanfkeepers
52add8dad6 minor cleanups to comments 2023-02-08 14:59:18 -07:00
ryanfkeepers
43c47d678e fix segfault panic 2023-02-08 14:59:18 -07:00
ryanfkeepers
1b1629e09c add locationRef to details
Adds a new reference to the details ent: location-
ref.  The location holds the human-readable
version of the item's location in whatever m365
service sourced the item.  Hookup is incomplete,
following PRs will fill out functionality.

Also adds a LocationPather interface to data_
collections to pass this data back and forth
between producers and consumers.
2023-02-08 14:59:18 -07:00
24 changed files with 654 additions and 156 deletions

View File

@ -169,10 +169,8 @@ func (c Contacts) EnumerateContainers(
continue continue
} }
temp := graph.NewCacheFolder(fold, nil) temp := graph.NewCacheFolder(fold, nil, nil)
if err := fn(temp); err != nil {
err = fn(temp)
if err != nil {
errs = multierror.Append(err, errs) errs = multierror.Append(err, errs)
continue continue
} }

View File

@ -209,10 +209,11 @@ func (c Events) EnumerateContainers(
continue continue
} }
temp := graph.NewCacheFolder(cd, path.Builder{}.Append(*cd.GetDisplayName())) temp := graph.NewCacheFolder(
cd,
err = fn(temp) path.Builder{}.Append(*cd.GetDisplayName()),
if err != nil { path.Builder{}.Append(*cd.GetDisplayName()))
if err := fn(temp); err != nil {
errs = multierror.Append(err, errs) errs = multierror.Append(err, errs)
continue continue
} }

View File

@ -198,8 +198,7 @@ func (c Mail) EnumerateContainers(
} }
for _, v := range resp.GetValue() { for _, v := range resp.GetValue() {
temp := graph.NewCacheFolder(v, nil) temp := graph.NewCacheFolder(v, nil, nil)
if err := fn(temp); err != nil { if err := fn(temp); err != nil {
errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta")) errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta"))
continue continue

View File

@ -29,8 +29,10 @@ func (cfc *contactFolderCache) populateContactRoot(
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder") return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...)) temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(baseContainerPath...), // storage path
path.Builder{}.Append(baseContainerPath...)) // display location
if err := cfc.addFolder(temp); err != nil { if err := cfc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir") return errors.Wrap(err, "adding resolver dir")
} }

View File

@ -26,16 +26,19 @@ type mockContainer struct {
displayName *string displayName *string
parentID *string parentID *string
p *path.Builder p *path.Builder
l *path.Builder
} }
//nolint:revive //nolint:revive
func (m mockContainer) GetId() *string { return m.id } func (m mockContainer) GetId() *string { return m.id }
//nolint:revive //nolint:revive
func (m mockContainer) GetParentFolderId() *string { return m.parentID } func (m mockContainer) GetParentFolderId() *string { return m.parentID }
func (m mockContainer) GetDisplayName() *string { return m.displayName } func (m mockContainer) GetDisplayName() *string { return m.displayName }
func (m mockContainer) Path() *path.Builder { return m.p } func (m mockContainer) Location() *path.Builder { return m.l }
func (m mockContainer) SetPath(p *path.Builder) {} func (m mockContainer) SetLocation(p *path.Builder) {}
func (m mockContainer) Path() *path.Builder { return m.p }
func (m mockContainer) SetPath(p *path.Builder) {}
func strPtr(s string) *string { func strPtr(s string) *string {
return &s return &s
@ -168,7 +171,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil, parentID: nil,
}, },
nil, nil,
), nil),
check: assert.Error, check: assert.Error,
}, },
{ {
@ -180,7 +183,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil, parentID: nil,
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
), path.Builder{}.Append("loc")),
check: assert.NoError, check: assert.NoError,
}, },
{ {
@ -192,7 +195,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID, parentID: &testParentID,
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
), path.Builder{}.Append("loc")),
check: assert.Error, check: assert.Error,
}, },
{ {
@ -204,7 +207,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID, parentID: &testParentID,
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
), path.Builder{}.Append("loc")),
check: assert.Error, check: assert.Error,
}, },
{ {
@ -216,7 +219,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID, parentID: &testParentID,
}, },
nil, nil,
), nil),
check: assert.NoError, check: assert.NoError,
}, },
} }
@ -241,31 +244,21 @@ type mockCachedContainer struct {
id string id string
parentID string parentID string
displayName string displayName string
l *path.Builder
p *path.Builder p *path.Builder
expectedPath string expectedPath string
} }
//nolint:revive //nolint:revive
func (m mockCachedContainer) GetId() *string { func (m mockCachedContainer) GetId() *string { return &m.id }
return &m.id
}
//nolint:revive //nolint:revive
func (m mockCachedContainer) GetParentFolderId() *string { func (m mockCachedContainer) GetParentFolderId() *string { return &m.parentID }
return &m.parentID func (m mockCachedContainer) GetDisplayName() *string { return &m.displayName }
} func (m mockCachedContainer) Location() *path.Builder { return m.l }
func (m *mockCachedContainer) SetLocation(newLoc *path.Builder) { m.l = newLoc }
func (m mockCachedContainer) GetDisplayName() *string { func (m mockCachedContainer) Path() *path.Builder { return m.p }
return &m.displayName func (m *mockCachedContainer) SetPath(newPath *path.Builder) { m.p = newPath }
}
func (m mockCachedContainer) Path() *path.Builder {
return m.p
}
func (m *mockCachedContainer) SetPath(newPath *path.Builder) {
m.p = newPath
}
func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) { func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) {
containers := make([]*mockCachedContainer, 0, numContainers) containers := make([]*mockCachedContainer, 0, numContainers)
@ -595,7 +588,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
folderID, err := CreateContainerDestinaion( folderID, err := CreateContainerDestination(
ctx, ctx,
m365, m365,
test.pathFunc1(t), test.pathFunc1(t),
@ -608,7 +601,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
_, err = resolver.IDToPath(ctx, folderID) _, err = resolver.IDToPath(ctx, folderID)
assert.NoError(t, err) assert.NoError(t, err)
secondID, err := CreateContainerDestinaion( secondID, err := CreateContainerDestination(
ctx, ctx,
m365, m365,
test.pathFunc2(t), test.pathFunc2(t),

View File

@ -44,7 +44,10 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err)) return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(container)) temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(container), // storage path
path.Builder{}.Append(container)) // display location
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "initializing calendar resolver") return errors.Wrap(err, "initializing calendar resolver")
} }
@ -91,7 +94,10 @@ func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container
return errors.Wrap(err, "validating container") return errors.Wrap(err, "validating container")
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName())) temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName()), // storage path
path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName())) // display location
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding container") return errors.Wrap(err, "adding container")

View File

@ -77,6 +77,11 @@ type Collection struct {
// moved. It will be empty on its first retrieval. // moved. It will be empty on its first retrieval.
prevPath path.Path prevPath path.Path
// LocationPath contains the path with human-readable display names.
// IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t"
// Currently only implemented for Exchange Calendars.
locationPath path.Path
state data.CollectionState state data.CollectionState
// doNotMergeItems should only be true if the old delta token expired. // doNotMergeItems should only be true if the old delta token expired.
@ -91,7 +96,7 @@ type Collection struct {
// or notMoved (if they match). // or notMoved (if they match).
func NewCollection( func NewCollection(
user string, user string,
curr, prev path.Path, curr, prev, location path.Path,
category path.CategoryType, category path.CategoryType,
items itemer, items itemer,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
@ -99,18 +104,19 @@ func NewCollection(
doNotMergeItems bool, doNotMergeItems bool,
) Collection { ) Collection {
collection := Collection{ collection := Collection{
added: make(map[string]struct{}, 0),
category: category, category: category,
ctrl: ctrlOpts, ctrl: ctrlOpts,
data: make(chan data.Stream, collectionChannelBufferSize), data: make(chan data.Stream, collectionChannelBufferSize),
doNotMergeItems: doNotMergeItems, doNotMergeItems: doNotMergeItems,
fullPath: curr, fullPath: curr,
added: make(map[string]struct{}, 0), items: items,
removed: make(map[string]struct{}, 0), locationPath: location,
prevPath: prev, prevPath: prev,
removed: make(map[string]struct{}, 0),
state: stateOf(prev, curr), state: stateOf(prev, curr),
statusUpdater: statusUpdater, statusUpdater: statusUpdater,
user: user, user: user,
items: items,
} }
return collection return collection
@ -144,6 +150,12 @@ func (col *Collection) FullPath() path.Path {
return col.fullPath return col.fullPath
} }
// LocationPath produces the Collection's full path, but with display names
// instead of IDs in the folders. Only populated for Calendars.
func (col *Collection) LocationPath() path.Path {
return col.locationPath
}
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old // TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
// and new folder hierarchies. // and new folder hierarchies.
func (col Collection) PreviousPath() path.Path { func (col Collection) PreviousPath() path.Path {

View File

@ -127,28 +127,36 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
Append("bar"). Append("bar").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
locP, err := path.Builder{}.
Append("human-readable").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
table := []struct { table := []struct {
name string name string
prev path.Path prev path.Path
curr path.Path curr path.Path
loc path.Path
expect data.CollectionState expect data.CollectionState
}{ }{
{ {
name: "new", name: "new",
curr: fooP, curr: fooP,
loc: locP,
expect: data.NewState, expect: data.NewState,
}, },
{ {
name: "not moved", name: "not moved",
prev: fooP, prev: fooP,
curr: fooP, curr: fooP,
loc: locP,
expect: data.NotMovedState, expect: data.NotMovedState,
}, },
{ {
name: "moved", name: "moved",
prev: fooP, prev: fooP,
curr: barP, curr: barP,
loc: locP,
expect: data.MovedState, expect: data.MovedState,
}, },
{ {
@ -161,12 +169,15 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
c := NewCollection( c := NewCollection(
"u", "u",
test.curr, test.prev, test.curr, test.prev, test.loc,
0, 0,
&mockItemer{}, nil, &mockItemer{}, nil,
control.Options{}, control.Options{},
false) false)
assert.Equal(t, test.expect, c.State()) assert.Equal(t, test.expect, c.State(), "collection state")
assert.Equal(t, test.curr, c.fullPath, "full path")
assert.Equal(t, test.prev, c.prevPath, "prev path")
assert.Equal(t, test.loc, c.locationPath, "location path")
}) })
} }
} }

View File

@ -53,7 +53,9 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
directory = DefaultMailFolder directory = DefaultMailFolder
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory)) temp := graph.NewCacheFolder(f,
path.Builder{}.Append(directory), // storage path
path.Builder{}.Append(directory)) // display location
if err := mc.addFolder(temp); err != nil { if err := mc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir") return errors.Wrap(err, "adding resolver dir")
} }

View File

@ -86,44 +86,70 @@ func PopulateExchangeContainerResolver(
} }
// Returns true if the container passes the scope comparison and should be included. // Returns true if the container passes the scope comparison and should be included.
// Also returns the path representing the directory. // Returns:
// - the path representing the directory as it should be stored in the repository.
// - the human-readable path using display names.
// - true if the path passes the scope comparison.
func includeContainer( func includeContainer(
qp graph.QueryParams, qp graph.QueryParams,
c graph.CachedContainer, c graph.CachedContainer,
scope selectors.ExchangeScope, scope selectors.ExchangeScope,
) (path.Path, bool) { ) (path.Path, path.Path, bool) {
var ( var (
category = scope.Category().PathType()
directory string directory string
locPath path.Path
category = scope.Category().PathType()
pb = c.Path() pb = c.Path()
loc = c.Location()
) )
// Clause ensures that DefaultContactFolder is inspected properly // Clause ensures that DefaultContactFolder is inspected properly
if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder { if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder {
pb = c.Path().Append(DefaultContactFolder) pb = pb.Append(DefaultContactFolder)
if loc != nil {
loc = loc.Append(DefaultContactFolder)
}
} }
dirPath, err := pb.ToDataLayerExchangePathForCategory( dirPath, err := pb.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID, qp.Credentials.AzureTenantID,
qp.ResourceOwner, qp.ResourceOwner,
category, category,
false, false)
)
// Containers without a path (e.g. Root mail folder) always err here. // Containers without a path (e.g. Root mail folder) always err here.
if err != nil { if err != nil {
return nil, false return nil, nil, false
} }
directory = pb.String() directory = dirPath.Folder()
if loc != nil {
locPath, err = loc.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID,
qp.ResourceOwner,
category,
false)
// Containers without a path (e.g. Root mail folder) always err here.
if err != nil {
return nil, nil, false
}
directory = locPath.Folder()
}
var ok bool
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
return dirPath, scope.Matches(selectors.ExchangeMailFolder, directory) ok = scope.Matches(selectors.ExchangeMailFolder, directory)
case path.ContactsCategory: case path.ContactsCategory:
return dirPath, scope.Matches(selectors.ExchangeContactFolder, directory) ok = scope.Matches(selectors.ExchangeContactFolder, directory)
case path.EventsCategory: case path.EventsCategory:
return dirPath, scope.Matches(selectors.ExchangeEventCalendar, directory) ok = scope.Matches(selectors.ExchangeEventCalendar, directory)
default: default:
return dirPath, false return nil, nil, false
} }
return dirPath, locPath, ok
} }

View File

@ -70,7 +70,7 @@ func filterContainersAndFillCollections(
cID := *c.GetId() cID := *c.GetId()
delete(tombstones, cID) delete(tombstones, cID)
currPath, ok := includeContainer(qp, c, scope) currPath, locPath, ok := includeContainer(qp, c, scope)
// Only create a collection if the path matches the scope. // Only create a collection if the path matches the scope.
if !ok { if !ok {
continue continue
@ -110,10 +110,15 @@ func filterContainersAndFillCollections(
deltaURLs[cID] = newDelta.URL deltaURLs[cID] = newDelta.URL
} }
if qp.Category != path.EventsCategory {
locPath = nil
}
edc := NewCollection( edc := NewCollection(
qp.ResourceOwner, qp.ResourceOwner,
currPath, currPath,
prevPath, prevPath,
locPath,
scope.Category().PathType(), scope.Category().PathType(),
ibt, ibt,
statusUpdater, statusUpdater,
@ -167,6 +172,7 @@ func filterContainersAndFillCollections(
qp.ResourceOwner, qp.ResourceOwner,
nil, // marks the collection as deleted nil, // marks the collection as deleted
prevPath, prevPath,
nil, // tombstones don't need a location
scope.Category().PathType(), scope.Category().PathType(),
ibt, ibt,
statusUpdater, statusUpdater,

View File

@ -344,7 +344,7 @@ func RestoreExchangeDataCollections(
userCaches = directoryCaches[userID] userCaches = directoryCaches[userID]
} }
containerID, err := CreateContainerDestinaion( containerID, err := CreateContainerDestination(
ctx, ctx,
creds, creds,
dc.FullPath(), dc.FullPath(),
@ -447,10 +447,16 @@ func restoreCollection(
continue continue
} }
// var locationRef string
// if category == path.ContactsCategory {
// locationRef = itemPath.Folder()
// }
deets.Add( deets.Add(
itemPath.String(), itemPath.String(),
itemPath.ShortRef(), itemPath.ShortRef(),
"", "",
"", // TODO: locationRef
true, true,
details.ItemInfo{ details.ItemInfo{
Exchange: info, Exchange: info,
@ -461,12 +467,12 @@ func restoreCollection(
} }
} }
// CreateContainerDestinaion builds the destination into the container // CreateContainerDestination builds the destination into the container
// at the provided path. As a precondition, the destination cannot // at the provided path. As a precondition, the destination cannot
// already exist. If it does then an error is returned. The provided // already exist. If it does then an error is returned. The provided
// containerResolver is updated with the new destination. // containerResolver is updated with the new destination.
// @ returns the container ID of the new destination container. // @ returns the container ID of the new destination container.
func CreateContainerDestinaion( func CreateContainerDestination(
ctx context.Context, ctx context.Context,
creds account.M365Config, creds account.M365Config,
directory path.Path, directory path.Path,

View File

@ -12,6 +12,12 @@ import (
// reuse logic in IDToPath. // reuse logic in IDToPath.
type CachedContainer interface { type CachedContainer interface {
Container Container
// Location contains either the display names for the dirs (if this is a calendar)
// or nil
Location() *path.Builder
SetLocation(*path.Builder)
// Path contains either the ids for the dirs (if this is a calendar)
// or the display names for the dirs
Path() *path.Builder Path() *path.Builder
SetPath(*path.Builder) SetPath(*path.Builder)
} }
@ -45,13 +51,15 @@ var _ CachedContainer = &CacheFolder{}
type CacheFolder struct { type CacheFolder struct {
Container Container
l *path.Builder
p *path.Builder p *path.Builder
} }
// NewCacheFolder public constructor for struct // NewCacheFolder public constructor for struct
func NewCacheFolder(c Container, pb *path.Builder) CacheFolder { func NewCacheFolder(c Container, pb, lpb *path.Builder) CacheFolder {
cf := CacheFolder{ cf := CacheFolder{
Container: c, Container: c,
l: lpb,
p: pb, p: pb,
} }
@ -62,6 +70,14 @@ func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
// Required Functions to satisfy interfaces // Required Functions to satisfy interfaces
// ========================================= // =========================================
func (cf CacheFolder) Location() *path.Builder {
return cf.l
}
func (cf *CacheFolder) SetLocation(newLocation *path.Builder) {
cf.l = newLocation
}
func (cf CacheFolder) Path() *path.Builder { func (cf CacheFolder) Path() *path.Builder {
return cf.p return cf.p
} }

View File

@ -235,7 +235,13 @@ func RestoreCollection(
restoredIDs[trimmedName] = itemID restoredIDs[trimmedName] = itemID
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo) deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
// Mark it as success without processing .meta // Mark it as success without processing .meta
// file if we are not restoring permissions // file if we are not restoring permissions
@ -343,7 +349,13 @@ func RestoreCollection(
continue continue
} }
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo) deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
metrics.Successes++ metrics.Successes++
} }
} }

View File

@ -276,6 +276,7 @@ func RestoreListCollection(
itemPath.String(), itemPath.String(),
itemPath.ShortRef(), itemPath.ShortRef(),
"", "",
"", // TODO: implement locationRef
true, true,
itemInfo) itemInfo)
@ -355,6 +356,7 @@ func RestorePageCollection(
itemPath.String(), itemPath.String(),
itemPath.ShortRef(), itemPath.ShortRef(),
"", "",
"", // TODO: implement locationRef
true, true,
itemInfo, itemInfo,
) )

View File

@ -92,6 +92,12 @@ type Stream interface {
Deleted() bool Deleted() bool
} }
// LocationPather provides a LocationPath describing the path with Display Names
// instead of canonical IDs
type LocationPather interface {
LocationPath() path.Path
}
// StreamInfo is used to provide service specific // StreamInfo is used to provide service specific
// information about the Stream // information about the Stream
type StreamInfo interface { type StreamInfo interface {

View File

@ -124,10 +124,11 @@ func (rw *restoreStreamReader) Read(p []byte) (n int, err error) {
} }
type itemDetails struct { type itemDetails struct {
info *details.ItemInfo info *details.ItemInfo
repoPath path.Path repoPath path.Path
prevPath path.Path prevPath path.Path
cached bool locationPath path.Path
cached bool
} }
type corsoProgress struct { type corsoProgress struct {
@ -188,20 +189,29 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
parent := d.repoPath.ToBuilder().Dir() parent := d.repoPath.ToBuilder().Dir()
var locationFolders string
if d.locationPath != nil {
locationFolders = d.locationPath.Folder()
}
cp.deets.Add( cp.deets.Add(
d.repoPath.String(), d.repoPath.String(),
d.repoPath.ShortRef(), d.repoPath.ShortRef(),
parent.ShortRef(), parent.ShortRef(),
locationFolders,
!d.cached, !d.cached,
*d.info, *d.info)
)
folders := details.FolderEntriesForPath(parent) var locPB *path.Builder
if d.locationPath != nil {
locPB = d.locationPath.ToBuilder()
}
folders := details.FolderEntriesForPath(parent, locPB)
cp.deets.AddFoldersForItem( cp.deets.AddFoldersForItem(
folders, folders,
*d.info, *d.info,
!d.cached, !d.cached)
)
} }
// Kopia interface function used as a callback when kopia finishes hashing a file. // Kopia interface function used as a callback when kopia finishes hashing a file.
@ -311,6 +321,12 @@ func collectionEntries(
continue continue
} }
var locationPath path.Path
if lp, ok := e.(data.LocationPather); ok {
locationPath = lp.LocationPath()
}
trace.Log(ctx, "kopia:streamEntries:item", itemPath.String()) trace.Log(ctx, "kopia:streamEntries:item", itemPath.String())
if e.Deleted() { if e.Deleted() {
@ -332,7 +348,11 @@ func collectionEntries(
// previous snapshot then we should populate prevPath here and leave // previous snapshot then we should populate prevPath here and leave
// info nil. // info nil.
itemInfo := ei.Info() itemInfo := ei.Info()
d := &itemDetails{info: &itemInfo, repoPath: itemPath} d := &itemDetails{
info: &itemInfo,
repoPath: itemPath,
locationPath: locationPath,
}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
} }

View File

@ -581,10 +581,11 @@ func mergeDetails(
newPath.String(), newPath.String(),
newPath.ShortRef(), newPath.ShortRef(),
newPath.ToBuilder().Dir().ShortRef(), newPath.ToBuilder().Dir().ShortRef(),
"", // TODO Location Ref,
itemUpdated, itemUpdated,
item) item)
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir()) folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), nil)
deets.AddFoldersForItem(folders, item, itemUpdated) deets.AddFoldersForItem(folders, item, itemUpdated)
// Track how many entries we added so that we know if we got them all when // Track how many entries we added so that we know if we got them all when

View File

@ -44,7 +44,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
deetsBuilder := &details.Builder{} deetsBuilder := &details.Builder{}
deetsBuilder.Add("ref", "shortref", "parentref", true, deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
details.ItemInfo{ details.ItemInfo{
Exchange: &details.ExchangeInfo{ Exchange: &details.ExchangeInfo{
Subject: "hello world", Subject: "hello world",
@ -66,6 +66,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef) assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef)
assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef) assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef)
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef) assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef)
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated) assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
assert.NotNil(t, readDeets.Entries[0].Exchange) assert.NotNil(t, readDeets.Entries[0].Exchange)
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange) assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)

View File

@ -15,11 +15,12 @@ import (
) )
type folderEntry struct { type folderEntry struct {
RepoRef string RepoRef string
ShortRef string ShortRef string
ParentRef string ParentRef string
Updated bool LocationRef string
Info ItemInfo Updated bool
Info ItemInfo
} }
// -------------------------------------------------------------------------------- // --------------------------------------------------------------------------------
@ -110,10 +111,14 @@ type Builder struct {
knownFolders map[string]folderEntry `json:"-"` knownFolders map[string]folderEntry `json:"-"`
} }
func (b *Builder) Add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) { func (b *Builder) Add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
b.mu.Lock() b.mu.Lock()
defer b.mu.Unlock() defer b.mu.Unlock()
b.d.add(repoRef, shortRef, parentRef, updated, info) b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
} }
func (b *Builder) Details() *Details { func (b *Builder) Details() *Details {
@ -131,16 +136,23 @@ func (b *Builder) Details() *Details {
// TODO(ashmrtn): If we never need to pre-populate the modified time of a folder // TODO(ashmrtn): If we never need to pre-populate the modified time of a folder
// we should just merge this with AddFoldersForItem, have Add call // we should just merge this with AddFoldersForItem, have Add call
// AddFoldersForItem, and unexport AddFoldersForItem. // AddFoldersForItem, and unexport AddFoldersForItem.
func FolderEntriesForPath(parent *path.Builder) []folderEntry { func FolderEntriesForPath(parent, location *path.Builder) []folderEntry {
folders := []folderEntry{} folders := []folderEntry{}
lfs := locationRefOf(location)
for len(parent.Elements()) > 0 { for len(parent.Elements()) > 0 {
nextParent := parent.Dir() nextParent := parent.Dir()
var lr string
if lfs != nil {
lr = lfs.String()
}
folders = append(folders, folderEntry{ folders = append(folders, folderEntry{
RepoRef: parent.String(), RepoRef: parent.String(),
ShortRef: parent.ShortRef(), ShortRef: parent.ShortRef(),
ParentRef: nextParent.ShortRef(), ParentRef: nextParent.ShortRef(),
LocationRef: lr,
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
ItemType: FolderItem, ItemType: FolderItem,
@ -150,11 +162,30 @@ func FolderEntriesForPath(parent *path.Builder) []folderEntry {
}) })
parent = nextParent parent = nextParent
if lfs != nil {
lfs = lfs.Dir()
}
} }
return folders return folders
} }
// assumes the pb contains a path like:
// <tenant>/<service>/<owner>/<category>/<logical_containers>...
// and returns a string with only <logical_containers>/...
func locationRefOf(pb *path.Builder) *path.Builder {
if pb == nil {
return nil
}
for i := 0; i < 4; i++ {
pb = pb.PopFront()
}
return pb
}
// AddFoldersForItem adds entries for the given folders. It skips adding entries that // AddFoldersForItem adds entries for the given folders. It skips adding entries that
// have been added by previous calls. // have been added by previous calls.
func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) { func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) {
@ -202,7 +233,11 @@ type Details struct {
DetailsModel DetailsModel
} }
func (d *Details) add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) { func (d *Details) add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
d.Entries = append(d.Entries, DetailsEntry{ d.Entries = append(d.Entries, DetailsEntry{
RepoRef: repoRef, RepoRef: repoRef,
ShortRef: shortRef, ShortRef: shortRef,
@ -233,9 +268,21 @@ type DetailsEntry struct {
RepoRef string `json:"repoRef"` RepoRef string `json:"repoRef"`
ShortRef string `json:"shortRef"` ShortRef string `json:"shortRef"`
ParentRef string `json:"parentRef,omitempty"` ParentRef string `json:"parentRef,omitempty"`
// LocationRef contains the logical path structure by its human-readable
// display names. IE: If an item is located at "/Inbox/Important", we
// hold that string in the LocationRef, while the actual IDs of each
// container are used for the RepoRef.
// LocationRef only holds the container values, and does not include
// the metadata prefixes (tenant, service, owner, etc) found in the
// repoRef.
// Currently only implemented for Exchange Calendars.
LocationRef string `json:"locationRef,omitempty"`
// Indicates the item was added or updated in this backup // Indicates the item was added or updated in this backup
// Always `true` for full backups // Always `true` for full backups
Updated bool `json:"updated"` Updated bool `json:"updated"`
ItemInfo ItemInfo
} }

View File

@ -39,8 +39,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "no info", name: "no info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
}, },
expectHs: []string{"ID"}, expectHs: []string{"ID"},
expectVs: []string{"deadbeef"}, expectVs: []string{"deadbeef"},
@ -48,8 +49,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "exchange event info", name: "exchange event info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{ Exchange: &ExchangeInfo{
ItemType: ExchangeEvent, ItemType: ExchangeEvent,
@ -67,8 +69,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "exchange contact info", name: "exchange contact info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{ Exchange: &ExchangeInfo{
ItemType: ExchangeContact, ItemType: ExchangeContact,
@ -82,8 +85,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "exchange mail info", name: "exchange mail info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{ Exchange: &ExchangeInfo{
ItemType: ExchangeMail, ItemType: ExchangeMail,
@ -99,8 +103,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "sharepoint info", name: "sharepoint info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
SharePoint: &SharePointInfo{ SharePoint: &SharePointInfo{
ItemName: "itemName", ItemName: "itemName",
@ -128,8 +133,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "oneDrive info", name: "oneDrive info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
OneDrive: &OneDriveInfo{ OneDrive: &OneDriveInfo{
ItemName: "itemName", ItemName: "itemName",
@ -157,37 +163,57 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
} }
var pathItemsTable = []struct { var pathItemsTable = []struct {
name string name string
ents []DetailsEntry ents []DetailsEntry
expectRefs []string expectRepoRefs []string
expectLocationRefs []string
}{ }{
{ {
name: "nil entries", name: "nil entries",
ents: nil, ents: nil,
expectRefs: []string{}, expectRepoRefs: []string{},
expectLocationRefs: []string{},
}, },
{ {
name: "single entry", name: "single entry",
ents: []DetailsEntry{ ents: []DetailsEntry{
{RepoRef: "abcde"}, {
RepoRef: "abcde",
LocationRef: "locationref",
},
}, },
expectRefs: []string{"abcde"}, expectRepoRefs: []string{"abcde"},
expectLocationRefs: []string{"locationref"},
}, },
{ {
name: "multiple entries", name: "multiple entries",
ents: []DetailsEntry{ ents: []DetailsEntry{
{RepoRef: "abcde"}, {
{RepoRef: "12345"}, RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
}, },
expectRefs: []string{"abcde", "12345"}, expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
}, },
{ {
name: "multiple entries with folder", name: "multiple entries with folder",
ents: []DetailsEntry{ ents: []DetailsEntry{
{RepoRef: "abcde"},
{RepoRef: "12345"},
{ {
RepoRef: "deadbeef", RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
{
RepoRef: "deadbeef",
LocationRef: "locationref3",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
DisplayName: "test folder", DisplayName: "test folder",
@ -195,7 +221,8 @@ var pathItemsTable = []struct {
}, },
}, },
}, },
expectRefs: []string{"abcde", "12345"}, expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
}, },
} }
@ -207,7 +234,7 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Path() {
Entries: test.ents, Entries: test.ents,
}, },
} }
assert.Equal(t, test.expectRefs, d.Paths()) assert.ElementsMatch(t, test.expectRepoRefs, d.Paths())
}) })
} }
} }
@ -222,10 +249,11 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Items() {
} }
ents := d.Items() ents := d.Items()
assert.Len(t, ents, len(test.expectRefs)) assert.Len(t, ents, len(test.expectRepoRefs))
for _, e := range ents { for _, e := range ents {
assert.Contains(t, test.expectRefs, e.RepoRef) assert.Contains(t, test.expectRepoRefs, e.RepoRef)
assert.Contains(t, test.expectLocationRefs, e.LocationRef)
} }
}) })
} }
@ -253,9 +281,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
name: "MultipleFolders", name: "MultipleFolders",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -263,9 +292,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeNewerThanItem, Modified: folderTimeNewerThanItem,
@ -283,9 +313,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
name: "MultipleFoldersWithRepeats", name: "MultipleFoldersWithRepeats",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -293,9 +324,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -303,9 +335,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -313,9 +346,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr3", RepoRef: "rr3",
ShortRef: "sr3", ShortRef: "sr3",
ParentRef: "pr3", ParentRef: "pr3",
LocationRef: "lr3",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeNewerThanItem, Modified: folderTimeNewerThanItem,
@ -363,18 +397,20 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
name: "ItemNotUpdated_NoChange", name: "ItemNotUpdated_NoChange",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
Updated: true, Updated: true,
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
@ -390,17 +426,19 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
name: "ItemUpdated", name: "ItemUpdated",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
@ -482,9 +520,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersDifferentServices() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
folder := folderEntry{ folder := folderEntry{
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },

View File

@ -1041,6 +1041,233 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
} }
} }
func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce_locationRef() {
var (
contact = stubRepoRef(path.ExchangeService, path.ContactsCategory, "uid", "id5/id6", "cid")
contactLocation = "conts/my_cont"
event = stubRepoRef(path.ExchangeService, path.EventsCategory, "uid", "id1/id2", "eid")
eventLocation = "cal/my_cal"
mail = stubRepoRef(path.ExchangeService, path.EmailCategory, "uid", "id3/id4", "mid")
mailLocation = "inbx/my_mail"
)
makeDeets := func(refs ...string) *details.Details {
deets := &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{},
},
}
for _, r := range refs {
var (
location string
itype = details.UnknownType
)
switch r {
case contact:
itype = details.ExchangeContact
location = contactLocation
case event:
itype = details.ExchangeEvent
location = eventLocation
case mail:
itype = details.ExchangeMail
location = mailLocation
}
deets.Entries = append(deets.Entries, details.DetailsEntry{
RepoRef: r,
LocationRef: location,
ItemInfo: details.ItemInfo{
Exchange: &details.ExchangeInfo{
ItemType: itype,
},
},
})
}
return deets
}
arr := func(s ...string) []string {
return s
}
table := []struct {
name string
deets *details.Details
makeSelector func() *ExchangeRestore
expect []string
}{
{
"no refs",
makeDeets(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{},
},
{
"contact only",
makeDeets(contact),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(contact),
},
{
"event only",
makeDeets(event),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(event),
},
{
"mail only",
makeDeets(mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(mail),
},
{
"all",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
arr(contact, event, mail),
},
{
"only match contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er
},
arr(contact),
},
{
"only match event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Events([]string{eventLocation}, []string{"eid"}))
return er
},
arr(event),
},
{
"only match mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Mails([]string{mailLocation}, []string{"mid"}))
return er
},
arr(mail),
},
{
"exclude contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er
},
arr(event, mail),
},
{
"exclude event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Events([]string{eventLocation}, []string{"eid"}))
return er
},
arr(contact, mail),
},
{
"exclude mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Mails([]string{mailLocation}, []string{"mid"}))
return er
},
arr(contact, event),
},
{
"filter on mail subject",
func() *details.Details {
ds := makeDeets(mail)
for i := range ds.Entries {
ds.Entries[i].Exchange.Subject = "has a subject"
}
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
arr(mail),
},
{
"filter on mail subject multiple input categories",
func() *details.Details {
mds := makeDeets(mail)
for i := range mds.Entries {
mds.Entries[i].Exchange.Subject = "has a subject"
}
ds := makeDeets(contact, event)
ds.Entries = append(ds.Entries, mds.Entries...)
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
arr(mail),
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
errs := mock.NewAdder()
sel := test.makeSelector()
results := sel.Reduce(ctx, test.deets, errs)
paths := results.Paths()
assert.Equal(t, test.expect, paths)
assert.Empty(t, errs.Errs)
})
}
}
func (suite *ExchangeSelectorSuite) TestScopesByCategory() { func (suite *ExchangeSelectorSuite) TestScopesByCategory() {
var ( var (
es = NewExchangeRestore(Any()) es = NewExchangeRestore(Any())

View File

@ -317,6 +317,27 @@ func reduce[T scopeT, C categoryT](
continue continue
} }
// if the details entry has a locationRef specified, use those folders in place
// of the repoRef folders, so that scopes can match against the display names
// instead of container IDs.
if len(ent.LocationRef) > 0 {
pb, err := path.Builder{}.
Append(path.Split(ent.LocationRef)...).
Append(repoPath.Item()).
ToDataLayerPath(
repoPath.Tenant(),
repoPath.ResourceOwner(),
repoPath.Service(),
repoPath.Category(),
true)
if err != nil {
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
continue
}
repoPath = pb
}
// first check, every entry needs to match the selector's resource owners. // first check, every entry needs to match the selector's resource owners.
if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) { if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) {
continue continue

View File

@ -290,6 +290,50 @@ func (suite *SelectorScopesSuite) TestReduce() {
} }
} }
func (suite *SelectorScopesSuite) TestReduce_locationRef() {
deets := func() details.Details {
return details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
{
RepoRef: stubRepoRef(
pathServiceStub,
pathCatStub,
rootCatStub.String(),
"stub",
leafCatStub.String(),
),
LocationRef: "a/b/c//defg",
},
},
},
}
}
dataCats := map[path.CategoryType]mockCategorizer{
pathCatStub: rootCatStub,
}
for _, test := range reduceTestTable {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
errs := mock.NewAdder()
ds := deets()
result := reduce[mockScope](
ctx,
&ds,
test.sel().Selector,
dataCats,
errs)
require.NotNil(t, result)
require.Empty(t, errs.Errs, "iteration errors")
assert.Len(t, result.Entries, test.expectLen)
})
}
}
func (suite *SelectorScopesSuite) TestScopesByCategory() { func (suite *SelectorScopesSuite) TestScopesByCategory() {
t := suite.T() t := suite.T()
s1 := stubScope("") s1 := stubScope("")