add locationRef to details (#2427)

## Description

Adds a new reference to the details ent: location-
ref.  The location holds the human-readable
version of the item's location in whatever m365
service sourced the item.  Hookup is incomplete,
following PRs will fill out functionality.

Also adds a LocationPather interface to data_
collections to pass this data back and forth
between producers and consumers.

Should be safe to merge into main.

## Does this PR need a docs update or release note?

- [x] 🕐 Yes, but in a later PR

## Type of change

- [x] 🌻 Feature
- [x] 🐛 Bugfix

## Issue(s)

* #2423

## Test Plan

- [x]  Unit test
This commit is contained in:
Keepers 2023-02-13 13:19:04 -07:00 committed by GitHub
parent 9695afbc8d
commit 52455356e5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 2280 additions and 1034 deletions

View File

@ -10,11 +10,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed ### Fixed
- Support for item.Attachment:Mail restore - Support for item.Attachment:Mail restore
- Errors from duplicate names in Exchange Calendars
### Changed ### Changed
- When using Restore and Details on Exchange Calendars, the `--event-calendar` flag can now identify calendars by either a Display Name or a Microsoft 365 ID.
- Exchange Calendars storage entries now construct their paths using container IDs instead of display names. This fixes cases where duplicate display names caused system failures.
### Known Issues ### Known Issues
- Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs - Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs
- Breaking changes to Exchange Calendar backups.
## [v0.3.0] (alpha) - 2023-2-07 ## [v0.3.0] (alpha) - 2023-2-07

View File

@ -137,14 +137,14 @@ var (
Name: "EmailsFolderPrefixMatch", Name: "EmailsFolderPrefixMatch",
Expected: testdata.ExchangeEmailItems, Expected: testdata.ExchangeEmailItems,
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder()}, EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false)},
}, },
}, },
{ {
Name: "EmailsFolderPrefixMatchTrailingSlash", Name: "EmailsFolderPrefixMatchTrailingSlash",
Expected: testdata.ExchangeEmailItems, Expected: testdata.ExchangeEmailItems,
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder() + "/"}, EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false) + "/"},
}, },
}, },
{ {
@ -154,7 +154,7 @@ var (
testdata.ExchangeEmailItems[2], testdata.ExchangeEmailItems[2],
}, },
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder()}, EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false)},
}, },
}, },
{ {
@ -164,7 +164,7 @@ var (
testdata.ExchangeEmailItems[2], testdata.ExchangeEmailItems[2],
}, },
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder() + "/"}, EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false) + "/"},
}, },
}, },
{ {

View File

@ -172,7 +172,7 @@ func buildCollections(
return nil, err return nil, err
} }
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items)) mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items))
for i := 0; i < len(c.items); i++ { for i := 0; i < len(c.items); i++ {
mc.Names[i] = c.items[i].name mc.Names[i] = c.items[i].name

View File

@ -161,39 +161,6 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() {
} }
} }
// TestGraphQueryFunctions verifies if Query functions APIs
// through Microsoft Graph are functional
func (suite *ExchangeServiceSuite) TestGraphQueryFunctions() {
ctx, flush := tester.NewContext()
defer flush()
c, err := NewClient(suite.credentials)
require.NoError(suite.T(), err)
userID := tester.M365UserID(suite.T())
tests := []struct {
name string
function GraphQuery
}{
{
name: "GraphQuery: Get All ContactFolders",
function: c.Contacts().GetAllContactFolderNamesForUser,
},
{
name: "GraphQuery: Get All Calendars for User",
function: c.Events().GetAllCalendarNamesForUser,
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
response, err := test.function(ctx, userID)
assert.NoError(t, err)
assert.NotNil(t, response)
})
}
}
//nolint:lll //nolint:lll
var stubHTMLContent = "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Happy New Year,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture?&nbsp;</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><img class=\"FluidPluginCopy ContentPasted0 w-2070 h-1380\" size=\"5854817\" data-outlook-trace=\"F:1|T:1\" src=\"cid:85f4faa3-9851-40c7-ba0a-e63dce1185f9\" style=\"max-width:100%\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Let me know if this meets our culture requirements.</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Warm Regards,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin</div></body></html>" var stubHTMLContent = "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Happy New Year,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture?&nbsp;</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><img class=\"FluidPluginCopy ContentPasted0 w-2070 h-1380\" size=\"5854817\" data-outlook-trace=\"F:1|T:1\" src=\"cid:85f4faa3-9851-40c7-ba0a-e63dce1185f9\" style=\"max-width:100%\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Let me know if this meets our culture requirements.</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Warm Regards,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin</div></body></html>"

View File

@ -80,28 +80,6 @@ func (c Contacts) GetItem(
return cont, ContactInfo(cont), nil return cont, ContactInfo(cont), nil
} }
// GetAllContactFolderNamesForUser is a GraphQuery function for getting
// ContactFolderId and display names for contacts. All other information is omitted.
// Does not return the default Contact Folder
func (c Contacts) GetAllContactFolderNamesForUser(
ctx context.Context,
user string,
) (serialization.Parsable, error) {
options, err := optionsForContactFolders([]string{"displayName", "parentFolderId"})
if err != nil {
return nil, err
}
var resp models.ContactFolderCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(user).ContactFolders().Get(ctx, options)
return err
})
return resp, err
}
func (c Contacts) GetContainerByID( func (c Contacts) GetContainerByID(
ctx context.Context, ctx context.Context,
userID, dirID string, userID, dirID string,
@ -169,10 +147,8 @@ func (c Contacts) EnumerateContainers(
continue continue
} }
temp := graph.NewCacheFolder(fold, nil) temp := graph.NewCacheFolder(fold, nil, nil)
if err := fn(temp); err != nil {
err = fn(temp)
if err != nil {
errs = multierror.Append(err, errs) errs = multierror.Append(err, errs)
continue continue
} }

View File

@ -144,25 +144,6 @@ func (c Events) GetItem(
return event, EventInfo(event), nil return event, EventInfo(event), nil
} }
func (c Client) GetAllCalendarNamesForUser(
ctx context.Context,
user string,
) (serialization.Parsable, error) {
options, err := optionsForCalendars([]string{"name", "owner"})
if err != nil {
return nil, err
}
var resp models.CalendarCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(user).Calendars().Get(ctx, options)
return err
})
return resp, err
}
// EnumerateContainers iterates through all of the users current // EnumerateContainers iterates through all of the users current
// calendars, converting each to a graph.CacheFolder, and // calendars, converting each to a graph.CacheFolder, and
// calling fn(cf) on each one. If fn(cf) errors, the error is // calling fn(cf) on each one. If fn(cf) errors, the error is
@ -210,10 +191,11 @@ func (c Events) EnumerateContainers(
continue continue
} }
temp := graph.NewCacheFolder(cd, path.Builder{}.Append(*cd.GetDisplayName())) temp := graph.NewCacheFolder(
cd,
err = fn(temp) path.Builder{}.Append(*cd.GetId()), // storage path
if err != nil { path.Builder{}.Append(*cd.GetDisplayName())) // display location
if err := fn(temp); err != nil {
errs = multierror.Append(err, errs) errs = multierror.Append(err, errs)
continue continue
} }

View File

@ -198,8 +198,7 @@ func (c Mail) EnumerateContainers(
} }
for _, v := range resp.GetValue() { for _, v := range resp.GetValue() {
temp := graph.NewCacheFolder(v, nil) temp := graph.NewCacheFolder(v, nil, nil)
if err := fn(temp); err != nil { if err := fn(temp); err != nil {
errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta")) errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta"))
continue continue

View File

@ -135,27 +135,6 @@ func optionsForCalendarsByID(moreOps []string) (
return options, nil return options, nil
} }
// optionsForContactFolders places allowed options for exchange.ContactFolder object
// @return is first call in ContactFolders().GetWithRequestConfigurationAndResponseHandler
func optionsForContactFolders(moreOps []string) (
*users.ItemContactFoldersRequestBuilderGetRequestConfiguration,
error,
) {
selecting, err := buildOptions(moreOps, fieldsForFolders)
if err != nil {
return nil, err
}
requestParameters := &users.ItemContactFoldersRequestBuilderGetQueryParameters{
Select: selecting,
}
options := &users.ItemContactFoldersRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters,
}
return options, nil
}
func optionsForContactFolderByID(moreOps []string) ( func optionsForContactFolderByID(moreOps []string) (
*users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration, *users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration,
error, error,

View File

@ -29,8 +29,10 @@ func (cfc *contactFolderCache) populateContactRoot(
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder") return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...)) temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(baseContainerPath...), // storage path
path.Builder{}.Append(baseContainerPath...)) // display location
if err := cfc.addFolder(temp); err != nil { if err := cfc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir") return errors.Wrap(err, "adding resolver dir")
} }
@ -56,7 +58,7 @@ func (cfc *contactFolderCache) Populate(
return errors.Wrap(err, "enumerating containers") return errors.Wrap(err, "enumerating containers")
} }
if err := cfc.populatePaths(ctx); err != nil { if err := cfc.populatePaths(ctx, false); err != nil {
return errors.Wrap(err, "populating paths") return errors.Wrap(err, "populating paths")
} }

View File

@ -51,38 +51,52 @@ type containerResolver struct {
func (cr *containerResolver) IDToPath( func (cr *containerResolver) IDToPath(
ctx context.Context, ctx context.Context,
folderID string, folderID string,
) (*path.Builder, error) { useIDInPath bool,
return cr.idToPath(ctx, folderID, 0) ) (*path.Builder, *path.Builder, error) {
return cr.idToPath(ctx, folderID, 0, useIDInPath)
} }
func (cr *containerResolver) idToPath( func (cr *containerResolver) idToPath(
ctx context.Context, ctx context.Context,
folderID string, folderID string,
depth int, depth int,
) (*path.Builder, error) { useIDInPath bool,
) (*path.Builder, *path.Builder, error) {
if depth >= maxIterations { if depth >= maxIterations {
return nil, errors.New("path contains cycle or is too tall") return nil, nil, errors.New("path contains cycle or is too tall")
} }
c, ok := cr.cache[folderID] c, ok := cr.cache[folderID]
if !ok { if !ok {
return nil, errors.Errorf("folder %s not cached", folderID) return nil, nil, errors.Errorf("folder %s not cached", folderID)
} }
p := c.Path() p := c.Path()
if p != nil { if p != nil {
return p, nil return p, c.Location(), nil
} }
parentPath, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1) parentPath, parentLoc, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1, useIDInPath)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "retrieving parent folder") return nil, nil, errors.Wrap(err, "retrieving parent folder")
} }
fullPath := parentPath.Append(*c.GetDisplayName()) toAppend := *c.GetDisplayName()
if useIDInPath {
toAppend = *c.GetId()
}
fullPath := parentPath.Append(toAppend)
c.SetPath(fullPath) c.SetPath(fullPath)
return fullPath, nil var locPath *path.Builder
if parentLoc != nil {
locPath = parentLoc.Append(*c.GetDisplayName())
c.SetLocation(locPath)
}
return fullPath, locPath, nil
} }
// PathInCache utility function to return m365ID of folder if the path.Folders // PathInCache utility function to return m365ID of folder if the path.Folders
@ -93,13 +107,13 @@ func (cr *containerResolver) PathInCache(pathString string) (string, bool) {
return "", false return "", false
} }
for _, contain := range cr.cache { for _, cc := range cr.cache {
if contain.Path() == nil { if cc.Path() == nil {
continue continue
} }
if contain.Path().String() == pathString { if cc.Path().String() == pathString {
return *contain.GetId(), true return *cc.GetId(), true
} }
} }
@ -141,18 +155,21 @@ func (cr *containerResolver) Items() []graph.CachedContainer {
// AddToCache adds container to map in field 'cache' // AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible. // @returns error iff the required values are not accessible.
func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) error { func (cr *containerResolver) AddToCache(
ctx context.Context,
f graph.Container,
useIDInPath bool,
) error {
temp := graph.CacheFolder{ temp := graph.CacheFolder{
Container: f, Container: f,
} }
if err := cr.addFolder(temp); err != nil { if err := cr.addFolder(temp); err != nil {
return errors.Wrap(err, "adding cache folder") return errors.Wrap(err, "adding cache folder")
} }
// Populate the path for this entry so calls to PathInCache succeed no matter // Populate the path for this entry so calls to PathInCache succeed no matter
// when they're made. // when they're made.
_, err := cr.IDToPath(ctx, *f.GetId()) _, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath)
if err != nil { if err != nil {
return errors.Wrap(err, "adding cache entry") return errors.Wrap(err, "adding cache entry")
} }
@ -160,12 +177,18 @@ func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container)
return nil return nil
} }
func (cr *containerResolver) populatePaths(ctx context.Context) error { // DestinationNameToID returns an empty string. This is only supported by exchange
// calendars at this time.
func (cr *containerResolver) DestinationNameToID(dest string) string {
return ""
}
func (cr *containerResolver) populatePaths(ctx context.Context, useIDInPath bool) error {
var errs *multierror.Error var errs *multierror.Error
// Populate all folder paths. // Populate all folder paths.
for _, f := range cr.Items() { for _, f := range cr.Items() {
_, err := cr.IDToPath(ctx, *f.GetId()) _, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath)
if err != nil { if err != nil {
errs = multierror.Append(errs, errors.Wrap(err, "populating path")) errs = multierror.Append(errs, errors.Wrap(err, "populating path"))
} }

View File

@ -1,6 +1,7 @@
package exchange package exchange
import ( import (
"fmt"
stdpath "path" stdpath "path"
"testing" "testing"
@ -26,16 +27,19 @@ type mockContainer struct {
displayName *string displayName *string
parentID *string parentID *string
p *path.Builder p *path.Builder
l *path.Builder
} }
//nolint:revive //nolint:revive
func (m mockContainer) GetId() *string { return m.id } func (m mockContainer) GetId() *string { return m.id }
//nolint:revive //nolint:revive
func (m mockContainer) GetParentFolderId() *string { return m.parentID } func (m mockContainer) GetParentFolderId() *string { return m.parentID }
func (m mockContainer) GetDisplayName() *string { return m.displayName } func (m mockContainer) GetDisplayName() *string { return m.displayName }
func (m mockContainer) Path() *path.Builder { return m.p } func (m mockContainer) Location() *path.Builder { return m.l }
func (m mockContainer) SetPath(p *path.Builder) {} func (m mockContainer) SetLocation(p *path.Builder) {}
func (m mockContainer) Path() *path.Builder { return m.p }
func (m mockContainer) SetPath(p *path.Builder) {}
func strPtr(s string) *string { func strPtr(s string) *string {
return &s return &s
@ -168,7 +172,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil, parentID: nil,
}, },
nil, nil,
), nil),
check: assert.Error, check: assert.Error,
}, },
{ {
@ -180,7 +184,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil, parentID: nil,
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
), path.Builder{}.Append("loc")),
check: assert.NoError, check: assert.NoError,
}, },
{ {
@ -192,7 +196,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID, parentID: &testParentID,
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
), path.Builder{}.Append("loc")),
check: assert.Error, check: assert.Error,
}, },
{ {
@ -204,7 +208,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID, parentID: &testParentID,
}, },
path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
), path.Builder{}.Append("loc")),
check: assert.Error, check: assert.Error,
}, },
{ {
@ -216,7 +220,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID, parentID: &testParentID,
}, },
nil, nil,
), nil),
check: assert.NoError, check: assert.NoError,
}, },
} }
@ -238,52 +242,57 @@ func newMockCachedContainer(name string) *mockCachedContainer {
} }
type mockCachedContainer struct { type mockCachedContainer struct {
id string id string
parentID string parentID string
displayName string displayName string
p *path.Builder l *path.Builder
expectedPath string p *path.Builder
expectedPath string
expectedLocation string
} }
//nolint:revive //nolint:revive
func (m mockCachedContainer) GetId() *string { func (m mockCachedContainer) GetId() *string { return &m.id }
return &m.id
}
//nolint:revive //nolint:revive
func (m mockCachedContainer) GetParentFolderId() *string { func (m mockCachedContainer) GetParentFolderId() *string { return &m.parentID }
return &m.parentID func (m mockCachedContainer) GetDisplayName() *string { return &m.displayName }
} func (m mockCachedContainer) Location() *path.Builder { return m.l }
func (m *mockCachedContainer) SetLocation(newLoc *path.Builder) { m.l = newLoc }
func (m mockCachedContainer) Path() *path.Builder { return m.p }
func (m *mockCachedContainer) SetPath(newPath *path.Builder) { m.p = newPath }
func (m mockCachedContainer) GetDisplayName() *string { func resolverWithContainers(numContainers int, useIDInPath bool) (*containerResolver, []*mockCachedContainer) {
return &m.displayName
}
func (m mockCachedContainer) Path() *path.Builder {
return m.p
}
func (m *mockCachedContainer) SetPath(newPath *path.Builder) {
m.p = newPath
}
func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) {
containers := make([]*mockCachedContainer, 0, numContainers) containers := make([]*mockCachedContainer, 0, numContainers)
for i := 0; i < numContainers; i++ { for i := 0; i < numContainers; i++ {
containers = append(containers, newMockCachedContainer("a")) containers = append(containers, newMockCachedContainer(fmt.Sprintf("%d", i)))
} }
// Base case for the recursive lookup. // Base case for the recursive lookup.
containers[0].p = path.Builder{}.Append(containers[0].displayName) dn := containers[0].displayName
containers[0].expectedPath = containers[0].displayName
apndP := dn
if useIDInPath {
apndP = containers[0].id
}
containers[0].p = path.Builder{}.Append(apndP)
containers[0].expectedPath = apndP
containers[0].l = path.Builder{}.Append(dn)
containers[0].expectedLocation = dn
for i := 1; i < len(containers); i++ { for i := 1; i < len(containers); i++ {
dn := containers[i].displayName
apndP := dn
if useIDInPath {
apndP = containers[i].id
}
containers[i].parentID = containers[i-1].id containers[i].parentID = containers[i-1].id
containers[i].expectedPath = stdpath.Join( containers[i].expectedPath = stdpath.Join(containers[i-1].expectedPath, apndP)
containers[i-1].expectedPath, containers[i].expectedLocation = stdpath.Join(containers[i-1].expectedLocation, dn)
containers[i].displayName,
)
} }
resolver := newContainerResolver() resolver := newContainerResolver()
@ -303,13 +312,16 @@ func resolverWithContainers(numContainers int) (*containerResolver, []*mockCache
type ConfiguredFolderCacheUnitSuite struct { type ConfiguredFolderCacheUnitSuite struct {
suite.Suite suite.Suite
fc *containerResolver fc *containerResolver
fcWithID *containerResolver
allContainers []*mockCachedContainer allContainers []*mockCachedContainer
containersWithID []*mockCachedContainer
} }
func (suite *ConfiguredFolderCacheUnitSuite) SetupTest() { func (suite *ConfiguredFolderCacheUnitSuite) SetupTest() {
suite.fc, suite.allContainers = resolverWithContainers(4) suite.fc, suite.allContainers = resolverWithContainers(4, false)
suite.fcWithID, suite.containersWithID = resolverWithContainers(4, true)
} }
func TestConfiguredFolderCacheUnitSuite(t *testing.T) { func TestConfiguredFolderCacheUnitSuite(t *testing.T) {
@ -339,8 +351,8 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
resolver, containers := resolverWithContainers(test.numContainers) resolver, containers := resolverWithContainers(test.numContainers, false)
_, err := resolver.IDToPath(ctx, containers[len(containers)-1].id) _, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false)
test.check(t, err) test.check(t, err)
}) })
} }
@ -352,7 +364,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() {
t := suite.T() t := suite.T()
require.NoError(t, suite.fc.populatePaths(ctx)) require.NoError(t, suite.fc.populatePaths(ctx, false))
items := suite.fc.Items() items := suite.fc.Items()
gotPaths := make([]string, 0, len(items)) gotPaths := make([]string, 0, len(items))
@ -375,10 +387,24 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
for _, c := range suite.allContainers { for _, c := range suite.allContainers {
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) { suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
p, err := suite.fc.IDToPath(ctx, c.id) p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
})
}
}
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached_useID() {
ctx, flush := tester.NewContext()
defer flush()
for _, c := range suite.containersWithID {
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
}) })
} }
} }
@ -390,17 +416,37 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths()
t := suite.T() t := suite.T()
c := suite.allContainers[len(suite.allContainers)-1] c := suite.allContainers[len(suite.allContainers)-1]
p, err := suite.fc.IDToPath(ctx, c.id) p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo" c.parentID = "foo"
p, err = suite.fc.IDToPath(ctx, c.id) p, l, err = suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String()) assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
}
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_useID() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
c := suite.containersWithID[len(suite.containersWithID)-1]
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo"
p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
} }
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentNotFound() { func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentNotFound() {
@ -413,7 +459,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN
delete(suite.fc.cache, almostLast.id) delete(suite.fc.cache, almostLast.id)
_, err := suite.fc.IDToPath(ctx, last.id) _, _, err := suite.fc.IDToPath(ctx, last.id, false)
assert.Error(t, err) assert.Error(t, err)
} }
@ -423,7 +469,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun
t := suite.T() t := suite.T()
_, err := suite.fc.IDToPath(ctx, "foo") _, _, err := suite.fc.IDToPath(ctx, "foo", false)
assert.Error(t, err) assert.Error(t, err)
} }
@ -431,20 +477,26 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T() var (
dest = "testAddFolder"
last := suite.allContainers[len(suite.allContainers)-1] t = suite.T()
last = suite.allContainers[len(suite.allContainers)-1]
m := newMockCachedContainer("testAddFolder") m = newMockCachedContainer(dest)
)
m.parentID = last.id m.parentID = last.id
m.expectedPath = stdpath.Join(last.expectedPath, m.displayName) m.expectedPath = stdpath.Join(last.expectedPath, m.displayName)
m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName)
require.NoError(t, suite.fc.AddToCache(ctx, m)) require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache")
require.NoError(t, suite.fc.AddToCache(ctx, m, false))
require.Empty(t, suite.fc.DestinationNameToID(dest),
"destination id from cache, still empty, because this is not a calendar")
p, err := suite.fc.IDToPath(ctx, m.id) p, l, err := suite.fc.IDToPath(ctx, m.id, false)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, m.expectedPath, p.String()) assert.Equal(t, m.expectedPath, p.String())
assert.Equal(t, m.expectedLocation, l.String())
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -506,32 +558,35 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
pathFunc2 func(t *testing.T) path.Path pathFunc2 func(t *testing.T) path.Path
category path.CategoryType category path.CategoryType
folderPrefix string folderPrefix string
useIDForPath bool
}{ }{
{ {
name: "Mail Cache Test", name: "Mail Cache Test",
category: path.EmailCategory, category: path.EmailCategory,
pathFunc1: func(t *testing.T) path.Path { pathFunc1: func(t *testing.T) path.Path {
pth, err := path.Builder{}.Append("Griffindor"). pth, err := path.Builder{}.
Append("Croix").ToDataLayerExchangePathForCategory( Append("Griffindor").
suite.credentials.AzureTenantID, Append("Croix").
user, ToDataLayerExchangePathForCategory(
path.EmailCategory, suite.credentials.AzureTenantID,
false, user,
) path.EmailCategory,
false)
require.NoError(t, err) require.NoError(t, err)
return pth return pth
}, },
pathFunc2: func(t *testing.T) path.Path { pathFunc2: func(t *testing.T) path.Path {
pth, err := path.Builder{}.Append("Griffindor"). pth, err := path.Builder{}.
Append("Felicius").ToDataLayerExchangePathForCategory( Append("Griffindor").
suite.credentials.AzureTenantID, Append("Felicius").
user, ToDataLayerExchangePathForCategory(
path.EmailCategory, suite.credentials.AzureTenantID,
false, user,
) path.EmailCategory,
false)
require.NoError(t, err) require.NoError(t, err)
return pth return pth
}, },
}, },
@ -539,63 +594,65 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
name: "Contact Cache Test", name: "Contact Cache Test",
category: path.ContactsCategory, category: path.ContactsCategory,
pathFunc1: func(t *testing.T) path.Path { pathFunc1: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("HufflePuff"). aPath, err := path.Builder{}.
Append("HufflePuff").
ToDataLayerExchangePathForCategory( ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
user, user,
path.ContactsCategory, path.ContactsCategory,
false, false)
)
require.NoError(t, err) require.NoError(t, err)
return aPath return aPath
}, },
pathFunc2: func(t *testing.T) path.Path { pathFunc2: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("Ravenclaw"). aPath, err := path.Builder{}.
Append("Ravenclaw").
ToDataLayerExchangePathForCategory( ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
user, user,
path.ContactsCategory, path.ContactsCategory,
false, false)
)
require.NoError(t, err) require.NoError(t, err)
return aPath return aPath
}, },
}, },
{ {
name: "Event Cache Test", name: "Event Cache Test",
category: path.EventsCategory, category: path.EventsCategory,
useIDForPath: true,
pathFunc1: func(t *testing.T) path.Path { pathFunc1: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("Durmstrang"). aPath, err := path.Builder{}.
Append("Durmstrang").
ToDataLayerExchangePathForCategory( ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
user, user,
path.EventsCategory, path.EventsCategory,
false, false)
)
require.NoError(t, err) require.NoError(t, err)
return aPath return aPath
}, },
pathFunc2: func(t *testing.T) path.Path { pathFunc2: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("Beauxbatons"). aPath, err := path.Builder{}.
Append("Beauxbatons").
ToDataLayerExchangePathForCategory( ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID, suite.credentials.AzureTenantID,
user, user,
path.EventsCategory, path.EventsCategory,
false, false)
)
require.NoError(t, err) require.NoError(t, err)
return aPath return aPath
}, },
folderPrefix: calendarOthersFolder,
}, },
} }
) )
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
folderID, err := CreateContainerDestinaion( folderID, err := CreateContainerDestination(
ctx, ctx,
m365, m365,
test.pathFunc1(t), test.pathFunc1(t),
@ -605,21 +662,26 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
resolver := directoryCaches[test.category] resolver := directoryCaches[test.category]
_, err = resolver.IDToPath(ctx, folderID) _, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath)
assert.NoError(t, err) assert.NoError(t, err)
secondID, err := CreateContainerDestinaion( parentContainer := folderName
if test.useIDForPath {
parentContainer = folderID
}
secondID, err := CreateContainerDestination(
ctx, ctx,
m365, m365,
test.pathFunc2(t), test.pathFunc2(t),
folderName, parentContainer,
directoryCaches) directoryCaches)
require.NoError(t, err) require.NoError(t, err)
_, err = resolver.IDToPath(ctx, secondID) _, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath)
require.NoError(t, err) require.NoError(t, err)
p := stdpath.Join(test.folderPrefix, folderName) p := stdpath.Join(test.folderPrefix, parentContainer)
_, ok := resolver.PathInCache(p) _, ok := resolver.PathInCache(p)
require.True(t, ok, "looking for path in cache: %s", p) require.True(t, ok, "looking for path in cache: %s", p)
}) })

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -274,8 +275,8 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
continue continue
} }
require.NotEmpty(t, c.FullPath().Folder()) require.NotEmpty(t, c.FullPath().Folder(false))
folder := c.FullPath().Folder() folder := c.FullPath().Folder(false)
delete(test.folderNames, folder) delete(test.folderNames, folder)
} }
@ -507,7 +508,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
continue continue
} }
assert.Equal(t, edc.FullPath().Folder(), DefaultContactFolder) assert.Equal(t, edc.FullPath().Folder(false), DefaultContactFolder)
assert.NotZero(t, count) assert.NotZero(t, count)
} }
@ -527,13 +528,35 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
users := []string{suite.user} users := []string{suite.user}
ac, err := api.NewClient(acct)
require.NoError(suite.T(), err, "creating client")
var (
calID string
bdayID string
)
fn := func(gcf graph.CacheFolder) error {
if *gcf.GetDisplayName() == DefaultCalendar {
calID = *gcf.GetId()
}
if *gcf.GetDisplayName() == "Birthdays" {
bdayID = *gcf.GetId()
}
return nil
}
require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn))
tests := []struct { tests := []struct {
name, expected string name, expected string
scope selectors.ExchangeScope scope selectors.ExchangeScope
}{ }{
{ {
name: "Default Event Calendar", name: "Default Event Calendar",
expected: DefaultCalendar, expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars( scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar}, []string{DefaultCalendar},
selectors.PrefixMatch(), selectors.PrefixMatch(),
@ -541,9 +564,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
}, },
{ {
name: "Birthday Calendar", name: "Birthday Calendar",
expected: calendarOthersFolder + "/Birthdays", expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars( scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{calendarOthersFolder + "/Birthdays"}, []string{"Birthdays"},
selectors.PrefixMatch(), selectors.PrefixMatch(),
)[0], )[0],
}, },
@ -571,9 +594,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
if edc.FullPath().Service() != path.ExchangeMetadataService { if edc.FullPath().Service() != path.ExchangeMetadataService {
isMetadata = true isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder()) assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else { } else {
assert.Equal(t, "", edc.FullPath().Folder()) assert.Equal(t, "", edc.FullPath().Folder(false))
} }
for item := range edc.Items() { for item := range edc.Items() {

View File

@ -14,9 +14,10 @@ var _ graph.ContainerResolver = &eventCalendarCache{}
type eventCalendarCache struct { type eventCalendarCache struct {
*containerResolver *containerResolver
enumer containersEnumerator enumer containersEnumerator
getter containerGetter getter containerGetter
userID string userID string
newAdditions map[string]string
} }
// init ensures that the structure's fields are initialized. // init ensures that the structure's fields are initialized.
@ -44,7 +45,10 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err)) return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(container)) temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(*f.GetId()), // storage path
path.Builder{}.Append(*f.GetDisplayName())) // display location
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "initializing calendar resolver") return errors.Wrap(err, "initializing calendar resolver")
} }
@ -68,16 +72,12 @@ func (ecc *eventCalendarCache) Populate(
ctx, ctx,
ecc.userID, ecc.userID,
"", "",
func(cf graph.CacheFolder) error { ecc.addFolder)
cf.SetPath(path.Builder{}.Append(calendarOthersFolder, *cf.GetDisplayName()))
return ecc.addFolder(cf)
},
)
if err != nil { if err != nil {
return errors.Wrap(err, "enumerating containers") return errors.Wrap(err, "enumerating containers")
} }
if err := ecc.populatePaths(ctx); err != nil { if err := ecc.populatePaths(ctx, true); err != nil {
return errors.Wrap(err, "establishing calendar paths") return errors.Wrap(err, "establishing calendar paths")
} }
@ -86,23 +86,40 @@ func (ecc *eventCalendarCache) Populate(
// AddToCache adds container to map in field 'cache' // AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible. // @returns error iff the required values are not accessible.
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error { func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container, useIDInPath bool) error {
if err := checkIDAndName(f); err != nil { if err := checkIDAndName(f); err != nil {
return errors.Wrap(err, "validating container") return errors.Wrap(err, "validating container")
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName())) temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(*f.GetId()), // storage path
path.Builder{}.Append(*f.GetDisplayName())) // display location
if len(ecc.newAdditions) == 0 {
ecc.newAdditions = map[string]string{}
}
ecc.newAdditions[*f.GetDisplayName()] = *f.GetId()
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
delete(ecc.newAdditions, *f.GetDisplayName())
return errors.Wrap(err, "adding container") return errors.Wrap(err, "adding container")
} }
// Populate the path for this entry so calls to PathInCache succeed no matter // Populate the path for this entry so calls to PathInCache succeed no matter
// when they're made. // when they're made.
_, err := ecc.IDToPath(ctx, *f.GetId()) _, _, err := ecc.IDToPath(ctx, *f.GetId(), true)
if err != nil { if err != nil {
delete(ecc.newAdditions, *f.GetDisplayName())
return errors.Wrap(err, "setting path to container id") return errors.Wrap(err, "setting path to container id")
} }
return nil return nil
} }
// DestinationNameToID returns an empty string. This is only supported by exchange
// calendars at this time.
func (ecc *eventCalendarCache) DestinationNameToID(dest string) string {
return ecc.newAdditions[dest]
}

View File

@ -77,6 +77,11 @@ type Collection struct {
// moved. It will be empty on its first retrieval. // moved. It will be empty on its first retrieval.
prevPath path.Path prevPath path.Path
// LocationPath contains the path with human-readable display names.
// IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t"
// Currently only implemented for Exchange Calendars.
locationPath path.Path
state data.CollectionState state data.CollectionState
// doNotMergeItems should only be true if the old delta token expired. // doNotMergeItems should only be true if the old delta token expired.
@ -91,7 +96,7 @@ type Collection struct {
// or notMoved (if they match). // or notMoved (if they match).
func NewCollection( func NewCollection(
user string, user string,
curr, prev path.Path, curr, prev, location path.Path,
category path.CategoryType, category path.CategoryType,
items itemer, items itemer,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
@ -99,18 +104,19 @@ func NewCollection(
doNotMergeItems bool, doNotMergeItems bool,
) Collection { ) Collection {
collection := Collection{ collection := Collection{
added: make(map[string]struct{}, 0),
category: category, category: category,
ctrl: ctrlOpts, ctrl: ctrlOpts,
data: make(chan data.Stream, collectionChannelBufferSize), data: make(chan data.Stream, collectionChannelBufferSize),
doNotMergeItems: doNotMergeItems, doNotMergeItems: doNotMergeItems,
fullPath: curr, fullPath: curr,
added: make(map[string]struct{}, 0), items: items,
removed: make(map[string]struct{}, 0), locationPath: location,
prevPath: prev, prevPath: prev,
removed: make(map[string]struct{}, 0),
state: data.StateOf(prev, curr), state: data.StateOf(prev, curr),
statusUpdater: statusUpdater, statusUpdater: statusUpdater,
user: user, user: user,
items: items,
} }
return collection return collection
@ -128,6 +134,12 @@ func (col *Collection) FullPath() path.Path {
return col.fullPath return col.fullPath
} }
// LocationPath produces the Collection's full path, but with display names
// instead of IDs in the folders. Only populated for Calendars.
func (col *Collection) LocationPath() path.Path {
return col.locationPath
}
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old // TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
// and new folder hierarchies. // and new folder hierarchies.
func (col Collection) PreviousPath() path.Path { func (col Collection) PreviousPath() path.Path {
@ -172,7 +184,7 @@ func (col *Collection) streamItems(ctx context.Context) {
ctx, ctx,
col.fullPath.Category().String(), col.fullPath.Category().String(),
observe.PII(user), observe.PII(user),
observe.PII(col.fullPath.Folder())) observe.PII(col.fullPath.Folder(false)))
go closer() go closer()
@ -331,7 +343,7 @@ func (col *Collection) finishPopulation(ctx context.Context, success int, totalB
TotalBytes: totalBytes, TotalBytes: totalBytes,
}, },
errs, errs,
col.fullPath.Folder()) col.fullPath.Folder(false))
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String()) logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
col.statusUpdater(status) col.statusUpdater(status)
} }

View File

@ -12,8 +12,10 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -116,6 +118,70 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
suite.Equal(fullPath, edc.FullPath()) suite.Equal(fullPath, edc.FullPath())
} }
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
fooP, err := path.Builder{}.
Append("foo").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
barP, err := path.Builder{}.
Append("bar").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
locP, err := path.Builder{}.
Append("human-readable").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
table := []struct {
name string
prev path.Path
curr path.Path
loc path.Path
expect data.CollectionState
}{
{
name: "new",
curr: fooP,
loc: locP,
expect: data.NewState,
},
{
name: "not moved",
prev: fooP,
curr: fooP,
loc: locP,
expect: data.NotMovedState,
},
{
name: "moved",
prev: fooP,
curr: barP,
loc: locP,
expect: data.MovedState,
},
{
name: "deleted",
prev: fooP,
expect: data.DeletedState,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
c := NewCollection(
"u",
test.curr, test.prev, test.loc,
0,
&mockItemer{}, nil,
control.Options{},
false)
assert.Equal(t, test.expect, c.State(), "collection state")
assert.Equal(t, test.curr, c.fullPath, "full path")
assert.Equal(t, test.prev, c.prevPath, "prev path")
assert.Equal(t, test.loc, c.locationPath, "location path")
})
}
}
func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() { func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
table := []struct { table := []struct {
name string name string

View File

@ -38,5 +38,4 @@ const (
rootFolderAlias = "msgfolderroot" rootFolderAlias = "msgfolderroot"
DefaultContactFolder = "Contacts" DefaultContactFolder = "Contacts"
DefaultCalendar = "Calendar" DefaultCalendar = "Calendar"
calendarOthersFolder = "Other Calendars"
) )

View File

@ -47,6 +47,9 @@ func (suite *CacheResolverSuite) TestPopulate() {
ac, err := api.NewClient(suite.credentials) ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar)
require.NoError(suite.T(), err)
eventFunc := func(t *testing.T) graph.ContainerResolver { eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{ return &eventCalendarCache{
userID: tester.M365UserID(t), userID: tester.M365UserID(t),
@ -64,61 +67,61 @@ func (suite *CacheResolverSuite) TestPopulate() {
} }
tests := []struct { tests := []struct {
name, folderName, root, basePath string name, folderInCache, root, basePath string
resolverFunc func(t *testing.T) graph.ContainerResolver resolverFunc func(t *testing.T) graph.ContainerResolver
canFind assert.BoolAssertionFunc canFind assert.BoolAssertionFunc
}{ }{
{ {
name: "Default Event Cache", name: "Default Event Cache",
folderName: DefaultCalendar, folderInCache: *cal.GetId(),
root: DefaultCalendar, root: DefaultCalendar,
basePath: DefaultCalendar, basePath: DefaultCalendar,
resolverFunc: eventFunc, resolverFunc: eventFunc,
canFind: assert.True, canFind: assert.True,
}, },
{ {
name: "Default Event Folder Hidden", name: "Default Event Folder Hidden",
root: DefaultCalendar, folderInCache: DefaultContactFolder,
folderName: DefaultContactFolder, root: DefaultCalendar,
canFind: assert.False, canFind: assert.False,
resolverFunc: eventFunc, resolverFunc: eventFunc,
}, },
{ {
name: "Name Not in Cache", name: "Name Not in Cache",
folderName: "testFooBarWhoBar", folderInCache: "testFooBarWhoBar",
root: DefaultCalendar, root: DefaultCalendar,
canFind: assert.False, canFind: assert.False,
resolverFunc: eventFunc, resolverFunc: eventFunc,
}, },
{ {
name: "Default Contact Cache", name: "Default Contact Cache",
folderName: DefaultContactFolder, folderInCache: DefaultContactFolder,
root: DefaultContactFolder, root: DefaultContactFolder,
basePath: DefaultContactFolder, basePath: DefaultContactFolder,
canFind: assert.True, canFind: assert.True,
resolverFunc: contactFunc, resolverFunc: contactFunc,
}, },
{ {
name: "Default Contact Hidden", name: "Default Contact Hidden",
folderName: DefaultContactFolder, folderInCache: DefaultContactFolder,
root: DefaultContactFolder, root: DefaultContactFolder,
canFind: assert.False, canFind: assert.False,
resolverFunc: contactFunc, resolverFunc: contactFunc,
}, },
{ {
name: "Name Not in Cache", name: "Name Not in Cache",
folderName: "testFooBarWhoBar", folderInCache: "testFooBarWhoBar",
root: DefaultContactFolder, root: DefaultContactFolder,
canFind: assert.False, canFind: assert.False,
resolverFunc: contactFunc, resolverFunc: contactFunc,
}, },
} }
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
resolver := test.resolverFunc(t) resolver := test.resolverFunc(t)
require.NoError(t, resolver.Populate(ctx, test.root, test.basePath)) require.NoError(t, resolver.Populate(ctx, test.root, test.basePath))
_, isFound := resolver.PathInCache(test.folderName)
_, isFound := resolver.PathInCache(test.folderInCache)
test.canFind(t, isFound) test.canFind(t, isFound)
}) })
} }

View File

@ -53,7 +53,9 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
directory = DefaultMailFolder directory = DefaultMailFolder
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory)) temp := graph.NewCacheFolder(f,
path.Builder{}.Append(directory), // storage path
path.Builder{}.Append(directory)) // display location
if err := mc.addFolder(temp); err != nil { if err := mc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir") return errors.Wrap(err, "adding resolver dir")
} }
@ -81,7 +83,7 @@ func (mc *mailFolderCache) Populate(
return errors.Wrap(err, "enumerating containers") return errors.Wrap(err, "enumerating containers")
} }
if err := mc.populatePaths(ctx); err != nil { if err := mc.populatePaths(ctx, false); err != nil {
return errors.Wrap(err, "populating paths") return errors.Wrap(err, "populating paths")
} }

View File

@ -18,9 +18,9 @@ const (
// top-level folders right now. // top-level folders right now.
//nolint:lll //nolint:lll
testFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAABl7AqpAAA=" testFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAABl7AqpAAA="
//nolint:lll //nolint:lll
topFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAEIAAA=" topFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAEIAAA="
//nolint:lll
// Full folder path for the folder above. // Full folder path for the folder above.
expectedFolderPath = "toplevel/subFolder/subsubfolder" expectedFolderPath = "toplevel/subFolder/subsubfolder"
) )
@ -94,9 +94,10 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
require.NoError(t, mfc.Populate(ctx, test.root, test.path...)) require.NoError(t, mfc.Populate(ctx, test.root, test.path...))
p, err := mfc.IDToPath(ctx, testFolderID) p, l, err := mfc.IDToPath(ctx, testFolderID, true)
require.NoError(t, err) require.NoError(t, err)
t.Logf("Path: %s\n", p.String()) t.Logf("Path: %s\n", p.String())
t.Logf("Location: %s\n", l.String())
expectedPath := stdpath.Join(append(test.path, expectedFolderPath)...) expectedPath := stdpath.Join(append(test.path, expectedFolderPath)...)
assert.Equal(t, expectedPath, p.String()) assert.Equal(t, expectedPath, p.String())

View File

@ -86,44 +86,70 @@ func PopulateExchangeContainerResolver(
} }
// Returns true if the container passes the scope comparison and should be included. // Returns true if the container passes the scope comparison and should be included.
// Also returns the path representing the directory. // Returns:
// - the path representing the directory as it should be stored in the repository.
// - the human-readable path using display names.
// - true if the path passes the scope comparison.
func includeContainer( func includeContainer(
qp graph.QueryParams, qp graph.QueryParams,
c graph.CachedContainer, c graph.CachedContainer,
scope selectors.ExchangeScope, scope selectors.ExchangeScope,
) (path.Path, bool) { ) (path.Path, path.Path, bool) {
var ( var (
category = scope.Category().PathType()
directory string directory string
locPath path.Path
category = scope.Category().PathType()
pb = c.Path() pb = c.Path()
loc = c.Location()
) )
// Clause ensures that DefaultContactFolder is inspected properly // Clause ensures that DefaultContactFolder is inspected properly
if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder { if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder {
pb = c.Path().Append(DefaultContactFolder) pb = pb.Append(DefaultContactFolder)
if loc != nil {
loc = loc.Append(DefaultContactFolder)
}
} }
dirPath, err := pb.ToDataLayerExchangePathForCategory( dirPath, err := pb.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID, qp.Credentials.AzureTenantID,
qp.ResourceOwner, qp.ResourceOwner,
category, category,
false, false)
)
// Containers without a path (e.g. Root mail folder) always err here. // Containers without a path (e.g. Root mail folder) always err here.
if err != nil { if err != nil {
return nil, false return nil, nil, false
} }
directory = pb.String() directory = dirPath.Folder(false)
if loc != nil {
locPath, err = loc.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID,
qp.ResourceOwner,
category,
false)
// Containers without a path (e.g. Root mail folder) always err here.
if err != nil {
return nil, nil, false
}
directory = locPath.Folder(false)
}
var ok bool
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
return dirPath, scope.Matches(selectors.ExchangeMailFolder, directory) ok = scope.Matches(selectors.ExchangeMailFolder, directory)
case path.ContactsCategory: case path.ContactsCategory:
return dirPath, scope.Matches(selectors.ExchangeContactFolder, directory) ok = scope.Matches(selectors.ExchangeContactFolder, directory)
case path.EventsCategory: case path.EventsCategory:
return dirPath, scope.Matches(selectors.ExchangeEventCalendar, directory) ok = scope.Matches(selectors.ExchangeEventCalendar, directory)
default: default:
return dirPath, false return nil, nil, false
} }
return dirPath, locPath, ok
} }

View File

@ -70,7 +70,7 @@ func filterContainersAndFillCollections(
cID := *c.GetId() cID := *c.GetId()
delete(tombstones, cID) delete(tombstones, cID)
currPath, ok := includeContainer(qp, c, scope) currPath, locPath, ok := includeContainer(qp, c, scope)
// Only create a collection if the path matches the scope. // Only create a collection if the path matches the scope.
if !ok { if !ok {
continue continue
@ -110,10 +110,15 @@ func filterContainersAndFillCollections(
deltaURLs[cID] = newDelta.URL deltaURLs[cID] = newDelta.URL
} }
if qp.Category != path.EventsCategory {
locPath = nil
}
edc := NewCollection( edc := NewCollection(
qp.ResourceOwner, qp.ResourceOwner,
currPath, currPath,
prevPath, prevPath,
locPath,
scope.Category().PathType(), scope.Category().PathType(),
ibt, ibt,
statusUpdater, statusUpdater,
@ -167,6 +172,7 @@ func filterContainersAndFillCollections(
qp.ResourceOwner, qp.ResourceOwner,
nil, // marks the collection as deleted nil, // marks the collection as deleted
prevPath, prevPath,
nil, // tombstones don't need a location
scope.Category().PathType(), scope.Category().PathType(),
ibt, ibt,
statusUpdater, statusUpdater,

View File

@ -59,6 +59,7 @@ var _ graph.ContainerResolver = &mockResolver{}
type ( type (
mockResolver struct { mockResolver struct {
items []graph.CachedContainer items []graph.CachedContainer
added map[string]string
} }
) )
@ -76,10 +77,21 @@ func (m mockResolver) Items() []graph.CachedContainer {
return m.items return m.items
} }
func (m mockResolver) AddToCache(context.Context, graph.Container) error { return nil } func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container, b bool) error {
func (m mockResolver) IDToPath(context.Context, string) (*path.Builder, error) { return nil, nil } if len(m.added) == 0 {
func (m mockResolver) PathInCache(string) (string, bool) { return "", false } m.added = map[string]string{}
func (m mockResolver) Populate(context.Context, string, ...string) error { return nil } }
m.added[*gc.GetDisplayName()] = *gc.GetId()
return nil
}
func (m mockResolver) DestinationNameToID(dest string) string { return m.added[dest] }
func (m mockResolver) IDToPath(context.Context, string, bool) (*path.Builder, *path.Builder, error) {
return nil, nil, nil
}
func (m mockResolver) PathInCache(string) (string, bool) { return "", false }
func (m mockResolver) Populate(context.Context, string, ...string) error { return nil }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests // tests

View File

@ -342,7 +342,7 @@ func RestoreExchangeDataCollections(
userCaches = directoryCaches[userID] userCaches = directoryCaches[userID]
} }
containerID, err := CreateContainerDestinaion( containerID, err := CreateContainerDestination(
ctx, ctx,
creds, creds,
dc.FullPath(), dc.FullPath(),
@ -398,7 +398,7 @@ func restoreCollection(
ctx, ctx,
category.String(), category.String(),
observe.PII(user), observe.PII(user),
observe.PII(directory.Folder())) observe.PII(directory.Folder(false)))
defer closer() defer closer()
defer close(colProgress) defer close(colProgress)
@ -445,10 +445,16 @@ func restoreCollection(
continue continue
} }
var locationRef string
if category == path.ContactsCategory {
locationRef = itemPath.Folder(false)
}
deets.Add( deets.Add(
itemPath.String(), itemPath.String(),
itemPath.ShortRef(), itemPath.ShortRef(),
"", "",
locationRef,
true, true,
details.ItemInfo{ details.ItemInfo{
Exchange: info, Exchange: info,
@ -459,12 +465,12 @@ func restoreCollection(
} }
} }
// CreateContainerDestinaion builds the destination into the container // CreateContainerDestination builds the destination into the container
// at the provided path. As a precondition, the destination cannot // at the provided path. As a precondition, the destination cannot
// already exist. If it does then an error is returned. The provided // already exist. If it does then an error is returned. The provided
// containerResolver is updated with the new destination. // containerResolver is updated with the new destination.
// @ returns the container ID of the new destination container. // @ returns the container ID of the new destination container.
func CreateContainerDestinaion( func CreateContainerDestination(
ctx context.Context, ctx context.Context,
creds account.M365Config, creds account.M365Config,
directory path.Path, directory path.Path,
@ -476,7 +482,6 @@ func CreateContainerDestinaion(
user = directory.ResourceOwner() user = directory.ResourceOwner()
category = directory.Category() category = directory.Category()
directoryCache = caches[category] directoryCache = caches[category]
newPathFolders = append([]string{destination}, directory.Folders()...)
) )
// TODO(rkeepers): pass the api client into this func, rather than generating one. // TODO(rkeepers): pass the api client into this func, rather than generating one.
@ -487,6 +492,8 @@ func CreateContainerDestinaion(
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
folders := append([]string{destination}, directory.Folders()...)
if directoryCache == nil { if directoryCache == nil {
acm := ac.Mail() acm := ac.Mail()
mfc := &mailFolderCache{ mfc := &mailFolderCache{
@ -503,12 +510,14 @@ func CreateContainerDestinaion(
return establishMailRestoreLocation( return establishMailRestoreLocation(
ctx, ctx,
ac, ac,
newPathFolders, folders,
directoryCache, directoryCache,
user, user,
newCache) newCache)
case path.ContactsCategory: case path.ContactsCategory:
folders := append([]string{destination}, directory.Folders()...)
if directoryCache == nil { if directoryCache == nil {
acc := ac.Contacts() acc := ac.Contacts()
cfc := &contactFolderCache{ cfc := &contactFolderCache{
@ -524,12 +533,14 @@ func CreateContainerDestinaion(
return establishContactsRestoreLocation( return establishContactsRestoreLocation(
ctx, ctx,
ac, ac,
newPathFolders, folders,
directoryCache, directoryCache,
user, user,
newCache) newCache)
case path.EventsCategory: case path.EventsCategory:
dest := destination
if directoryCache == nil { if directoryCache == nil {
ace := ac.Events() ace := ac.Events()
ecc := &eventCalendarCache{ ecc := &eventCalendarCache{
@ -540,16 +551,23 @@ func CreateContainerDestinaion(
caches[category] = ecc caches[category] = ecc
newCache = true newCache = true
directoryCache = ecc directoryCache = ecc
} else if did := directoryCache.DestinationNameToID(dest); len(did) > 0 {
// calendars are cached by ID in the resolver, not name, so once we have
// created the destination calendar, we need to look up its id and use
// that for resolver lookups instead of the display name.
dest = did
} }
folders := append([]string{dest}, directory.Folders()...)
return establishEventsRestoreLocation( return establishEventsRestoreLocation(
ctx, ctx,
ac, ac,
newPathFolders, folders,
directoryCache, directoryCache,
user, user,
newCache, newCache)
)
default: default:
return "", fmt.Errorf("category: %s not support for exchange cache", category) return "", fmt.Errorf("category: %s not support for exchange cache", category)
} }
@ -602,7 +620,7 @@ func establishMailRestoreLocation(
} }
// NOOP if the folder is already in the cache. // NOOP if the folder is already in the cache.
if err = mfc.AddToCache(ctx, temp); err != nil { if err = mfc.AddToCache(ctx, temp, false); err != nil {
return "", errors.Wrap(err, "adding folder to cache") return "", errors.Wrap(err, "adding folder to cache")
} }
} }
@ -641,7 +659,7 @@ func establishContactsRestoreLocation(
return "", errors.Wrap(err, "populating contact cache") return "", errors.Wrap(err, "populating contact cache")
} }
if err = cfc.AddToCache(ctx, temp); err != nil { if err = cfc.AddToCache(ctx, temp, false); err != nil {
return "", errors.Wrap(err, "adding contact folder to cache") return "", errors.Wrap(err, "adding contact folder to cache")
} }
} }
@ -658,10 +676,7 @@ func establishEventsRestoreLocation(
isNewCache bool, isNewCache bool,
) (string, error) { ) (string, error) {
// Need to prefix with the "Other Calendars" folder so lookup happens properly. // Need to prefix with the "Other Calendars" folder so lookup happens properly.
cached, ok := ecc.PathInCache(path.Builder{}.Append( cached, ok := ecc.PathInCache(folders[0])
calendarOthersFolder,
folders[0],
).String())
if ok { if ok {
return cached, nil return cached, nil
} }
@ -679,7 +694,7 @@ func establishEventsRestoreLocation(
} }
displayable := api.CalendarDisplayable{Calendarable: temp} displayable := api.CalendarDisplayable{Calendarable: temp}
if err = ecc.AddToCache(ctx, displayable); err != nil { if err = ecc.AddToCache(ctx, displayable, true); err != nil {
return "", errors.Wrap(err, "adding new calendar to cache") return "", errors.Wrap(err, "adding new calendar to cache")
} }
} }

View File

@ -1,41 +1,84 @@
package graph package graph
import ( import (
"github.com/alcionai/clues" "context"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
// Idable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of an ID.
type Idable interface {
GetId() *string
}
// Descendable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a "parent folder".
type Descendable interface {
Idable
GetParentFolderId() *string
}
// Displayable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a display name.
type Displayable interface {
Idable
GetDisplayName() *string
}
type Container interface {
Descendable
Displayable
}
// CachedContainer is used for local unit tests but also makes it so that this // CachedContainer is used for local unit tests but also makes it so that this
// code can be broken into generic- and service-specific chunks later on to // code can be broken into generic- and service-specific chunks later on to
// reuse logic in IDToPath. // reuse logic in IDToPath.
type CachedContainer interface { type CachedContainer interface {
Container Container
// Location contains either the display names for the dirs (if this is a calendar)
// or nil
Location() *path.Builder
SetLocation(*path.Builder)
// Path contains either the ids for the dirs (if this is a calendar)
// or the display names for the dirs
Path() *path.Builder Path() *path.Builder
SetPath(*path.Builder) SetPath(*path.Builder)
} }
// checkRequiredValues is a helper function to ensure that // ContainerResolver houses functions for getting information about containers
// all the pointers are set prior to being called. // from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may
func CheckRequiredValues(c Container) error { // cache information about containers.
idPtr := c.GetId() type ContainerResolver interface {
if idPtr == nil || len(*idPtr) == 0 { // IDToPath takes an m365 container ID and converts it to a hierarchical path
return errors.New("folder without ID") // to that container. The path has a similar format to paths on the local
} // file system.
IDToPath(ctx context.Context, m365ID string, useIDInPath bool) (*path.Builder, *path.Builder, error)
ptr := c.GetDisplayName() // Populate performs initialization steps for the resolver
if ptr == nil || len(*ptr) == 0 { // @param ctx is necessary param for Graph API tracing
return clues.New("folder missing display name").With("container_id", *idPtr) // @param baseFolderID represents the M365ID base that the resolver will
} // conclude its search. Default input is "".
Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error
ptr = c.GetParentFolderId() // PathInCache performs a look up of a path reprensentation
if ptr == nil || len(*ptr) == 0 { // and returns the m365ID of directory iff the pathString
return clues.New("folder missing parent ID").With("container_parent_id", *idPtr) // matches the path of a container within the cache.
} // @returns bool represents if m365ID was found.
PathInCache(pathString string) (string, bool)
return nil AddToCache(ctx context.Context, m365Container Container, useIDInPath bool) error
// DestinationNameToID returns the ID of the destination container. Dest is
// assumed to be a display name. The ID is only populated if the destination
// was added using `AddToCache()`. Returns an empty string if not found.
DestinationNameToID(dest string) string
// Items returns the containers in the cache.
Items() []CachedContainer
} }
// ====================================== // ======================================
@ -46,13 +89,15 @@ var _ CachedContainer = &CacheFolder{}
type CacheFolder struct { type CacheFolder struct {
Container Container
l *path.Builder
p *path.Builder p *path.Builder
} }
// NewCacheFolder public constructor for struct // NewCacheFolder public constructor for struct
func NewCacheFolder(c Container, pb *path.Builder) CacheFolder { func NewCacheFolder(c Container, pb, lpb *path.Builder) CacheFolder {
cf := CacheFolder{ cf := CacheFolder{
Container: c, Container: c,
l: lpb,
p: pb, p: pb,
} }
@ -63,6 +108,14 @@ func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
// Required Functions to satisfy interfaces // Required Functions to satisfy interfaces
// ========================================= // =========================================
func (cf CacheFolder) Location() *path.Builder {
return cf.l
}
func (cf *CacheFolder) SetLocation(newLocation *path.Builder) {
cf.l = newLocation
}
func (cf CacheFolder) Path() *path.Builder { func (cf CacheFolder) Path() *path.Builder {
return cf.p return cf.p
} }
@ -109,3 +162,28 @@ func CreateCalendarDisplayable(entry any, parentID string) *CalendarDisplayable
parentID: parentID, parentID: parentID,
} }
} }
// =========================================
// helper funcs
// =========================================
// checkRequiredValues is a helper function to ensure that
// all the pointers are set prior to being called.
func CheckRequiredValues(c Container) error {
idPtr := c.GetId()
if idPtr == nil || len(*idPtr) == 0 {
return errors.New("folder without ID")
}
ptr := c.GetDisplayName()
if ptr == nil || len(*ptr) == 0 {
return errors.Errorf("folder %s without display name", *idPtr)
}
ptr = c.GetParentFolderId()
if ptr == nil || len(*ptr) == 0 {
return errors.Errorf("folder %s without parent ID", *idPtr)
}
return nil
}

View File

@ -151,7 +151,7 @@ func (md MetadataCollection) Items() <-chan data.Stream {
TotalBytes: totalBytes, TotalBytes: totalBytes,
}, },
nil, nil,
md.fullPath.Folder(), md.fullPath.Folder(false),
) )
md.statusUpdater(status) md.statusUpdater(status)

View File

@ -1,7 +1,6 @@
package graph package graph
import ( import (
"context"
"net/http" "net/http"
"net/http/httputil" "net/http/httputil"
"os" "os"
@ -173,57 +172,6 @@ type Servicer interface {
Adapter() *msgraphsdk.GraphRequestAdapter Adapter() *msgraphsdk.GraphRequestAdapter
} }
// Idable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of an ID.
type Idable interface {
GetId() *string
}
// Descendable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a "parent folder".
type Descendable interface {
Idable
GetParentFolderId() *string
}
// Displayable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a display name.
type Displayable interface {
Idable
GetDisplayName() *string
}
type Container interface {
Descendable
Displayable
}
// ContainerResolver houses functions for getting information about containers
// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may
// cache information about containers.
type ContainerResolver interface {
// IDToPath takes an m365 container ID and converts it to a hierarchical path
// to that container. The path has a similar format to paths on the local
// file system.
IDToPath(ctx context.Context, m365ID string) (*path.Builder, error)
// Populate performs initialization steps for the resolver
// @param ctx is necessary param for Graph API tracing
// @param baseFolderID represents the M365ID base that the resolver will
// conclude its search. Default input is "".
Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error
// PathInCache performs a look up of a path reprensentation
// and returns the m365ID of directory iff the pathString
// matches the path of a container within the cache.
// @returns bool represents if m365ID was found.
PathInCache(pathString string) (string, bool)
AddToCache(ctx context.Context, m365Container Container) error
// Items returns the containers in the cache.
Items() []CachedContainer
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Client Middleware // Client Middleware
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -1013,9 +1013,9 @@ func collectionsForInfo(
user, user,
info.category, info.category,
info.pathElements, info.pathElements,
false, false)
)
mc := mockconnector.NewMockExchangeCollection(pth, len(info.items)) mc := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth) baseDestPath := backupOutputPathFromRestore(t, dest, pth)
baseExpected := expectedData[baseDestPath.String()] baseExpected := expectedData[baseDestPath.String()]
@ -1076,7 +1076,7 @@ func collectionsForInfoVersion0(
info.pathElements, info.pathElements,
false, false,
) )
c := mockconnector.NewMockExchangeCollection(pth, len(info.items)) c := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth) baseDestPath := backupOutputPathFromRestore(t, dest, pth)
baseExpected := expectedData[baseDestPath.String()] baseExpected := expectedData[baseDestPath.String()]

View File

@ -16,6 +16,7 @@ import (
// MockExchangeDataCollection represents a mock exchange mailbox // MockExchangeDataCollection represents a mock exchange mailbox
type MockExchangeDataCollection struct { type MockExchangeDataCollection struct {
fullPath path.Path fullPath path.Path
LocPath path.Path
messageCount int messageCount int
Data [][]byte Data [][]byte
Names []string Names []string
@ -35,9 +36,14 @@ var (
// NewMockExchangeDataCollection creates an data collection that will return the specified number of // NewMockExchangeDataCollection creates an data collection that will return the specified number of
// mock messages when iterated. Exchange type mail // mock messages when iterated. Exchange type mail
func NewMockExchangeCollection(pathRepresentation path.Path, numMessagesToReturn int) *MockExchangeDataCollection { func NewMockExchangeCollection(
storagePath path.Path,
locationPath path.Path,
numMessagesToReturn int,
) *MockExchangeDataCollection {
c := &MockExchangeDataCollection{ c := &MockExchangeDataCollection{
fullPath: pathRepresentation, fullPath: storagePath,
LocPath: locationPath,
messageCount: numMessagesToReturn, messageCount: numMessagesToReturn,
Data: [][]byte{}, Data: [][]byte{},
Names: []string{}, Names: []string{},
@ -93,21 +99,11 @@ func NewMockContactCollection(pathRepresentation path.Path, numMessagesToReturn
return c return c
} }
func (medc *MockExchangeDataCollection) FullPath() path.Path { func (medc MockExchangeDataCollection) FullPath() path.Path { return medc.fullPath }
return medc.fullPath func (medc MockExchangeDataCollection) LocationPath() path.Path { return medc.LocPath }
} func (medc MockExchangeDataCollection) PreviousPath() path.Path { return medc.PrevPath }
func (medc MockExchangeDataCollection) State() data.CollectionState { return medc.ColState }
func (medc MockExchangeDataCollection) PreviousPath() path.Path { func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return medc.DoNotMerge }
return medc.PrevPath
}
func (medc MockExchangeDataCollection) State() data.CollectionState {
return medc.ColState
}
func (medc MockExchangeDataCollection) DoNotMergeItems() bool {
return medc.DoNotMerge
}
// Items returns a channel that has the next items in the collection. The // Items returns a channel that has the next items in the collection. The
// channel is closed when there are no more items available. // channel is closed when there are no more items available.

View File

@ -25,7 +25,7 @@ func TestMockExchangeCollectionSuite(t *testing.T) {
} }
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() { func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
mdc := mockconnector.NewMockExchangeCollection(nil, 2) mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2)
messagesRead := 0 messagesRead := 0
@ -40,7 +40,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() { func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
t := suite.T() t := suite.T()
mdc := mockconnector.NewMockExchangeCollection(nil, 2) mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2)
mdc.Data[1] = []byte("This is some buffer of data so that the size is different than the default") mdc.Data[1] = []byte("This is some buffer of data so that the size is different than the default")
@ -58,7 +58,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
// functions by verifying no failures on (de)serializing steps using kiota serialization library // functions by verifying no failures on (de)serializing steps using kiota serialization library
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hydration() { func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hydration() {
t := suite.T() t := suite.T()
mdc := mockconnector.NewMockExchangeCollection(nil, 3) mdc := mockconnector.NewMockExchangeCollection(nil, nil, 3)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
for stream := range mdc.Items() { for stream := range mdc.Items() {

View File

@ -439,7 +439,7 @@ func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRe
TotalBytes: byteCount, // Number of bytes read in the operation, TotalBytes: byteCount, // Number of bytes read in the operation,
}, },
errs, errs,
oc.folderPath.Folder(), // Additional details oc.folderPath.Folder(false), // Additional details
) )
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String()) logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
oc.statusUpdater(status) oc.statusUpdater(status)

View File

@ -213,7 +213,7 @@ func RestoreCollection(
trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String()) trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String())
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
"restoring to destination", "restoring to destination",
"origin", dc.FullPath().Folder(), "origin", dc.FullPath().Folder(false),
"destination", restoreFolderElements) "destination", restoreFolderElements)
parentPerms, colPerms, err := getParentAndCollectionPermissions( parentPerms, colPerms, err := getParentAndCollectionPermissions(
@ -284,7 +284,13 @@ func RestoreCollection(
continue continue
} }
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo) deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
// Mark it as success without processing .meta // Mark it as success without processing .meta
// file if we are not restoring permissions // file if we are not restoring permissions
@ -371,7 +377,13 @@ func RestoreCollection(
continue continue
} }
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo) deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
metrics.Successes++ metrics.Successes++
} }
} }

View File

@ -167,7 +167,7 @@ func (sc *Collection) finishPopulation(ctx context.Context, attempts, success in
TotalBytes: totalBytes, TotalBytes: totalBytes,
}, },
errs, errs,
sc.fullPath.Folder()) sc.fullPath.Folder(false))
logger.Ctx(ctx).Debug(status.String()) logger.Ctx(ctx).Debug(status.String())
if sc.statusUpdater != nil { if sc.statusUpdater != nil {
@ -191,7 +191,7 @@ func (sc *Collection) populate(ctx context.Context) {
ctx, ctx,
sc.fullPath.Category().String(), sc.fullPath.Category().String(),
observe.Safe("name"), observe.Safe("name"),
observe.PII(sc.fullPath.Folder())) observe.PII(sc.fullPath.Folder(false)))
go closer() go closer()
defer func() { defer func() {

View File

@ -276,6 +276,7 @@ func RestoreListCollection(
itemPath.String(), itemPath.String(),
itemPath.ShortRef(), itemPath.ShortRef(),
"", "",
"", // TODO: implement locationRef
true, true,
itemInfo) itemInfo)
@ -355,6 +356,7 @@ func RestorePageCollection(
itemPath.String(), itemPath.String(),
itemPath.ShortRef(), itemPath.ShortRef(),
"", "",
"", // TODO: implement locationRef
true, true,
itemInfo, itemInfo,
) )

View File

@ -96,6 +96,12 @@ type Stream interface {
Deleted() bool Deleted() bool
} }
// LocationPather provides a LocationPath describing the path with Display Names
// instead of canonical IDs
type LocationPather interface {
LocationPath() path.Path
}
// StreamInfo is used to provide service specific // StreamInfo is used to provide service specific
// information about the Stream // information about the Stream
type StreamInfo interface { type StreamInfo interface {
@ -124,7 +130,7 @@ func StateOf(prev, curr path.Path) CollectionState {
return NewState return NewState
} }
if curr.Folder() != prev.Folder() { if curr.Folder(false) != prev.Folder(false) {
return MovedState return MovedState
} }

View File

@ -123,10 +123,11 @@ func (rw *restoreStreamReader) Read(p []byte) (n int, err error) {
} }
type itemDetails struct { type itemDetails struct {
info *details.ItemInfo info *details.ItemInfo
repoPath path.Path repoPath path.Path
prevPath path.Path prevPath path.Path
cached bool locationPath path.Path
cached bool
} }
type corsoProgress struct { type corsoProgress struct {
@ -135,7 +136,7 @@ type corsoProgress struct {
deets *details.Builder deets *details.Builder
// toMerge represents items that we don't have in-memory item info for. The // toMerge represents items that we don't have in-memory item info for. The
// item info for these items should be sourced from a base snapshot later on. // item info for these items should be sourced from a base snapshot later on.
toMerge map[string]path.Path toMerge map[string]PrevRefs
mu sync.RWMutex mu sync.RWMutex
totalBytes int64 totalBytes int64
errs *fault.Errors errs *fault.Errors
@ -180,27 +181,45 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
cp.mu.Lock() cp.mu.Lock()
defer cp.mu.Unlock() defer cp.mu.Unlock()
cp.toMerge[d.prevPath.ShortRef()] = d.repoPath cp.toMerge[d.prevPath.ShortRef()] = PrevRefs{
Repo: d.repoPath,
Location: d.locationPath,
}
return return
} }
parent := d.repoPath.ToBuilder().Dir() var (
locationFolders string
locPB *path.Builder
parent = d.repoPath.ToBuilder().Dir()
)
if d.locationPath != nil {
locationFolders = d.locationPath.Folder(true)
locPB = d.locationPath.ToBuilder()
// folderEntriesForPath assumes the location will
// not have an item element appended
if len(d.locationPath.Item()) > 0 {
locPB = locPB.Dir()
}
}
cp.deets.Add( cp.deets.Add(
d.repoPath.String(), d.repoPath.String(),
d.repoPath.ShortRef(), d.repoPath.ShortRef(),
parent.ShortRef(), parent.ShortRef(),
locationFolders,
!d.cached, !d.cached,
*d.info, *d.info)
)
folders := details.FolderEntriesForPath(parent) folders := details.FolderEntriesForPath(parent, locPB)
cp.deets.AddFoldersForItem( cp.deets.AddFoldersForItem(
folders, folders,
*d.info, *d.info,
!d.cached, !d.cached)
)
} }
// Kopia interface function used as a callback when kopia finishes hashing a file. // Kopia interface function used as a callback when kopia finishes hashing a file.
@ -263,12 +282,17 @@ func collectionEntries(
} }
var ( var (
locationPath path.Path
// Track which items have already been seen so we can skip them if we see // Track which items have already been seen so we can skip them if we see
// them again in the data from the base snapshot. // them again in the data from the base snapshot.
seen = map[string]struct{}{} seen = map[string]struct{}{}
items = streamedEnts.Items() items = streamedEnts.Items()
) )
if lp, ok := streamedEnts.(data.LocationPather); ok {
locationPath = lp.LocationPath()
}
for { for {
select { select {
case <-ctx.Done(): case <-ctx.Done():
@ -328,7 +352,11 @@ func collectionEntries(
// previous snapshot then we should populate prevPath here and leave // previous snapshot then we should populate prevPath here and leave
// info nil. // info nil.
itemInfo := ei.Info() itemInfo := ei.Info()
d := &itemDetails{info: &itemInfo, repoPath: itemPath} d := &itemDetails{
info: &itemInfo,
repoPath: itemPath,
locationPath: locationPath,
}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
} }
@ -356,6 +384,7 @@ func streamBaseEntries(
cb func(context.Context, fs.Entry) error, cb func(context.Context, fs.Entry) error,
curPath path.Path, curPath path.Path,
prevPath path.Path, prevPath path.Path,
locationPath path.Path,
dir fs.Directory, dir fs.Directory,
encodedSeen map[string]struct{}, encodedSeen map[string]struct{},
globalExcludeSet map[string]struct{}, globalExcludeSet map[string]struct{},
@ -411,7 +440,12 @@ func streamBaseEntries(
// All items have item info in the base backup. However, we need to make // All items have item info in the base backup. However, we need to make
// sure we have enough metadata to find those entries. To do that we add the // sure we have enough metadata to find those entries. To do that we add the
// item to progress and having progress aggregate everything for later. // item to progress and having progress aggregate everything for later.
d := &itemDetails{info: nil, repoPath: itemPath, prevPath: prevItemPath} d := &itemDetails{
info: nil,
repoPath: itemPath,
prevPath: prevItemPath,
locationPath: locationPath,
}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
if err := cb(ctx, entry); err != nil { if err := cb(ctx, entry); err != nil {
@ -455,6 +489,12 @@ func getStreamItemFunc(
} }
} }
var locationPath path.Path
if lp, ok := streamedEnts.(data.LocationPather); ok {
locationPath = lp.LocationPath()
}
seen, err := collectionEntries(ctx, cb, streamedEnts, progress) seen, err := collectionEntries(ctx, cb, streamedEnts, progress)
if err != nil { if err != nil {
return errors.Wrap(err, "streaming collection entries") return errors.Wrap(err, "streaming collection entries")
@ -465,6 +505,7 @@ func getStreamItemFunc(
cb, cb,
curPath, curPath,
prevPath, prevPath,
locationPath,
baseDir, baseDir,
seen, seen,
globalExcludeSet, globalExcludeSet,
@ -533,6 +574,7 @@ type treeMap struct {
// Previous path this directory may have resided at if it is sourced from a // Previous path this directory may have resided at if it is sourced from a
// base snapshot. // base snapshot.
prevPath path.Path prevPath path.Path
// Child directories of this directory. // Child directories of this directory.
childDirs map[string]*treeMap childDirs map[string]*treeMap
// Reference to data pulled from the external service. Contains only items in // Reference to data pulled from the external service. Contains only items in

File diff suppressed because it is too large Load Diff

View File

@ -114,6 +114,13 @@ type IncrementalBase struct {
SubtreePaths []*path.Builder SubtreePaths []*path.Builder
} }
// PrevRefs hold the repoRef and locationRef from the items
// that need to be merged in from prior snapshots.
type PrevRefs struct {
Repo path.Path
Location path.Path
}
// BackupCollections takes a set of collections and creates a kopia snapshot // BackupCollections takes a set of collections and creates a kopia snapshot
// with the data that they contain. previousSnapshots is used for incremental // with the data that they contain. previousSnapshots is used for incremental
// backups and should represent the base snapshot from which metadata is sourced // backups and should represent the base snapshot from which metadata is sourced
@ -128,7 +135,7 @@ func (w Wrapper) BackupCollections(
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Errors, errs *fault.Errors,
) (*BackupStats, *details.Builder, map[string]path.Path, error) { ) (*BackupStats, *details.Builder, map[string]PrevRefs, error) {
if w.c == nil { if w.c == nil {
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx) return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
} }
@ -143,7 +150,7 @@ func (w Wrapper) BackupCollections(
progress := &corsoProgress{ progress := &corsoProgress{
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
deets: &details.Builder{}, deets: &details.Builder{},
toMerge: map[string]path.Path{}, toMerge: map[string]PrevRefs{},
errs: errs, errs: errs,
} }

View File

@ -27,7 +27,9 @@ import (
const ( const (
testTenant = "a-tenant" testTenant = "a-tenant"
testUser = "user1" testUser = "user1"
testInboxID = "Inbox_ID"
testInboxDir = "Inbox" testInboxDir = "Inbox"
testArchiveID = "Archive_ID"
testArchiveDir = "Archive" testArchiveDir = "Archive"
testFileName = "file1" testFileName = "file1"
testFileName2 = "file2" testFileName2 = "file2"
@ -144,8 +146,10 @@ type KopiaIntegrationSuite struct {
ctx context.Context ctx context.Context
flush func() flush func()
testPath1 path.Path storePath1 path.Path
testPath2 path.Path storePath2 path.Path
locPath1 path.Path
locPath2 path.Path
} }
func TestKopiaIntegrationSuite(t *testing.T) { func TestKopiaIntegrationSuite(t *testing.T) {
@ -164,21 +168,21 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
testTenant, testTenant,
testUser, testUser,
path.EmailCategory, path.EmailCategory,
false, false)
)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
suite.testPath1 = tmp suite.storePath1 = tmp
suite.locPath1 = tmp
tmp, err = path.Builder{}.Append(testArchiveDir).ToDataLayerExchangePathForCategory( tmp, err = path.Builder{}.Append(testArchiveDir).ToDataLayerExchangePathForCategory(
testTenant, testTenant,
testUser, testUser,
path.EmailCategory, path.EmailCategory,
false, false)
)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
suite.testPath2 = tmp suite.storePath2 = tmp
suite.locPath2 = tmp
} }
func (suite *KopiaIntegrationSuite) SetupTest() { func (suite *KopiaIntegrationSuite) SetupTest() {
@ -199,13 +203,13 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
func (suite *KopiaIntegrationSuite) TestBackupCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollections() {
collections := []data.BackupCollection{ collections := []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath1, suite.storePath1,
5, suite.locPath1,
), 5),
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath2, suite.storePath2,
42, suite.locPath2,
), 42),
} }
// tags that are supplied by the caller. This includes basic tags to support // tags that are supplied by the caller. This includes basic tags to support
@ -217,14 +221,14 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
reasons := []Reason{ reasons := []Reason{
{ {
ResourceOwner: suite.testPath1.ResourceOwner(), ResourceOwner: suite.storePath1.ResourceOwner(),
Service: suite.testPath1.Service(), Service: suite.storePath1.Service(),
Category: suite.testPath1.Category(), Category: suite.storePath1.Category(),
}, },
{ {
ResourceOwner: suite.testPath2.ResourceOwner(), ResourceOwner: suite.storePath2.ResourceOwner(),
Service: suite.testPath2.Service(), Service: suite.storePath2.Service(),
Category: suite.testPath2.Category(), Category: suite.storePath2.Category(),
}, },
} }
@ -311,7 +315,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
prevSnaps = append(prevSnaps, IncrementalBase{ prevSnaps = append(prevSnaps, IncrementalBase{
Manifest: snap, Manifest: snap,
SubtreePaths: []*path.Builder{ SubtreePaths: []*path.Builder{
suite.testPath1.ToBuilder().Dir(), suite.storePath1.ToBuilder().Dir(),
}, },
}) })
}) })
@ -342,13 +346,13 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
tags[k] = "" tags[k] = ""
} }
dc1 := mockconnector.NewMockExchangeCollection(suite.testPath1, 1) dc1 := mockconnector.NewMockExchangeCollection(suite.storePath1, suite.locPath1, 1)
dc2 := mockconnector.NewMockExchangeCollection(suite.testPath2, 1) dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1)
fp1, err := suite.testPath1.Append(dc1.Names[0], true) fp1, err := suite.storePath1.Append(dc1.Names[0], true)
require.NoError(t, err) require.NoError(t, err)
fp2, err := suite.testPath2.Append(dc2.Names[0], true) fp2, err := suite.storePath2.Append(dc2.Names[0], true)
require.NoError(t, err) require.NoError(t, err)
stats, _, _, err := w.BackupCollections( stats, _, _, err := w.BackupCollections(
@ -434,7 +438,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
collections := []data.BackupCollection{ collections := []data.BackupCollection{
&mockBackupCollection{ &mockBackupCollection{
path: suite.testPath1, path: suite.storePath1,
streams: []data.Stream{ streams: []data.Stream{
&mockconnector.MockExchangeData{ &mockconnector.MockExchangeData{
ID: testFileName, ID: testFileName,
@ -447,7 +451,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
}, },
}, },
&mockBackupCollection{ &mockBackupCollection{
path: suite.testPath2, path: suite.storePath2,
streams: []data.Stream{ streams: []data.Stream{
&mockconnector.MockExchangeData{ &mockconnector.MockExchangeData{
ID: testFileName3, ID: testFileName3,
@ -487,7 +491,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
// 5 file and 6 folder entries. // 5 file and 6 folder entries.
assert.Len(t, deets.Details().Entries, 5+6) assert.Len(t, deets.Details().Entries, 5+6)
failedPath, err := suite.testPath2.Append(testFileName4, true) failedPath, err := suite.storePath2.Append(testFileName4, true)
require.NoError(t, err) require.NoError(t, err)
ic := i64counter{} ic := i64counter{}
@ -792,8 +796,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
cols: func() []data.BackupCollection { cols: func() []data.BackupCollection {
c := mockconnector.NewMockExchangeCollection( c := mockconnector.NewMockExchangeCollection(
suite.testPath1, suite.testPath1,
1, suite.testPath1,
) 1)
c.ColState = data.NotMovedState c.ColState = data.NotMovedState
return []data.BackupCollection{c} return []data.BackupCollection{c}

View File

@ -338,7 +338,7 @@ type backuper interface {
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Errors, errs *fault.Errors,
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error)
} }
func selectorToReasons(sel selectors.Selector) []kopia.Reason { func selectorToReasons(sel selectors.Selector) []kopia.Reason {
@ -397,7 +397,7 @@ func consumeBackupDataCollections(
backupID model.StableID, backupID model.StableID,
isIncremental bool, isIncremental bool,
errs *fault.Errors, errs *fault.Errors,
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) { ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) {
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Backing up data")) complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Backing up data"))
defer func() { defer func() {
complete <- struct{}{} complete <- struct{}{}
@ -503,7 +503,7 @@ func mergeDetails(
ms *store.Wrapper, ms *store.Wrapper,
detailsStore detailsReader, detailsStore detailsReader,
mans []*kopia.ManifestEntry, mans []*kopia.ManifestEntry,
shortRefsFromPrevBackup map[string]path.Path, shortRefsFromPrevBackup map[string]kopia.PrevRefs,
deets *details.Builder, deets *details.Builder,
errs *fault.Errors, errs *fault.Errors,
) error { ) error {
@ -559,13 +559,16 @@ func mergeDetails(
continue continue
} }
newPath := shortRefsFromPrevBackup[rr.ShortRef()] prev, ok := shortRefsFromPrevBackup[rr.ShortRef()]
if newPath == nil { if !ok {
// This entry was not sourced from a base snapshot or cached from a // This entry was not sourced from a base snapshot or cached from a
// previous backup, skip it. // previous backup, skip it.
continue continue
} }
newPath := prev.Repo
newLoc := prev.Location
// Fixup paths in the item. // Fixup paths in the item.
item := entry.ItemInfo item := entry.ItemInfo
if err := details.UpdateItem(&item, newPath); err != nil { if err := details.UpdateItem(&item, newPath); err != nil {
@ -574,16 +577,27 @@ func mergeDetails(
// TODO(ashmrtn): This may need updated if we start using this merge // TODO(ashmrtn): This may need updated if we start using this merge
// strategry for items that were cached in kopia. // strategry for items that were cached in kopia.
itemUpdated := newPath.String() != rr.String() var (
itemUpdated = newPath.String() != rr.String()
newLocStr string
locBuilder *path.Builder
)
if newLoc != nil {
locBuilder = newLoc.ToBuilder()
newLocStr = newLoc.Folder(true)
itemUpdated = itemUpdated || newLocStr != entry.LocationRef
}
deets.Add( deets.Add(
newPath.String(), newPath.String(),
newPath.ShortRef(), newPath.ShortRef(),
newPath.ToBuilder().Dir().ShortRef(), newPath.ToBuilder().Dir().ShortRef(),
newLocStr,
itemUpdated, itemUpdated,
item) item)
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir()) folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), locBuilder)
deets.AddFoldersForItem(folders, item, itemUpdated) deets.AddFoldersForItem(folders, item, itemUpdated)
// Track how many entries we added so that we know if we got them all when // Track how many entries we added so that we know if we got them all when

View File

@ -402,7 +402,7 @@ func buildCollections(
c.pathFolders, c.pathFolders,
false) false)
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items)) mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items))
for i := 0; i < len(c.items); i++ { for i := 0; i < len(c.items); i++ {
mc.Names[i] = c.items[i].name mc.Names[i] = c.items[i].name
@ -777,8 +777,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true) p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
require.NoError(t, err) require.NoError(t, err)
id, ok := cr.PathInCache(p.Folder()) id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category) require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
d := dataset[category].dests[destName] d := dataset[category].dests[destName]
d.containerID = id d.containerID = id
@ -895,8 +895,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true) p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true)
require.NoError(t, err) require.NoError(t, err)
id, ok := cr.PathInCache(p.Folder()) id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category) require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
dataset[category].dests[container3] = contDeets{id, deets} dataset[category].dests[container3] = contDeets{id, deets}
} }

View File

@ -89,8 +89,7 @@ type mockBackuper struct {
bases []kopia.IncrementalBase, bases []kopia.IncrementalBase,
cs []data.BackupCollection, cs []data.BackupCollection,
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool)
)
} }
func (mbu mockBackuper) BackupCollections( func (mbu mockBackuper) BackupCollections(
@ -101,7 +100,7 @@ func (mbu mockBackuper) BackupCollections(
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Errors, errs *fault.Errors,
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) { ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) {
if mbu.checkFunc != nil { if mbu.checkFunc != nil {
mbu.checkFunc(bases, cs, tags, buildTreeWithBase) mbu.checkFunc(bases, cs, tags, buildTreeWithBase)
} }
@ -249,9 +248,10 @@ func makeFolderEntry(
t.Helper() t.Helper()
return &details.DetailsEntry{ return &details.DetailsEntry{
RepoRef: pb.String(), RepoRef: pb.String(),
ShortRef: pb.ShortRef(), ShortRef: pb.ShortRef(),
ParentRef: pb.Dir().ShortRef(), ParentRef: pb.Dir().ShortRef(),
LocationRef: pb.PopFront().PopFront().PopFront().PopFront().Dir().String(),
ItemInfo: details.ItemInfo{ ItemInfo: details.ItemInfo{
Folder: &details.FolderInfo{ Folder: &details.FolderInfo{
ItemType: details.FolderItem, ItemType: details.FolderItem,
@ -277,17 +277,24 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path {
func makeDetailsEntry( func makeDetailsEntry(
t *testing.T, t *testing.T,
p path.Path, p path.Path,
l path.Path,
size int, size int,
updated bool, updated bool,
) *details.DetailsEntry { ) *details.DetailsEntry {
t.Helper() t.Helper()
var lr string
if l != nil {
lr = l.PopFront().PopFront().PopFront().PopFront().Dir().String()
}
res := &details.DetailsEntry{ res := &details.DetailsEntry{
RepoRef: p.String(), RepoRef: p.String(),
ShortRef: p.ShortRef(), ShortRef: p.ShortRef(),
ParentRef: p.ToBuilder().Dir().ShortRef(), ParentRef: p.ToBuilder().Dir().ShortRef(),
ItemInfo: details.ItemInfo{}, LocationRef: lr,
Updated: updated, ItemInfo: details.ItemInfo{},
Updated: updated,
} }
switch p.Service() { switch p.Service() {
@ -607,6 +614,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
true, true,
) )
locationPath1 = makePath(
suite.T(),
[]string{
tenant,
path.OneDriveService.String(),
ro,
path.FilesCategory.String(),
"drives",
"drive-id",
"root:",
"work-display-name",
"item1",
},
true,
)
itemPath2 = makePath( itemPath2 = makePath(
suite.T(), suite.T(),
[]string{ []string{
@ -622,6 +644,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
true, true,
) )
locationPath2 = makePath(
suite.T(),
[]string{
tenant,
path.OneDriveService.String(),
ro,
path.FilesCategory.String(),
"drives",
"drive-id",
"root:",
"personal-display-name",
"item2",
},
true,
)
itemPath3 = makePath( itemPath3 = makePath(
suite.T(), suite.T(),
[]string{ []string{
@ -634,6 +671,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
true, true,
) )
locationPath3 = makePath(
suite.T(),
[]string{
tenant,
path.ExchangeService.String(),
ro,
path.EmailCategory.String(),
"personal-display-name",
"item3",
},
true,
)
backup1 = backup.Backup{ backup1 = backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
@ -669,7 +718,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
populatedModels map[model.StableID]backup.Backup populatedModels map[model.StableID]backup.Backup
populatedDetails map[string]*details.Details populatedDetails map[string]*details.Details
inputMans []*kopia.ManifestEntry inputMans []*kopia.ManifestEntry
inputShortRefsFromPrevBackup map[string]path.Path inputShortRefsFromPrevBackup map[string]kopia.PrevRefs
errCheck assert.ErrorAssertionFunc errCheck assert.ErrorAssertionFunc
expectedEntries []*details.DetailsEntry expectedEntries []*details.DetailsEntry
@ -682,15 +731,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "EmptyShortRefsFromPrevBackup", name: "EmptyShortRefsFromPrevBackup",
inputShortRefsFromPrevBackup: map[string]path.Path{}, inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{},
errCheck: assert.NoError, errCheck: assert.NoError,
// Use empty slice so we don't error out on nil != empty. // Use empty slice so we don't error out on nil != empty.
expectedEntries: []*details.DetailsEntry{}, expectedEntries: []*details.DetailsEntry{},
}, },
{ {
name: "BackupIDNotFound", name: "BackupIDNotFound",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -704,8 +756,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "DetailsIDNotFound", name: "DetailsIDNotFound",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -727,9 +782,15 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "BaseMissingItems", name: "BaseMissingItems",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
itemPath2.ShortRef(): itemPath2, Repo: itemPath1,
Location: locationPath1,
},
itemPath2.ShortRef(): {
Repo: itemPath2,
Location: locationPath2,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -746,7 +807,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
}, },
}, },
}, },
@ -755,8 +816,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "TooManyItems", name: "TooManyItems",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -779,7 +843,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
}, },
}, },
}, },
@ -788,8 +852,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "BadBaseRepoRef", name: "BadBaseRepoRef",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath2,
Location: locationPath2,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -834,19 +901,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "BadOneDrivePath", name: "BadOneDrivePath",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): makePath( itemPath1.ShortRef(): {
suite.T(), Repo: makePath(
[]string{ suite.T(),
itemPath1.Tenant(), []string{
path.OneDriveService.String(), itemPath1.Tenant(),
itemPath1.ResourceOwner(), path.OneDriveService.String(),
path.FilesCategory.String(), itemPath1.ResourceOwner(),
"personal", path.FilesCategory.String(),
"item1", "personal",
}, "item1",
true, },
), true,
),
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -863,7 +932,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
}, },
}, },
}, },
@ -872,8 +941,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
}, },
{ {
name: "ItemMerged", name: "ItemMerged",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -890,20 +962,88 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
}, },
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{ expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false), makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
{
name: "ItemMergedNoLocation",
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
Manifest: makeManifest(suite.T(), backup1.ID, ""),
Reasons: []kopia.Reason{
pathReason1,
},
},
},
populatedModels: map[model.StableID]backup.Backup{
backup1.ID: backup1,
},
populatedDetails: map[string]*details.Details{
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
},
},
{
name: "ItemMergedSameLocation",
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: itemPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
Manifest: makeManifest(suite.T(), backup1.ID, ""),
Reasons: []kopia.Reason{
pathReason1,
},
},
},
populatedModels: map[model.StableID]backup.Backup{
backup1.ID: backup1,
},
populatedDetails: map[string]*details.Details{
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
}, },
}, },
{ {
name: "ItemMergedExtraItemsInBase", name: "ItemMergedExtraItemsInBase",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -920,21 +1060,24 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath2, 84, false), *makeDetailsEntry(suite.T(), itemPath2, locationPath2, 84, false),
}, },
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{ expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false), makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
}, },
}, },
{ {
name: "ItemMoved", name: "ItemMoved",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath2, itemPath1.ShortRef(): {
Repo: itemPath2,
Location: locationPath2,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -951,21 +1094,27 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
}, },
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{ expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath2, 42, true), makeDetailsEntry(suite.T(), itemPath2, locationPath2, 42, true),
}, },
}, },
{ {
name: "MultipleBases", name: "MultipleBases",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
itemPath3.ShortRef(): itemPath3, Repo: itemPath1,
Location: locationPath1,
},
itemPath3.ShortRef(): {
Repo: itemPath3,
Location: locationPath3,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -989,7 +1138,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
}, },
}, },
}, },
@ -997,23 +1146,26 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
// This entry should not be picked due to a mismatch on Reasons. // This entry should not be picked due to a mismatch on Reasons.
*makeDetailsEntry(suite.T(), itemPath1, 84, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false),
// This item should be picked. // This item should be picked.
*makeDetailsEntry(suite.T(), itemPath3, 37, false), *makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false),
}, },
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{ expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false), makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
makeDetailsEntry(suite.T(), itemPath3, 37, false), makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false),
}, },
}, },
{ {
name: "SomeBasesIncomplete", name: "SomeBasesIncomplete",
inputShortRefsFromPrevBackup: map[string]path.Path{ inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
}, },
inputMans: []*kopia.ManifestEntry{ inputMans: []*kopia.ManifestEntry{
{ {
@ -1037,7 +1189,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: { backup1.DetailsID: {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
}, },
}, },
}, },
@ -1045,14 +1197,14 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{ Entries: []details.DetailsEntry{
// This entry should not be picked due to being incomplete. // This entry should not be picked due to being incomplete.
*makeDetailsEntry(suite.T(), itemPath1, 84, false), *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false),
}, },
}, },
}, },
}, },
errCheck: assert.NoError, errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{ expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false), makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
}, },
}, },
} }
@ -1075,6 +1227,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
&deets, &deets,
fault.New(true)) fault.New(true))
test.errCheck(t, err) test.errCheck(t, err)
if err != nil { if err != nil {
return return
} }
@ -1103,8 +1256,12 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
itemPath1 = makePath( itemPath1 = makePath(
t, t,
pathElems, pathElems,
true, true)
)
locPath1 = makePath(
t,
pathElems[:len(pathElems)-1],
false)
backup1 = backup.Backup{ backup1 = backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
@ -1119,8 +1276,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
Category: itemPath1.Category(), Category: itemPath1.Category(),
} }
inputToMerge = map[string]path.Path{ inputToMerge = map[string]kopia.PrevRefs{
itemPath1.ShortRef(): itemPath1, itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locPath1,
},
} }
inputMans = []*kopia.ManifestEntry{ inputMans = []*kopia.ManifestEntry{
@ -1137,7 +1297,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
} }
itemSize = 42 itemSize = 42
itemDetails = makeDetailsEntry(t, itemPath1, itemSize, false) itemDetails = makeDetailsEntry(t, itemPath1, itemPath1, itemSize, false)
populatedDetails = map[string]*details.Details{ populatedDetails = map[string]*details.Details{
backup1.DetailsID: { backup1.DetailsID: {

View File

@ -44,7 +44,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
deetsBuilder := &details.Builder{} deetsBuilder := &details.Builder{}
deetsBuilder.Add("ref", "shortref", "parentref", true, deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
details.ItemInfo{ details.ItemInfo{
Exchange: &details.ExchangeInfo{ Exchange: &details.ExchangeInfo{
Subject: "hello world", Subject: "hello world",
@ -66,6 +66,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef) assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef)
assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef) assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef)
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef) assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef)
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated) assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
assert.NotNil(t, readDeets.Entries[0].Exchange) assert.NotNil(t, readDeets.Entries[0].Exchange)
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange) assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)

View File

@ -14,7 +14,7 @@ import (
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
const Version = 1 const Version = 2
// Backup represents the result of a backup operation // Backup represents the result of a backup operation
type Backup struct { type Backup struct {

View File

@ -15,11 +15,12 @@ import (
) )
type folderEntry struct { type folderEntry struct {
RepoRef string RepoRef string
ShortRef string ShortRef string
ParentRef string ParentRef string
Updated bool LocationRef string
Info ItemInfo Updated bool
Info ItemInfo
} }
// -------------------------------------------------------------------------------- // --------------------------------------------------------------------------------
@ -110,10 +111,14 @@ type Builder struct {
knownFolders map[string]folderEntry `json:"-"` knownFolders map[string]folderEntry `json:"-"`
} }
func (b *Builder) Add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) { func (b *Builder) Add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
b.mu.Lock() b.mu.Lock()
defer b.mu.Unlock() defer b.mu.Unlock()
b.d.add(repoRef, shortRef, parentRef, updated, info) b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
} }
func (b *Builder) Details() *Details { func (b *Builder) Details() *Details {
@ -131,30 +136,65 @@ func (b *Builder) Details() *Details {
// TODO(ashmrtn): If we never need to pre-populate the modified time of a folder // TODO(ashmrtn): If we never need to pre-populate the modified time of a folder
// we should just merge this with AddFoldersForItem, have Add call // we should just merge this with AddFoldersForItem, have Add call
// AddFoldersForItem, and unexport AddFoldersForItem. // AddFoldersForItem, and unexport AddFoldersForItem.
func FolderEntriesForPath(parent *path.Builder) []folderEntry { func FolderEntriesForPath(parent, location *path.Builder) []folderEntry {
folders := []folderEntry{} folders := []folderEntry{}
lfs := locationRefOf(location)
for len(parent.Elements()) > 0 { for len(parent.Elements()) > 0 {
nextParent := parent.Dir() var (
nextParent = parent.Dir()
lr string
dn = parent.LastElem()
)
// TODO: We may have future cases where the storage hierarchy
// doesn't match the location hierarchy.
if lfs != nil {
lr = lfs.String()
if len(lfs.Elements()) > 0 {
dn = lfs.LastElem()
}
}
folders = append(folders, folderEntry{ folders = append(folders, folderEntry{
RepoRef: parent.String(), RepoRef: parent.String(),
ShortRef: parent.ShortRef(), ShortRef: parent.ShortRef(),
ParentRef: nextParent.ShortRef(), ParentRef: nextParent.ShortRef(),
LocationRef: lr,
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
ItemType: FolderItem, ItemType: FolderItem,
DisplayName: parent.Elements()[len(parent.Elements())-1], DisplayName: dn,
}, },
}, },
}) })
parent = nextParent parent = nextParent
if lfs != nil {
lfs = lfs.Dir()
}
} }
return folders return folders
} }
// assumes the pb contains a path like:
// <tenant>/<service>/<owner>/<category>/<logical_containers>...
// and returns a string with only <logical_containers>/...
func locationRefOf(pb *path.Builder) *path.Builder {
if pb == nil {
return nil
}
for i := 0; i < 4; i++ {
pb = pb.PopFront()
}
return pb
}
// AddFoldersForItem adds entries for the given folders. It skips adding entries that // AddFoldersForItem adds entries for the given folders. It skips adding entries that
// have been added by previous calls. // have been added by previous calls.
func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) { func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) {
@ -202,13 +242,18 @@ type Details struct {
DetailsModel DetailsModel
} }
func (d *Details) add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) { func (d *Details) add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
d.Entries = append(d.Entries, DetailsEntry{ d.Entries = append(d.Entries, DetailsEntry{
RepoRef: repoRef, RepoRef: repoRef,
ShortRef: shortRef, ShortRef: shortRef,
ParentRef: parentRef, ParentRef: parentRef,
Updated: updated, LocationRef: locationRef,
ItemInfo: info, Updated: updated,
ItemInfo: info,
}) })
} }
@ -233,9 +278,21 @@ type DetailsEntry struct {
RepoRef string `json:"repoRef"` RepoRef string `json:"repoRef"`
ShortRef string `json:"shortRef"` ShortRef string `json:"shortRef"`
ParentRef string `json:"parentRef,omitempty"` ParentRef string `json:"parentRef,omitempty"`
// LocationRef contains the logical path structure by its human-readable
// display names. IE: If an item is located at "/Inbox/Important", we
// hold that string in the LocationRef, while the actual IDs of each
// container are used for the RepoRef.
// LocationRef only holds the container values, and does not include
// the metadata prefixes (tenant, service, owner, etc) found in the
// repoRef.
// Currently only implemented for Exchange Calendars.
LocationRef string `json:"locationRef,omitempty"`
// Indicates the item was added or updated in this backup // Indicates the item was added or updated in this backup
// Always `true` for full backups // Always `true` for full backups
Updated bool `json:"updated"` Updated bool `json:"updated"`
ItemInfo ItemInfo
} }
@ -316,18 +373,21 @@ const (
FolderItem ItemType = iota + 300 FolderItem ItemType = iota + 300
) )
func UpdateItem(item *ItemInfo, newPath path.Path) error { func UpdateItem(item *ItemInfo, repoPath path.Path) error {
// Only OneDrive and SharePoint have information about parent folders // Only OneDrive and SharePoint have information about parent folders
// contained in them. // contained in them.
var updatePath func(path.Path) error
switch item.infoType() { switch item.infoType() {
case SharePointItem: case SharePointItem:
return item.SharePoint.UpdateParentPath(newPath) updatePath = item.SharePoint.UpdateParentPath
case OneDriveItem: case OneDriveItem:
return item.OneDrive.UpdateParentPath(newPath) updatePath = item.OneDrive.UpdateParentPath
default:
return nil
} }
return nil return updatePath(repoPath)
} }
// ItemInfo is a oneOf that contains service specific // ItemInfo is a oneOf that contains service specific

View File

@ -39,8 +39,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "no info", name: "no info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
}, },
expectHs: []string{"ID"}, expectHs: []string{"ID"},
expectVs: []string{"deadbeef"}, expectVs: []string{"deadbeef"},
@ -48,8 +49,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "exchange event info", name: "exchange event info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{ Exchange: &ExchangeInfo{
ItemType: ExchangeEvent, ItemType: ExchangeEvent,
@ -67,8 +69,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "exchange contact info", name: "exchange contact info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{ Exchange: &ExchangeInfo{
ItemType: ExchangeContact, ItemType: ExchangeContact,
@ -82,8 +85,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "exchange mail info", name: "exchange mail info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{ Exchange: &ExchangeInfo{
ItemType: ExchangeMail, ItemType: ExchangeMail,
@ -99,8 +103,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "sharepoint info", name: "sharepoint info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
SharePoint: &SharePointInfo{ SharePoint: &SharePointInfo{
ItemName: "itemName", ItemName: "itemName",
@ -128,8 +133,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
{ {
name: "oneDrive info", name: "oneDrive info",
entry: DetailsEntry{ entry: DetailsEntry{
RepoRef: "reporef", RepoRef: "reporef",
ShortRef: "deadbeef", ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
OneDrive: &OneDriveInfo{ OneDrive: &OneDriveInfo{
ItemName: "itemName", ItemName: "itemName",
@ -157,37 +163,57 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
} }
var pathItemsTable = []struct { var pathItemsTable = []struct {
name string name string
ents []DetailsEntry ents []DetailsEntry
expectRefs []string expectRepoRefs []string
expectLocationRefs []string
}{ }{
{ {
name: "nil entries", name: "nil entries",
ents: nil, ents: nil,
expectRefs: []string{}, expectRepoRefs: []string{},
expectLocationRefs: []string{},
}, },
{ {
name: "single entry", name: "single entry",
ents: []DetailsEntry{ ents: []DetailsEntry{
{RepoRef: "abcde"}, {
RepoRef: "abcde",
LocationRef: "locationref",
},
}, },
expectRefs: []string{"abcde"}, expectRepoRefs: []string{"abcde"},
expectLocationRefs: []string{"locationref"},
}, },
{ {
name: "multiple entries", name: "multiple entries",
ents: []DetailsEntry{ ents: []DetailsEntry{
{RepoRef: "abcde"}, {
{RepoRef: "12345"}, RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
}, },
expectRefs: []string{"abcde", "12345"}, expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
}, },
{ {
name: "multiple entries with folder", name: "multiple entries with folder",
ents: []DetailsEntry{ ents: []DetailsEntry{
{RepoRef: "abcde"},
{RepoRef: "12345"},
{ {
RepoRef: "deadbeef", RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
{
RepoRef: "deadbeef",
LocationRef: "locationref3",
ItemInfo: ItemInfo{ ItemInfo: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
DisplayName: "test folder", DisplayName: "test folder",
@ -195,7 +221,8 @@ var pathItemsTable = []struct {
}, },
}, },
}, },
expectRefs: []string{"abcde", "12345"}, expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
}, },
} }
@ -207,7 +234,7 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Path() {
Entries: test.ents, Entries: test.ents,
}, },
} }
assert.Equal(t, test.expectRefs, d.Paths()) assert.ElementsMatch(t, test.expectRepoRefs, d.Paths())
}) })
} }
} }
@ -222,10 +249,11 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Items() {
} }
ents := d.Items() ents := d.Items()
assert.Len(t, ents, len(test.expectRefs)) assert.Len(t, ents, len(test.expectRepoRefs))
for _, e := range ents { for _, e := range ents {
assert.Contains(t, test.expectRefs, e.RepoRef) assert.Contains(t, test.expectRepoRefs, e.RepoRef)
assert.Contains(t, test.expectLocationRefs, e.LocationRef)
} }
}) })
} }
@ -253,9 +281,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
name: "MultipleFolders", name: "MultipleFolders",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -263,9 +292,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeNewerThanItem, Modified: folderTimeNewerThanItem,
@ -283,9 +313,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
name: "MultipleFoldersWithRepeats", name: "MultipleFoldersWithRepeats",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -293,9 +324,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -303,9 +335,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeOlderThanItem, Modified: folderTimeOlderThanItem,
@ -313,9 +346,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
}, },
}, },
{ {
RepoRef: "rr3", RepoRef: "rr3",
ShortRef: "sr3", ShortRef: "sr3",
ParentRef: "pr3", ParentRef: "pr3",
LocationRef: "lr3",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{ Folder: &FolderInfo{
Modified: folderTimeNewerThanItem, Modified: folderTimeNewerThanItem,
@ -363,18 +397,20 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
name: "ItemNotUpdated_NoChange", name: "ItemNotUpdated_NoChange",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
Updated: true, Updated: true,
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
@ -390,17 +426,19 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
name: "ItemUpdated", name: "ItemUpdated",
folders: []folderEntry{ folders: []folderEntry{
{ {
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
}, },
{ {
RepoRef: "rr2", RepoRef: "rr2",
ShortRef: "sr2", ShortRef: "sr2",
ParentRef: "pr2", ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
@ -482,9 +520,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersDifferentServices() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
folder := folderEntry{ folder := folderEntry{
RepoRef: "rr1", RepoRef: "rr1",
ShortRef: "sr1", ShortRef: "sr1",
ParentRef: "pr1", ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{ Info: ItemInfo{
Folder: &FolderInfo{}, Folder: &FolderInfo{},
}, },
@ -562,7 +601,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
table := []struct { table := []struct {
name string name string
input ItemInfo input ItemInfo
newPath path.Path repoPath path.Path
locPath path.Path
errCheck assert.ErrorAssertionFunc errCheck assert.ErrorAssertionFunc
expectedItem ItemInfo expectedItem ItemInfo
}{ }{
@ -616,7 +656,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1, ParentPath: folder1,
}, },
}, },
newPath: newOneDrivePath, repoPath: newOneDrivePath,
locPath: newOneDrivePath,
errCheck: assert.NoError, errCheck: assert.NoError,
expectedItem: ItemInfo{ expectedItem: ItemInfo{
OneDrive: &OneDriveInfo{ OneDrive: &OneDriveInfo{
@ -633,7 +674,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1, ParentPath: folder1,
}, },
}, },
newPath: newOneDrivePath, repoPath: newOneDrivePath,
locPath: newOneDrivePath,
errCheck: assert.NoError, errCheck: assert.NoError,
expectedItem: ItemInfo{ expectedItem: ItemInfo{
SharePoint: &SharePointInfo{ SharePoint: &SharePointInfo{
@ -650,7 +692,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1, ParentPath: folder1,
}, },
}, },
newPath: badOneDrivePath, repoPath: badOneDrivePath,
locPath: badOneDrivePath,
errCheck: assert.Error, errCheck: assert.Error,
}, },
{ {
@ -661,7 +704,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1, ParentPath: folder1,
}, },
}, },
newPath: badOneDrivePath, repoPath: badOneDrivePath,
locPath: badOneDrivePath,
errCheck: assert.Error, errCheck: assert.Error,
}, },
} }
@ -669,7 +713,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
item := test.input item := test.input
err := UpdateItem(&item, test.newPath) err := UpdateItem(&item, test.repoPath)
test.errCheck(t, err) test.errCheck(t, err)
if err != nil { if err != nil {
@ -680,3 +724,162 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
}) })
} }
} }
var (
basePath = path.Builder{}.Append("ten", "serv", "user", "type")
baseFolderEnts = []folderEntry{
{
RepoRef: basePath.String(),
ShortRef: basePath.ShortRef(),
ParentRef: basePath.Dir().ShortRef(),
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "type",
},
},
},
{
RepoRef: basePath.Dir().String(),
ShortRef: basePath.Dir().ShortRef(),
ParentRef: basePath.Dir().Dir().ShortRef(),
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "user",
},
},
},
{
RepoRef: basePath.Dir().Dir().String(),
ShortRef: basePath.Dir().Dir().ShortRef(),
ParentRef: basePath.Dir().Dir().Dir().ShortRef(),
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "serv",
},
},
},
{
RepoRef: basePath.Dir().Dir().Dir().String(),
ShortRef: basePath.Dir().Dir().Dir().ShortRef(),
ParentRef: "",
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "ten",
},
},
},
}
)
func folderEntriesFor(pathElems []string, locElems []string) []folderEntry {
p := basePath.Append(pathElems...)
l := path.Builder{}.Append(locElems...)
ents := make([]folderEntry, 0, len(pathElems)+4)
for range pathElems {
dn := p.LastElem()
if l != nil && len(l.Elements()) > 0 {
dn = l.LastElem()
}
fe := folderEntry{
RepoRef: p.String(),
ShortRef: p.ShortRef(),
ParentRef: p.Dir().ShortRef(),
LocationRef: l.String(),
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: dn,
},
},
}
l = l.Dir()
p = p.Dir()
ents = append(ents, fe)
}
return append(ents, baseFolderEnts...)
}
func (suite *DetailsUnitSuite) TestFolderEntriesForPath() {
var (
fnords = []string{"fnords"}
smarf = []string{"fnords", "smarf"}
beau = []string{"beau"}
regard = []string{"beau", "regard"}
)
table := []struct {
name string
parent *path.Builder
location *path.Builder
expect []folderEntry
}{
{
name: "base path, parent only",
parent: basePath,
expect: baseFolderEnts,
},
{
name: "base path with location",
parent: basePath,
location: basePath,
expect: baseFolderEnts,
},
{
name: "single depth parent only",
parent: basePath.Append(fnords...),
expect: folderEntriesFor(fnords, nil),
},
{
name: "single depth with location",
parent: basePath.Append(fnords...),
location: basePath.Append(beau...),
expect: folderEntriesFor(fnords, beau),
},
{
name: "two depth parent only",
parent: basePath.Append(smarf...),
expect: folderEntriesFor(smarf, nil),
},
{
name: "two depth with location",
parent: basePath.Append(smarf...),
location: basePath.Append(regard...),
expect: folderEntriesFor(smarf, regard),
},
{
name: "mismatched depth, parent longer",
parent: basePath.Append(smarf...),
location: basePath.Append(beau...),
expect: folderEntriesFor(smarf, beau),
},
// We can't handle this right now. But we don't have any cases
// which immediately require it, either. Keeping in the test
// as a reminder that this might be required at some point.
// {
// name: "mismatched depth, location longer",
// parent: basePath.Append(fnords...),
// location: basePath.Append(regard...),
// expect: folderEntriesFor(fnords, regard),
// },
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
result := FolderEntriesForPath(test.parent, test.location)
assert.ElementsMatch(t, test.expect, result)
})
}
}

View File

@ -162,8 +162,9 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
// then try to set up a logger directly // then try to set up a logger directly
var ( var (
lgr *zap.Logger lgr *zap.Logger
err error err error
opts = []zap.Option{zap.AddStacktrace(zapcore.PanicLevel)}
) )
if level != Production { if level != Production {
@ -178,12 +179,13 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel) cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
} }
opts := []zap.Option{}
if readableOutput { if readableOutput {
opts = append(opts, zap.WithCaller(false), zap.AddStacktrace(zapcore.DPanicLevel)) opts = append(opts, zap.WithCaller(false))
cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00") cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00")
cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
if logfile == "stderr" || logfile == "stdout" {
cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
}
} }
cfg.OutputPaths = []string{logfile} cfg.OutputPaths = []string{logfile}
@ -191,7 +193,7 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
} else { } else {
cfg := zap.NewProductionConfig() cfg := zap.NewProductionConfig()
cfg.OutputPaths = []string{logfile} cfg.OutputPaths = []string{logfile}
lgr, err = cfg.Build() lgr, err = cfg.Build(opts...)
} }
// fall back to the core config if the default creation fails // fall back to the core config if the default creation fails

View File

@ -20,7 +20,7 @@ func ToOneDrivePath(p Path) (*DrivePath, error) {
if len(folders) < 3 { if len(folders) < 3 {
return nil, clues. return nil, clues.
New("folder path doesn't match expected format for OneDrive items"). New("folder path doesn't match expected format for OneDrive items").
With("path_folders", p.Folder()) With("path_folders", p.Folder(false))
} }
return &DrivePath{DriveID: folders[1], Folders: folders[3:]}, nil return &DrivePath{DriveID: folders[1], Folders: folders[3:]}, nil

View File

@ -86,7 +86,7 @@ type Path interface {
Category() CategoryType Category() CategoryType
Tenant() string Tenant() string
ResourceOwner() string ResourceOwner() string
Folder() string Folder(bool) string
Folders() []string Folders() []string
Item() string Item() string
// PopFront returns a Builder object with the first element (left-side) // PopFront returns a Builder object with the first element (left-side)
@ -140,6 +140,14 @@ func (pb Builder) UnescapeAndAppend(elements ...string) (*Builder, error) {
return res, nil return res, nil
} }
// SplitUnescapeAppend takes in an escaped string representing a directory
// path, splits the string, and appends it to the current builder.
func (pb Builder) SplitUnescapeAppend(s string) (*Builder, error) {
elems := Split(TrimTrailingSlash(s))
return pb.UnescapeAndAppend(elems...)
}
// Append creates a copy of this Builder and adds the given elements them to the // Append creates a copy of this Builder and adds the given elements them to the
// end of the new Builder. Elements are added in the order they are passed. // end of the new Builder. Elements are added in the order they are passed.
func (pb Builder) Append(elements ...string) *Builder { func (pb Builder) Append(elements ...string) *Builder {
@ -205,6 +213,14 @@ func (pb Builder) Dir() *Builder {
} }
} }
func (pb Builder) LastElem() string {
if len(pb.elements) == 0 {
return ""
}
return pb.elements[len(pb.elements)-1]
}
// String returns a string that contains all path elements joined together. // String returns a string that contains all path elements joined together.
// Elements of the path that need escaping are escaped. // Elements of the path that need escaping are escaped.
func (pb Builder) String() string { func (pb Builder) String() string {
@ -247,11 +263,6 @@ func (pb Builder) Elements() []string {
return append([]string{}, pb.elements...) return append([]string{}, pb.elements...)
} }
//nolint:unused
func (pb Builder) join(start, end int) string {
return join(pb.elements[start:end])
}
func verifyInputValues(tenant, resourceOwner string) error { func verifyInputValues(tenant, resourceOwner string) error {
if len(tenant) == 0 { if len(tenant) == 0 {
return clues.Stack(errMissingSegment, errors.New("tenant")) return clues.Stack(errMissingSegment, errors.New("tenant"))

View File

@ -480,13 +480,87 @@ func (suite *PathUnitSuite) TestFromStringErrors() {
} }
} }
func (suite *PathUnitSuite) TestFolder() {
table := []struct {
name string
p func(t *testing.T) Path
escape bool
expectFolder string
expectSplit []string
}{
{
name: "clean path",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
expectFolder: "a/b/c",
expectSplit: []string{"a", "b", "c"},
},
{
name: "clean path escaped",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
escape: true,
expectFolder: "a/b/c",
expectSplit: []string{"a", "b", "c"},
},
{
name: "escapable path",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
expectFolder: "a//b/c",
expectSplit: []string{"a", "b", "c"},
},
{
name: "escapable path escaped",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
escape: true,
expectFolder: "a\\//b/c",
expectSplit: []string{"a\\/", "b", "c"},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
p := test.p(t)
result := p.Folder(test.escape)
assert.Equal(t, test.expectFolder, result)
assert.Equal(t, test.expectSplit, Split(result))
})
}
}
func (suite *PathUnitSuite) TestFromString() { func (suite *PathUnitSuite) TestFromString() {
const ( const (
testTenant = "tenant" testTenant = "tenant"
testUser = "user" testUser = "user"
testElement1 = "folder" testElement1 = "folder/"
testElement2 = "folder2" testElementTrimmed = "folder"
testElement3 = "other" testElement2 = "folder2"
testElement3 = "other"
) )
isItem := []struct { isItem := []struct {
@ -509,9 +583,13 @@ func (suite *PathUnitSuite) TestFromString() {
// Expected result for Folder() if path is marked as a folder. // Expected result for Folder() if path is marked as a folder.
expectedFolder string expectedFolder string
// Expected result for Item() if path is marked as an item. // Expected result for Item() if path is marked as an item.
expectedItem string // Expected result for Split(Folder()) if path is marked as a folder.
expectedSplit []string
expectedItem string
// Expected result for Folder() if path is marked as an item. // Expected result for Folder() if path is marked as an item.
expectedItemFolder string expectedItemFolder string
// Expected result for Split(Folder()) if path is marked as an item.
expectedItemSplit []string
}{ }{
{ {
name: "BasicPath", name: "BasicPath",
@ -525,16 +603,25 @@ func (suite *PathUnitSuite) TestFromString() {
), ),
expectedFolder: fmt.Sprintf( expectedFolder: fmt.Sprintf(
"%s/%s/%s", "%s/%s/%s",
testElement1, testElementTrimmed,
testElement2, testElement2,
testElement3, testElement3,
), ),
expectedSplit: []string{
testElementTrimmed,
testElement2,
testElement3,
},
expectedItem: testElement3, expectedItem: testElement3,
expectedItemFolder: fmt.Sprintf( expectedItemFolder: fmt.Sprintf(
"%s/%s", "%s/%s",
testElement1, testElementTrimmed,
testElement2, testElement2,
), ),
expectedItemSplit: []string{
testElementTrimmed,
testElement2,
},
}, },
{ {
name: "PathWithEmptyElements", name: "PathWithEmptyElements",
@ -542,22 +629,31 @@ func (suite *PathUnitSuite) TestFromString() {
"/%s//%%s//%s//%%s//%s///%s//%s//", "/%s//%%s//%s//%%s//%s///%s//%s//",
testTenant, testTenant,
testUser, testUser,
testElement1, testElementTrimmed,
testElement2, testElement2,
testElement3, testElement3,
), ),
expectedFolder: fmt.Sprintf( expectedFolder: fmt.Sprintf(
"%s/%s/%s", "%s/%s/%s",
testElement1, testElementTrimmed,
testElement2, testElement2,
testElement3, testElement3,
), ),
expectedSplit: []string{
testElementTrimmed,
testElement2,
testElement3,
},
expectedItem: testElement3, expectedItem: testElement3,
expectedItemFolder: fmt.Sprintf( expectedItemFolder: fmt.Sprintf(
"%s/%s", "%s/%s",
testElement1, testElementTrimmed,
testElement2, testElement2,
), ),
expectedItemSplit: []string{
testElementTrimmed,
testElement2,
},
}, },
} }
@ -572,16 +668,25 @@ func (suite *PathUnitSuite) TestFromString() {
p, err := FromDataLayerPath(testPath, item.isItem) p, err := FromDataLayerPath(testPath, item.isItem)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, service, p.Service()) assert.Equal(t, service, p.Service(), "service")
assert.Equal(t, cat, p.Category()) assert.Equal(t, cat, p.Category(), "category")
assert.Equal(t, testTenant, p.Tenant()) assert.Equal(t, testTenant, p.Tenant(), "tenant")
assert.Equal(t, testUser, p.ResourceOwner()) assert.Equal(t, testUser, p.ResourceOwner(), "resource owner")
if !item.isItem { fld := p.Folder(false)
assert.Equal(t, test.expectedFolder, p.Folder()) escfld := p.Folder(true)
if item.isItem {
assert.Equal(t, test.expectedItemFolder, fld, "item folder")
assert.Equal(t, test.expectedItemSplit, Split(fld), "item split")
assert.Equal(t, test.expectedItemFolder, escfld, "escaped item folder")
assert.Equal(t, test.expectedItemSplit, Split(escfld), "escaped item split")
assert.Equal(t, test.expectedItem, p.Item(), "item")
} else { } else {
assert.Equal(t, test.expectedItemFolder, p.Folder()) assert.Equal(t, test.expectedFolder, fld, "dir folder")
assert.Equal(t, test.expectedItem, p.Item()) assert.Equal(t, test.expectedSplit, Split(fld), "dir split")
assert.Equal(t, test.expectedFolder, escfld, "escaped dir folder")
assert.Equal(t, test.expectedSplit, Split(escfld), "escaped dir split")
} }
}) })
} }

View File

@ -201,13 +201,20 @@ func (rp dataLayerResourcePath) lastFolderIdx() int {
} }
// Folder returns the folder segment embedded in the dataLayerResourcePath. // Folder returns the folder segment embedded in the dataLayerResourcePath.
func (rp dataLayerResourcePath) Folder() string { func (rp dataLayerResourcePath) Folder(escape bool) string {
endIdx := rp.lastFolderIdx() endIdx := rp.lastFolderIdx()
if endIdx == 4 { if endIdx == 4 {
return "" return ""
} }
return rp.Builder.join(4, endIdx) fs := rp.Folders()
if !escape {
return join(fs)
}
// builder.String() will escape all individual elements.
return Builder{}.Append(fs...).String()
} }
// Folders returns the individual folder elements embedded in the // Folders returns the individual folder elements embedded in the

View File

@ -172,7 +172,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
) )
require.NoError(t, err) require.NoError(t, err)
assert.Empty(t, p.Folder()) assert.Empty(t, p.Folder(false))
assert.Empty(t, p.Folders()) assert.Empty(t, p.Folders())
assert.Equal(t, item, p.Item()) assert.Equal(t, item, p.Item())
}) })
@ -391,7 +391,7 @@ func (suite *DataLayerResourcePath) TestToExchangePathForCategory() {
assert.Equal(t, path.ExchangeService, p.Service()) assert.Equal(t, path.ExchangeService, p.Service())
assert.Equal(t, test.category, p.Category()) assert.Equal(t, test.category, p.Category())
assert.Equal(t, testUser, p.ResourceOwner()) assert.Equal(t, testUser, p.ResourceOwner())
assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder()) assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder(false))
assert.Equal(t, m.expectedFolders, p.Folders()) assert.Equal(t, m.expectedFolders, p.Folders())
assert.Equal(t, m.expectedItem, p.Item()) assert.Equal(t, m.expectedItem, p.Item())
}) })
@ -465,7 +465,7 @@ func (suite *PopulatedDataLayerResourcePath) TestFolder() {
assert.Equal( assert.Equal(
t, t,
strings.Join(m.expectedFolders, "/"), strings.Join(m.expectedFolders, "/"),
suite.paths[m.isItem].Folder(), suite.paths[m.isItem].Folder(false),
) )
}) })
} }
@ -525,7 +525,7 @@ func (suite *PopulatedDataLayerResourcePath) TestAppend() {
return return
} }
assert.Equal(t, test.expectedFolder, newPath.Folder()) assert.Equal(t, test.expectedFolder, newPath.Folder(false))
assert.Equal(t, test.expectedItem, newPath.Item()) assert.Equal(t, test.expectedItem, newPath.Item())
}) })
} }

View File

@ -575,12 +575,12 @@ func (ec exchangeCategory) isLeaf() bool {
return ec == ec.leafCat() return ec == ec.leafCat()
} }
// pathValues transforms a path to a map of identified properties. // pathValues transforms the two paths to maps of identified properties.
// //
// Example: // Example:
// [tenantID, service, userPN, category, mailFolder, mailID] // [tenantID, service, userPN, category, mailFolder, mailID]
// => {exchUser: userPN, exchMailFolder: mailFolder, exchMail: mailID} // => {exchMailFolder: mailFolder, exchMail: mailID}
func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string { func (ec exchangeCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
var folderCat, itemCat categorizer var folderCat, itemCat categorizer
switch ec { switch ec {
@ -594,13 +594,24 @@ func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string {
folderCat, itemCat = ExchangeMailFolder, ExchangeMail folderCat, itemCat = ExchangeMailFolder, ExchangeMail
default: default:
return map[categorizer]string{} return map[categorizer]string{}, map[categorizer]string{}
} }
return map[categorizer]string{ rv := map[categorizer]string{
folderCat: p.Folder(), folderCat: repo.Folder(false),
itemCat: p.Item(), itemCat: repo.Item(),
} }
lv := map[categorizer]string{}
if location != nil {
lv = map[categorizer]string{
folderCat: location.Folder(false),
itemCat: location.Item(),
}
}
return rv, lv
} }
// pathKeys returns the path keys recognized by the receiver's leaf type. // pathKeys returns the path keys recognized by the receiver's leaf type.

View File

@ -705,13 +705,16 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesInfo() {
func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() { func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
const ( const (
usr = "userID" usr = "userID"
fID1 = "mf_id_1"
fld1 = "mailFolder" fld1 = "mailFolder"
fID2 = "mf_id_2"
fld2 = "subFolder" fld2 = "subFolder"
mail = "mailID" mail = "mailID"
) )
var ( var (
pth = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory) repo = stubPath(suite.T(), usr, []string{fID1, fID2, mail}, path.EmailCategory)
loc = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory)
short = "thisisahashofsomekind" short = "thisisahashofsomekind"
es = NewExchangeRestore(Any()) es = NewExchangeRestore(Any())
) )
@ -726,13 +729,18 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
{"all folders", es.MailFolders(Any()), "", assert.True}, {"all folders", es.MailFolders(Any()), "", assert.True},
{"no folders", es.MailFolders(None()), "", assert.False}, {"no folders", es.MailFolders(None()), "", assert.False},
{"matching folder", es.MailFolders([]string{fld1}), "", assert.True}, {"matching folder", es.MailFolders([]string{fld1}), "", assert.True},
{"matching folder id", es.MailFolders([]string{fID1}), "", assert.True},
{"incomplete matching folder", es.MailFolders([]string{"mail"}), "", assert.False}, {"incomplete matching folder", es.MailFolders([]string{"mail"}), "", assert.False},
{"incomplete matching folder ID", es.MailFolders([]string{"mf_id"}), "", assert.False},
{"non-matching folder", es.MailFolders([]string{"smarf"}), "", assert.False}, {"non-matching folder", es.MailFolders([]string{"smarf"}), "", assert.False},
{"non-matching folder substring", es.MailFolders([]string{fld1 + "_suffix"}), "", assert.False}, {"non-matching folder substring", es.MailFolders([]string{fld1 + "_suffix"}), "", assert.False},
{"non-matching folder id substring", es.MailFolders([]string{fID1 + "_suffix"}), "", assert.False},
{"matching folder prefix", es.MailFolders([]string{fld1}, PrefixMatch()), "", assert.True}, {"matching folder prefix", es.MailFolders([]string{fld1}, PrefixMatch()), "", assert.True},
{"matching folder ID prefix", es.MailFolders([]string{fID1}, PrefixMatch()), "", assert.True},
{"incomplete folder prefix", es.MailFolders([]string{"mail"}, PrefixMatch()), "", assert.False}, {"incomplete folder prefix", es.MailFolders([]string{"mail"}, PrefixMatch()), "", assert.False},
{"matching folder substring", es.MailFolders([]string{"Folder"}), "", assert.False}, {"matching folder substring", es.MailFolders([]string{"Folder"}), "", assert.False},
{"one of multiple folders", es.MailFolders([]string{"smarf", fld2}), "", assert.True}, {"one of multiple folders", es.MailFolders([]string{"smarf", fld2}), "", assert.True},
{"one of multiple folders by ID", es.MailFolders([]string{"smarf", fID2}), "", assert.True},
{"all mail", es.Mails(Any(), Any()), "", assert.True}, {"all mail", es.Mails(Any(), Any()), "", assert.True},
{"no mail", es.Mails(Any(), None()), "", assert.False}, {"no mail", es.Mails(Any(), None()), "", assert.False},
{"matching mail", es.Mails(Any(), []string{mail}), "", assert.True}, {"matching mail", es.Mails(Any(), []string{mail}), "", assert.True},
@ -746,8 +754,12 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
scopes := setScopesToDefault(test.scope) scopes := setScopesToDefault(test.scope)
var aMatch bool var aMatch bool
for _, scope := range scopes { for _, scope := range scopes {
pv := ExchangeMail.pathValues(pth) repoVals, locVals := ExchangeMail.pathValues(repo, loc)
if matchesPathValues(scope, ExchangeMail, pv, short) { if matchesPathValues(scope, ExchangeMail, repoVals, short) {
aMatch = true
break
}
if matchesPathValues(scope, ExchangeMail, locVals, short) {
aMatch = true aMatch = true
break break
} }
@ -833,6 +845,256 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
return deets return deets
} }
table := []struct {
name string
deets *details.Details
makeSelector func() *ExchangeRestore
expect []string
}{
{
"no refs",
makeDeets(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{},
},
{
"contact only",
makeDeets(contact),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{contact},
},
{
"event only",
makeDeets(event),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{event},
},
{
"mail only",
makeDeets(mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{mail},
},
{
"all",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{contact, event, mail},
},
{
"only match contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Contacts([]string{"cfld"}, []string{"cid"}))
return er
},
[]string{contact},
},
{
"only match contactInSubFolder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}))
return er
},
[]string{contactInSubFolder},
},
{
"only match contactInSubFolder by prefix",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch()))
return er
},
[]string{contactInSubFolder},
},
{
"only match contactInSubFolder by leaf folder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld2"}))
return er
},
[]string{contactInSubFolder},
},
{
"only match event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Events([]string{"ecld"}, []string{"eid"}))
return er
},
[]string{event},
},
{
"only match mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Mails([]string{"mfld"}, []string{"mid"}))
return er
},
[]string{mail},
},
{
"exclude contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"}))
return er
},
[]string{event, mail},
},
{
"exclude event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Events([]string{"ecld"}, []string{"eid"}))
return er
},
[]string{contact, mail},
},
{
"exclude mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"}))
return er
},
[]string{contact, event},
},
{
"filter on mail subject",
func() *details.Details {
ds := makeDeets(mail)
for i := range ds.Entries {
ds.Entries[i].Exchange.Subject = "has a subject"
}
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
[]string{mail},
},
{
"filter on mail subject multiple input categories",
func() *details.Details {
mds := makeDeets(mail)
for i := range mds.Entries {
mds.Entries[i].Exchange.Subject = "has a subject"
}
ds := makeDeets(contact, event)
ds.Entries = append(ds.Entries, mds.Entries...)
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
[]string{mail},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
sel := test.makeSelector()
results := sel.Reduce(ctx, test.deets, fault.New(true))
paths := results.Paths()
assert.Equal(t, test.expect, paths)
})
}
}
func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce_locationRef() {
var (
contact = stubRepoRef(path.ExchangeService, path.ContactsCategory, "uid", "id5/id6", "cid")
contactLocation = "conts/my_cont"
event = stubRepoRef(path.ExchangeService, path.EventsCategory, "uid", "id1/id2", "eid")
eventLocation = "cal/my_cal"
mail = stubRepoRef(path.ExchangeService, path.EmailCategory, "uid", "id3/id4", "mid")
mailLocation = "inbx/my_mail"
)
makeDeets := func(refs ...string) *details.Details {
deets := &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{},
},
}
for _, r := range refs {
var (
location string
itype = details.UnknownType
)
switch r {
case contact:
itype = details.ExchangeContact
location = contactLocation
case event:
itype = details.ExchangeEvent
location = eventLocation
case mail:
itype = details.ExchangeMail
location = mailLocation
}
deets.Entries = append(deets.Entries, details.DetailsEntry{
RepoRef: r,
LocationRef: location,
ItemInfo: details.ItemInfo{
Exchange: &details.ExchangeInfo{
ItemType: itype,
},
},
})
}
return deets
}
arr := func(s ...string) []string { arr := func(s ...string) []string {
return s return s
} }
@ -898,47 +1160,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
makeDeets(contact, event, mail), makeDeets(contact, event, mail),
func() *ExchangeRestore { func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"}) er := NewExchangeRestore([]string{"uid"})
er.Include(er.Contacts([]string{"cfld"}, []string{"cid"})) er.Include(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er return er
}, },
arr(contact), arr(contact),
}, },
{
"only match contactInSubFolder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}))
return er
},
arr(contactInSubFolder),
},
{
"only match contactInSubFolder by prefix",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch()))
return er
},
arr(contactInSubFolder),
},
{
"only match contactInSubFolder by leaf folder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld2"}))
return er
},
arr(contactInSubFolder),
},
{ {
"only match event", "only match event",
makeDeets(contact, event, mail), makeDeets(contact, event, mail),
func() *ExchangeRestore { func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"}) er := NewExchangeRestore([]string{"uid"})
er.Include(er.Events([]string{"ecld"}, []string{"eid"})) er.Include(er.Events([]string{eventLocation}, []string{"eid"}))
return er return er
}, },
arr(event), arr(event),
@ -948,7 +1180,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
makeDeets(contact, event, mail), makeDeets(contact, event, mail),
func() *ExchangeRestore { func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"}) er := NewExchangeRestore([]string{"uid"})
er.Include(er.Mails([]string{"mfld"}, []string{"mid"})) er.Include(er.Mails([]string{mailLocation}, []string{"mid"}))
return er return er
}, },
arr(mail), arr(mail),
@ -959,7 +1191,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
func() *ExchangeRestore { func() *ExchangeRestore {
er := NewExchangeRestore(Any()) er := NewExchangeRestore(Any())
er.Include(er.AllData()) er.Include(er.AllData())
er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"})) er.Exclude(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er return er
}, },
arr(event, mail), arr(event, mail),
@ -970,7 +1202,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
func() *ExchangeRestore { func() *ExchangeRestore {
er := NewExchangeRestore(Any()) er := NewExchangeRestore(Any())
er.Include(er.AllData()) er.Include(er.AllData())
er.Exclude(er.Events([]string{"ecld"}, []string{"eid"})) er.Exclude(er.Events([]string{eventLocation}, []string{"eid"}))
return er return er
}, },
arr(contact, mail), arr(contact, mail),
@ -981,7 +1213,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
func() *ExchangeRestore { func() *ExchangeRestore {
er := NewExchangeRestore(Any()) er := NewExchangeRestore(Any())
er.Include(er.AllData()) er.Include(er.AllData())
er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"})) er.Exclude(er.Mails([]string{mailLocation}, []string{"mid"}))
return er return er
}, },
arr(contact, event), arr(contact, event),
@ -1128,9 +1360,12 @@ func (suite *ExchangeSelectorSuite) TestPasses() {
} }
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
repoVals, locVals := cat.pathValues(pth, pth)
result := passes( result := passes(
cat, cat,
cat.pathValues(pth), repoVals,
locVals,
entry, entry,
test.excludes, test.excludes,
test.filters, test.filters,
@ -1233,17 +1468,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
contactPath := stubPath(t, "user", []string{"cfolder", "contactitem"}, path.ContactsCategory) contactPath := stubPath(t, "user", []string{"cfolder", "contactitem"}, path.ContactsCategory)
contactMap := map[categorizer]string{ contactMap := map[categorizer]string{
ExchangeContactFolder: contactPath.Folder(), ExchangeContactFolder: contactPath.Folder(false),
ExchangeContact: contactPath.Item(), ExchangeContact: contactPath.Item(),
} }
eventPath := stubPath(t, "user", []string{"ecalendar", "eventitem"}, path.EventsCategory) eventPath := stubPath(t, "user", []string{"ecalendar", "eventitem"}, path.EventsCategory)
eventMap := map[categorizer]string{ eventMap := map[categorizer]string{
ExchangeEventCalendar: eventPath.Folder(), ExchangeEventCalendar: eventPath.Folder(false),
ExchangeEvent: eventPath.Item(), ExchangeEvent: eventPath.Item(),
} }
mailPath := stubPath(t, "user", []string{"mfolder", "mailitem"}, path.EmailCategory) mailPath := stubPath(t, "user", []string{"mfolder", "mailitem"}, path.EmailCategory)
mailMap := map[categorizer]string{ mailMap := map[categorizer]string{
ExchangeMailFolder: mailPath.Folder(), ExchangeMailFolder: mailPath.Folder(false),
ExchangeMail: mailPath.Item(), ExchangeMail: mailPath.Item(),
} }
@ -1258,7 +1493,9 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
} }
for _, test := range table { for _, test := range table {
suite.T().Run(string(test.cat), func(t *testing.T) { suite.T().Run(string(test.cat), func(t *testing.T) {
assert.Equal(t, test.cat.pathValues(test.path), test.expect) r, l := test.cat.pathValues(test.path, test.path)
assert.Equal(t, test.expect, r)
assert.Equal(t, test.expect, l)
}) })
} }
} }

View File

@ -55,11 +55,13 @@ func (mc mockCategorizer) isLeaf() bool {
return mc == leafCatStub return mc == leafCatStub
} }
func (mc mockCategorizer) pathValues(pth path.Path) map[categorizer]string { func (mc mockCategorizer) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
return map[categorizer]string{ pv := map[categorizer]string{
rootCatStub: "root", rootCatStub: "root",
leafCatStub: "leaf", leafCatStub: "leaf",
} }
return pv, pv
} }
func (mc mockCategorizer) pathKeys() []categorizer { func (mc mockCategorizer) pathKeys() []categorizer {

View File

@ -371,19 +371,30 @@ func (c oneDriveCategory) isLeaf() bool {
return c == OneDriveItem return c == OneDriveItem
} }
// pathValues transforms a path to a map of identified properties. // pathValues transforms the two paths to maps of identified properties.
// //
// Example: // Example:
// [tenantID, service, userPN, category, folder, fileID] // [tenantID, service, userPN, category, folder, fileID]
// => {odUser: userPN, odFolder: folder, odFileID: fileID} // => {odFolder: folder, odFileID: fileID}
func (c oneDriveCategory) pathValues(p path.Path) map[categorizer]string { func (c oneDriveCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
// Ignore `drives/<driveID>/root:` for folder comparison // Ignore `drives/<driveID>/root:` for folder comparison
folder := path.Builder{}.Append(p.Folders()...).PopFront().PopFront().PopFront().String() rFld := path.Builder{}.Append(repo.Folders()...).PopFront().PopFront().PopFront().String()
rv := map[categorizer]string{
return map[categorizer]string{ OneDriveFolder: rFld,
OneDriveFolder: folder, OneDriveItem: repo.Item(),
OneDriveItem: p.Item(),
} }
lv := map[categorizer]string{}
if location != nil {
lFld := path.Builder{}.Append(location.Folders()...).PopFront().PopFront().PopFront().String()
lv = map[categorizer]string{
OneDriveFolder: lFld,
OneDriveItem: location.Item(),
}
}
return rv, lv
} }
// pathKeys returns the path keys recognized by the receiver's leaf type. // pathKeys returns the path keys recognized by the receiver's leaf type.

View File

@ -262,7 +262,9 @@ func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() {
OneDriveItem: "file", OneDriveItem: "file",
} }
assert.Equal(t, expected, OneDriveItem.pathValues(filePath)) r, l := OneDriveItem.pathValues(filePath, filePath)
assert.Equal(t, expected, r)
assert.Equal(t, expected, l)
} }
func (suite *OneDriveSelectorSuite) TestOneDriveScope_MatchesInfo() { func (suite *OneDriveSelectorSuite) TestOneDriveScope_MatchesInfo() {

View File

@ -77,17 +77,18 @@ type (
// eg: in a resourceOwner/folder/item structure, the item is the leaf. // eg: in a resourceOwner/folder/item structure, the item is the leaf.
isLeaf() bool isLeaf() bool
// pathValues should produce a map of category:string pairs populated by extracting // pathValues takes in two paths, both variants of the repoRef, one containing the standard
// values out of the path.Path struct. // repoRef, and the other amended to include the locationRef directories (if available). It
// should produce two maps of category:string pairs populated by extracting the values out of
// each path.Path.
// //
// Ex: given a path builder like ["tenant", "service", "resource", "dataType", "folder", "itemID"], // Ex: given a path builder like ["tenant", "service", "resource", "dataType", "folder", "itemID"],
// the func should use the path to construct a map similar to this: // the func should use the path to construct a map similar to this:
// { // {
// rootCat: resource,
// folderCat: folder, // folderCat: folder,
// itemCat: itemID, // itemCat: itemID,
// } // }
pathValues(path.Path) map[categorizer]string pathValues(path.Path, path.Path) (map[categorizer]string, map[categorizer]string)
// pathKeys produces a list of categorizers that can be used as keys in the pathValues // pathKeys produces a list of categorizers that can be used as keys in the pathValues
// map. The combination of the two funcs generically interprets the context of the // map. The combination of the two funcs generically interprets the context of the
@ -317,6 +318,31 @@ func reduce[T scopeT, C categoryT](
continue continue
} }
var locationPath path.Path
// if the details entry has a locationRef specified, use those folders in place
// of the repoRef folders, so that scopes can match against the display names
// instead of container IDs.
if len(ent.LocationRef) > 0 {
pb, err := path.Builder{}.SplitUnescapeAppend(ent.LocationRef)
if err != nil {
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
continue
}
locationPath, err = pb.Append(repoPath.Item()).
ToDataLayerPath(
repoPath.Tenant(),
repoPath.ResourceOwner(),
repoPath.Service(),
repoPath.Category(),
true)
if err != nil {
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
continue
}
}
// first check, every entry needs to match the selector's resource owners. // first check, every entry needs to match the selector's resource owners.
if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) { if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) {
continue continue
@ -334,7 +360,9 @@ func reduce[T scopeT, C categoryT](
continue continue
} }
passed := passes(dc, dc.pathValues(repoPath), *ent, e, f, i) rv, lv := dc.pathValues(repoPath, locationPath)
passed := passes(dc, rv, lv, *ent, e, f, i)
if passed { if passed {
ents = append(ents, *ent) ents = append(ents, *ent)
} }
@ -379,7 +407,7 @@ func scopesByCategory[T scopeT, C categoryT](
// if the path is included, passes filters, and not excluded. // if the path is included, passes filters, and not excluded.
func passes[T scopeT, C categoryT]( func passes[T scopeT, C categoryT](
cat C, cat C,
pathValues map[categorizer]string, repoValues, locationValues map[categorizer]string,
entry details.DetailsEntry, entry details.DetailsEntry,
excs, filts, incs []T, excs, filts, incs []T,
) bool { ) bool {
@ -395,7 +423,7 @@ func passes[T scopeT, C categoryT](
var included bool var included bool
for _, inc := range incs { for _, inc := range incs {
if matchesEntry(inc, cat, pathValues, entry) { if matchesEntry(inc, cat, repoValues, locationValues, entry) {
included = true included = true
break break
} }
@ -408,14 +436,14 @@ func passes[T scopeT, C categoryT](
// all filters must pass // all filters must pass
for _, filt := range filts { for _, filt := range filts {
if !matchesEntry(filt, cat, pathValues, entry) { if !matchesEntry(filt, cat, repoValues, locationValues, entry) {
return false return false
} }
} }
// any matching exclusion means failure // any matching exclusion means failure
for _, exc := range excs { for _, exc := range excs {
if matchesEntry(exc, cat, pathValues, entry) { if matchesEntry(exc, cat, repoValues, locationValues, entry) {
return false return false
} }
} }
@ -428,7 +456,7 @@ func passes[T scopeT, C categoryT](
func matchesEntry[T scopeT, C categoryT]( func matchesEntry[T scopeT, C categoryT](
sc T, sc T,
cat C, cat C,
pathValues map[categorizer]string, repoValues, locationValues map[categorizer]string,
entry details.DetailsEntry, entry details.DetailsEntry,
) bool { ) bool {
// filterCategory requires matching against service-specific info values // filterCategory requires matching against service-specific info values
@ -436,7 +464,11 @@ func matchesEntry[T scopeT, C categoryT](
return sc.matchesInfo(entry.ItemInfo) return sc.matchesInfo(entry.ItemInfo)
} }
return matchesPathValues(sc, cat, pathValues, entry.ShortRef) if len(locationValues) > 0 && matchesPathValues(sc, cat, locationValues, entry.ShortRef) {
return true
}
return matchesPathValues(sc, cat, repoValues, entry.ShortRef)
} }
// matchesPathValues will check whether the pathValues have matching entries // matchesPathValues will check whether the pathValues have matching entries

View File

@ -290,6 +290,47 @@ func (suite *SelectorScopesSuite) TestReduce() {
} }
} }
func (suite *SelectorScopesSuite) TestReduce_locationRef() {
deets := func() details.Details {
return details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
{
RepoRef: stubRepoRef(
pathServiceStub,
pathCatStub,
rootCatStub.String(),
"stub",
leafCatStub.String(),
),
LocationRef: "a/b/c//defg",
},
},
},
}
}
dataCats := map[path.CategoryType]mockCategorizer{
pathCatStub: rootCatStub,
}
for _, test := range reduceTestTable {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
ds := deets()
result := reduce[mockScope](
ctx,
&ds,
test.sel().Selector,
dataCats,
fault.New(true))
require.NotNil(t, result)
assert.Len(t, result.Entries, test.expectLen)
})
}
}
func (suite *SelectorScopesSuite) TestScopesByCategory() { func (suite *SelectorScopesSuite) TestScopesByCategory() {
t := suite.T() t := suite.T()
s1 := stubScope("") s1 := stubScope("")
@ -309,7 +350,7 @@ func (suite *SelectorScopesSuite) TestScopesByCategory() {
func (suite *SelectorScopesSuite) TestPasses() { func (suite *SelectorScopesSuite) TestPasses() {
cat := rootCatStub cat := rootCatStub
pth := stubPath(suite.T(), "uid", []string{"fld"}, path.EventsCategory) pth := stubPath(suite.T(), "uid", []string{"fld"}, path.EventsCategory)
pathVals := cat.pathValues(pth) repoVals, locVals := cat.pathValues(pth, pth)
entry := details.DetailsEntry{} entry := details.DetailsEntry{}
for _, test := range reduceTestTable { for _, test := range reduceTestTable {
@ -320,7 +361,8 @@ func (suite *SelectorScopesSuite) TestPasses() {
incl := toMockScope(sel.Includes) incl := toMockScope(sel.Includes)
result := passes( result := passes(
cat, cat,
pathVals, repoVals,
locVals,
entry, entry,
excl, filt, incl) excl, filt, incl)
test.expectPasses(t, result) test.expectPasses(t, result)

View File

@ -48,7 +48,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders( sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailInboxPath.Folder()}, []string{testdata.ExchangeEmailInboxPath.Folder(false)},
)) ))
return sel return sel
@ -177,7 +177,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders( sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailBasePath.Folder()}, []string{testdata.ExchangeEmailBasePath.Folder(false)},
)) ))
return sel return sel
@ -192,7 +192,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders( sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailBasePath.Folder()}, []string{testdata.ExchangeEmailBasePath.Folder(false)},
selectors.PrefixMatch(), // force prefix matching selectors.PrefixMatch(), // force prefix matching
)) ))
@ -205,7 +205,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders( sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailInboxPath.Folder()}, []string{testdata.ExchangeEmailInboxPath.Folder(false)},
)) ))
return sel return sel
@ -217,7 +217,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.ContactFolders( sel.Include(sel.ContactFolders(
[]string{testdata.ExchangeContactsBasePath.Folder()}, []string{testdata.ExchangeContactsBasePath.Folder(false)},
)) ))
return sel return sel
@ -229,7 +229,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.ContactFolders( sel.Include(sel.ContactFolders(
[]string{testdata.ExchangeContactsRootPath.Folder()}, []string{testdata.ExchangeContactsRootPath.Folder(false)},
)) ))
return sel return sel
@ -242,7 +242,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.EventCalendars( sel.Include(sel.EventCalendars(
[]string{testdata.ExchangeEventsBasePath.Folder()}, []string{testdata.ExchangeEventsBasePath.Folder(false)},
)) ))
return sel return sel
@ -254,7 +254,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer { selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any()) sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.EventCalendars( sel.Include(sel.EventCalendars(
[]string{testdata.ExchangeEventsRootPath.Folder()}, []string{testdata.ExchangeEventsRootPath.Folder(false)},
)) ))
return sel return sel

View File

@ -423,12 +423,12 @@ func (c sharePointCategory) isLeaf() bool {
return c == c.leafCat() return c == c.leafCat()
} }
// pathValues transforms a path to a map of identified properties. // pathValues transforms the two paths to maps of identified properties.
// //
// Example: // Example:
// [tenantID, service, siteID, category, folder, itemID] // [tenantID, service, siteID, category, folder, itemID]
// => {spSite: siteID, spFolder: folder, spItemID: itemID} // => {spFolder: folder, spItemID: itemID}
func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string { func (c sharePointCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
var folderCat, itemCat categorizer var folderCat, itemCat categorizer
switch c { switch c {
@ -439,13 +439,24 @@ func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string {
case SharePointPage, SharePointPageFolder: case SharePointPage, SharePointPageFolder:
folderCat, itemCat = SharePointPageFolder, SharePointPage folderCat, itemCat = SharePointPageFolder, SharePointPage
default: default:
return map[categorizer]string{} return map[categorizer]string{}, map[categorizer]string{}
} }
return map[categorizer]string{ rv := map[categorizer]string{
folderCat: p.Folder(), folderCat: repo.Folder(false),
itemCat: p.Item(), itemCat: repo.Item(),
} }
lv := map[categorizer]string{}
if location != nil {
lv = map[categorizer]string{
folderCat: location.Folder(false),
itemCat: location.Item(),
}
}
return rv, lv
} }
// pathKeys returns the path keys recognized by the receiver's leaf type. // pathKeys returns the path keys recognized by the receiver's leaf type.

View File

@ -346,10 +346,11 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
"tenant", "tenant",
"site", "site",
test.sc.PathType(), test.sc.PathType(),
true, true)
)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, test.expected, test.sc.pathValues(itemPath)) r, l := test.sc.pathValues(itemPath, itemPath)
assert.Equal(t, test.expected, r)
assert.Equal(t, test.expected, l)
}) })
} }
} }