add locationRef to details (#2427)

## Description

Adds a new reference to the details ent: location-
ref.  The location holds the human-readable
version of the item's location in whatever m365
service sourced the item.  Hookup is incomplete,
following PRs will fill out functionality.

Also adds a LocationPather interface to data_
collections to pass this data back and forth
between producers and consumers.

Should be safe to merge into main.

## Does this PR need a docs update or release note?

- [x] 🕐 Yes, but in a later PR

## Type of change

- [x] 🌻 Feature
- [x] 🐛 Bugfix

## Issue(s)

* #2423

## Test Plan

- [x]  Unit test
This commit is contained in:
Keepers 2023-02-13 13:19:04 -07:00 committed by GitHub
parent 9695afbc8d
commit 52455356e5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 2280 additions and 1034 deletions

View File

@ -10,11 +10,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed
- Support for item.Attachment:Mail restore
- Errors from duplicate names in Exchange Calendars
### Changed
- When using Restore and Details on Exchange Calendars, the `--event-calendar` flag can now identify calendars by either a Display Name or a Microsoft 365 ID.
- Exchange Calendars storage entries now construct their paths using container IDs instead of display names. This fixes cases where duplicate display names caused system failures.
### Known Issues
- Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs
- Breaking changes to Exchange Calendar backups.
## [v0.3.0] (alpha) - 2023-2-07

View File

@ -137,14 +137,14 @@ var (
Name: "EmailsFolderPrefixMatch",
Expected: testdata.ExchangeEmailItems,
Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder()},
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false)},
},
},
{
Name: "EmailsFolderPrefixMatchTrailingSlash",
Expected: testdata.ExchangeEmailItems,
Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder() + "/"},
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false) + "/"},
},
},
{
@ -154,7 +154,7 @@ var (
testdata.ExchangeEmailItems[2],
},
Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder()},
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false)},
},
},
{
@ -164,7 +164,7 @@ var (
testdata.ExchangeEmailItems[2],
},
Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder() + "/"},
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false) + "/"},
},
},
{

View File

@ -172,7 +172,7 @@ func buildCollections(
return nil, err
}
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items))
mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items))
for i := 0; i < len(c.items); i++ {
mc.Names[i] = c.items[i].name

View File

@ -161,39 +161,6 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() {
}
}
// TestGraphQueryFunctions verifies if Query functions APIs
// through Microsoft Graph are functional
func (suite *ExchangeServiceSuite) TestGraphQueryFunctions() {
ctx, flush := tester.NewContext()
defer flush()
c, err := NewClient(suite.credentials)
require.NoError(suite.T(), err)
userID := tester.M365UserID(suite.T())
tests := []struct {
name string
function GraphQuery
}{
{
name: "GraphQuery: Get All ContactFolders",
function: c.Contacts().GetAllContactFolderNamesForUser,
},
{
name: "GraphQuery: Get All Calendars for User",
function: c.Events().GetAllCalendarNamesForUser,
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
response, err := test.function(ctx, userID)
assert.NoError(t, err)
assert.NotNil(t, response)
})
}
}
//nolint:lll
var stubHTMLContent = "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Happy New Year,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture?&nbsp;</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><img class=\"FluidPluginCopy ContentPasted0 w-2070 h-1380\" size=\"5854817\" data-outlook-trace=\"F:1|T:1\" src=\"cid:85f4faa3-9851-40c7-ba0a-e63dce1185f9\" style=\"max-width:100%\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Let me know if this meets our culture requirements.</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Warm Regards,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin</div></body></html>"

View File

@ -80,28 +80,6 @@ func (c Contacts) GetItem(
return cont, ContactInfo(cont), nil
}
// GetAllContactFolderNamesForUser is a GraphQuery function for getting
// ContactFolderId and display names for contacts. All other information is omitted.
// Does not return the default Contact Folder
func (c Contacts) GetAllContactFolderNamesForUser(
ctx context.Context,
user string,
) (serialization.Parsable, error) {
options, err := optionsForContactFolders([]string{"displayName", "parentFolderId"})
if err != nil {
return nil, err
}
var resp models.ContactFolderCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(user).ContactFolders().Get(ctx, options)
return err
})
return resp, err
}
func (c Contacts) GetContainerByID(
ctx context.Context,
userID, dirID string,
@ -169,10 +147,8 @@ func (c Contacts) EnumerateContainers(
continue
}
temp := graph.NewCacheFolder(fold, nil)
err = fn(temp)
if err != nil {
temp := graph.NewCacheFolder(fold, nil, nil)
if err := fn(temp); err != nil {
errs = multierror.Append(err, errs)
continue
}

View File

@ -144,25 +144,6 @@ func (c Events) GetItem(
return event, EventInfo(event), nil
}
func (c Client) GetAllCalendarNamesForUser(
ctx context.Context,
user string,
) (serialization.Parsable, error) {
options, err := optionsForCalendars([]string{"name", "owner"})
if err != nil {
return nil, err
}
var resp models.CalendarCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(user).Calendars().Get(ctx, options)
return err
})
return resp, err
}
// EnumerateContainers iterates through all of the users current
// calendars, converting each to a graph.CacheFolder, and
// calling fn(cf) on each one. If fn(cf) errors, the error is
@ -210,10 +191,11 @@ func (c Events) EnumerateContainers(
continue
}
temp := graph.NewCacheFolder(cd, path.Builder{}.Append(*cd.GetDisplayName()))
err = fn(temp)
if err != nil {
temp := graph.NewCacheFolder(
cd,
path.Builder{}.Append(*cd.GetId()), // storage path
path.Builder{}.Append(*cd.GetDisplayName())) // display location
if err := fn(temp); err != nil {
errs = multierror.Append(err, errs)
continue
}

View File

@ -198,8 +198,7 @@ func (c Mail) EnumerateContainers(
}
for _, v := range resp.GetValue() {
temp := graph.NewCacheFolder(v, nil)
temp := graph.NewCacheFolder(v, nil, nil)
if err := fn(temp); err != nil {
errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta"))
continue

View File

@ -135,27 +135,6 @@ func optionsForCalendarsByID(moreOps []string) (
return options, nil
}
// optionsForContactFolders places allowed options for exchange.ContactFolder object
// @return is first call in ContactFolders().GetWithRequestConfigurationAndResponseHandler
func optionsForContactFolders(moreOps []string) (
*users.ItemContactFoldersRequestBuilderGetRequestConfiguration,
error,
) {
selecting, err := buildOptions(moreOps, fieldsForFolders)
if err != nil {
return nil, err
}
requestParameters := &users.ItemContactFoldersRequestBuilderGetQueryParameters{
Select: selecting,
}
options := &users.ItemContactFoldersRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters,
}
return options, nil
}
func optionsForContactFolderByID(moreOps []string) (
*users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration,
error,

View File

@ -29,8 +29,10 @@ func (cfc *contactFolderCache) populateContactRoot(
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...))
temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(baseContainerPath...), // storage path
path.Builder{}.Append(baseContainerPath...)) // display location
if err := cfc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir")
}
@ -56,7 +58,7 @@ func (cfc *contactFolderCache) Populate(
return errors.Wrap(err, "enumerating containers")
}
if err := cfc.populatePaths(ctx); err != nil {
if err := cfc.populatePaths(ctx, false); err != nil {
return errors.Wrap(err, "populating paths")
}

View File

@ -51,38 +51,52 @@ type containerResolver struct {
func (cr *containerResolver) IDToPath(
ctx context.Context,
folderID string,
) (*path.Builder, error) {
return cr.idToPath(ctx, folderID, 0)
useIDInPath bool,
) (*path.Builder, *path.Builder, error) {
return cr.idToPath(ctx, folderID, 0, useIDInPath)
}
func (cr *containerResolver) idToPath(
ctx context.Context,
folderID string,
depth int,
) (*path.Builder, error) {
useIDInPath bool,
) (*path.Builder, *path.Builder, error) {
if depth >= maxIterations {
return nil, errors.New("path contains cycle or is too tall")
return nil, nil, errors.New("path contains cycle or is too tall")
}
c, ok := cr.cache[folderID]
if !ok {
return nil, errors.Errorf("folder %s not cached", folderID)
return nil, nil, errors.Errorf("folder %s not cached", folderID)
}
p := c.Path()
if p != nil {
return p, nil
return p, c.Location(), nil
}
parentPath, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1)
parentPath, parentLoc, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1, useIDInPath)
if err != nil {
return nil, errors.Wrap(err, "retrieving parent folder")
return nil, nil, errors.Wrap(err, "retrieving parent folder")
}
fullPath := parentPath.Append(*c.GetDisplayName())
toAppend := *c.GetDisplayName()
if useIDInPath {
toAppend = *c.GetId()
}
fullPath := parentPath.Append(toAppend)
c.SetPath(fullPath)
return fullPath, nil
var locPath *path.Builder
if parentLoc != nil {
locPath = parentLoc.Append(*c.GetDisplayName())
c.SetLocation(locPath)
}
return fullPath, locPath, nil
}
// PathInCache utility function to return m365ID of folder if the path.Folders
@ -93,13 +107,13 @@ func (cr *containerResolver) PathInCache(pathString string) (string, bool) {
return "", false
}
for _, contain := range cr.cache {
if contain.Path() == nil {
for _, cc := range cr.cache {
if cc.Path() == nil {
continue
}
if contain.Path().String() == pathString {
return *contain.GetId(), true
if cc.Path().String() == pathString {
return *cc.GetId(), true
}
}
@ -141,18 +155,21 @@ func (cr *containerResolver) Items() []graph.CachedContainer {
// AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible.
func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) error {
func (cr *containerResolver) AddToCache(
ctx context.Context,
f graph.Container,
useIDInPath bool,
) error {
temp := graph.CacheFolder{
Container: f,
}
if err := cr.addFolder(temp); err != nil {
return errors.Wrap(err, "adding cache folder")
}
// Populate the path for this entry so calls to PathInCache succeed no matter
// when they're made.
_, err := cr.IDToPath(ctx, *f.GetId())
_, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath)
if err != nil {
return errors.Wrap(err, "adding cache entry")
}
@ -160,12 +177,18 @@ func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container)
return nil
}
func (cr *containerResolver) populatePaths(ctx context.Context) error {
// DestinationNameToID returns an empty string. This is only supported by exchange
// calendars at this time.
func (cr *containerResolver) DestinationNameToID(dest string) string {
return ""
}
func (cr *containerResolver) populatePaths(ctx context.Context, useIDInPath bool) error {
var errs *multierror.Error
// Populate all folder paths.
for _, f := range cr.Items() {
_, err := cr.IDToPath(ctx, *f.GetId())
_, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath)
if err != nil {
errs = multierror.Append(errs, errors.Wrap(err, "populating path"))
}

View File

@ -1,6 +1,7 @@
package exchange
import (
"fmt"
stdpath "path"
"testing"
@ -26,6 +27,7 @@ type mockContainer struct {
displayName *string
parentID *string
p *path.Builder
l *path.Builder
}
//nolint:revive
@ -34,6 +36,8 @@ func (m mockContainer) GetId() *string { return m.id }
//nolint:revive
func (m mockContainer) GetParentFolderId() *string { return m.parentID }
func (m mockContainer) GetDisplayName() *string { return m.displayName }
func (m mockContainer) Location() *path.Builder { return m.l }
func (m mockContainer) SetLocation(p *path.Builder) {}
func (m mockContainer) Path() *path.Builder { return m.p }
func (m mockContainer) SetPath(p *path.Builder) {}
@ -168,7 +172,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil,
},
nil,
),
nil),
check: assert.Error,
},
{
@ -180,7 +184,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: nil,
},
path.Builder{}.Append("foo"),
),
path.Builder{}.Append("loc")),
check: assert.NoError,
},
{
@ -192,7 +196,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID,
},
path.Builder{}.Append("foo"),
),
path.Builder{}.Append("loc")),
check: assert.Error,
},
{
@ -204,7 +208,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID,
},
path.Builder{}.Append("foo"),
),
path.Builder{}.Append("loc")),
check: assert.Error,
},
{
@ -216,7 +220,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
parentID: &testParentID,
},
nil,
),
nil),
check: assert.NoError,
},
}
@ -241,49 +245,54 @@ type mockCachedContainer struct {
id string
parentID string
displayName string
l *path.Builder
p *path.Builder
expectedPath string
expectedLocation string
}
//nolint:revive
func (m mockCachedContainer) GetId() *string {
return &m.id
}
func (m mockCachedContainer) GetId() *string { return &m.id }
//nolint:revive
func (m mockCachedContainer) GetParentFolderId() *string {
return &m.parentID
}
func (m mockCachedContainer) GetParentFolderId() *string { return &m.parentID }
func (m mockCachedContainer) GetDisplayName() *string { return &m.displayName }
func (m mockCachedContainer) Location() *path.Builder { return m.l }
func (m *mockCachedContainer) SetLocation(newLoc *path.Builder) { m.l = newLoc }
func (m mockCachedContainer) Path() *path.Builder { return m.p }
func (m *mockCachedContainer) SetPath(newPath *path.Builder) { m.p = newPath }
func (m mockCachedContainer) GetDisplayName() *string {
return &m.displayName
}
func (m mockCachedContainer) Path() *path.Builder {
return m.p
}
func (m *mockCachedContainer) SetPath(newPath *path.Builder) {
m.p = newPath
}
func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) {
func resolverWithContainers(numContainers int, useIDInPath bool) (*containerResolver, []*mockCachedContainer) {
containers := make([]*mockCachedContainer, 0, numContainers)
for i := 0; i < numContainers; i++ {
containers = append(containers, newMockCachedContainer("a"))
containers = append(containers, newMockCachedContainer(fmt.Sprintf("%d", i)))
}
// Base case for the recursive lookup.
containers[0].p = path.Builder{}.Append(containers[0].displayName)
containers[0].expectedPath = containers[0].displayName
dn := containers[0].displayName
apndP := dn
if useIDInPath {
apndP = containers[0].id
}
containers[0].p = path.Builder{}.Append(apndP)
containers[0].expectedPath = apndP
containers[0].l = path.Builder{}.Append(dn)
containers[0].expectedLocation = dn
for i := 1; i < len(containers); i++ {
dn := containers[i].displayName
apndP := dn
if useIDInPath {
apndP = containers[i].id
}
containers[i].parentID = containers[i-1].id
containers[i].expectedPath = stdpath.Join(
containers[i-1].expectedPath,
containers[i].displayName,
)
containers[i].expectedPath = stdpath.Join(containers[i-1].expectedPath, apndP)
containers[i].expectedLocation = stdpath.Join(containers[i-1].expectedLocation, dn)
}
resolver := newContainerResolver()
@ -304,12 +313,15 @@ type ConfiguredFolderCacheUnitSuite struct {
suite.Suite
fc *containerResolver
fcWithID *containerResolver
allContainers []*mockCachedContainer
containersWithID []*mockCachedContainer
}
func (suite *ConfiguredFolderCacheUnitSuite) SetupTest() {
suite.fc, suite.allContainers = resolverWithContainers(4)
suite.fc, suite.allContainers = resolverWithContainers(4, false)
suite.fcWithID, suite.containersWithID = resolverWithContainers(4, true)
}
func TestConfiguredFolderCacheUnitSuite(t *testing.T) {
@ -339,8 +351,8 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() {
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
resolver, containers := resolverWithContainers(test.numContainers)
_, err := resolver.IDToPath(ctx, containers[len(containers)-1].id)
resolver, containers := resolverWithContainers(test.numContainers, false)
_, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false)
test.check(t, err)
})
}
@ -352,7 +364,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() {
t := suite.T()
require.NoError(t, suite.fc.populatePaths(ctx))
require.NoError(t, suite.fc.populatePaths(ctx, false))
items := suite.fc.Items()
gotPaths := make([]string, 0, len(items))
@ -375,10 +387,24 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
for _, c := range suite.allContainers {
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
p, err := suite.fc.IDToPath(ctx, c.id)
p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
})
}
}
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached_useID() {
ctx, flush := tester.NewContext()
defer flush()
for _, c := range suite.containersWithID {
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
})
}
}
@ -390,17 +416,37 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths()
t := suite.T()
c := suite.allContainers[len(suite.allContainers)-1]
p, err := suite.fc.IDToPath(ctx, c.id)
p, l, err := suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo"
p, err = suite.fc.IDToPath(ctx, c.id)
p, l, err = suite.fc.IDToPath(ctx, c.id, false)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
}
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_useID() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
c := suite.containersWithID[len(suite.containersWithID)-1]
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
c.parentID = "foo"
p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true)
require.NoError(t, err)
assert.Equal(t, c.expectedPath, p.String())
assert.Equal(t, c.expectedLocation, l.String())
}
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentNotFound() {
@ -413,7 +459,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN
delete(suite.fc.cache, almostLast.id)
_, err := suite.fc.IDToPath(ctx, last.id)
_, _, err := suite.fc.IDToPath(ctx, last.id, false)
assert.Error(t, err)
}
@ -423,7 +469,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun
t := suite.T()
_, err := suite.fc.IDToPath(ctx, "foo")
_, _, err := suite.fc.IDToPath(ctx, "foo", false)
assert.Error(t, err)
}
@ -431,20 +477,26 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
last := suite.allContainers[len(suite.allContainers)-1]
m := newMockCachedContainer("testAddFolder")
var (
dest = "testAddFolder"
t = suite.T()
last = suite.allContainers[len(suite.allContainers)-1]
m = newMockCachedContainer(dest)
)
m.parentID = last.id
m.expectedPath = stdpath.Join(last.expectedPath, m.displayName)
m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName)
require.NoError(t, suite.fc.AddToCache(ctx, m))
require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache")
require.NoError(t, suite.fc.AddToCache(ctx, m, false))
require.Empty(t, suite.fc.DestinationNameToID(dest),
"destination id from cache, still empty, because this is not a calendar")
p, err := suite.fc.IDToPath(ctx, m.id)
p, l, err := suite.fc.IDToPath(ctx, m.id, false)
require.NoError(t, err)
assert.Equal(t, m.expectedPath, p.String())
assert.Equal(t, m.expectedLocation, l.String())
}
// ---------------------------------------------------------------------------
@ -506,32 +558,35 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
pathFunc2 func(t *testing.T) path.Path
category path.CategoryType
folderPrefix string
useIDForPath bool
}{
{
name: "Mail Cache Test",
category: path.EmailCategory,
pathFunc1: func(t *testing.T) path.Path {
pth, err := path.Builder{}.Append("Griffindor").
Append("Croix").ToDataLayerExchangePathForCategory(
pth, err := path.Builder{}.
Append("Griffindor").
Append("Croix").
ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID,
user,
path.EmailCategory,
false,
)
false)
require.NoError(t, err)
return pth
},
pathFunc2: func(t *testing.T) path.Path {
pth, err := path.Builder{}.Append("Griffindor").
Append("Felicius").ToDataLayerExchangePathForCategory(
pth, err := path.Builder{}.
Append("Griffindor").
Append("Felicius").
ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID,
user,
path.EmailCategory,
false,
)
false)
require.NoError(t, err)
return pth
},
},
@ -539,63 +594,65 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
name: "Contact Cache Test",
category: path.ContactsCategory,
pathFunc1: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("HufflePuff").
aPath, err := path.Builder{}.
Append("HufflePuff").
ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID,
user,
path.ContactsCategory,
false,
)
false)
require.NoError(t, err)
return aPath
},
pathFunc2: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("Ravenclaw").
aPath, err := path.Builder{}.
Append("Ravenclaw").
ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID,
user,
path.ContactsCategory,
false,
)
false)
require.NoError(t, err)
return aPath
},
},
{
name: "Event Cache Test",
category: path.EventsCategory,
useIDForPath: true,
pathFunc1: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("Durmstrang").
aPath, err := path.Builder{}.
Append("Durmstrang").
ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID,
user,
path.EventsCategory,
false,
)
false)
require.NoError(t, err)
return aPath
},
pathFunc2: func(t *testing.T) path.Path {
aPath, err := path.Builder{}.Append("Beauxbatons").
aPath, err := path.Builder{}.
Append("Beauxbatons").
ToDataLayerExchangePathForCategory(
suite.credentials.AzureTenantID,
user,
path.EventsCategory,
false,
)
false)
require.NoError(t, err)
return aPath
},
folderPrefix: calendarOthersFolder,
},
}
)
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
folderID, err := CreateContainerDestinaion(
folderID, err := CreateContainerDestination(
ctx,
m365,
test.pathFunc1(t),
@ -605,21 +662,26 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
resolver := directoryCaches[test.category]
_, err = resolver.IDToPath(ctx, folderID)
_, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath)
assert.NoError(t, err)
secondID, err := CreateContainerDestinaion(
parentContainer := folderName
if test.useIDForPath {
parentContainer = folderID
}
secondID, err := CreateContainerDestination(
ctx,
m365,
test.pathFunc2(t),
folderName,
parentContainer,
directoryCaches)
require.NoError(t, err)
_, err = resolver.IDToPath(ctx, secondID)
_, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath)
require.NoError(t, err)
p := stdpath.Join(test.folderPrefix, folderName)
p := stdpath.Join(test.folderPrefix, parentContainer)
_, ok := resolver.PathInCache(p)
require.True(t, ok, "looking for path in cache: %s", p)
})

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
@ -274,8 +275,8 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
continue
}
require.NotEmpty(t, c.FullPath().Folder())
folder := c.FullPath().Folder()
require.NotEmpty(t, c.FullPath().Folder(false))
folder := c.FullPath().Folder(false)
delete(test.folderNames, folder)
}
@ -507,7 +508,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
continue
}
assert.Equal(t, edc.FullPath().Folder(), DefaultContactFolder)
assert.Equal(t, edc.FullPath().Folder(false), DefaultContactFolder)
assert.NotZero(t, count)
}
@ -527,13 +528,35 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
users := []string{suite.user}
ac, err := api.NewClient(acct)
require.NoError(suite.T(), err, "creating client")
var (
calID string
bdayID string
)
fn := func(gcf graph.CacheFolder) error {
if *gcf.GetDisplayName() == DefaultCalendar {
calID = *gcf.GetId()
}
if *gcf.GetDisplayName() == "Birthdays" {
bdayID = *gcf.GetId()
}
return nil
}
require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn))
tests := []struct {
name, expected string
scope selectors.ExchangeScope
}{
{
name: "Default Event Calendar",
expected: DefaultCalendar,
expected: calID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{DefaultCalendar},
selectors.PrefixMatch(),
@ -541,9 +564,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
},
{
name: "Birthday Calendar",
expected: calendarOthersFolder + "/Birthdays",
expected: bdayID,
scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{calendarOthersFolder + "/Birthdays"},
[]string{"Birthdays"},
selectors.PrefixMatch(),
)[0],
},
@ -571,9 +594,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
if edc.FullPath().Service() != path.ExchangeMetadataService {
isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder())
assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else {
assert.Equal(t, "", edc.FullPath().Folder())
assert.Equal(t, "", edc.FullPath().Folder(false))
}
for item := range edc.Items() {

View File

@ -17,6 +17,7 @@ type eventCalendarCache struct {
enumer containersEnumerator
getter containerGetter
userID string
newAdditions map[string]string
}
// init ensures that the structure's fields are initialized.
@ -44,7 +45,10 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(container))
temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(*f.GetId()), // storage path
path.Builder{}.Append(*f.GetDisplayName())) // display location
if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "initializing calendar resolver")
}
@ -68,16 +72,12 @@ func (ecc *eventCalendarCache) Populate(
ctx,
ecc.userID,
"",
func(cf graph.CacheFolder) error {
cf.SetPath(path.Builder{}.Append(calendarOthersFolder, *cf.GetDisplayName()))
return ecc.addFolder(cf)
},
)
ecc.addFolder)
if err != nil {
return errors.Wrap(err, "enumerating containers")
}
if err := ecc.populatePaths(ctx); err != nil {
if err := ecc.populatePaths(ctx, true); err != nil {
return errors.Wrap(err, "establishing calendar paths")
}
@ -86,23 +86,40 @@ func (ecc *eventCalendarCache) Populate(
// AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible.
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error {
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container, useIDInPath bool) error {
if err := checkIDAndName(f); err != nil {
return errors.Wrap(err, "validating container")
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName()))
temp := graph.NewCacheFolder(
f,
path.Builder{}.Append(*f.GetId()), // storage path
path.Builder{}.Append(*f.GetDisplayName())) // display location
if len(ecc.newAdditions) == 0 {
ecc.newAdditions = map[string]string{}
}
ecc.newAdditions[*f.GetDisplayName()] = *f.GetId()
if err := ecc.addFolder(temp); err != nil {
delete(ecc.newAdditions, *f.GetDisplayName())
return errors.Wrap(err, "adding container")
}
// Populate the path for this entry so calls to PathInCache succeed no matter
// when they're made.
_, err := ecc.IDToPath(ctx, *f.GetId())
_, _, err := ecc.IDToPath(ctx, *f.GetId(), true)
if err != nil {
delete(ecc.newAdditions, *f.GetDisplayName())
return errors.Wrap(err, "setting path to container id")
}
return nil
}
// DestinationNameToID returns an empty string. This is only supported by exchange
// calendars at this time.
func (ecc *eventCalendarCache) DestinationNameToID(dest string) string {
return ecc.newAdditions[dest]
}

View File

@ -77,6 +77,11 @@ type Collection struct {
// moved. It will be empty on its first retrieval.
prevPath path.Path
// LocationPath contains the path with human-readable display names.
// IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t"
// Currently only implemented for Exchange Calendars.
locationPath path.Path
state data.CollectionState
// doNotMergeItems should only be true if the old delta token expired.
@ -91,7 +96,7 @@ type Collection struct {
// or notMoved (if they match).
func NewCollection(
user string,
curr, prev path.Path,
curr, prev, location path.Path,
category path.CategoryType,
items itemer,
statusUpdater support.StatusUpdater,
@ -99,18 +104,19 @@ func NewCollection(
doNotMergeItems bool,
) Collection {
collection := Collection{
added: make(map[string]struct{}, 0),
category: category,
ctrl: ctrlOpts,
data: make(chan data.Stream, collectionChannelBufferSize),
doNotMergeItems: doNotMergeItems,
fullPath: curr,
added: make(map[string]struct{}, 0),
removed: make(map[string]struct{}, 0),
items: items,
locationPath: location,
prevPath: prev,
removed: make(map[string]struct{}, 0),
state: data.StateOf(prev, curr),
statusUpdater: statusUpdater,
user: user,
items: items,
}
return collection
@ -128,6 +134,12 @@ func (col *Collection) FullPath() path.Path {
return col.fullPath
}
// LocationPath produces the Collection's full path, but with display names
// instead of IDs in the folders. Only populated for Calendars.
func (col *Collection) LocationPath() path.Path {
return col.locationPath
}
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
// and new folder hierarchies.
func (col Collection) PreviousPath() path.Path {
@ -172,7 +184,7 @@ func (col *Collection) streamItems(ctx context.Context) {
ctx,
col.fullPath.Category().String(),
observe.PII(user),
observe.PII(col.fullPath.Folder()))
observe.PII(col.fullPath.Folder(false)))
go closer()
@ -331,7 +343,7 @@ func (col *Collection) finishPopulation(ctx context.Context, success int, totalB
TotalBytes: totalBytes,
},
errs,
col.fullPath.Folder())
col.fullPath.Folder(false))
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
col.statusUpdater(status)
}

View File

@ -12,8 +12,10 @@ import (
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
)
@ -116,6 +118,70 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
suite.Equal(fullPath, edc.FullPath())
}
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
fooP, err := path.Builder{}.
Append("foo").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
barP, err := path.Builder{}.
Append("bar").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
locP, err := path.Builder{}.
Append("human-readable").
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
require.NoError(suite.T(), err)
table := []struct {
name string
prev path.Path
curr path.Path
loc path.Path
expect data.CollectionState
}{
{
name: "new",
curr: fooP,
loc: locP,
expect: data.NewState,
},
{
name: "not moved",
prev: fooP,
curr: fooP,
loc: locP,
expect: data.NotMovedState,
},
{
name: "moved",
prev: fooP,
curr: barP,
loc: locP,
expect: data.MovedState,
},
{
name: "deleted",
prev: fooP,
expect: data.DeletedState,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
c := NewCollection(
"u",
test.curr, test.prev, test.loc,
0,
&mockItemer{}, nil,
control.Options{},
false)
assert.Equal(t, test.expect, c.State(), "collection state")
assert.Equal(t, test.curr, c.fullPath, "full path")
assert.Equal(t, test.prev, c.prevPath, "prev path")
assert.Equal(t, test.loc, c.locationPath, "location path")
})
}
}
func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
table := []struct {
name string

View File

@ -38,5 +38,4 @@ const (
rootFolderAlias = "msgfolderroot"
DefaultContactFolder = "Contacts"
DefaultCalendar = "Calendar"
calendarOthersFolder = "Other Calendars"
)

View File

@ -47,6 +47,9 @@ func (suite *CacheResolverSuite) TestPopulate() {
ac, err := api.NewClient(suite.credentials)
require.NoError(suite.T(), err)
cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar)
require.NoError(suite.T(), err)
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventCalendarCache{
userID: tester.M365UserID(t),
@ -64,13 +67,13 @@ func (suite *CacheResolverSuite) TestPopulate() {
}
tests := []struct {
name, folderName, root, basePath string
name, folderInCache, root, basePath string
resolverFunc func(t *testing.T) graph.ContainerResolver
canFind assert.BoolAssertionFunc
}{
{
name: "Default Event Cache",
folderName: DefaultCalendar,
folderInCache: *cal.GetId(),
root: DefaultCalendar,
basePath: DefaultCalendar,
resolverFunc: eventFunc,
@ -78,21 +81,21 @@ func (suite *CacheResolverSuite) TestPopulate() {
},
{
name: "Default Event Folder Hidden",
folderInCache: DefaultContactFolder,
root: DefaultCalendar,
folderName: DefaultContactFolder,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Name Not in Cache",
folderName: "testFooBarWhoBar",
folderInCache: "testFooBarWhoBar",
root: DefaultCalendar,
canFind: assert.False,
resolverFunc: eventFunc,
},
{
name: "Default Contact Cache",
folderName: DefaultContactFolder,
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
basePath: DefaultContactFolder,
canFind: assert.True,
@ -100,14 +103,14 @@ func (suite *CacheResolverSuite) TestPopulate() {
},
{
name: "Default Contact Hidden",
folderName: DefaultContactFolder,
folderInCache: DefaultContactFolder,
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
},
{
name: "Name Not in Cache",
folderName: "testFooBarWhoBar",
folderInCache: "testFooBarWhoBar",
root: DefaultContactFolder,
canFind: assert.False,
resolverFunc: contactFunc,
@ -116,9 +119,9 @@ func (suite *CacheResolverSuite) TestPopulate() {
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
resolver := test.resolverFunc(t)
require.NoError(t, resolver.Populate(ctx, test.root, test.basePath))
_, isFound := resolver.PathInCache(test.folderName)
_, isFound := resolver.PathInCache(test.folderInCache)
test.canFind(t, isFound)
})
}

View File

@ -53,7 +53,9 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
directory = DefaultMailFolder
}
temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory))
temp := graph.NewCacheFolder(f,
path.Builder{}.Append(directory), // storage path
path.Builder{}.Append(directory)) // display location
if err := mc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding resolver dir")
}
@ -81,7 +83,7 @@ func (mc *mailFolderCache) Populate(
return errors.Wrap(err, "enumerating containers")
}
if err := mc.populatePaths(ctx); err != nil {
if err := mc.populatePaths(ctx, false); err != nil {
return errors.Wrap(err, "populating paths")
}

View File

@ -18,9 +18,9 @@ const (
// top-level folders right now.
//nolint:lll
testFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAABl7AqpAAA="
//nolint:lll
topFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAEIAAA="
//nolint:lll
// Full folder path for the folder above.
expectedFolderPath = "toplevel/subFolder/subsubfolder"
)
@ -94,9 +94,10 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
require.NoError(t, mfc.Populate(ctx, test.root, test.path...))
p, err := mfc.IDToPath(ctx, testFolderID)
p, l, err := mfc.IDToPath(ctx, testFolderID, true)
require.NoError(t, err)
t.Logf("Path: %s\n", p.String())
t.Logf("Location: %s\n", l.String())
expectedPath := stdpath.Join(append(test.path, expectedFolderPath)...)
assert.Equal(t, expectedPath, p.String())

View File

@ -86,44 +86,70 @@ func PopulateExchangeContainerResolver(
}
// Returns true if the container passes the scope comparison and should be included.
// Also returns the path representing the directory.
// Returns:
// - the path representing the directory as it should be stored in the repository.
// - the human-readable path using display names.
// - true if the path passes the scope comparison.
func includeContainer(
qp graph.QueryParams,
c graph.CachedContainer,
scope selectors.ExchangeScope,
) (path.Path, bool) {
) (path.Path, path.Path, bool) {
var (
category = scope.Category().PathType()
directory string
locPath path.Path
category = scope.Category().PathType()
pb = c.Path()
loc = c.Location()
)
// Clause ensures that DefaultContactFolder is inspected properly
if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder {
pb = c.Path().Append(DefaultContactFolder)
pb = pb.Append(DefaultContactFolder)
if loc != nil {
loc = loc.Append(DefaultContactFolder)
}
}
dirPath, err := pb.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID,
qp.ResourceOwner,
category,
false,
)
false)
// Containers without a path (e.g. Root mail folder) always err here.
if err != nil {
return nil, false
return nil, nil, false
}
directory = pb.String()
directory = dirPath.Folder(false)
if loc != nil {
locPath, err = loc.ToDataLayerExchangePathForCategory(
qp.Credentials.AzureTenantID,
qp.ResourceOwner,
category,
false)
// Containers without a path (e.g. Root mail folder) always err here.
if err != nil {
return nil, nil, false
}
directory = locPath.Folder(false)
}
var ok bool
switch category {
case path.EmailCategory:
return dirPath, scope.Matches(selectors.ExchangeMailFolder, directory)
ok = scope.Matches(selectors.ExchangeMailFolder, directory)
case path.ContactsCategory:
return dirPath, scope.Matches(selectors.ExchangeContactFolder, directory)
ok = scope.Matches(selectors.ExchangeContactFolder, directory)
case path.EventsCategory:
return dirPath, scope.Matches(selectors.ExchangeEventCalendar, directory)
ok = scope.Matches(selectors.ExchangeEventCalendar, directory)
default:
return dirPath, false
return nil, nil, false
}
return dirPath, locPath, ok
}

View File

@ -70,7 +70,7 @@ func filterContainersAndFillCollections(
cID := *c.GetId()
delete(tombstones, cID)
currPath, ok := includeContainer(qp, c, scope)
currPath, locPath, ok := includeContainer(qp, c, scope)
// Only create a collection if the path matches the scope.
if !ok {
continue
@ -110,10 +110,15 @@ func filterContainersAndFillCollections(
deltaURLs[cID] = newDelta.URL
}
if qp.Category != path.EventsCategory {
locPath = nil
}
edc := NewCollection(
qp.ResourceOwner,
currPath,
prevPath,
locPath,
scope.Category().PathType(),
ibt,
statusUpdater,
@ -167,6 +172,7 @@ func filterContainersAndFillCollections(
qp.ResourceOwner,
nil, // marks the collection as deleted
prevPath,
nil, // tombstones don't need a location
scope.Category().PathType(),
ibt,
statusUpdater,

View File

@ -59,6 +59,7 @@ var _ graph.ContainerResolver = &mockResolver{}
type (
mockResolver struct {
items []graph.CachedContainer
added map[string]string
}
)
@ -76,8 +77,19 @@ func (m mockResolver) Items() []graph.CachedContainer {
return m.items
}
func (m mockResolver) AddToCache(context.Context, graph.Container) error { return nil }
func (m mockResolver) IDToPath(context.Context, string) (*path.Builder, error) { return nil, nil }
func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container, b bool) error {
if len(m.added) == 0 {
m.added = map[string]string{}
}
m.added[*gc.GetDisplayName()] = *gc.GetId()
return nil
}
func (m mockResolver) DestinationNameToID(dest string) string { return m.added[dest] }
func (m mockResolver) IDToPath(context.Context, string, bool) (*path.Builder, *path.Builder, error) {
return nil, nil, nil
}
func (m mockResolver) PathInCache(string) (string, bool) { return "", false }
func (m mockResolver) Populate(context.Context, string, ...string) error { return nil }

View File

@ -342,7 +342,7 @@ func RestoreExchangeDataCollections(
userCaches = directoryCaches[userID]
}
containerID, err := CreateContainerDestinaion(
containerID, err := CreateContainerDestination(
ctx,
creds,
dc.FullPath(),
@ -398,7 +398,7 @@ func restoreCollection(
ctx,
category.String(),
observe.PII(user),
observe.PII(directory.Folder()))
observe.PII(directory.Folder(false)))
defer closer()
defer close(colProgress)
@ -445,10 +445,16 @@ func restoreCollection(
continue
}
var locationRef string
if category == path.ContactsCategory {
locationRef = itemPath.Folder(false)
}
deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
locationRef,
true,
details.ItemInfo{
Exchange: info,
@ -459,12 +465,12 @@ func restoreCollection(
}
}
// CreateContainerDestinaion builds the destination into the container
// CreateContainerDestination builds the destination into the container
// at the provided path. As a precondition, the destination cannot
// already exist. If it does then an error is returned. The provided
// containerResolver is updated with the new destination.
// @ returns the container ID of the new destination container.
func CreateContainerDestinaion(
func CreateContainerDestination(
ctx context.Context,
creds account.M365Config,
directory path.Path,
@ -476,7 +482,6 @@ func CreateContainerDestinaion(
user = directory.ResourceOwner()
category = directory.Category()
directoryCache = caches[category]
newPathFolders = append([]string{destination}, directory.Folders()...)
)
// TODO(rkeepers): pass the api client into this func, rather than generating one.
@ -487,6 +492,8 @@ func CreateContainerDestinaion(
switch category {
case path.EmailCategory:
folders := append([]string{destination}, directory.Folders()...)
if directoryCache == nil {
acm := ac.Mail()
mfc := &mailFolderCache{
@ -503,12 +510,14 @@ func CreateContainerDestinaion(
return establishMailRestoreLocation(
ctx,
ac,
newPathFolders,
folders,
directoryCache,
user,
newCache)
case path.ContactsCategory:
folders := append([]string{destination}, directory.Folders()...)
if directoryCache == nil {
acc := ac.Contacts()
cfc := &contactFolderCache{
@ -524,12 +533,14 @@ func CreateContainerDestinaion(
return establishContactsRestoreLocation(
ctx,
ac,
newPathFolders,
folders,
directoryCache,
user,
newCache)
case path.EventsCategory:
dest := destination
if directoryCache == nil {
ace := ac.Events()
ecc := &eventCalendarCache{
@ -540,16 +551,23 @@ func CreateContainerDestinaion(
caches[category] = ecc
newCache = true
directoryCache = ecc
} else if did := directoryCache.DestinationNameToID(dest); len(did) > 0 {
// calendars are cached by ID in the resolver, not name, so once we have
// created the destination calendar, we need to look up its id and use
// that for resolver lookups instead of the display name.
dest = did
}
folders := append([]string{dest}, directory.Folders()...)
return establishEventsRestoreLocation(
ctx,
ac,
newPathFolders,
folders,
directoryCache,
user,
newCache,
)
newCache)
default:
return "", fmt.Errorf("category: %s not support for exchange cache", category)
}
@ -602,7 +620,7 @@ func establishMailRestoreLocation(
}
// NOOP if the folder is already in the cache.
if err = mfc.AddToCache(ctx, temp); err != nil {
if err = mfc.AddToCache(ctx, temp, false); err != nil {
return "", errors.Wrap(err, "adding folder to cache")
}
}
@ -641,7 +659,7 @@ func establishContactsRestoreLocation(
return "", errors.Wrap(err, "populating contact cache")
}
if err = cfc.AddToCache(ctx, temp); err != nil {
if err = cfc.AddToCache(ctx, temp, false); err != nil {
return "", errors.Wrap(err, "adding contact folder to cache")
}
}
@ -658,10 +676,7 @@ func establishEventsRestoreLocation(
isNewCache bool,
) (string, error) {
// Need to prefix with the "Other Calendars" folder so lookup happens properly.
cached, ok := ecc.PathInCache(path.Builder{}.Append(
calendarOthersFolder,
folders[0],
).String())
cached, ok := ecc.PathInCache(folders[0])
if ok {
return cached, nil
}
@ -679,7 +694,7 @@ func establishEventsRestoreLocation(
}
displayable := api.CalendarDisplayable{Calendarable: temp}
if err = ecc.AddToCache(ctx, displayable); err != nil {
if err = ecc.AddToCache(ctx, displayable, true); err != nil {
return "", errors.Wrap(err, "adding new calendar to cache")
}
}

View File

@ -1,41 +1,84 @@
package graph
import (
"github.com/alcionai/clues"
"context"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/pkg/path"
)
// Idable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of an ID.
type Idable interface {
GetId() *string
}
// Descendable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a "parent folder".
type Descendable interface {
Idable
GetParentFolderId() *string
}
// Displayable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a display name.
type Displayable interface {
Idable
GetDisplayName() *string
}
type Container interface {
Descendable
Displayable
}
// CachedContainer is used for local unit tests but also makes it so that this
// code can be broken into generic- and service-specific chunks later on to
// reuse logic in IDToPath.
type CachedContainer interface {
Container
// Location contains either the display names for the dirs (if this is a calendar)
// or nil
Location() *path.Builder
SetLocation(*path.Builder)
// Path contains either the ids for the dirs (if this is a calendar)
// or the display names for the dirs
Path() *path.Builder
SetPath(*path.Builder)
}
// checkRequiredValues is a helper function to ensure that
// all the pointers are set prior to being called.
func CheckRequiredValues(c Container) error {
idPtr := c.GetId()
if idPtr == nil || len(*idPtr) == 0 {
return errors.New("folder without ID")
}
// ContainerResolver houses functions for getting information about containers
// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may
// cache information about containers.
type ContainerResolver interface {
// IDToPath takes an m365 container ID and converts it to a hierarchical path
// to that container. The path has a similar format to paths on the local
// file system.
IDToPath(ctx context.Context, m365ID string, useIDInPath bool) (*path.Builder, *path.Builder, error)
ptr := c.GetDisplayName()
if ptr == nil || len(*ptr) == 0 {
return clues.New("folder missing display name").With("container_id", *idPtr)
}
// Populate performs initialization steps for the resolver
// @param ctx is necessary param for Graph API tracing
// @param baseFolderID represents the M365ID base that the resolver will
// conclude its search. Default input is "".
Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error
ptr = c.GetParentFolderId()
if ptr == nil || len(*ptr) == 0 {
return clues.New("folder missing parent ID").With("container_parent_id", *idPtr)
}
// PathInCache performs a look up of a path reprensentation
// and returns the m365ID of directory iff the pathString
// matches the path of a container within the cache.
// @returns bool represents if m365ID was found.
PathInCache(pathString string) (string, bool)
return nil
AddToCache(ctx context.Context, m365Container Container, useIDInPath bool) error
// DestinationNameToID returns the ID of the destination container. Dest is
// assumed to be a display name. The ID is only populated if the destination
// was added using `AddToCache()`. Returns an empty string if not found.
DestinationNameToID(dest string) string
// Items returns the containers in the cache.
Items() []CachedContainer
}
// ======================================
@ -46,13 +89,15 @@ var _ CachedContainer = &CacheFolder{}
type CacheFolder struct {
Container
l *path.Builder
p *path.Builder
}
// NewCacheFolder public constructor for struct
func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
func NewCacheFolder(c Container, pb, lpb *path.Builder) CacheFolder {
cf := CacheFolder{
Container: c,
l: lpb,
p: pb,
}
@ -63,6 +108,14 @@ func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
// Required Functions to satisfy interfaces
// =========================================
func (cf CacheFolder) Location() *path.Builder {
return cf.l
}
func (cf *CacheFolder) SetLocation(newLocation *path.Builder) {
cf.l = newLocation
}
func (cf CacheFolder) Path() *path.Builder {
return cf.p
}
@ -109,3 +162,28 @@ func CreateCalendarDisplayable(entry any, parentID string) *CalendarDisplayable
parentID: parentID,
}
}
// =========================================
// helper funcs
// =========================================
// checkRequiredValues is a helper function to ensure that
// all the pointers are set prior to being called.
func CheckRequiredValues(c Container) error {
idPtr := c.GetId()
if idPtr == nil || len(*idPtr) == 0 {
return errors.New("folder without ID")
}
ptr := c.GetDisplayName()
if ptr == nil || len(*ptr) == 0 {
return errors.Errorf("folder %s without display name", *idPtr)
}
ptr = c.GetParentFolderId()
if ptr == nil || len(*ptr) == 0 {
return errors.Errorf("folder %s without parent ID", *idPtr)
}
return nil
}

View File

@ -151,7 +151,7 @@ func (md MetadataCollection) Items() <-chan data.Stream {
TotalBytes: totalBytes,
},
nil,
md.fullPath.Folder(),
md.fullPath.Folder(false),
)
md.statusUpdater(status)

View File

@ -1,7 +1,6 @@
package graph
import (
"context"
"net/http"
"net/http/httputil"
"os"
@ -173,57 +172,6 @@ type Servicer interface {
Adapter() *msgraphsdk.GraphRequestAdapter
}
// Idable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of an ID.
type Idable interface {
GetId() *string
}
// Descendable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a "parent folder".
type Descendable interface {
Idable
GetParentFolderId() *string
}
// Displayable represents objects that implement msgraph-sdk-go/models.entityable
// and have the concept of a display name.
type Displayable interface {
Idable
GetDisplayName() *string
}
type Container interface {
Descendable
Displayable
}
// ContainerResolver houses functions for getting information about containers
// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may
// cache information about containers.
type ContainerResolver interface {
// IDToPath takes an m365 container ID and converts it to a hierarchical path
// to that container. The path has a similar format to paths on the local
// file system.
IDToPath(ctx context.Context, m365ID string) (*path.Builder, error)
// Populate performs initialization steps for the resolver
// @param ctx is necessary param for Graph API tracing
// @param baseFolderID represents the M365ID base that the resolver will
// conclude its search. Default input is "".
Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error
// PathInCache performs a look up of a path reprensentation
// and returns the m365ID of directory iff the pathString
// matches the path of a container within the cache.
// @returns bool represents if m365ID was found.
PathInCache(pathString string) (string, bool)
AddToCache(ctx context.Context, m365Container Container) error
// Items returns the containers in the cache.
Items() []CachedContainer
}
// ---------------------------------------------------------------------------
// Client Middleware
// ---------------------------------------------------------------------------

View File

@ -1013,9 +1013,9 @@ func collectionsForInfo(
user,
info.category,
info.pathElements,
false,
)
mc := mockconnector.NewMockExchangeCollection(pth, len(info.items))
false)
mc := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
baseExpected := expectedData[baseDestPath.String()]
@ -1076,7 +1076,7 @@ func collectionsForInfoVersion0(
info.pathElements,
false,
)
c := mockconnector.NewMockExchangeCollection(pth, len(info.items))
c := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
baseExpected := expectedData[baseDestPath.String()]

View File

@ -16,6 +16,7 @@ import (
// MockExchangeDataCollection represents a mock exchange mailbox
type MockExchangeDataCollection struct {
fullPath path.Path
LocPath path.Path
messageCount int
Data [][]byte
Names []string
@ -35,9 +36,14 @@ var (
// NewMockExchangeDataCollection creates an data collection that will return the specified number of
// mock messages when iterated. Exchange type mail
func NewMockExchangeCollection(pathRepresentation path.Path, numMessagesToReturn int) *MockExchangeDataCollection {
func NewMockExchangeCollection(
storagePath path.Path,
locationPath path.Path,
numMessagesToReturn int,
) *MockExchangeDataCollection {
c := &MockExchangeDataCollection{
fullPath: pathRepresentation,
fullPath: storagePath,
LocPath: locationPath,
messageCount: numMessagesToReturn,
Data: [][]byte{},
Names: []string{},
@ -93,21 +99,11 @@ func NewMockContactCollection(pathRepresentation path.Path, numMessagesToReturn
return c
}
func (medc *MockExchangeDataCollection) FullPath() path.Path {
return medc.fullPath
}
func (medc MockExchangeDataCollection) PreviousPath() path.Path {
return medc.PrevPath
}
func (medc MockExchangeDataCollection) State() data.CollectionState {
return medc.ColState
}
func (medc MockExchangeDataCollection) DoNotMergeItems() bool {
return medc.DoNotMerge
}
func (medc MockExchangeDataCollection) FullPath() path.Path { return medc.fullPath }
func (medc MockExchangeDataCollection) LocationPath() path.Path { return medc.LocPath }
func (medc MockExchangeDataCollection) PreviousPath() path.Path { return medc.PrevPath }
func (medc MockExchangeDataCollection) State() data.CollectionState { return medc.ColState }
func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return medc.DoNotMerge }
// Items returns a channel that has the next items in the collection. The
// channel is closed when there are no more items available.

View File

@ -25,7 +25,7 @@ func TestMockExchangeCollectionSuite(t *testing.T) {
}
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
mdc := mockconnector.NewMockExchangeCollection(nil, 2)
mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2)
messagesRead := 0
@ -40,7 +40,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
t := suite.T()
mdc := mockconnector.NewMockExchangeCollection(nil, 2)
mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2)
mdc.Data[1] = []byte("This is some buffer of data so that the size is different than the default")
@ -58,7 +58,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
// functions by verifying no failures on (de)serializing steps using kiota serialization library
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hydration() {
t := suite.T()
mdc := mockconnector.NewMockExchangeCollection(nil, 3)
mdc := mockconnector.NewMockExchangeCollection(nil, nil, 3)
buf := &bytes.Buffer{}
for stream := range mdc.Items() {

View File

@ -439,7 +439,7 @@ func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRe
TotalBytes: byteCount, // Number of bytes read in the operation,
},
errs,
oc.folderPath.Folder(), // Additional details
oc.folderPath.Folder(false), // Additional details
)
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
oc.statusUpdater(status)

View File

@ -213,7 +213,7 @@ func RestoreCollection(
trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String())
logger.Ctx(ctx).Infow(
"restoring to destination",
"origin", dc.FullPath().Folder(),
"origin", dc.FullPath().Folder(false),
"destination", restoreFolderElements)
parentPerms, colPerms, err := getParentAndCollectionPermissions(
@ -284,7 +284,13 @@ func RestoreCollection(
continue
}
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
// Mark it as success without processing .meta
// file if we are not restoring permissions
@ -371,7 +377,13 @@ func RestoreCollection(
continue
}
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
deets.Add(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
metrics.Successes++
}
}

View File

@ -167,7 +167,7 @@ func (sc *Collection) finishPopulation(ctx context.Context, attempts, success in
TotalBytes: totalBytes,
},
errs,
sc.fullPath.Folder())
sc.fullPath.Folder(false))
logger.Ctx(ctx).Debug(status.String())
if sc.statusUpdater != nil {
@ -191,7 +191,7 @@ func (sc *Collection) populate(ctx context.Context) {
ctx,
sc.fullPath.Category().String(),
observe.Safe("name"),
observe.PII(sc.fullPath.Folder()))
observe.PII(sc.fullPath.Folder(false)))
go closer()
defer func() {

View File

@ -276,6 +276,7 @@ func RestoreListCollection(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo)
@ -355,6 +356,7 @@ func RestorePageCollection(
itemPath.String(),
itemPath.ShortRef(),
"",
"", // TODO: implement locationRef
true,
itemInfo,
)

View File

@ -96,6 +96,12 @@ type Stream interface {
Deleted() bool
}
// LocationPather provides a LocationPath describing the path with Display Names
// instead of canonical IDs
type LocationPather interface {
LocationPath() path.Path
}
// StreamInfo is used to provide service specific
// information about the Stream
type StreamInfo interface {
@ -124,7 +130,7 @@ func StateOf(prev, curr path.Path) CollectionState {
return NewState
}
if curr.Folder() != prev.Folder() {
if curr.Folder(false) != prev.Folder(false) {
return MovedState
}

View File

@ -126,6 +126,7 @@ type itemDetails struct {
info *details.ItemInfo
repoPath path.Path
prevPath path.Path
locationPath path.Path
cached bool
}
@ -135,7 +136,7 @@ type corsoProgress struct {
deets *details.Builder
// toMerge represents items that we don't have in-memory item info for. The
// item info for these items should be sourced from a base snapshot later on.
toMerge map[string]path.Path
toMerge map[string]PrevRefs
mu sync.RWMutex
totalBytes int64
errs *fault.Errors
@ -180,27 +181,45 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
cp.mu.Lock()
defer cp.mu.Unlock()
cp.toMerge[d.prevPath.ShortRef()] = d.repoPath
cp.toMerge[d.prevPath.ShortRef()] = PrevRefs{
Repo: d.repoPath,
Location: d.locationPath,
}
return
}
parent := d.repoPath.ToBuilder().Dir()
var (
locationFolders string
locPB *path.Builder
parent = d.repoPath.ToBuilder().Dir()
)
if d.locationPath != nil {
locationFolders = d.locationPath.Folder(true)
locPB = d.locationPath.ToBuilder()
// folderEntriesForPath assumes the location will
// not have an item element appended
if len(d.locationPath.Item()) > 0 {
locPB = locPB.Dir()
}
}
cp.deets.Add(
d.repoPath.String(),
d.repoPath.ShortRef(),
parent.ShortRef(),
locationFolders,
!d.cached,
*d.info,
)
*d.info)
folders := details.FolderEntriesForPath(parent)
folders := details.FolderEntriesForPath(parent, locPB)
cp.deets.AddFoldersForItem(
folders,
*d.info,
!d.cached,
)
!d.cached)
}
// Kopia interface function used as a callback when kopia finishes hashing a file.
@ -263,12 +282,17 @@ func collectionEntries(
}
var (
locationPath path.Path
// Track which items have already been seen so we can skip them if we see
// them again in the data from the base snapshot.
seen = map[string]struct{}{}
items = streamedEnts.Items()
)
if lp, ok := streamedEnts.(data.LocationPather); ok {
locationPath = lp.LocationPath()
}
for {
select {
case <-ctx.Done():
@ -328,7 +352,11 @@ func collectionEntries(
// previous snapshot then we should populate prevPath here and leave
// info nil.
itemInfo := ei.Info()
d := &itemDetails{info: &itemInfo, repoPath: itemPath}
d := &itemDetails{
info: &itemInfo,
repoPath: itemPath,
locationPath: locationPath,
}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
}
@ -356,6 +384,7 @@ func streamBaseEntries(
cb func(context.Context, fs.Entry) error,
curPath path.Path,
prevPath path.Path,
locationPath path.Path,
dir fs.Directory,
encodedSeen map[string]struct{},
globalExcludeSet map[string]struct{},
@ -411,7 +440,12 @@ func streamBaseEntries(
// All items have item info in the base backup. However, we need to make
// sure we have enough metadata to find those entries. To do that we add the
// item to progress and having progress aggregate everything for later.
d := &itemDetails{info: nil, repoPath: itemPath, prevPath: prevItemPath}
d := &itemDetails{
info: nil,
repoPath: itemPath,
prevPath: prevItemPath,
locationPath: locationPath,
}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
if err := cb(ctx, entry); err != nil {
@ -455,6 +489,12 @@ func getStreamItemFunc(
}
}
var locationPath path.Path
if lp, ok := streamedEnts.(data.LocationPather); ok {
locationPath = lp.LocationPath()
}
seen, err := collectionEntries(ctx, cb, streamedEnts, progress)
if err != nil {
return errors.Wrap(err, "streaming collection entries")
@ -465,6 +505,7 @@ func getStreamItemFunc(
cb,
curPath,
prevPath,
locationPath,
baseDir,
seen,
globalExcludeSet,
@ -533,6 +574,7 @@ type treeMap struct {
// Previous path this directory may have resided at if it is sourced from a
// base snapshot.
prevPath path.Path
// Child directories of this directory.
childDirs map[string]*treeMap
// Reference to data pulled from the external service. Contains only items in

File diff suppressed because it is too large Load Diff

View File

@ -114,6 +114,13 @@ type IncrementalBase struct {
SubtreePaths []*path.Builder
}
// PrevRefs hold the repoRef and locationRef from the items
// that need to be merged in from prior snapshots.
type PrevRefs struct {
Repo path.Path
Location path.Path
}
// BackupCollections takes a set of collections and creates a kopia snapshot
// with the data that they contain. previousSnapshots is used for incremental
// backups and should represent the base snapshot from which metadata is sourced
@ -128,7 +135,7 @@ func (w Wrapper) BackupCollections(
tags map[string]string,
buildTreeWithBase bool,
errs *fault.Errors,
) (*BackupStats, *details.Builder, map[string]path.Path, error) {
) (*BackupStats, *details.Builder, map[string]PrevRefs, error) {
if w.c == nil {
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
}
@ -143,7 +150,7 @@ func (w Wrapper) BackupCollections(
progress := &corsoProgress{
pending: map[string]*itemDetails{},
deets: &details.Builder{},
toMerge: map[string]path.Path{},
toMerge: map[string]PrevRefs{},
errs: errs,
}

View File

@ -27,7 +27,9 @@ import (
const (
testTenant = "a-tenant"
testUser = "user1"
testInboxID = "Inbox_ID"
testInboxDir = "Inbox"
testArchiveID = "Archive_ID"
testArchiveDir = "Archive"
testFileName = "file1"
testFileName2 = "file2"
@ -144,8 +146,10 @@ type KopiaIntegrationSuite struct {
ctx context.Context
flush func()
testPath1 path.Path
testPath2 path.Path
storePath1 path.Path
storePath2 path.Path
locPath1 path.Path
locPath2 path.Path
}
func TestKopiaIntegrationSuite(t *testing.T) {
@ -164,21 +168,21 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
testTenant,
testUser,
path.EmailCategory,
false,
)
false)
require.NoError(suite.T(), err)
suite.testPath1 = tmp
suite.storePath1 = tmp
suite.locPath1 = tmp
tmp, err = path.Builder{}.Append(testArchiveDir).ToDataLayerExchangePathForCategory(
testTenant,
testUser,
path.EmailCategory,
false,
)
false)
require.NoError(suite.T(), err)
suite.testPath2 = tmp
suite.storePath2 = tmp
suite.locPath2 = tmp
}
func (suite *KopiaIntegrationSuite) SetupTest() {
@ -199,13 +203,13 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
func (suite *KopiaIntegrationSuite) TestBackupCollections() {
collections := []data.BackupCollection{
mockconnector.NewMockExchangeCollection(
suite.testPath1,
5,
),
suite.storePath1,
suite.locPath1,
5),
mockconnector.NewMockExchangeCollection(
suite.testPath2,
42,
),
suite.storePath2,
suite.locPath2,
42),
}
// tags that are supplied by the caller. This includes basic tags to support
@ -217,14 +221,14 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
reasons := []Reason{
{
ResourceOwner: suite.testPath1.ResourceOwner(),
Service: suite.testPath1.Service(),
Category: suite.testPath1.Category(),
ResourceOwner: suite.storePath1.ResourceOwner(),
Service: suite.storePath1.Service(),
Category: suite.storePath1.Category(),
},
{
ResourceOwner: suite.testPath2.ResourceOwner(),
Service: suite.testPath2.Service(),
Category: suite.testPath2.Category(),
ResourceOwner: suite.storePath2.ResourceOwner(),
Service: suite.storePath2.Service(),
Category: suite.storePath2.Category(),
},
}
@ -311,7 +315,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
prevSnaps = append(prevSnaps, IncrementalBase{
Manifest: snap,
SubtreePaths: []*path.Builder{
suite.testPath1.ToBuilder().Dir(),
suite.storePath1.ToBuilder().Dir(),
},
})
})
@ -342,13 +346,13 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
tags[k] = ""
}
dc1 := mockconnector.NewMockExchangeCollection(suite.testPath1, 1)
dc2 := mockconnector.NewMockExchangeCollection(suite.testPath2, 1)
dc1 := mockconnector.NewMockExchangeCollection(suite.storePath1, suite.locPath1, 1)
dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1)
fp1, err := suite.testPath1.Append(dc1.Names[0], true)
fp1, err := suite.storePath1.Append(dc1.Names[0], true)
require.NoError(t, err)
fp2, err := suite.testPath2.Append(dc2.Names[0], true)
fp2, err := suite.storePath2.Append(dc2.Names[0], true)
require.NoError(t, err)
stats, _, _, err := w.BackupCollections(
@ -434,7 +438,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
collections := []data.BackupCollection{
&mockBackupCollection{
path: suite.testPath1,
path: suite.storePath1,
streams: []data.Stream{
&mockconnector.MockExchangeData{
ID: testFileName,
@ -447,7 +451,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
},
},
&mockBackupCollection{
path: suite.testPath2,
path: suite.storePath2,
streams: []data.Stream{
&mockconnector.MockExchangeData{
ID: testFileName3,
@ -487,7 +491,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
// 5 file and 6 folder entries.
assert.Len(t, deets.Details().Entries, 5+6)
failedPath, err := suite.testPath2.Append(testFileName4, true)
failedPath, err := suite.storePath2.Append(testFileName4, true)
require.NoError(t, err)
ic := i64counter{}
@ -792,8 +796,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
cols: func() []data.BackupCollection {
c := mockconnector.NewMockExchangeCollection(
suite.testPath1,
1,
)
suite.testPath1,
1)
c.ColState = data.NotMovedState
return []data.BackupCollection{c}

View File

@ -338,7 +338,7 @@ type backuper interface {
tags map[string]string,
buildTreeWithBase bool,
errs *fault.Errors,
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error)
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error)
}
func selectorToReasons(sel selectors.Selector) []kopia.Reason {
@ -397,7 +397,7 @@ func consumeBackupDataCollections(
backupID model.StableID,
isIncremental bool,
errs *fault.Errors,
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) {
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) {
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Backing up data"))
defer func() {
complete <- struct{}{}
@ -503,7 +503,7 @@ func mergeDetails(
ms *store.Wrapper,
detailsStore detailsReader,
mans []*kopia.ManifestEntry,
shortRefsFromPrevBackup map[string]path.Path,
shortRefsFromPrevBackup map[string]kopia.PrevRefs,
deets *details.Builder,
errs *fault.Errors,
) error {
@ -559,13 +559,16 @@ func mergeDetails(
continue
}
newPath := shortRefsFromPrevBackup[rr.ShortRef()]
if newPath == nil {
prev, ok := shortRefsFromPrevBackup[rr.ShortRef()]
if !ok {
// This entry was not sourced from a base snapshot or cached from a
// previous backup, skip it.
continue
}
newPath := prev.Repo
newLoc := prev.Location
// Fixup paths in the item.
item := entry.ItemInfo
if err := details.UpdateItem(&item, newPath); err != nil {
@ -574,16 +577,27 @@ func mergeDetails(
// TODO(ashmrtn): This may need updated if we start using this merge
// strategry for items that were cached in kopia.
itemUpdated := newPath.String() != rr.String()
var (
itemUpdated = newPath.String() != rr.String()
newLocStr string
locBuilder *path.Builder
)
if newLoc != nil {
locBuilder = newLoc.ToBuilder()
newLocStr = newLoc.Folder(true)
itemUpdated = itemUpdated || newLocStr != entry.LocationRef
}
deets.Add(
newPath.String(),
newPath.ShortRef(),
newPath.ToBuilder().Dir().ShortRef(),
newLocStr,
itemUpdated,
item)
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir())
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), locBuilder)
deets.AddFoldersForItem(folders, item, itemUpdated)
// Track how many entries we added so that we know if we got them all when

View File

@ -402,7 +402,7 @@ func buildCollections(
c.pathFolders,
false)
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items))
mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items))
for i := 0; i < len(c.items); i++ {
mc.Names[i] = c.items[i].name
@ -777,8 +777,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
require.NoError(t, err)
id, ok := cr.PathInCache(p.Folder())
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category)
id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
d := dataset[category].dests[destName]
d.containerID = id
@ -895,8 +895,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true)
require.NoError(t, err)
id, ok := cr.PathInCache(p.Folder())
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category)
id, ok := cr.PathInCache(p.Folder(false))
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
dataset[category].dests[container3] = contDeets{id, deets}
}

View File

@ -89,8 +89,7 @@ type mockBackuper struct {
bases []kopia.IncrementalBase,
cs []data.BackupCollection,
tags map[string]string,
buildTreeWithBase bool,
)
buildTreeWithBase bool)
}
func (mbu mockBackuper) BackupCollections(
@ -101,7 +100,7 @@ func (mbu mockBackuper) BackupCollections(
tags map[string]string,
buildTreeWithBase bool,
errs *fault.Errors,
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) {
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) {
if mbu.checkFunc != nil {
mbu.checkFunc(bases, cs, tags, buildTreeWithBase)
}
@ -252,6 +251,7 @@ func makeFolderEntry(
RepoRef: pb.String(),
ShortRef: pb.ShortRef(),
ParentRef: pb.Dir().ShortRef(),
LocationRef: pb.PopFront().PopFront().PopFront().PopFront().Dir().String(),
ItemInfo: details.ItemInfo{
Folder: &details.FolderInfo{
ItemType: details.FolderItem,
@ -277,15 +277,22 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path {
func makeDetailsEntry(
t *testing.T,
p path.Path,
l path.Path,
size int,
updated bool,
) *details.DetailsEntry {
t.Helper()
var lr string
if l != nil {
lr = l.PopFront().PopFront().PopFront().PopFront().Dir().String()
}
res := &details.DetailsEntry{
RepoRef: p.String(),
ShortRef: p.ShortRef(),
ParentRef: p.ToBuilder().Dir().ShortRef(),
LocationRef: lr,
ItemInfo: details.ItemInfo{},
Updated: updated,
}
@ -607,6 +614,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
true,
)
locationPath1 = makePath(
suite.T(),
[]string{
tenant,
path.OneDriveService.String(),
ro,
path.FilesCategory.String(),
"drives",
"drive-id",
"root:",
"work-display-name",
"item1",
},
true,
)
itemPath2 = makePath(
suite.T(),
[]string{
@ -622,6 +644,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
true,
)
locationPath2 = makePath(
suite.T(),
[]string{
tenant,
path.OneDriveService.String(),
ro,
path.FilesCategory.String(),
"drives",
"drive-id",
"root:",
"personal-display-name",
"item2",
},
true,
)
itemPath3 = makePath(
suite.T(),
[]string{
@ -634,6 +671,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
true,
)
locationPath3 = makePath(
suite.T(),
[]string{
tenant,
path.ExchangeService.String(),
ro,
path.EmailCategory.String(),
"personal-display-name",
"item3",
},
true,
)
backup1 = backup.Backup{
BaseModel: model.BaseModel{
@ -669,7 +718,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
populatedModels map[model.StableID]backup.Backup
populatedDetails map[string]*details.Details
inputMans []*kopia.ManifestEntry
inputShortRefsFromPrevBackup map[string]path.Path
inputShortRefsFromPrevBackup map[string]kopia.PrevRefs
errCheck assert.ErrorAssertionFunc
expectedEntries []*details.DetailsEntry
@ -682,15 +731,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "EmptyShortRefsFromPrevBackup",
inputShortRefsFromPrevBackup: map[string]path.Path{},
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{},
errCheck: assert.NoError,
// Use empty slice so we don't error out on nil != empty.
expectedEntries: []*details.DetailsEntry{},
},
{
name: "BackupIDNotFound",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -704,8 +756,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "DetailsIDNotFound",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -727,9 +782,15 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "BaseMissingItems",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
itemPath2.ShortRef(): itemPath2,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
itemPath2.ShortRef(): {
Repo: itemPath2,
Location: locationPath2,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -746,7 +807,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
},
},
},
@ -755,8 +816,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "TooManyItems",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -779,7 +843,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
},
},
},
@ -788,8 +852,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "BadBaseRepoRef",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath2,
Location: locationPath2,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -834,8 +901,9 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "BadOneDrivePath",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): makePath(
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: makePath(
suite.T(),
[]string{
itemPath1.Tenant(),
@ -848,6 +916,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
true,
),
},
},
inputMans: []*kopia.ManifestEntry{
{
Manifest: makeManifest(suite.T(), backup1.ID, ""),
@ -863,7 +932,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
},
},
},
@ -872,8 +941,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
},
{
name: "ItemMerged",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -890,20 +962,88 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false),
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
{
name: "ItemMergedNoLocation",
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
Manifest: makeManifest(suite.T(), backup1.ID, ""),
Reasons: []kopia.Reason{
pathReason1,
},
},
},
populatedModels: map[model.StableID]backup.Backup{
backup1.ID: backup1,
},
populatedDetails: map[string]*details.Details{
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
},
},
{
name: "ItemMergedSameLocation",
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: itemPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
Manifest: makeManifest(suite.T(), backup1.ID, ""),
Reasons: []kopia.Reason{
pathReason1,
},
},
},
populatedModels: map[model.StableID]backup.Backup{
backup1.ID: backup1,
},
populatedDetails: map[string]*details.Details{
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
},
},
{
name: "ItemMergedExtraItemsInBase",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -920,21 +1060,24 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath2, 84, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath2, locationPath2, 84, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false),
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
{
name: "ItemMoved",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath2,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath2,
Location: locationPath2,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -951,21 +1094,27 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath2, 42, true),
makeDetailsEntry(suite.T(), itemPath2, locationPath2, 42, true),
},
},
{
name: "MultipleBases",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
itemPath3.ShortRef(): itemPath3,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
itemPath3.ShortRef(): {
Repo: itemPath3,
Location: locationPath3,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -989,7 +1138,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
},
@ -997,23 +1146,26 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
// This entry should not be picked due to a mismatch on Reasons.
*makeDetailsEntry(suite.T(), itemPath1, 84, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false),
// This item should be picked.
*makeDetailsEntry(suite.T(), itemPath3, 37, false),
*makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false),
makeDetailsEntry(suite.T(), itemPath3, 37, false),
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false),
},
},
{
name: "SomeBasesIncomplete",
inputShortRefsFromPrevBackup: map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locationPath1,
},
},
inputMans: []*kopia.ManifestEntry{
{
@ -1037,7 +1189,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
},
@ -1045,14 +1197,14 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
// This entry should not be picked due to being incomplete.
*makeDetailsEntry(suite.T(), itemPath1, 84, false),
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.DetailsEntry{
makeDetailsEntry(suite.T(), itemPath1, 42, false),
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
}
@ -1075,6 +1227,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
&deets,
fault.New(true))
test.errCheck(t, err)
if err != nil {
return
}
@ -1103,8 +1256,12 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
itemPath1 = makePath(
t,
pathElems,
true,
)
true)
locPath1 = makePath(
t,
pathElems[:len(pathElems)-1],
false)
backup1 = backup.Backup{
BaseModel: model.BaseModel{
@ -1119,8 +1276,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
Category: itemPath1.Category(),
}
inputToMerge = map[string]path.Path{
itemPath1.ShortRef(): itemPath1,
inputToMerge = map[string]kopia.PrevRefs{
itemPath1.ShortRef(): {
Repo: itemPath1,
Location: locPath1,
},
}
inputMans = []*kopia.ManifestEntry{
@ -1137,7 +1297,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
}
itemSize = 42
itemDetails = makeDetailsEntry(t, itemPath1, itemSize, false)
itemDetails = makeDetailsEntry(t, itemPath1, itemPath1, itemSize, false)
populatedDetails = map[string]*details.Details{
backup1.DetailsID: {

View File

@ -44,7 +44,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
deetsBuilder := &details.Builder{}
deetsBuilder.Add("ref", "shortref", "parentref", true,
deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
details.ItemInfo{
Exchange: &details.ExchangeInfo{
Subject: "hello world",
@ -66,6 +66,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef)
assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef)
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef)
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
assert.NotNil(t, readDeets.Entries[0].Exchange)
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)

View File

@ -14,7 +14,7 @@ import (
"github.com/alcionai/corso/src/pkg/selectors"
)
const Version = 1
const Version = 2
// Backup represents the result of a backup operation
type Backup struct {

View File

@ -18,6 +18,7 @@ type folderEntry struct {
RepoRef string
ShortRef string
ParentRef string
LocationRef string
Updated bool
Info ItemInfo
}
@ -110,10 +111,14 @@ type Builder struct {
knownFolders map[string]folderEntry `json:"-"`
}
func (b *Builder) Add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) {
func (b *Builder) Add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
b.mu.Lock()
defer b.mu.Unlock()
b.d.add(repoRef, shortRef, parentRef, updated, info)
b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
}
func (b *Builder) Details() *Details {
@ -131,30 +136,65 @@ func (b *Builder) Details() *Details {
// TODO(ashmrtn): If we never need to pre-populate the modified time of a folder
// we should just merge this with AddFoldersForItem, have Add call
// AddFoldersForItem, and unexport AddFoldersForItem.
func FolderEntriesForPath(parent *path.Builder) []folderEntry {
func FolderEntriesForPath(parent, location *path.Builder) []folderEntry {
folders := []folderEntry{}
lfs := locationRefOf(location)
for len(parent.Elements()) > 0 {
nextParent := parent.Dir()
var (
nextParent = parent.Dir()
lr string
dn = parent.LastElem()
)
// TODO: We may have future cases where the storage hierarchy
// doesn't match the location hierarchy.
if lfs != nil {
lr = lfs.String()
if len(lfs.Elements()) > 0 {
dn = lfs.LastElem()
}
}
folders = append(folders, folderEntry{
RepoRef: parent.String(),
ShortRef: parent.ShortRef(),
ParentRef: nextParent.ShortRef(),
LocationRef: lr,
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: parent.Elements()[len(parent.Elements())-1],
DisplayName: dn,
},
},
})
parent = nextParent
if lfs != nil {
lfs = lfs.Dir()
}
}
return folders
}
// assumes the pb contains a path like:
// <tenant>/<service>/<owner>/<category>/<logical_containers>...
// and returns a string with only <logical_containers>/...
func locationRefOf(pb *path.Builder) *path.Builder {
if pb == nil {
return nil
}
for i := 0; i < 4; i++ {
pb = pb.PopFront()
}
return pb
}
// AddFoldersForItem adds entries for the given folders. It skips adding entries that
// have been added by previous calls.
func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) {
@ -202,11 +242,16 @@ type Details struct {
DetailsModel
}
func (d *Details) add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) {
func (d *Details) add(
repoRef, shortRef, parentRef, locationRef string,
updated bool,
info ItemInfo,
) {
d.Entries = append(d.Entries, DetailsEntry{
RepoRef: repoRef,
ShortRef: shortRef,
ParentRef: parentRef,
LocationRef: locationRef,
Updated: updated,
ItemInfo: info,
})
@ -233,9 +278,21 @@ type DetailsEntry struct {
RepoRef string `json:"repoRef"`
ShortRef string `json:"shortRef"`
ParentRef string `json:"parentRef,omitempty"`
// LocationRef contains the logical path structure by its human-readable
// display names. IE: If an item is located at "/Inbox/Important", we
// hold that string in the LocationRef, while the actual IDs of each
// container are used for the RepoRef.
// LocationRef only holds the container values, and does not include
// the metadata prefixes (tenant, service, owner, etc) found in the
// repoRef.
// Currently only implemented for Exchange Calendars.
LocationRef string `json:"locationRef,omitempty"`
// Indicates the item was added or updated in this backup
// Always `true` for full backups
Updated bool `json:"updated"`
ItemInfo
}
@ -316,18 +373,21 @@ const (
FolderItem ItemType = iota + 300
)
func UpdateItem(item *ItemInfo, newPath path.Path) error {
func UpdateItem(item *ItemInfo, repoPath path.Path) error {
// Only OneDrive and SharePoint have information about parent folders
// contained in them.
var updatePath func(path.Path) error
switch item.infoType() {
case SharePointItem:
return item.SharePoint.UpdateParentPath(newPath)
updatePath = item.SharePoint.UpdateParentPath
case OneDriveItem:
return item.OneDrive.UpdateParentPath(newPath)
updatePath = item.OneDrive.UpdateParentPath
default:
return nil
}
return nil
return updatePath(repoPath)
}
// ItemInfo is a oneOf that contains service specific

View File

@ -41,6 +41,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
},
expectHs: []string{"ID"},
expectVs: []string{"deadbeef"},
@ -50,6 +51,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeEvent,
@ -69,6 +71,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeContact,
@ -84,6 +87,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeMail,
@ -101,6 +105,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
SharePoint: &SharePointInfo{
ItemName: "itemName",
@ -130,6 +135,7 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
entry: DetailsEntry{
RepoRef: "reporef",
ShortRef: "deadbeef",
LocationRef: "locationref",
ItemInfo: ItemInfo{
OneDrive: &OneDriveInfo{
ItemName: "itemName",
@ -159,35 +165,55 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
var pathItemsTable = []struct {
name string
ents []DetailsEntry
expectRefs []string
expectRepoRefs []string
expectLocationRefs []string
}{
{
name: "nil entries",
ents: nil,
expectRefs: []string{},
expectRepoRefs: []string{},
expectLocationRefs: []string{},
},
{
name: "single entry",
ents: []DetailsEntry{
{RepoRef: "abcde"},
{
RepoRef: "abcde",
LocationRef: "locationref",
},
expectRefs: []string{"abcde"},
},
expectRepoRefs: []string{"abcde"},
expectLocationRefs: []string{"locationref"},
},
{
name: "multiple entries",
ents: []DetailsEntry{
{RepoRef: "abcde"},
{RepoRef: "12345"},
{
RepoRef: "abcde",
LocationRef: "locationref",
},
expectRefs: []string{"abcde", "12345"},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
},
expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
},
{
name: "multiple entries with folder",
ents: []DetailsEntry{
{RepoRef: "abcde"},
{RepoRef: "12345"},
{
RepoRef: "abcde",
LocationRef: "locationref",
},
{
RepoRef: "12345",
LocationRef: "locationref2",
},
{
RepoRef: "deadbeef",
LocationRef: "locationref3",
ItemInfo: ItemInfo{
Folder: &FolderInfo{
DisplayName: "test folder",
@ -195,7 +221,8 @@ var pathItemsTable = []struct {
},
},
},
expectRefs: []string{"abcde", "12345"},
expectRepoRefs: []string{"abcde", "12345"},
expectLocationRefs: []string{"locationref", "locationref2"},
},
}
@ -207,7 +234,7 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Path() {
Entries: test.ents,
},
}
assert.Equal(t, test.expectRefs, d.Paths())
assert.ElementsMatch(t, test.expectRepoRefs, d.Paths())
})
}
}
@ -222,10 +249,11 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Items() {
}
ents := d.Items()
assert.Len(t, ents, len(test.expectRefs))
assert.Len(t, ents, len(test.expectRepoRefs))
for _, e := range ents {
assert.Contains(t, test.expectRefs, e.RepoRef)
assert.Contains(t, test.expectRepoRefs, e.RepoRef)
assert.Contains(t, test.expectLocationRefs, e.LocationRef)
}
})
}
@ -256,6 +284,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -266,6 +295,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeNewerThanItem,
@ -286,6 +316,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -296,6 +327,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -306,6 +338,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeOlderThanItem,
@ -316,6 +349,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
RepoRef: "rr3",
ShortRef: "sr3",
ParentRef: "pr3",
LocationRef: "lr3",
Info: ItemInfo{
Folder: &FolderInfo{
Modified: folderTimeNewerThanItem,
@ -366,6 +400,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -375,6 +410,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -393,6 +429,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -401,6 +438,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
RepoRef: "rr2",
ShortRef: "sr2",
ParentRef: "pr2",
LocationRef: "lr2",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -485,6 +523,7 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersDifferentServices() {
RepoRef: "rr1",
ShortRef: "sr1",
ParentRef: "pr1",
LocationRef: "lr1",
Info: ItemInfo{
Folder: &FolderInfo{},
},
@ -562,7 +601,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
table := []struct {
name string
input ItemInfo
newPath path.Path
repoPath path.Path
locPath path.Path
errCheck assert.ErrorAssertionFunc
expectedItem ItemInfo
}{
@ -616,7 +656,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
newPath: newOneDrivePath,
repoPath: newOneDrivePath,
locPath: newOneDrivePath,
errCheck: assert.NoError,
expectedItem: ItemInfo{
OneDrive: &OneDriveInfo{
@ -633,7 +674,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
newPath: newOneDrivePath,
repoPath: newOneDrivePath,
locPath: newOneDrivePath,
errCheck: assert.NoError,
expectedItem: ItemInfo{
SharePoint: &SharePointInfo{
@ -650,7 +692,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
newPath: badOneDrivePath,
repoPath: badOneDrivePath,
locPath: badOneDrivePath,
errCheck: assert.Error,
},
{
@ -661,7 +704,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
ParentPath: folder1,
},
},
newPath: badOneDrivePath,
repoPath: badOneDrivePath,
locPath: badOneDrivePath,
errCheck: assert.Error,
},
}
@ -669,7 +713,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
item := test.input
err := UpdateItem(&item, test.newPath)
err := UpdateItem(&item, test.repoPath)
test.errCheck(t, err)
if err != nil {
@ -680,3 +724,162 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
})
}
}
var (
basePath = path.Builder{}.Append("ten", "serv", "user", "type")
baseFolderEnts = []folderEntry{
{
RepoRef: basePath.String(),
ShortRef: basePath.ShortRef(),
ParentRef: basePath.Dir().ShortRef(),
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "type",
},
},
},
{
RepoRef: basePath.Dir().String(),
ShortRef: basePath.Dir().ShortRef(),
ParentRef: basePath.Dir().Dir().ShortRef(),
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "user",
},
},
},
{
RepoRef: basePath.Dir().Dir().String(),
ShortRef: basePath.Dir().Dir().ShortRef(),
ParentRef: basePath.Dir().Dir().Dir().ShortRef(),
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "serv",
},
},
},
{
RepoRef: basePath.Dir().Dir().Dir().String(),
ShortRef: basePath.Dir().Dir().Dir().ShortRef(),
ParentRef: "",
LocationRef: "",
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: "ten",
},
},
},
}
)
func folderEntriesFor(pathElems []string, locElems []string) []folderEntry {
p := basePath.Append(pathElems...)
l := path.Builder{}.Append(locElems...)
ents := make([]folderEntry, 0, len(pathElems)+4)
for range pathElems {
dn := p.LastElem()
if l != nil && len(l.Elements()) > 0 {
dn = l.LastElem()
}
fe := folderEntry{
RepoRef: p.String(),
ShortRef: p.ShortRef(),
ParentRef: p.Dir().ShortRef(),
LocationRef: l.String(),
Info: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
DisplayName: dn,
},
},
}
l = l.Dir()
p = p.Dir()
ents = append(ents, fe)
}
return append(ents, baseFolderEnts...)
}
func (suite *DetailsUnitSuite) TestFolderEntriesForPath() {
var (
fnords = []string{"fnords"}
smarf = []string{"fnords", "smarf"}
beau = []string{"beau"}
regard = []string{"beau", "regard"}
)
table := []struct {
name string
parent *path.Builder
location *path.Builder
expect []folderEntry
}{
{
name: "base path, parent only",
parent: basePath,
expect: baseFolderEnts,
},
{
name: "base path with location",
parent: basePath,
location: basePath,
expect: baseFolderEnts,
},
{
name: "single depth parent only",
parent: basePath.Append(fnords...),
expect: folderEntriesFor(fnords, nil),
},
{
name: "single depth with location",
parent: basePath.Append(fnords...),
location: basePath.Append(beau...),
expect: folderEntriesFor(fnords, beau),
},
{
name: "two depth parent only",
parent: basePath.Append(smarf...),
expect: folderEntriesFor(smarf, nil),
},
{
name: "two depth with location",
parent: basePath.Append(smarf...),
location: basePath.Append(regard...),
expect: folderEntriesFor(smarf, regard),
},
{
name: "mismatched depth, parent longer",
parent: basePath.Append(smarf...),
location: basePath.Append(beau...),
expect: folderEntriesFor(smarf, beau),
},
// We can't handle this right now. But we don't have any cases
// which immediately require it, either. Keeping in the test
// as a reminder that this might be required at some point.
// {
// name: "mismatched depth, location longer",
// parent: basePath.Append(fnords...),
// location: basePath.Append(regard...),
// expect: folderEntriesFor(fnords, regard),
// },
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
result := FolderEntriesForPath(test.parent, test.location)
assert.ElementsMatch(t, test.expect, result)
})
}
}

View File

@ -164,6 +164,7 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
var (
lgr *zap.Logger
err error
opts = []zap.Option{zap.AddStacktrace(zapcore.PanicLevel)}
)
if level != Production {
@ -178,20 +179,21 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
}
opts := []zap.Option{}
if readableOutput {
opts = append(opts, zap.WithCaller(false), zap.AddStacktrace(zapcore.DPanicLevel))
opts = append(opts, zap.WithCaller(false))
cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00")
if logfile == "stderr" || logfile == "stdout" {
cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
}
}
cfg.OutputPaths = []string{logfile}
lgr, err = cfg.Build(opts...)
} else {
cfg := zap.NewProductionConfig()
cfg.OutputPaths = []string{logfile}
lgr, err = cfg.Build()
lgr, err = cfg.Build(opts...)
}
// fall back to the core config if the default creation fails

View File

@ -20,7 +20,7 @@ func ToOneDrivePath(p Path) (*DrivePath, error) {
if len(folders) < 3 {
return nil, clues.
New("folder path doesn't match expected format for OneDrive items").
With("path_folders", p.Folder())
With("path_folders", p.Folder(false))
}
return &DrivePath{DriveID: folders[1], Folders: folders[3:]}, nil

View File

@ -86,7 +86,7 @@ type Path interface {
Category() CategoryType
Tenant() string
ResourceOwner() string
Folder() string
Folder(bool) string
Folders() []string
Item() string
// PopFront returns a Builder object with the first element (left-side)
@ -140,6 +140,14 @@ func (pb Builder) UnescapeAndAppend(elements ...string) (*Builder, error) {
return res, nil
}
// SplitUnescapeAppend takes in an escaped string representing a directory
// path, splits the string, and appends it to the current builder.
func (pb Builder) SplitUnescapeAppend(s string) (*Builder, error) {
elems := Split(TrimTrailingSlash(s))
return pb.UnescapeAndAppend(elems...)
}
// Append creates a copy of this Builder and adds the given elements them to the
// end of the new Builder. Elements are added in the order they are passed.
func (pb Builder) Append(elements ...string) *Builder {
@ -205,6 +213,14 @@ func (pb Builder) Dir() *Builder {
}
}
func (pb Builder) LastElem() string {
if len(pb.elements) == 0 {
return ""
}
return pb.elements[len(pb.elements)-1]
}
// String returns a string that contains all path elements joined together.
// Elements of the path that need escaping are escaped.
func (pb Builder) String() string {
@ -247,11 +263,6 @@ func (pb Builder) Elements() []string {
return append([]string{}, pb.elements...)
}
//nolint:unused
func (pb Builder) join(start, end int) string {
return join(pb.elements[start:end])
}
func verifyInputValues(tenant, resourceOwner string) error {
if len(tenant) == 0 {
return clues.Stack(errMissingSegment, errors.New("tenant"))

View File

@ -480,11 +480,85 @@ func (suite *PathUnitSuite) TestFromStringErrors() {
}
}
func (suite *PathUnitSuite) TestFolder() {
table := []struct {
name string
p func(t *testing.T) Path
escape bool
expectFolder string
expectSplit []string
}{
{
name: "clean path",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
expectFolder: "a/b/c",
expectSplit: []string{"a", "b", "c"},
},
{
name: "clean path escaped",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
escape: true,
expectFolder: "a/b/c",
expectSplit: []string{"a", "b", "c"},
},
{
name: "escapable path",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
expectFolder: "a//b/c",
expectSplit: []string{"a", "b", "c"},
},
{
name: "escapable path escaped",
p: func(t *testing.T) Path {
p, err := Builder{}.
Append("a/", "b", "c").
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
require.NoError(t, err)
return p
},
escape: true,
expectFolder: "a\\//b/c",
expectSplit: []string{"a\\/", "b", "c"},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
p := test.p(t)
result := p.Folder(test.escape)
assert.Equal(t, test.expectFolder, result)
assert.Equal(t, test.expectSplit, Split(result))
})
}
}
func (suite *PathUnitSuite) TestFromString() {
const (
testTenant = "tenant"
testUser = "user"
testElement1 = "folder"
testElement1 = "folder/"
testElementTrimmed = "folder"
testElement2 = "folder2"
testElement3 = "other"
)
@ -509,9 +583,13 @@ func (suite *PathUnitSuite) TestFromString() {
// Expected result for Folder() if path is marked as a folder.
expectedFolder string
// Expected result for Item() if path is marked as an item.
// Expected result for Split(Folder()) if path is marked as a folder.
expectedSplit []string
expectedItem string
// Expected result for Folder() if path is marked as an item.
expectedItemFolder string
// Expected result for Split(Folder()) if path is marked as an item.
expectedItemSplit []string
}{
{
name: "BasicPath",
@ -525,16 +603,25 @@ func (suite *PathUnitSuite) TestFromString() {
),
expectedFolder: fmt.Sprintf(
"%s/%s/%s",
testElement1,
testElementTrimmed,
testElement2,
testElement3,
),
expectedSplit: []string{
testElementTrimmed,
testElement2,
testElement3,
},
expectedItem: testElement3,
expectedItemFolder: fmt.Sprintf(
"%s/%s",
testElement1,
testElementTrimmed,
testElement2,
),
expectedItemSplit: []string{
testElementTrimmed,
testElement2,
},
},
{
name: "PathWithEmptyElements",
@ -542,22 +629,31 @@ func (suite *PathUnitSuite) TestFromString() {
"/%s//%%s//%s//%%s//%s///%s//%s//",
testTenant,
testUser,
testElement1,
testElementTrimmed,
testElement2,
testElement3,
),
expectedFolder: fmt.Sprintf(
"%s/%s/%s",
testElement1,
testElementTrimmed,
testElement2,
testElement3,
),
expectedSplit: []string{
testElementTrimmed,
testElement2,
testElement3,
},
expectedItem: testElement3,
expectedItemFolder: fmt.Sprintf(
"%s/%s",
testElement1,
testElementTrimmed,
testElement2,
),
expectedItemSplit: []string{
testElementTrimmed,
testElement2,
},
},
}
@ -572,16 +668,25 @@ func (suite *PathUnitSuite) TestFromString() {
p, err := FromDataLayerPath(testPath, item.isItem)
require.NoError(t, err)
assert.Equal(t, service, p.Service())
assert.Equal(t, cat, p.Category())
assert.Equal(t, testTenant, p.Tenant())
assert.Equal(t, testUser, p.ResourceOwner())
assert.Equal(t, service, p.Service(), "service")
assert.Equal(t, cat, p.Category(), "category")
assert.Equal(t, testTenant, p.Tenant(), "tenant")
assert.Equal(t, testUser, p.ResourceOwner(), "resource owner")
if !item.isItem {
assert.Equal(t, test.expectedFolder, p.Folder())
fld := p.Folder(false)
escfld := p.Folder(true)
if item.isItem {
assert.Equal(t, test.expectedItemFolder, fld, "item folder")
assert.Equal(t, test.expectedItemSplit, Split(fld), "item split")
assert.Equal(t, test.expectedItemFolder, escfld, "escaped item folder")
assert.Equal(t, test.expectedItemSplit, Split(escfld), "escaped item split")
assert.Equal(t, test.expectedItem, p.Item(), "item")
} else {
assert.Equal(t, test.expectedItemFolder, p.Folder())
assert.Equal(t, test.expectedItem, p.Item())
assert.Equal(t, test.expectedFolder, fld, "dir folder")
assert.Equal(t, test.expectedSplit, Split(fld), "dir split")
assert.Equal(t, test.expectedFolder, escfld, "escaped dir folder")
assert.Equal(t, test.expectedSplit, Split(escfld), "escaped dir split")
}
})
}

View File

@ -201,13 +201,20 @@ func (rp dataLayerResourcePath) lastFolderIdx() int {
}
// Folder returns the folder segment embedded in the dataLayerResourcePath.
func (rp dataLayerResourcePath) Folder() string {
func (rp dataLayerResourcePath) Folder(escape bool) string {
endIdx := rp.lastFolderIdx()
if endIdx == 4 {
return ""
}
return rp.Builder.join(4, endIdx)
fs := rp.Folders()
if !escape {
return join(fs)
}
// builder.String() will escape all individual elements.
return Builder{}.Append(fs...).String()
}
// Folders returns the individual folder elements embedded in the

View File

@ -172,7 +172,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
)
require.NoError(t, err)
assert.Empty(t, p.Folder())
assert.Empty(t, p.Folder(false))
assert.Empty(t, p.Folders())
assert.Equal(t, item, p.Item())
})
@ -391,7 +391,7 @@ func (suite *DataLayerResourcePath) TestToExchangePathForCategory() {
assert.Equal(t, path.ExchangeService, p.Service())
assert.Equal(t, test.category, p.Category())
assert.Equal(t, testUser, p.ResourceOwner())
assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder())
assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder(false))
assert.Equal(t, m.expectedFolders, p.Folders())
assert.Equal(t, m.expectedItem, p.Item())
})
@ -465,7 +465,7 @@ func (suite *PopulatedDataLayerResourcePath) TestFolder() {
assert.Equal(
t,
strings.Join(m.expectedFolders, "/"),
suite.paths[m.isItem].Folder(),
suite.paths[m.isItem].Folder(false),
)
})
}
@ -525,7 +525,7 @@ func (suite *PopulatedDataLayerResourcePath) TestAppend() {
return
}
assert.Equal(t, test.expectedFolder, newPath.Folder())
assert.Equal(t, test.expectedFolder, newPath.Folder(false))
assert.Equal(t, test.expectedItem, newPath.Item())
})
}

View File

@ -575,12 +575,12 @@ func (ec exchangeCategory) isLeaf() bool {
return ec == ec.leafCat()
}
// pathValues transforms a path to a map of identified properties.
// pathValues transforms the two paths to maps of identified properties.
//
// Example:
// [tenantID, service, userPN, category, mailFolder, mailID]
// => {exchUser: userPN, exchMailFolder: mailFolder, exchMail: mailID}
func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string {
// => {exchMailFolder: mailFolder, exchMail: mailID}
func (ec exchangeCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
var folderCat, itemCat categorizer
switch ec {
@ -594,13 +594,24 @@ func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string {
folderCat, itemCat = ExchangeMailFolder, ExchangeMail
default:
return map[categorizer]string{}
return map[categorizer]string{}, map[categorizer]string{}
}
return map[categorizer]string{
folderCat: p.Folder(),
itemCat: p.Item(),
rv := map[categorizer]string{
folderCat: repo.Folder(false),
itemCat: repo.Item(),
}
lv := map[categorizer]string{}
if location != nil {
lv = map[categorizer]string{
folderCat: location.Folder(false),
itemCat: location.Item(),
}
}
return rv, lv
}
// pathKeys returns the path keys recognized by the receiver's leaf type.

View File

@ -705,13 +705,16 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesInfo() {
func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
const (
usr = "userID"
fID1 = "mf_id_1"
fld1 = "mailFolder"
fID2 = "mf_id_2"
fld2 = "subFolder"
mail = "mailID"
)
var (
pth = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory)
repo = stubPath(suite.T(), usr, []string{fID1, fID2, mail}, path.EmailCategory)
loc = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory)
short = "thisisahashofsomekind"
es = NewExchangeRestore(Any())
)
@ -726,13 +729,18 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
{"all folders", es.MailFolders(Any()), "", assert.True},
{"no folders", es.MailFolders(None()), "", assert.False},
{"matching folder", es.MailFolders([]string{fld1}), "", assert.True},
{"matching folder id", es.MailFolders([]string{fID1}), "", assert.True},
{"incomplete matching folder", es.MailFolders([]string{"mail"}), "", assert.False},
{"incomplete matching folder ID", es.MailFolders([]string{"mf_id"}), "", assert.False},
{"non-matching folder", es.MailFolders([]string{"smarf"}), "", assert.False},
{"non-matching folder substring", es.MailFolders([]string{fld1 + "_suffix"}), "", assert.False},
{"non-matching folder id substring", es.MailFolders([]string{fID1 + "_suffix"}), "", assert.False},
{"matching folder prefix", es.MailFolders([]string{fld1}, PrefixMatch()), "", assert.True},
{"matching folder ID prefix", es.MailFolders([]string{fID1}, PrefixMatch()), "", assert.True},
{"incomplete folder prefix", es.MailFolders([]string{"mail"}, PrefixMatch()), "", assert.False},
{"matching folder substring", es.MailFolders([]string{"Folder"}), "", assert.False},
{"one of multiple folders", es.MailFolders([]string{"smarf", fld2}), "", assert.True},
{"one of multiple folders by ID", es.MailFolders([]string{"smarf", fID2}), "", assert.True},
{"all mail", es.Mails(Any(), Any()), "", assert.True},
{"no mail", es.Mails(Any(), None()), "", assert.False},
{"matching mail", es.Mails(Any(), []string{mail}), "", assert.True},
@ -746,8 +754,12 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
scopes := setScopesToDefault(test.scope)
var aMatch bool
for _, scope := range scopes {
pv := ExchangeMail.pathValues(pth)
if matchesPathValues(scope, ExchangeMail, pv, short) {
repoVals, locVals := ExchangeMail.pathValues(repo, loc)
if matchesPathValues(scope, ExchangeMail, repoVals, short) {
aMatch = true
break
}
if matchesPathValues(scope, ExchangeMail, locVals, short) {
aMatch = true
break
}
@ -833,6 +845,256 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
return deets
}
table := []struct {
name string
deets *details.Details
makeSelector func() *ExchangeRestore
expect []string
}{
{
"no refs",
makeDeets(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{},
},
{
"contact only",
makeDeets(contact),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{contact},
},
{
"event only",
makeDeets(event),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{event},
},
{
"mail only",
makeDeets(mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{mail},
},
{
"all",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
return er
},
[]string{contact, event, mail},
},
{
"only match contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Contacts([]string{"cfld"}, []string{"cid"}))
return er
},
[]string{contact},
},
{
"only match contactInSubFolder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}))
return er
},
[]string{contactInSubFolder},
},
{
"only match contactInSubFolder by prefix",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch()))
return er
},
[]string{contactInSubFolder},
},
{
"only match contactInSubFolder by leaf folder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld2"}))
return er
},
[]string{contactInSubFolder},
},
{
"only match event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Events([]string{"ecld"}, []string{"eid"}))
return er
},
[]string{event},
},
{
"only match mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Mails([]string{"mfld"}, []string{"mid"}))
return er
},
[]string{mail},
},
{
"exclude contact",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"}))
return er
},
[]string{event, mail},
},
{
"exclude event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Events([]string{"ecld"}, []string{"eid"}))
return er
},
[]string{contact, mail},
},
{
"exclude mail",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"}))
return er
},
[]string{contact, event},
},
{
"filter on mail subject",
func() *details.Details {
ds := makeDeets(mail)
for i := range ds.Entries {
ds.Entries[i].Exchange.Subject = "has a subject"
}
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
[]string{mail},
},
{
"filter on mail subject multiple input categories",
func() *details.Details {
mds := makeDeets(mail)
for i := range mds.Entries {
mds.Entries[i].Exchange.Subject = "has a subject"
}
ds := makeDeets(contact, event)
ds.Entries = append(ds.Entries, mds.Entries...)
return ds
}(),
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Filter(er.MailSubject("subj"))
return er
},
[]string{mail},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
sel := test.makeSelector()
results := sel.Reduce(ctx, test.deets, fault.New(true))
paths := results.Paths()
assert.Equal(t, test.expect, paths)
})
}
}
func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce_locationRef() {
var (
contact = stubRepoRef(path.ExchangeService, path.ContactsCategory, "uid", "id5/id6", "cid")
contactLocation = "conts/my_cont"
event = stubRepoRef(path.ExchangeService, path.EventsCategory, "uid", "id1/id2", "eid")
eventLocation = "cal/my_cal"
mail = stubRepoRef(path.ExchangeService, path.EmailCategory, "uid", "id3/id4", "mid")
mailLocation = "inbx/my_mail"
)
makeDeets := func(refs ...string) *details.Details {
deets := &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{},
},
}
for _, r := range refs {
var (
location string
itype = details.UnknownType
)
switch r {
case contact:
itype = details.ExchangeContact
location = contactLocation
case event:
itype = details.ExchangeEvent
location = eventLocation
case mail:
itype = details.ExchangeMail
location = mailLocation
}
deets.Entries = append(deets.Entries, details.DetailsEntry{
RepoRef: r,
LocationRef: location,
ItemInfo: details.ItemInfo{
Exchange: &details.ExchangeInfo{
ItemType: itype,
},
},
})
}
return deets
}
arr := func(s ...string) []string {
return s
}
@ -898,47 +1160,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Contacts([]string{"cfld"}, []string{"cid"}))
er.Include(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er
},
arr(contact),
},
{
"only match contactInSubFolder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}))
return er
},
arr(contactInSubFolder),
},
{
"only match contactInSubFolder by prefix",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch()))
return er
},
arr(contactInSubFolder),
},
{
"only match contactInSubFolder by leaf folder",
makeDeets(contactInSubFolder, contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.ContactFolders([]string{"cfld2"}))
return er
},
arr(contactInSubFolder),
},
{
"only match event",
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Events([]string{"ecld"}, []string{"eid"}))
er.Include(er.Events([]string{eventLocation}, []string{"eid"}))
return er
},
arr(event),
@ -948,7 +1180,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
makeDeets(contact, event, mail),
func() *ExchangeRestore {
er := NewExchangeRestore([]string{"uid"})
er.Include(er.Mails([]string{"mfld"}, []string{"mid"}))
er.Include(er.Mails([]string{mailLocation}, []string{"mid"}))
return er
},
arr(mail),
@ -959,7 +1191,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"}))
er.Exclude(er.Contacts([]string{contactLocation}, []string{"cid"}))
return er
},
arr(event, mail),
@ -970,7 +1202,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Events([]string{"ecld"}, []string{"eid"}))
er.Exclude(er.Events([]string{eventLocation}, []string{"eid"}))
return er
},
arr(contact, mail),
@ -981,7 +1213,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
func() *ExchangeRestore {
er := NewExchangeRestore(Any())
er.Include(er.AllData())
er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"}))
er.Exclude(er.Mails([]string{mailLocation}, []string{"mid"}))
return er
},
arr(contact, event),
@ -1128,9 +1360,12 @@ func (suite *ExchangeSelectorSuite) TestPasses() {
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
repoVals, locVals := cat.pathValues(pth, pth)
result := passes(
cat,
cat.pathValues(pth),
repoVals,
locVals,
entry,
test.excludes,
test.filters,
@ -1233,17 +1468,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
contactPath := stubPath(t, "user", []string{"cfolder", "contactitem"}, path.ContactsCategory)
contactMap := map[categorizer]string{
ExchangeContactFolder: contactPath.Folder(),
ExchangeContactFolder: contactPath.Folder(false),
ExchangeContact: contactPath.Item(),
}
eventPath := stubPath(t, "user", []string{"ecalendar", "eventitem"}, path.EventsCategory)
eventMap := map[categorizer]string{
ExchangeEventCalendar: eventPath.Folder(),
ExchangeEventCalendar: eventPath.Folder(false),
ExchangeEvent: eventPath.Item(),
}
mailPath := stubPath(t, "user", []string{"mfolder", "mailitem"}, path.EmailCategory)
mailMap := map[categorizer]string{
ExchangeMailFolder: mailPath.Folder(),
ExchangeMailFolder: mailPath.Folder(false),
ExchangeMail: mailPath.Item(),
}
@ -1258,7 +1493,9 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
}
for _, test := range table {
suite.T().Run(string(test.cat), func(t *testing.T) {
assert.Equal(t, test.cat.pathValues(test.path), test.expect)
r, l := test.cat.pathValues(test.path, test.path)
assert.Equal(t, test.expect, r)
assert.Equal(t, test.expect, l)
})
}
}

View File

@ -55,11 +55,13 @@ func (mc mockCategorizer) isLeaf() bool {
return mc == leafCatStub
}
func (mc mockCategorizer) pathValues(pth path.Path) map[categorizer]string {
return map[categorizer]string{
func (mc mockCategorizer) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
pv := map[categorizer]string{
rootCatStub: "root",
leafCatStub: "leaf",
}
return pv, pv
}
func (mc mockCategorizer) pathKeys() []categorizer {

View File

@ -371,19 +371,30 @@ func (c oneDriveCategory) isLeaf() bool {
return c == OneDriveItem
}
// pathValues transforms a path to a map of identified properties.
// pathValues transforms the two paths to maps of identified properties.
//
// Example:
// [tenantID, service, userPN, category, folder, fileID]
// => {odUser: userPN, odFolder: folder, odFileID: fileID}
func (c oneDriveCategory) pathValues(p path.Path) map[categorizer]string {
// => {odFolder: folder, odFileID: fileID}
func (c oneDriveCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
// Ignore `drives/<driveID>/root:` for folder comparison
folder := path.Builder{}.Append(p.Folders()...).PopFront().PopFront().PopFront().String()
return map[categorizer]string{
OneDriveFolder: folder,
OneDriveItem: p.Item(),
rFld := path.Builder{}.Append(repo.Folders()...).PopFront().PopFront().PopFront().String()
rv := map[categorizer]string{
OneDriveFolder: rFld,
OneDriveItem: repo.Item(),
}
lv := map[categorizer]string{}
if location != nil {
lFld := path.Builder{}.Append(location.Folders()...).PopFront().PopFront().PopFront().String()
lv = map[categorizer]string{
OneDriveFolder: lFld,
OneDriveItem: location.Item(),
}
}
return rv, lv
}
// pathKeys returns the path keys recognized by the receiver's leaf type.

View File

@ -262,7 +262,9 @@ func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() {
OneDriveItem: "file",
}
assert.Equal(t, expected, OneDriveItem.pathValues(filePath))
r, l := OneDriveItem.pathValues(filePath, filePath)
assert.Equal(t, expected, r)
assert.Equal(t, expected, l)
}
func (suite *OneDriveSelectorSuite) TestOneDriveScope_MatchesInfo() {

View File

@ -77,17 +77,18 @@ type (
// eg: in a resourceOwner/folder/item structure, the item is the leaf.
isLeaf() bool
// pathValues should produce a map of category:string pairs populated by extracting
// values out of the path.Path struct.
// pathValues takes in two paths, both variants of the repoRef, one containing the standard
// repoRef, and the other amended to include the locationRef directories (if available). It
// should produce two maps of category:string pairs populated by extracting the values out of
// each path.Path.
//
// Ex: given a path builder like ["tenant", "service", "resource", "dataType", "folder", "itemID"],
// the func should use the path to construct a map similar to this:
// {
// rootCat: resource,
// folderCat: folder,
// itemCat: itemID,
// }
pathValues(path.Path) map[categorizer]string
pathValues(path.Path, path.Path) (map[categorizer]string, map[categorizer]string)
// pathKeys produces a list of categorizers that can be used as keys in the pathValues
// map. The combination of the two funcs generically interprets the context of the
@ -317,6 +318,31 @@ func reduce[T scopeT, C categoryT](
continue
}
var locationPath path.Path
// if the details entry has a locationRef specified, use those folders in place
// of the repoRef folders, so that scopes can match against the display names
// instead of container IDs.
if len(ent.LocationRef) > 0 {
pb, err := path.Builder{}.SplitUnescapeAppend(ent.LocationRef)
if err != nil {
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
continue
}
locationPath, err = pb.Append(repoPath.Item()).
ToDataLayerPath(
repoPath.Tenant(),
repoPath.ResourceOwner(),
repoPath.Service(),
repoPath.Category(),
true)
if err != nil {
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
continue
}
}
// first check, every entry needs to match the selector's resource owners.
if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) {
continue
@ -334,7 +360,9 @@ func reduce[T scopeT, C categoryT](
continue
}
passed := passes(dc, dc.pathValues(repoPath), *ent, e, f, i)
rv, lv := dc.pathValues(repoPath, locationPath)
passed := passes(dc, rv, lv, *ent, e, f, i)
if passed {
ents = append(ents, *ent)
}
@ -379,7 +407,7 @@ func scopesByCategory[T scopeT, C categoryT](
// if the path is included, passes filters, and not excluded.
func passes[T scopeT, C categoryT](
cat C,
pathValues map[categorizer]string,
repoValues, locationValues map[categorizer]string,
entry details.DetailsEntry,
excs, filts, incs []T,
) bool {
@ -395,7 +423,7 @@ func passes[T scopeT, C categoryT](
var included bool
for _, inc := range incs {
if matchesEntry(inc, cat, pathValues, entry) {
if matchesEntry(inc, cat, repoValues, locationValues, entry) {
included = true
break
}
@ -408,14 +436,14 @@ func passes[T scopeT, C categoryT](
// all filters must pass
for _, filt := range filts {
if !matchesEntry(filt, cat, pathValues, entry) {
if !matchesEntry(filt, cat, repoValues, locationValues, entry) {
return false
}
}
// any matching exclusion means failure
for _, exc := range excs {
if matchesEntry(exc, cat, pathValues, entry) {
if matchesEntry(exc, cat, repoValues, locationValues, entry) {
return false
}
}
@ -428,7 +456,7 @@ func passes[T scopeT, C categoryT](
func matchesEntry[T scopeT, C categoryT](
sc T,
cat C,
pathValues map[categorizer]string,
repoValues, locationValues map[categorizer]string,
entry details.DetailsEntry,
) bool {
// filterCategory requires matching against service-specific info values
@ -436,7 +464,11 @@ func matchesEntry[T scopeT, C categoryT](
return sc.matchesInfo(entry.ItemInfo)
}
return matchesPathValues(sc, cat, pathValues, entry.ShortRef)
if len(locationValues) > 0 && matchesPathValues(sc, cat, locationValues, entry.ShortRef) {
return true
}
return matchesPathValues(sc, cat, repoValues, entry.ShortRef)
}
// matchesPathValues will check whether the pathValues have matching entries

View File

@ -290,6 +290,47 @@ func (suite *SelectorScopesSuite) TestReduce() {
}
}
func (suite *SelectorScopesSuite) TestReduce_locationRef() {
deets := func() details.Details {
return details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.DetailsEntry{
{
RepoRef: stubRepoRef(
pathServiceStub,
pathCatStub,
rootCatStub.String(),
"stub",
leafCatStub.String(),
),
LocationRef: "a/b/c//defg",
},
},
},
}
}
dataCats := map[path.CategoryType]mockCategorizer{
pathCatStub: rootCatStub,
}
for _, test := range reduceTestTable {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
ds := deets()
result := reduce[mockScope](
ctx,
&ds,
test.sel().Selector,
dataCats,
fault.New(true))
require.NotNil(t, result)
assert.Len(t, result.Entries, test.expectLen)
})
}
}
func (suite *SelectorScopesSuite) TestScopesByCategory() {
t := suite.T()
s1 := stubScope("")
@ -309,7 +350,7 @@ func (suite *SelectorScopesSuite) TestScopesByCategory() {
func (suite *SelectorScopesSuite) TestPasses() {
cat := rootCatStub
pth := stubPath(suite.T(), "uid", []string{"fld"}, path.EventsCategory)
pathVals := cat.pathValues(pth)
repoVals, locVals := cat.pathValues(pth, pth)
entry := details.DetailsEntry{}
for _, test := range reduceTestTable {
@ -320,7 +361,8 @@ func (suite *SelectorScopesSuite) TestPasses() {
incl := toMockScope(sel.Includes)
result := passes(
cat,
pathVals,
repoVals,
locVals,
entry,
excl, filt, incl)
test.expectPasses(t, result)

View File

@ -48,7 +48,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailInboxPath.Folder()},
[]string{testdata.ExchangeEmailInboxPath.Folder(false)},
))
return sel
@ -177,7 +177,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailBasePath.Folder()},
[]string{testdata.ExchangeEmailBasePath.Folder(false)},
))
return sel
@ -192,7 +192,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailBasePath.Folder()},
[]string{testdata.ExchangeEmailBasePath.Folder(false)},
selectors.PrefixMatch(), // force prefix matching
))
@ -205,7 +205,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.MailFolders(
[]string{testdata.ExchangeEmailInboxPath.Folder()},
[]string{testdata.ExchangeEmailInboxPath.Folder(false)},
))
return sel
@ -217,7 +217,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.ContactFolders(
[]string{testdata.ExchangeContactsBasePath.Folder()},
[]string{testdata.ExchangeContactsBasePath.Folder(false)},
))
return sel
@ -229,7 +229,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.ContactFolders(
[]string{testdata.ExchangeContactsRootPath.Folder()},
[]string{testdata.ExchangeContactsRootPath.Folder(false)},
))
return sel
@ -242,7 +242,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.EventCalendars(
[]string{testdata.ExchangeEventsBasePath.Folder()},
[]string{testdata.ExchangeEventsBasePath.Folder(false)},
))
return sel
@ -254,7 +254,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
selFunc: func() selectors.Reducer {
sel := selectors.NewExchangeRestore(selectors.Any())
sel.Include(sel.EventCalendars(
[]string{testdata.ExchangeEventsRootPath.Folder()},
[]string{testdata.ExchangeEventsRootPath.Folder(false)},
))
return sel

View File

@ -423,12 +423,12 @@ func (c sharePointCategory) isLeaf() bool {
return c == c.leafCat()
}
// pathValues transforms a path to a map of identified properties.
// pathValues transforms the two paths to maps of identified properties.
//
// Example:
// [tenantID, service, siteID, category, folder, itemID]
// => {spSite: siteID, spFolder: folder, spItemID: itemID}
func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string {
// => {spFolder: folder, spItemID: itemID}
func (c sharePointCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
var folderCat, itemCat categorizer
switch c {
@ -439,13 +439,24 @@ func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string {
case SharePointPage, SharePointPageFolder:
folderCat, itemCat = SharePointPageFolder, SharePointPage
default:
return map[categorizer]string{}
return map[categorizer]string{}, map[categorizer]string{}
}
return map[categorizer]string{
folderCat: p.Folder(),
itemCat: p.Item(),
rv := map[categorizer]string{
folderCat: repo.Folder(false),
itemCat: repo.Item(),
}
lv := map[categorizer]string{}
if location != nil {
lv = map[categorizer]string{
folderCat: location.Folder(false),
itemCat: location.Item(),
}
}
return rv, lv
}
// pathKeys returns the path keys recognized by the receiver's leaf type.

View File

@ -346,10 +346,11 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
"tenant",
"site",
test.sc.PathType(),
true,
)
true)
require.NoError(t, err)
assert.Equal(t, test.expected, test.sc.pathValues(itemPath))
r, l := test.sc.pathValues(itemPath, itemPath)
assert.Equal(t, test.expected, r)
assert.Equal(t, test.expected, l)
})
}
}