add move folder delta test (#1979)

## Description

Adds a delta test case for exchange incremental
backups by moving one folder into another one.
Also sets up retrieving container IDs in the test
in preparation for other test control.

## Does this PR need a docs update or release note?

- [x]  No 

## Type of change

- [x] 🤖 Test

## Issue(s)

* #1966

## Test Plan

- [x] 💚 E2E
This commit is contained in:
Keepers 2023-01-02 11:00:51 -07:00 committed by GitHub
parent 5239ff97e3
commit 35e0415a75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 88 additions and 32 deletions

View File

@ -62,9 +62,9 @@ func (cr *containerResolver) idToPath(
return fullPath, nil return fullPath, nil
} }
// PathInCache utility function to return m365ID of folder if the pathString // PathInCache utility function to return m365ID of folder if the path.Folders
// matches the path of a container within the cache. A boolean function // matches the directory of a container within the cache. A boolean result
// accompanies the call to indicate whether the lookup was successful. // is provided to indicate whether the lookup was successful.
func (cr *containerResolver) PathInCache(pathString string) (string, bool) { func (cr *containerResolver) PathInCache(pathString string) (string, bool) {
if len(pathString) == 0 || cr == nil { if len(pathString) == 0 || cr == nil {
return "", false return "", false

View File

@ -236,7 +236,7 @@ func createCollections(
defer closer() defer closer()
defer close(foldersComplete) defer close(foldersComplete)
resolver, err := populateExchangeContainerResolver(ctx, qp) resolver, err := PopulateExchangeContainerResolver(ctx, qp)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "getting folder cache") return nil, errors.Wrap(err, "getting folder cache")
} }

View File

@ -560,26 +560,24 @@ func (suite *ExchangeServiceSuite) TestGetContainerIDFromCache() {
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
folderID, err := GetContainerIDFromCache( folderID, err := CreateContainerDestinaion(
ctx, ctx,
connector, connector,
test.pathFunc1(t), test.pathFunc1(t),
folderName, folderName,
directoryCaches, directoryCaches)
)
require.NoError(t, err) require.NoError(t, err)
resolver := directoryCaches[test.category] resolver := directoryCaches[test.category]
_, err = resolver.IDToPath(ctx, folderID) _, err = resolver.IDToPath(ctx, folderID)
assert.NoError(t, err) assert.NoError(t, err)
secondID, err := GetContainerIDFromCache( secondID, err := CreateContainerDestinaion(
ctx, ctx,
connector, connector,
test.pathFunc2(t), test.pathFunc2(t),
folderName, folderName,
directoryCaches, directoryCaches)
)
require.NoError(t, err) require.NoError(t, err)
_, err = resolver.IDToPath(ctx, secondID) _, err = resolver.IDToPath(ctx, secondID)

View File

@ -125,11 +125,11 @@ func DeleteContactFolder(ctx context.Context, gs graph.Servicer, user, folderID
return gs.Client().UsersById(user).ContactFoldersById(folderID).Delete(ctx, nil) return gs.Client().UsersById(user).ContactFoldersById(folderID).Delete(ctx, nil)
} }
// populateExchangeContainerResolver gets a folder resolver if one is available for // PopulateExchangeContainerResolver gets a folder resolver if one is available for
// this category of data. If one is not available, returns nil so that other // this category of data. If one is not available, returns nil so that other
// logic in the caller can complete as long as they check if the resolver is not // logic in the caller can complete as long as they check if the resolver is not
// nil. If an error occurs populating the resolver, returns an error. // nil. If an error occurs populating the resolver, returns an error.
func populateExchangeContainerResolver( func PopulateExchangeContainerResolver(
ctx context.Context, ctx context.Context,
qp graph.QueryParams, qp graph.QueryParams,
) (graph.ContainerResolver, error) { ) (graph.ContainerResolver, error) {

View File

@ -312,7 +312,7 @@ func RestoreExchangeDataCollections(
userCaches = directoryCaches[userID] userCaches = directoryCaches[userID]
} }
containerID, err := GetContainerIDFromCache( containerID, err := CreateContainerDestinaion(
ctx, ctx,
gs, gs,
dc.FullPath(), dc.FullPath(),
@ -425,10 +425,12 @@ func restoreCollection(
} }
} }
// generateRestoreContainerFunc utility function that holds logic for creating // CreateContainerDestinaion builds the destination into the container
// Root Directory or necessary functions based on path.CategoryType // at the provided path. As a precondition, the destination cannot
// Assumption: collisionPolicy == COPY // already exist. If it does then an error is returned. The provided
func GetContainerIDFromCache( // containerResolver is updated with the new destination.
// @ returns the container ID of the new destination container.
func CreateContainerDestinaion(
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
directory path.Path, directory path.Path,

View File

@ -7,6 +7,7 @@ import (
"time" "time"
"github.com/google/uuid" "github.com/google/uuid"
msuser "github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -16,6 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
@ -631,6 +633,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
} }
folder1 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 1, now) folder1 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 1, now)
folder2 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 2, now) folder2 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 2, now)
folder3 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 3, now)
) )
m365, err := acct.M365Config() m365, err := acct.M365Config()
@ -639,14 +642,20 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
gc, err := connector.NewGraphConnector(ctx, acct, connector.Users) gc, err := connector.NewGraphConnector(ctx, acct, connector.Users)
require.NoError(t, err) require.NoError(t, err)
// generate 2 new folders with two items each. // generate 3 new folders with two items each.
// Only the first two folders will be part of the initial backup and
// incrementals. The third folder will be introduced partway through
// the changes.
// This should be enough to cover most delta actions, since moving one // This should be enough to cover most delta actions, since moving one
// folder into another generates a delta for both addition and deletion. // folder into another generates a delta for both addition and deletion.
// TODO: get the folder IDs somehow, so that we can call mutations on type contDeets struct {
// the folders by ID. containerID string
deets *details.Details
}
dataset := map[path.CategoryType]struct { dataset := map[path.CategoryType]struct {
dbf dataBuilderFunc dbf dataBuilderFunc
dests map[string]*details.Details dests map[string]contDeets
}{ }{
path.EmailCategory: { path.EmailCategory: {
dbf: func(id, timeStamp, subject, body string) []byte { dbf: func(id, timeStamp, subject, body string) []byte {
@ -657,9 +666,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
subject, body, body, subject, body, body,
now, now, now, now) now, now, now, now)
}, },
dests: map[string]*details.Details{ dests: map[string]contDeets{
folder1: nil, folder1: {},
folder2: nil, folder2: {},
folder3: {},
}, },
}, },
path.ContactsCategory: { path.ContactsCategory: {
@ -673,25 +683,50 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
"123-456-7890", "123-456-7890",
) )
}, },
dests: map[string]*details.Details{ dests: map[string]contDeets{
folder1: nil, folder1: {},
folder2: nil, folder2: {},
folder3: {},
}, },
}, },
} }
for category, gen := range dataset { for category, gen := range dataset {
for dest := range gen.dests { for destName := range gen.dests {
dataset[category].dests[dest] = generateContainerOfItems( deets := generateContainerOfItems(
t, t,
ctx, ctx,
gc, gc,
path.ExchangeService, path.ExchangeService,
category, category,
selectors.NewExchangeRestore(users).Selector, selectors.NewExchangeRestore(users).Selector,
m365.AzureTenantID, suite.user, dest, m365.AzureTenantID, suite.user, destName,
2, 2,
gen.dbf) gen.dbf)
dataset[category].dests[destName] = contDeets{"", deets}
}
}
for category, gen := range dataset {
qp := graph.QueryParams{
Category: category,
ResourceOwner: suite.user,
Credentials: m365,
}
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp)
require.NoError(t, err, "populating %s container resolver", category)
for destName, dest := range gen.dests {
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
require.NoError(t, err)
id, ok := cr.PathInCache(p.Folder())
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category)
d := dataset[category].dests[destName]
d.containerID = id
dataset[category].dests[destName] = d
} }
} }
@ -713,7 +748,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
// [ ] remove an item from an existing folder // [ ] remove an item from an existing folder
// [ ] add a new folder // [ ] add a new folder
// [ ] rename a folder // [ ] rename a folder
// [ ] relocate one folder into another // [x] relocate one folder into another
// [ ] remove a folder // [ ] remove a folder
table := []struct { table := []struct {
@ -729,6 +764,27 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
itemsRead: 0, itemsRead: 0,
itemsWritten: 0, itemsWritten: 0,
}, },
{
name: "move an email folder to a subfolder",
updateUserData: func(t *testing.T) {
// contacts cannot be sufoldered; this is an email-only change
toFolder := dataset[path.EmailCategory].dests[folder1].containerID
fromFolder := dataset[path.EmailCategory].dests[folder2].containerID
body := msuser.NewItemMailFoldersItemMovePostRequestBody()
body.SetDestinationId(&toFolder)
_, err := gc.Service.
Client().
UsersById(suite.user).
MailFoldersById(fromFolder).
Move().
Post(ctx, body, nil)
require.NoError(t, err)
},
itemsRead: 0, // zero because we don't count container reads
itemsWritten: 2,
},
} }
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
@ -753,7 +809,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
) )
// do some additional checks to ensure the incremental dealt with fewer items. // do some additional checks to ensure the incremental dealt with fewer items.
// +4 on read/writes to account for metadata // +4 on read/writes to account for metadata: 1 delta and 1 path for each type.
assert.Equal(t, test.itemsWritten+4, incBO.Results.ItemsWritten, "incremental items written") assert.Equal(t, test.itemsWritten+4, incBO.Results.ItemsWritten, "incremental items written")
assert.Equal(t, test.itemsRead+4, incBO.Results.ItemsRead, "incremental items read") assert.Equal(t, test.itemsRead+4, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Results.ReadErrors, "incremental read errors") assert.NoError(t, incBO.Results.ReadErrors, "incremental read errors")