set up integration test framework (#1975)
## Description Adds the base test framework for handling incremental backups integration testing. Future changes will add more tests to the table. ## Does this PR need a docs update or release note? - [x] ⛔ No ## Type of change - [x] 🤖 Test ## Issue(s) * #1966 ## Test Plan - [x] 💚 E2E
This commit is contained in:
parent
673ee10752
commit
6419a59636
2
.github/workflows/ci_test_cleanup.yml
vendored
2
.github/workflows/ci_test_cleanup.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
folder: [Corso_Restore_, TestRestore, testfolder, '']
|
||||
folder: [Corso_Restore_, TestRestore, testfolder, incrementals_ci_, '']
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, EXT_SDK_TEST_USER_ID]
|
||||
|
||||
steps:
|
||||
|
||||
@ -2,13 +2,21 @@ package operations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/internal/connector"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||
"github.com/alcionai/corso/src/internal/connector/mockconnector"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/events"
|
||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||
@ -17,22 +25,25 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
const incrementalsDestFolderPrefix = "incrementals_ci_"
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// prepNewBackupOp generates all clients required to run a backup operation,
|
||||
// prepNewTestBackupOp generates all clients required to run a backup operation,
|
||||
// returning both a backup operation created with those clients, as well as
|
||||
// the clients themselves.
|
||||
//
|
||||
//revive:disable:context-as-argument
|
||||
func prepNewBackupOp(
|
||||
func prepNewTestBackupOp(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
bus events.Eventer,
|
||||
@ -75,18 +86,18 @@ func prepNewBackupOp(
|
||||
ms.Close(ctx)
|
||||
}
|
||||
|
||||
bo := newBackupOp(t, ctx, kw, ms, acct, sel, bus, featureFlags, closer)
|
||||
bo := newTestBackupOp(t, ctx, kw, ms, acct, sel, bus, featureFlags, closer)
|
||||
|
||||
return bo, acct, kw, ms, closer
|
||||
}
|
||||
|
||||
// newBackupOp accepts the clients required to compose a backup operation, plus
|
||||
// newTestBackupOp accepts the clients required to compose a backup operation, plus
|
||||
// any other metadata, and uses them to generate a new backup operation. This
|
||||
// allows backup chains to utilize the same temp directory and configuration
|
||||
// details.
|
||||
//
|
||||
//revive:disable:context-as-argument
|
||||
func newBackupOp(
|
||||
func newTestBackupOp(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
kw *kopia.Wrapper,
|
||||
@ -155,9 +166,11 @@ func checkBackupIsInManifests(
|
||||
bo *BackupOperation,
|
||||
sel selectors.Selector,
|
||||
resourceOwner string,
|
||||
category path.CategoryType,
|
||||
categories ...path.CategoryType,
|
||||
) {
|
||||
//revive:enable:context-as-argument
|
||||
for _, category := range categories {
|
||||
t.Run(category.String(), func(t *testing.T) {
|
||||
var (
|
||||
sck, scv = kopia.MakeServiceCat(sel.PathService(), category)
|
||||
oc = &kopia.OwnersCats{
|
||||
@ -180,6 +193,8 @@ func checkBackupIsInManifests(
|
||||
}
|
||||
|
||||
assert.True(t, found, "backup retrieved by previous snapshot manifest")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//revive:disable:context-as-argument
|
||||
@ -191,10 +206,11 @@ func checkMetadataFilesExist(
|
||||
ms *kopia.ModelStore,
|
||||
tenant, user string,
|
||||
service path.ServiceType,
|
||||
category path.CategoryType,
|
||||
files []string,
|
||||
filesByCat map[path.CategoryType][]string,
|
||||
) {
|
||||
//revive:enable:context-as-argument
|
||||
for category, files := range filesByCat {
|
||||
t.Run(category.String(), func(t *testing.T) {
|
||||
bup := &backup.Backup{}
|
||||
|
||||
err := ms.Get(ctx, model.BackupSchema, backupID, bup)
|
||||
@ -252,6 +268,149 @@ func checkMetadataFilesExist(
|
||||
col.FullPath(),
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Incremental Item Generators
|
||||
// TODO: this is ripped from factory.go, which is ripped from other tests.
|
||||
// At this point, three variation of the sameish code in three locations
|
||||
// feels like something we can clean up. But, it's not a strong need, so
|
||||
// this gets to stay for now.
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// the params here are what generateContainerOfItems passes into the func.
|
||||
// the callback provider can use them, or not, as wanted.
|
||||
type dataBuilderFunc func(id, timeStamp, subject, body string) []byte
|
||||
|
||||
//revive:disable:context-as-argument
|
||||
func generateContainerOfItems(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
gc *connector.GraphConnector,
|
||||
service path.ServiceType,
|
||||
cat path.CategoryType,
|
||||
sel selectors.Selector,
|
||||
tenantID, userID, destFldr string,
|
||||
howManyItems int,
|
||||
dbf dataBuilderFunc,
|
||||
) *details.Details {
|
||||
//revive:enable:context-as-argument
|
||||
t.Helper()
|
||||
|
||||
items := make([]incrementalItem, 0, howManyItems)
|
||||
|
||||
for i := 0; i < howManyItems; i++ {
|
||||
var (
|
||||
now = common.Now()
|
||||
nowLegacy = common.FormatLegacyTime(time.Now())
|
||||
id = uuid.NewString()
|
||||
subject = "incr_test " + now[:16] + " - " + id[:8]
|
||||
body = "incr_test " + cat.String() + " generation for " + userID + " at " + now + " - " + id
|
||||
)
|
||||
|
||||
items = append(items, incrementalItem{
|
||||
name: id,
|
||||
data: dbf(id, nowLegacy, subject, body),
|
||||
})
|
||||
}
|
||||
|
||||
collections := []incrementalCollection{{
|
||||
pathFolders: []string{destFldr},
|
||||
category: cat,
|
||||
items: items,
|
||||
}}
|
||||
|
||||
dest := control.DefaultRestoreDestination(common.SimpleTimeTesting)
|
||||
dest.ContainerName = destFldr
|
||||
|
||||
dataColls := buildCollections(
|
||||
t,
|
||||
service,
|
||||
tenantID, userID,
|
||||
dest,
|
||||
collections)
|
||||
|
||||
deets, err := gc.RestoreDataCollections(ctx, sel, dest, dataColls)
|
||||
require.NoError(t, err)
|
||||
|
||||
return deets
|
||||
}
|
||||
|
||||
type incrementalItem struct {
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
|
||||
type incrementalCollection struct {
|
||||
pathFolders []string
|
||||
category path.CategoryType
|
||||
items []incrementalItem
|
||||
}
|
||||
|
||||
func buildCollections(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
dest control.RestoreDestination,
|
||||
colls []incrementalCollection,
|
||||
) []data.Collection {
|
||||
t.Helper()
|
||||
|
||||
collections := make([]data.Collection, 0, len(colls))
|
||||
|
||||
for _, c := range colls {
|
||||
pth := toDataLayerPath(
|
||||
t,
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
c.category,
|
||||
c.pathFolders,
|
||||
false)
|
||||
|
||||
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items))
|
||||
|
||||
for i := 0; i < len(c.items); i++ {
|
||||
mc.Names[i] = c.items[i].name
|
||||
mc.Data[i] = c.items[i].data
|
||||
}
|
||||
|
||||
collections = append(collections, mc)
|
||||
}
|
||||
|
||||
return collections
|
||||
}
|
||||
|
||||
func toDataLayerPath(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) path.Path {
|
||||
t.Helper()
|
||||
|
||||
var (
|
||||
pb = path.Builder{}.Append(elements...)
|
||||
p path.Path
|
||||
err error
|
||||
)
|
||||
|
||||
switch service {
|
||||
case path.ExchangeService:
|
||||
p, err = pb.ToDataLayerExchangePathForCategory(tenant, user, category, isItem)
|
||||
case path.OneDriveService:
|
||||
p, err = pb.ToDataLayerOneDrivePath(tenant, user, isItem)
|
||||
default:
|
||||
err = errors.Errorf("unknown service %s", service.String())
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -321,6 +480,10 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Exchange
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// TestBackup_Run ensures that Integration Testing works
|
||||
// for the following scopes: Contacts, Events, and Mail
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
@ -385,7 +548,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
ffs = control.FeatureFlags{ExchangeIncrementals: test.runIncremental}
|
||||
)
|
||||
|
||||
bo, acct, kw, ms, closer := prepNewBackupOp(t, ctx, mb, sel, ffs)
|
||||
bo, acct, kw, ms, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs)
|
||||
defer closer()
|
||||
|
||||
m365, err := acct.M365Config()
|
||||
@ -403,8 +566,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
m365.AzureTenantID,
|
||||
test.resourceOwner,
|
||||
path.ExchangeService,
|
||||
test.category,
|
||||
test.metadataFiles,
|
||||
map[path.CategoryType][]string{test.category: test.metadataFiles},
|
||||
)
|
||||
|
||||
if !test.runIncremental {
|
||||
@ -416,7 +578,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
// produces fewer results than the last backup.
|
||||
var (
|
||||
incMB = evmock.NewBus()
|
||||
incBO = newBackupOp(t, ctx, kw, ms, acct, sel, incMB, ffs, closer)
|
||||
incBO = newTestBackupOp(t, ctx, kw, ms, acct, sel, incMB, ffs, closer)
|
||||
)
|
||||
|
||||
runAndCheckBackup(t, ctx, &incBO, incMB)
|
||||
@ -430,8 +592,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
m365.AzureTenantID,
|
||||
test.resourceOwner,
|
||||
path.ExchangeService,
|
||||
test.category,
|
||||
test.metadataFiles,
|
||||
map[path.CategoryType][]string{test.category: test.metadataFiles},
|
||||
)
|
||||
|
||||
// do some additional checks to ensure the incremental dealt with fewer items.
|
||||
@ -451,6 +612,165 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
}
|
||||
}
|
||||
|
||||
// TestBackup_Run ensures that Integration Testing works
|
||||
// for the following scopes: Contacts, Events, and Mail
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
t = suite.T()
|
||||
acct = tester.NewM365Account(t)
|
||||
ffs = control.FeatureFlags{ExchangeIncrementals: true}
|
||||
mb = evmock.NewBus()
|
||||
now = common.Now()
|
||||
users = []string{suite.user}
|
||||
categories = map[path.CategoryType][]string{
|
||||
path.EmailCategory: exchange.MetadataFileNames(path.EmailCategory),
|
||||
path.ContactsCategory: exchange.MetadataFileNames(path.ContactsCategory),
|
||||
}
|
||||
folder1 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 1, now)
|
||||
folder2 = fmt.Sprintf("%s%d_%s", incrementalsDestFolderPrefix, 2, now)
|
||||
)
|
||||
|
||||
m365, err := acct.M365Config()
|
||||
require.NoError(t, err)
|
||||
|
||||
gc, err := connector.NewGraphConnector(ctx, acct, connector.Users)
|
||||
require.NoError(t, err)
|
||||
|
||||
// generate 2 new folders with two items each.
|
||||
// This should be enough to cover most delta actions, since moving one
|
||||
// folder into another generates a delta for both addition and deletion.
|
||||
// TODO: get the folder IDs somehow, so that we can call mutations on
|
||||
// the folders by ID.
|
||||
dataset := map[path.CategoryType]struct {
|
||||
dbf dataBuilderFunc
|
||||
dests map[string]*details.Details
|
||||
}{
|
||||
path.EmailCategory: {
|
||||
dbf: func(id, timeStamp, subject, body string) []byte {
|
||||
user := suite.user
|
||||
|
||||
return mockconnector.GetMockMessageWith(
|
||||
user, user, user,
|
||||
subject, body, body,
|
||||
now, now, now, now)
|
||||
},
|
||||
dests: map[string]*details.Details{
|
||||
folder1: nil,
|
||||
folder2: nil,
|
||||
},
|
||||
},
|
||||
path.ContactsCategory: {
|
||||
dbf: func(id, timeStamp, subject, body string) []byte {
|
||||
given, mid, sur := id[:8], id[9:13], id[len(id)-12:]
|
||||
|
||||
return mockconnector.GetMockContactBytesWith(
|
||||
given+" "+sur,
|
||||
sur+", "+given,
|
||||
given, mid, sur,
|
||||
"123-456-7890",
|
||||
)
|
||||
},
|
||||
dests: map[string]*details.Details{
|
||||
folder1: nil,
|
||||
folder2: nil,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for category, gen := range dataset {
|
||||
for dest := range gen.dests {
|
||||
dataset[category].dests[dest] = generateContainerOfItems(
|
||||
t,
|
||||
ctx,
|
||||
gc,
|
||||
path.ExchangeService,
|
||||
category,
|
||||
selectors.NewExchangeRestore(users).Selector,
|
||||
m365.AzureTenantID, suite.user, dest,
|
||||
2,
|
||||
gen.dbf)
|
||||
}
|
||||
}
|
||||
|
||||
// use the new folders here
|
||||
sel := selectors.NewExchangeBackup(users)
|
||||
sel.Include(
|
||||
sel.MailFolders(users, []string{folder1, folder2}, selectors.PrefixMatch()),
|
||||
sel.ContactFolders(users, []string{folder1, folder2}, selectors.PrefixMatch()),
|
||||
)
|
||||
|
||||
bo, _, kw, ms, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs)
|
||||
defer closer()
|
||||
|
||||
// run the initial backup
|
||||
runAndCheckBackup(t, ctx, &bo, mb)
|
||||
|
||||
// incrementals changes to make (all changes apply to both email and contacts)
|
||||
// [ ] add a new item to an existing folder
|
||||
// [ ] remove an item from an existing folder
|
||||
// [ ] add a new folder
|
||||
// [ ] rename a folder
|
||||
// [ ] relocate one folder into another
|
||||
// [ ] remove a folder
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
// performs the incremental update required for the test.
|
||||
updateUserData func(t *testing.T)
|
||||
itemsRead int
|
||||
itemsWritten int
|
||||
}{
|
||||
{
|
||||
name: "clean incremental, no changes",
|
||||
updateUserData: func(t *testing.T) {},
|
||||
itemsRead: 0,
|
||||
itemsWritten: 0,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
var (
|
||||
incMB = evmock.NewBus()
|
||||
incBO = newTestBackupOp(t, ctx, kw, ms, acct, sel.Selector, incMB, ffs, closer)
|
||||
)
|
||||
|
||||
test.updateUserData(t)
|
||||
require.NoError(t, incBO.Run(ctx))
|
||||
checkBackupIsInManifests(t, ctx, kw, &incBO, sel.Selector, suite.user, maps.Keys(categories)...)
|
||||
checkMetadataFilesExist(
|
||||
t,
|
||||
ctx,
|
||||
incBO.Results.BackupID,
|
||||
kw,
|
||||
ms,
|
||||
m365.AzureTenantID,
|
||||
suite.user,
|
||||
path.ExchangeService,
|
||||
categories,
|
||||
)
|
||||
|
||||
// do some additional checks to ensure the incremental dealt with fewer items.
|
||||
// +4 on read/writes to account for metadata
|
||||
assert.Equal(t, test.itemsWritten+4, incBO.Results.ItemsWritten, "incremental items written")
|
||||
assert.Equal(t, test.itemsRead+4, incBO.Results.ItemsRead, "incremental items read")
|
||||
assert.NoError(t, incBO.Results.ReadErrors, "incremental read errors")
|
||||
assert.NoError(t, incBO.Results.WriteErrors, "incremental write errors")
|
||||
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
|
||||
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
|
||||
assert.Equal(t,
|
||||
incMB.CalledWith[events.BackupStart][0][events.BackupID],
|
||||
incBO.Results.BackupID, "incremental backupID pre-declaration")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// OneDrive
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
@ -464,12 +784,16 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
|
||||
|
||||
sel.Include(sel.Users([]string{m365UserID}))
|
||||
|
||||
bo, _, _, _, closer := prepNewBackupOp(t, ctx, mb, sel.Selector, control.FeatureFlags{})
|
||||
bo, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.FeatureFlags{})
|
||||
defer closer()
|
||||
|
||||
runAndCheckBackup(t, ctx, &bo, mb)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SharePoint
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
@ -482,7 +806,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
|
||||
|
||||
sel.Include(sel.Sites([]string{suite.site}))
|
||||
|
||||
bo, _, _, _, closer := prepNewBackupOp(t, ctx, mb, sel.Selector, control.FeatureFlags{})
|
||||
bo, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.FeatureFlags{})
|
||||
defer closer()
|
||||
|
||||
runAndCheckBackup(t, ctx, &bo, mb)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user