refactor onedrive test data creation (#3343)

<!-- PR description-->

Currently we are duplicating the Onedrive test data creation code at two places-
- in cli package 
- in test files.
Moved the common code to connector package so that both cli and test files can use the same common code.

#### Does this PR need a docs update or release note?

- [ ]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->

- [ ] 🤖 Supportability/Tests


#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* https://github.com/alcionai/corso/issues/3183

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
This commit is contained in:
neha_gupta 2023-05-16 15:05:26 +05:30 committed by GitHub
parent 0bd524f86d
commit 694abfab68
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 933 additions and 1170 deletions

View File

@ -1,11 +1,8 @@
package impl package impl
import ( import (
"bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"io"
"os" "os"
"strings" "strings"
"time" "time"
@ -20,7 +17,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
@ -83,7 +79,7 @@ func generateAndRestoreItems(
} }
collections := []collection{{ collections := []collection{{
pathElements: []string{destFldr}, PathElements: []string{destFldr},
category: cat, category: cat,
items: items, items: items,
}} }}
@ -160,7 +156,7 @@ type collection struct {
// only contain elements after the prefix that corso uses for the path. For // only contain elements after the prefix that corso uses for the path. For
// example, a collection for the Inbox folder in exchange mail would just be // example, a collection for the Inbox folder in exchange mail would just be
// "Inbox". // "Inbox".
pathElements []string PathElements []string
category path.CategoryType category path.CategoryType
items []item items []item
} }
@ -180,7 +176,7 @@ func buildCollections(
service, service,
c.category, c.category,
false, false,
c.pathElements...) c.PathElements...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -198,45 +194,6 @@ func buildCollections(
return collections, nil return collections, nil
} }
type permData struct {
user string // user is only for older versions
entityID string
roles []string
sharingMode metadata.SharingMode
}
type itemData struct {
name string
data []byte
perms permData
}
type itemInfo struct {
// lookupKey is a string that can be used to find this data from a set of
// other data in the same collection. This key should be something that will
// be the same before and after restoring the item in M365 and may not be
// the M365 ID. When restoring items out of place, the item is assigned a
// new ID making it unsuitable for a lookup key.
lookupKey string
name string
data []byte
}
type onedriveCollection struct {
service path.ServiceType
pathElements []string
items []itemInfo
aux []itemInfo
backupVersion int
}
type onedriveColInfo struct {
pathElements []string
perms permData
files []itemData
folders []itemData
}
var ( var (
folderAName = "folder-a" folderAName = "folder-a"
folderBName = "b" folderBName = "b"
@ -292,7 +249,7 @@ func generateAndRestoreDriveItems(
} }
var ( var (
cols []onedriveColInfo cols []connector.OnedriveColInfo
rootPath = []string{"drives", driveID, "root:"} rootPath = []string{"drives", driveID, "root:"}
folderAPath = []string{"drives", driveID, "root:", folderAName} folderAPath = []string{"drives", driveID, "root:", folderAName}
@ -306,43 +263,43 @@ func generateAndRestoreDriveItems(
) )
for i := 0; i < count; i++ { for i := 0; i < count; i++ {
col := []onedriveColInfo{ col := []connector.OnedriveColInfo{
// basic folder and file creation // basic folder and file creation
{ {
pathElements: rootPath, PathElements: rootPath,
files: []itemData{ Files: []connector.ItemData{
{ {
name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
data: fileAData, Data: fileAData,
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
{ {
name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
data: fileBData, Data: fileBData,
}, },
}, },
folders: []itemData{ Folders: []connector.ItemData{
{ {
name: folderBName, Name: folderBName,
}, },
{ {
name: folderAName, Name: folderAName,
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
{ {
name: folderCName, Name: folderCName,
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
}, },
@ -350,62 +307,62 @@ func generateAndRestoreDriveItems(
{ {
// a folder that has permissions with an item in the folder with // a folder that has permissions with an item in the folder with
// the different permissions. // the different permissions.
pathElements: folderAPath, PathElements: folderAPath,
files: []itemData{ Files: []connector.ItemData{
{ {
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
data: fileEData, Data: fileEData,
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
{ {
// a folder that has permissions with an item in the folder with // a folder that has permissions with an item in the folder with
// no permissions. // no permissions.
pathElements: folderCPath, PathElements: folderCPath,
files: []itemData{ Files: []connector.ItemData{
{ {
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
data: fileAData, Data: fileAData,
}, },
}, },
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
{ {
pathElements: folderBPath, PathElements: folderBPath,
files: []itemData{ Files: []connector.ItemData{
{ {
// restoring a file in a non-root folder that doesn't inherit // restoring a file in a non-root folder that doesn't inherit
// permissions. // permissions.
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
data: fileBData, Data: fileBData,
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
folders: []itemData{ Folders: []connector.ItemData{
{ {
name: folderAName, Name: folderAName,
perms: permData{ Perms: connector.PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
}, },
@ -415,302 +372,40 @@ func generateAndRestoreDriveItems(
cols = append(cols, col...) cols = append(cols, col...)
} }
input := dataForInfo(service, cols, version.Backup) input, err := connector.DataForInfo(service, cols, version.Backup)
if err != nil {
return nil, err
}
collections := getCollections( // collections := getCollections(
service, // service,
tenantID, // tenantID,
[]string{resourceOwner}, // []string{resourceOwner},
input, // input,
version.Backup) // version.Backup)
opts := control.Options{ opts := control.Options{
RestorePermissions: true, RestorePermissions: true,
ToggleFeatures: control.Toggles{}, ToggleFeatures: control.Toggles{},
} }
config := connector.ConfigInfo{
Acct: acct,
Opts: opts,
Resource: connector.Users,
Service: service,
Tenant: tenantID,
ResourceOwners: []string{resourceOwner},
Dest: tester.DefaultTestRestoreDestination(""),
}
_, _, collections, _, err := connector.GetCollectionsAndExpected(
config,
input,
version.Backup)
if err != nil {
return nil, err
}
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs) return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
} }
func getCollections(
service path.ServiceType,
tenant string,
resourceOwners []string,
testCollections []colInfo,
backupVersion int,
) []data.RestoreCollection {
var collections []data.RestoreCollection
for _, owner := range resourceOwners {
ownerCollections := collectionsForInfo(
service,
tenant,
owner,
testCollections,
backupVersion,
)
collections = append(collections, ownerCollections...)
}
return collections
}
type mockRestoreCollection struct {
data.Collection
auxItems map[string]data.Stream
}
func (rc mockRestoreCollection) Fetch(
ctx context.Context,
name string,
) (data.Stream, error) {
res := rc.auxItems[name]
if res == nil {
return nil, data.ErrNotFound
}
return res, nil
}
func collectionsForInfo(
service path.ServiceType,
tenant, user string,
allInfo []colInfo,
backupVersion int,
) []data.RestoreCollection {
collections := make([]data.RestoreCollection, 0, len(allInfo))
for _, info := range allInfo {
pth := mustToDataLayerPath(
service,
tenant,
user,
info.category,
info.pathElements,
false)
mc := exchMock.NewCollection(pth, pth, len(info.items))
for i := 0; i < len(info.items); i++ {
mc.Names[i] = info.items[i].name
mc.Data[i] = info.items[i].data
// We do not count metadata files against item count
if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) &&
(service == path.OneDriveService || service == path.SharePointService) {
continue
}
}
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
for _, aux := range info.auxItems {
c.auxItems[aux.name] = &exchMock.Data{
ID: aux.name,
Reader: io.NopCloser(bytes.NewReader(aux.data)),
}
}
collections = append(collections, c)
}
return collections
}
func mustToDataLayerPath(
service path.ServiceType,
tenant, resourceOwner string,
category path.CategoryType,
elements []string,
isItem bool,
) path.Path {
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
if err != nil {
fmt.Println("building path", clues.ToCore(err))
}
return res
}
type colInfo struct {
// Elements (in order) for the path representing this collection. Should
// only contain elements after the prefix that corso uses for the path. For
// example, a collection for the Inbox folder in exchange mail would just be
// "Inbox".
pathElements []string
category path.CategoryType
items []itemInfo
// auxItems are items that can be retrieved with Fetch but won't be returned
// by Items().
auxItems []itemInfo
}
func newOneDriveCollection(
service path.ServiceType,
pathElements []string,
backupVersion int,
) *onedriveCollection {
return &onedriveCollection{
service: service,
pathElements: pathElements,
backupVersion: backupVersion,
}
}
func dataForInfo(
service path.ServiceType,
cols []onedriveColInfo,
backupVersion int,
) []colInfo {
var res []colInfo
for _, c := range cols {
onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion)
for _, f := range c.files {
onedriveCol.withFile(f.name, f.data, f.perms)
}
onedriveCol.withPermissions(c.perms)
res = append(res, onedriveCol.collection())
}
return res
}
func (c onedriveCollection) collection() colInfo {
cat := path.FilesCategory
if c.service == path.SharePointService {
cat = path.LibrariesCategory
}
return colInfo{
pathElements: c.pathElements,
category: cat,
items: c.items,
auxItems: c.aux,
}
}
func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection {
c.items = append(c.items, onedriveItemWithData(
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData))
md := onedriveMetadata(
name,
name+metadata.MetaFileSuffix,
name,
perm,
true)
c.items = append(c.items, md)
c.aux = append(c.aux, md)
return c
}
// withPermissions adds permissions to the folder represented by this
// onedriveCollection.
func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection {
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
return c
}
name := c.pathElements[len(c.pathElements)-1]
metaName := name
if c.backupVersion >= version.OneDrive5DirMetaNoName {
// We switched to just .dirmeta for metadata file names.
metaName = ""
}
if name == "root:" {
return c
}
md := onedriveMetadata(
name,
metaName+metadata.DirMetaFileSuffix,
metaName+metadata.DirMetaFileSuffix,
perm,
true)
c.items = append(c.items, md)
c.aux = append(c.aux, md)
return c
}
type oneDriveData struct {
FileName string `json:"fileName,omitempty"`
Data []byte `json:"data,omitempty"`
}
func onedriveItemWithData(
name, lookupKey string,
fileData []byte,
) itemInfo {
content := oneDriveData{
FileName: lookupKey,
Data: fileData,
}
serialized, _ := json.Marshal(content)
return itemInfo{
name: name,
data: serialized,
lookupKey: lookupKey,
}
}
func onedriveMetadata(
fileName, itemID, lookupKey string,
perm permData,
permUseID bool,
) itemInfo {
meta := getMetadata(fileName, perm, permUseID)
metaJSON, err := json.Marshal(meta)
if err != nil {
fmt.Println("marshalling metadata", clues.ToCore(err))
}
return itemInfo{
name: itemID,
data: metaJSON,
lookupKey: lookupKey,
}
}
func getMetadata(fileName string, perm permData, permUseID bool) metadata.Metadata {
if len(perm.user) == 0 || len(perm.roles) == 0 ||
perm.sharingMode != metadata.SharingModeCustom {
return metadata.Metadata{
FileName: fileName,
SharingMode: perm.sharingMode,
}
}
// In case of permissions, the id will usually be same for same
// user/role combo unless deleted and readded, but we have to do
// this as we only have two users of which one is already taken.
id := uuid.NewString()
uperm := metadata.Permission{ID: id, Roles: perm.roles}
if permUseID {
uperm.EntityID = perm.entityID
} else {
uperm.Email = perm.user
}
meta := metadata.Metadata{
FileName: fileName,
Permissions: []metadata.Permission{uperm},
}
return meta
}

View File

@ -1,7 +1,6 @@
package connector package connector
import ( import (
"bytes"
"context" "context"
"encoding/json" "encoding/json"
"io" "io"
@ -17,33 +16,16 @@ import (
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
func mustToDataLayerPath(
t *testing.T,
service path.ServiceType,
tenant, resourceOwner string,
category path.CategoryType,
elements []string,
isItem bool,
) path.Path {
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
require.NoError(t, err, clues.ToCore(err))
return res
}
func testElementsMatch[T any]( func testElementsMatch[T any](
t *testing.T, t *testing.T,
expected []T, expected []T,
@ -116,52 +98,17 @@ func testElementsMatch[T any](
unexpected) unexpected)
} }
type configInfo struct {
acct account.Account
opts control.Options
resource Resource
service path.ServiceType
tenant string
resourceOwners []string
dest control.RestoreDestination
}
type itemInfo struct {
// lookupKey is a string that can be used to find this data from a set of
// other data in the same collection. This key should be something that will
// be the same before and after restoring the item in M365 and may not be
// the M365 ID. When restoring items out of place, the item is assigned a
// new ID making it unsuitable for a lookup key.
lookupKey string
name string
data []byte
}
type colInfo struct {
// Elements (in order) for the path representing this collection. Should
// only contain elements after the prefix that corso uses for the path. For
// example, a collection for the Inbox folder in exchange mail would just be
// "Inbox".
pathElements []string
category path.CategoryType
items []itemInfo
// auxItems are items that can be retrieved with Fetch but won't be returned
// by Items(). These files do not directly participate in comparisosn at the
// end of a test.
auxItems []itemInfo
}
type restoreBackupInfo struct { type restoreBackupInfo struct {
name string name string
service path.ServiceType service path.ServiceType
collections []colInfo collections []ColInfo
resource Resource resource Resource
} }
type restoreBackupInfoMultiVersion struct { type restoreBackupInfoMultiVersion struct {
service path.ServiceType service path.ServiceType
collectionsLatest []colInfo collectionsLatest []ColInfo
collectionsPrevious []colInfo collectionsPrevious []ColInfo
resource Resource resource Resource
backupVersion int backupVersion int
} }
@ -734,7 +681,7 @@ func compareDriveItem(
t *testing.T, t *testing.T,
expected map[string][]byte, expected map[string][]byte,
item data.Stream, item data.Stream,
config configInfo, config ConfigInfo,
rootDir bool, rootDir bool,
) bool { ) bool {
// Skip Drive permissions in the folder that used to be the root. We don't // Skip Drive permissions in the folder that used to be the root. We don't
@ -814,7 +761,7 @@ func compareDriveItem(
assert.Equal(t, expectedMeta.FileName, itemMeta.FileName) assert.Equal(t, expectedMeta.FileName, itemMeta.FileName)
} }
if !config.opts.RestorePermissions { if !config.Opts.RestorePermissions {
assert.Equal(t, 0, len(itemMeta.Permissions)) assert.Equal(t, 0, len(itemMeta.Permissions))
return true return true
} }
@ -835,7 +782,7 @@ func compareDriveItem(
// sharepoint retrieves a superset of permissions // sharepoint retrieves a superset of permissions
// (all site admins, site groups, built in by default) // (all site admins, site groups, built in by default)
// relative to the permissions changed by the test. // relative to the permissions changed by the test.
config.service == path.SharePointService, config.Service == path.SharePointService,
permissionEqual) permissionEqual)
return true return true
@ -877,7 +824,7 @@ func compareItem(
service path.ServiceType, service path.ServiceType,
category path.CategoryType, category path.CategoryType,
item data.Stream, item data.Stream,
config configInfo, config ConfigInfo,
rootDir bool, rootDir bool,
) bool { ) bool {
if mt, ok := item.(data.StreamModTime); ok { if mt, ok := item.(data.StreamModTime); ok {
@ -971,7 +918,7 @@ func checkCollections(
expectedItems int, expectedItems int,
expected map[string]map[string][]byte, expected map[string]map[string][]byte,
got []data.BackupCollection, got []data.BackupCollection,
config configInfo, config ConfigInfo,
) int { ) int {
collectionsWithItems := []data.BackupCollection{} collectionsWithItems := []data.BackupCollection{}
@ -985,7 +932,7 @@ func checkCollections(
category = returned.FullPath().Category() category = returned.FullPath().Category()
expectedColData = expected[returned.FullPath().String()] expectedColData = expected[returned.FullPath().String()]
folders = returned.FullPath().Elements() folders = returned.FullPath().Elements()
rootDir = folders[len(folders)-1] == config.dest.ContainerName rootDir = folders[len(folders)-1] == config.Dest.ContainerName
) )
// Need to iterate through all items even if we don't expect to find a match // Need to iterate through all items even if we don't expect to find a match
@ -1166,127 +1113,6 @@ func backupSelectorForExpected(
return selectors.Selector{} return selectors.Selector{}
} }
// backupOutputPathFromRestore returns a path.Path denoting the location in
// kopia the data will be placed at. The location is a data-type specific
// combination of the location the data was recently restored to and where the
// data was originally in the hierarchy.
func backupOutputPathFromRestore(
t *testing.T,
restoreDest control.RestoreDestination,
inputPath path.Path,
) path.Path {
base := []string{restoreDest.ContainerName}
// OneDrive has leading information like the drive ID.
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
folders := inputPath.Folders()
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
if len(folders) > 3 {
base = append(base, folders[3:]...)
}
}
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
base = append(base, inputPath.Folders()...)
}
return mustToDataLayerPath(
t,
inputPath.Service(),
inputPath.Tenant(),
inputPath.ResourceOwner(),
inputPath.Category(),
base,
false,
)
}
// TODO(ashmrtn): Make this an actual mock class that can be used in other
// packages.
type mockRestoreCollection struct {
data.Collection
auxItems map[string]data.Stream
}
func (rc mockRestoreCollection) Fetch(
ctx context.Context,
name string,
) (data.Stream, error) {
res := rc.auxItems[name]
if res == nil {
return nil, data.ErrNotFound
}
return res, nil
}
func collectionsForInfo(
t *testing.T,
service path.ServiceType,
tenant, user string,
dest control.RestoreDestination,
allInfo []colInfo,
backupVersion int,
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
var (
collections = make([]data.RestoreCollection, 0, len(allInfo))
expectedData = make(map[string]map[string][]byte, len(allInfo))
totalItems = 0
kopiaEntries = 0
)
for _, info := range allInfo {
pth := mustToDataLayerPath(
t,
service,
tenant,
user,
info.category,
info.pathElements,
false)
mc := exchMock.NewCollection(pth, pth, len(info.items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
baseExpected := expectedData[baseDestPath.String()]
if len(baseExpected) == 0 {
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.items))
baseExpected = expectedData[baseDestPath.String()]
}
for i := 0; i < len(info.items); i++ {
mc.Names[i] = info.items[i].name
mc.Data[i] = info.items[i].data
baseExpected[info.items[i].lookupKey] = info.items[i].data
// We do not count metadata files against item count
if backupVersion > 0 &&
(service == path.OneDriveService || service == path.SharePointService) &&
metadata.HasMetaSuffix(info.items[i].name) {
continue
}
totalItems++
}
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
for _, aux := range info.auxItems {
c.auxItems[aux.name] = &exchMock.Data{
ID: aux.name,
Reader: io.NopCloser(bytes.NewReader(aux.data)),
}
}
collections = append(collections, c)
kopiaEntries += len(info.items)
}
return totalItems, kopiaEntries, collections, expectedData
}
func getSelectorWith( func getSelectorWith(
t *testing.T, t *testing.T,
service path.ServiceType, service path.ServiceType,

View File

@ -2,13 +2,11 @@ package connector
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -25,86 +23,6 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
// For any version post this(inclusive), we expect to be using IDs for
// permission instead of email
const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions
func getMetadata(fileName string, perm permData, permUseID bool) metadata.Metadata {
if len(perm.user) == 0 || len(perm.roles) == 0 ||
perm.sharingMode != metadata.SharingModeCustom {
return metadata.Metadata{
FileName: fileName,
SharingMode: perm.sharingMode,
}
}
// In case of permissions, the id will usually be same for same
// user/role combo unless deleted and readded, but we have to do
// this as we only have two users of which one is already taken.
id := uuid.NewString()
uperm := metadata.Permission{ID: id, Roles: perm.roles}
if permUseID {
uperm.EntityID = perm.entityID
} else {
uperm.Email = perm.user
}
testMeta := metadata.Metadata{
FileName: fileName,
Permissions: []metadata.Permission{uperm},
}
return testMeta
}
type testOneDriveData struct {
FileName string `json:"fileName,omitempty"`
Data []byte `json:"data,omitempty"`
}
func onedriveItemWithData(
t *testing.T,
name, lookupKey string,
fileData []byte,
) itemInfo {
t.Helper()
content := testOneDriveData{
FileName: lookupKey,
Data: fileData,
}
serialized, err := json.Marshal(content)
require.NoError(t, err, clues.ToCore(err))
return itemInfo{
name: name,
data: serialized,
lookupKey: lookupKey,
}
}
func onedriveMetadata(
t *testing.T,
fileName, itemID, lookupKey string,
perm permData,
permUseID bool,
) itemInfo {
t.Helper()
testMeta := getMetadata(fileName, perm, permUseID)
testMetaJSON, err := json.Marshal(testMeta)
require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
return itemInfo{
name: itemID,
data: testMetaJSON,
lookupKey: lookupKey,
}
}
var ( var (
fileName = "test-file.txt" fileName = "test-file.txt"
folderAName = "folder-a" folderAName = "folder-a"
@ -122,204 +40,6 @@ var (
readPerm = []string{"read"} readPerm = []string{"read"}
) )
func newOneDriveCollection(
t *testing.T,
service path.ServiceType,
pathElements []string,
backupVersion int,
) *onedriveCollection {
return &onedriveCollection{
service: service,
pathElements: pathElements,
backupVersion: backupVersion,
t: t,
}
}
type onedriveCollection struct {
service path.ServiceType
pathElements []string
items []itemInfo
aux []itemInfo
backupVersion int
t *testing.T
}
func (c onedriveCollection) collection() colInfo {
cat := path.FilesCategory
if c.service == path.SharePointService {
cat = path.LibrariesCategory
}
return colInfo{
pathElements: c.pathElements,
category: cat,
items: c.items,
auxItems: c.aux,
}
}
func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection {
switch c.backupVersion {
case 0:
// Lookups will occur using the most recent version of things so we need
// the embedded file name to match that.
c.items = append(c.items, onedriveItemWithData(
c.t,
name,
name+metadata.DataFileSuffix,
fileData))
// v1-5, early metadata design
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
c.items = append(c.items, onedriveItemWithData(
c.t,
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData))
md := onedriveMetadata(
c.t,
"",
name+metadata.MetaFileSuffix,
name+metadata.MetaFileSuffix,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
c.items = append(c.items, md)
c.aux = append(c.aux, md)
// v6+ current metadata design
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
c.items = append(c.items, onedriveItemWithData(
c.t,
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData))
md := onedriveMetadata(
c.t,
name,
name+metadata.MetaFileSuffix,
name,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
c.items = append(c.items, md)
c.aux = append(c.aux, md)
default:
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
}
return c
}
func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection {
switch c.backupVersion {
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
return c
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
c.items = append(
c.items,
onedriveMetadata(
c.t,
"",
name+metadata.DirMetaFileSuffix,
name+metadata.DirMetaFileSuffix,
perm,
c.backupVersion >= versionPermissionSwitchedToID))
default:
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
}
return c
}
// withPermissions adds permissions to the folder represented by this
// onedriveCollection.
func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection {
// These versions didn't store permissions for the folder or didn't store them
// in the folder's collection.
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
return c
}
name := c.pathElements[len(c.pathElements)-1]
metaName := name
if c.backupVersion >= version.OneDrive5DirMetaNoName {
// We switched to just .dirmeta for metadata file names.
metaName = ""
}
if name == odConsts.RootPathDir {
return c
}
md := onedriveMetadata(
c.t,
name,
metaName+metadata.DirMetaFileSuffix,
metaName+metadata.DirMetaFileSuffix,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
c.items = append(c.items, md)
c.aux = append(c.aux, md)
return c
}
type permData struct {
user string // user is only for older versions
entityID string
roles []string
sharingMode metadata.SharingMode
}
type itemData struct {
name string
data []byte
perms permData
}
type driveColInfo struct {
pathElements []string
perms permData
files []itemData
folders []itemData
}
func testDataForInfo(
t *testing.T,
service path.ServiceType,
cols []driveColInfo,
backupVersion int,
) []colInfo {
var res []colInfo
for _, c := range cols {
onedriveCol := newOneDriveCollection(t, service, c.pathElements, backupVersion)
for _, f := range c.files {
onedriveCol.withFile(f.name, f.data, f.perms)
}
for _, d := range c.folders {
onedriveCol.withFolder(d.name, d.perms)
}
onedriveCol.withPermissions(c.perms)
res = append(res, onedriveCol.collection())
}
return res
}
func mustGetDefaultDriveID( func mustGetDefaultDriveID(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
@ -675,78 +395,80 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
folderBName, folderBName,
} }
cols := []driveColInfo{ cols := []OnedriveColInfo{
{ {
pathElements: rootPath, PathElements: rootPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileAData, Data: fileAData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderAName, Name: folderAName,
}, },
{ {
name: folderBName, Name: folderBName,
}, },
}, },
}, },
{ {
pathElements: folderAPath, PathElements: folderAPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileBData, Data: fileBData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderBName, Name: folderBName,
}, },
}, },
}, },
{ {
pathElements: subfolderBPath, PathElements: subfolderBPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileCData, Data: fileCData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderAName, Name: folderAName,
}, },
}, },
}, },
{ {
pathElements: subfolderAPath, PathElements: subfolderAPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileDData, Data: fileDData,
}, },
}, },
}, },
{ {
pathElements: folderBPath, PathElements: folderBPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileEData, Data: fileEData,
}, },
}, },
}, },
} }
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
require.NoError(suite.T(), err)
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() { suite.Run(fmt.Sprintf("Version%d", vn), func() {
t := suite.T() t := suite.T()
input := testDataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -819,71 +541,71 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
folderCName, folderCName,
} }
cols := []driveColInfo{ cols := []OnedriveColInfo{
{ {
pathElements: rootPath, PathElements: rootPath,
files: []itemData{ Files: []ItemData{
{ {
// Test restoring a file that doesn't inherit permissions. // Test restoring a file that doesn't inherit permissions.
name: fileName, Name: fileName,
data: fileAData, Data: fileAData,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
{ {
// Test restoring a file that doesn't inherit permissions and has // Test restoring a file that doesn't inherit permissions and has
// no permissions. // no permissions.
name: fileName2, Name: fileName2,
data: fileBData, Data: fileBData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderBName, Name: folderBName,
}, },
{ {
name: folderAName, Name: folderAName,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
{ {
name: folderCName, Name: folderCName,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
}, },
}, },
{ {
pathElements: folderBPath, PathElements: folderBPath,
files: []itemData{ Files: []ItemData{
{ {
// Test restoring a file in a non-root folder that doesn't inherit // Test restoring a file in a non-root folder that doesn't inherit
// permissions. // permissions.
name: fileName, Name: fileName,
data: fileBData, Data: fileBData,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderAName, Name: folderAName,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
}, },
@ -905,52 +627,53 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
// }, // },
// }, // },
// }, // },
// perms: permData{ // Perms: PermData{
// user: secondaryUserName, // User: secondaryUserName,
// entityID: secondaryUserID, // EntityID: secondaryUserID,
// roles: readPerm, // Roles: readPerm,
// }, // },
// }, // },
{ {
// Tests a folder that has permissions with an item in the folder with // Tests a folder that has permissions with an item in the folder with
// the different permissions. // the different permissions.
pathElements: folderAPath, PathElements: folderAPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileEData, Data: fileEData,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
{ {
// Tests a folder that has permissions with an item in the folder with // Tests a folder that has permissions with an item in the folder with
// no permissions. // no permissions.
pathElements: folderCPath, PathElements: folderCPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileAData, Data: fileAData,
}, },
}, },
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
} }
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String() bss := suite.BackupService().String()
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
@ -959,7 +682,8 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
// Ideally this can always be true or false and still // Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as // work, but limiting older versions to use emails so as
// to validate that flow as well. // to validate that flow as well.
input := testDataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -997,51 +721,53 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
suite.Service(), suite.Service(),
suite.BackupResourceOwner()) suite.BackupResourceOwner())
inputCols := []driveColInfo{ inputCols := []OnedriveColInfo{
{ {
pathElements: []string{ PathElements: []string{
odConsts.DrivesPathDir, odConsts.DrivesPathDir,
driveID, driveID,
odConsts.RootPathDir, odConsts.RootPathDir,
}, },
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileAData, Data: fileAData,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
}, },
}, },
}, },
}, },
} }
expectedCols := []driveColInfo{ expectedCols := []OnedriveColInfo{
{ {
pathElements: []string{ PathElements: []string{
odConsts.DrivesPathDir, odConsts.DrivesPathDir,
driveID, driveID,
odConsts.RootPathDir, odConsts.RootPathDir,
}, },
files: []itemData{ Files: []ItemData{
{ {
// No permissions on the output since they weren't restored. // No permissions on the output since they weren't restored.
name: fileName, Name: fileName,
data: fileAData, Data: fileAData,
}, },
}, },
}, },
} }
expected := testDataForInfo(suite.T(), suite.BackupService(), expectedCols, version.Backup) expected, err := DataForInfo(suite.BackupService(), expectedCols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String() bss := suite.BackupService().String()
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() { suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
t := suite.T() t := suite.T()
input := testDataForInfo(t, suite.BackupService(), inputCols, vn) input, err := DataForInfo(suite.BackupService(), inputCols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -1119,29 +845,29 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
folderCName, folderCName,
} }
fileSet := []itemData{ fileSet := []ItemData{
{ {
name: "file-custom", Name: "file-custom",
data: fileAData, Data: fileAData,
perms: permData{ Perms: PermData{
user: secondaryUserName, User: secondaryUserName,
entityID: secondaryUserID, EntityID: secondaryUserID,
roles: writePerm, Roles: writePerm,
sharingMode: metadata.SharingModeCustom, SharingMode: metadata.SharingModeCustom,
}, },
}, },
{ {
name: "file-inherited", Name: "file-inherited",
data: fileAData, Data: fileAData,
perms: permData{ Perms: PermData{
sharingMode: metadata.SharingModeInherited, SharingMode: metadata.SharingModeInherited,
}, },
}, },
{ {
name: "file-empty", Name: "file-empty",
data: fileAData, Data: fileAData,
perms: permData{ Perms: PermData{
sharingMode: metadata.SharingModeCustom, SharingMode: metadata.SharingModeCustom,
}, },
}, },
} }
@ -1164,55 +890,56 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
// - inherted-permission-file // - inherted-permission-file
// - empty-permission-file (empty/empty might have interesting behavior) // - empty-permission-file (empty/empty might have interesting behavior)
cols := []driveColInfo{ cols := []OnedriveColInfo{
{ {
pathElements: rootPath, PathElements: rootPath,
files: []itemData{}, Files: []ItemData{},
folders: []itemData{ Folders: []ItemData{
{name: folderAName}, {Name: folderAName},
}, },
}, },
{ {
pathElements: folderAPath, PathElements: folderAPath,
files: fileSet, Files: fileSet,
folders: []itemData{ Folders: []ItemData{
{name: folderAName}, {Name: folderAName},
{name: folderBName}, {Name: folderBName},
{name: folderCName}, {Name: folderCName},
}, },
perms: permData{ Perms: PermData{
user: tertiaryUserName, User: tertiaryUserName,
entityID: tertiaryUserID, EntityID: tertiaryUserID,
roles: readPerm, Roles: readPerm,
}, },
}, },
{ {
pathElements: subfolderAAPath, PathElements: subfolderAAPath,
files: fileSet, Files: fileSet,
perms: permData{ Perms: PermData{
user: tertiaryUserName, User: tertiaryUserName,
entityID: tertiaryUserID, EntityID: tertiaryUserID,
roles: writePerm, Roles: writePerm,
sharingMode: metadata.SharingModeCustom, SharingMode: metadata.SharingModeCustom,
}, },
}, },
{ {
pathElements: subfolderABPath, PathElements: subfolderABPath,
files: fileSet, Files: fileSet,
perms: permData{ Perms: PermData{
sharingMode: metadata.SharingModeInherited, SharingMode: metadata.SharingModeInherited,
}, },
}, },
{ {
pathElements: subfolderACPath, PathElements: subfolderACPath,
files: fileSet, Files: fileSet,
perms: permData{ Perms: PermData{
sharingMode: metadata.SharingModeCustom, SharingMode: metadata.SharingModeCustom,
}, },
}, },
} }
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String() bss := suite.BackupService().String()
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
@ -1221,7 +948,8 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
// Ideally this can always be true or false and still // Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as // work, but limiting older versions to use emails so as
// to validate that flow as well. // to validate that flow as well.
input := testDataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -1279,61 +1007,63 @@ func testRestoreFolderNamedFolderRegression(
folderBName, folderBName,
} }
cols := []driveColInfo{ cols := []OnedriveColInfo{
{ {
pathElements: rootPath, PathElements: rootPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileAData, Data: fileAData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderNamedFolder, Name: folderNamedFolder,
}, },
{ {
name: folderBName, Name: folderBName,
}, },
}, },
}, },
{ {
pathElements: folderFolderPath, PathElements: folderFolderPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileBData, Data: fileBData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderBName, Name: folderBName,
}, },
}, },
}, },
{ {
pathElements: subfolderPath, PathElements: subfolderPath,
files: []itemData{ Files: []ItemData{
{ {
name: fileName, Name: fileName,
data: fileCData, Data: fileCData,
}, },
}, },
folders: []itemData{ Folders: []ItemData{
{ {
name: folderNamedFolder, Name: folderNamedFolder,
}, },
}, },
}, },
} }
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String() bss := suite.BackupService().String()
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() { suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
t := suite.T() t := suite.T()
input := testDataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),

View File

@ -0,0 +1,358 @@
package connector
import (
"encoding/json"
"fmt"
"github.com/alcionai/clues"
"github.com/google/uuid"
"golang.org/x/exp/maps"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/path"
)
// For any version post this(inclusive), we expect to be using IDs for
// permission instead of email
const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions
func getMetadata(fileName string, perm PermData, permUseID bool) metadata.Metadata {
if len(perm.User) == 0 || len(perm.Roles) == 0 ||
perm.SharingMode != metadata.SharingModeCustom {
return metadata.Metadata{
FileName: fileName,
SharingMode: perm.SharingMode,
}
}
// In case of permissions, the id will usually be same for same
// user/role combo unless deleted and readded, but we have to do
// this as we only have two users of which one is already taken.
id := uuid.NewString()
uperm := metadata.Permission{ID: id, Roles: perm.Roles}
if permUseID {
uperm.EntityID = perm.EntityID
} else {
uperm.Email = perm.User
}
testMeta := metadata.Metadata{
FileName: fileName,
Permissions: []metadata.Permission{uperm},
}
return testMeta
}
type PermData struct {
User string // user is only for older versions
EntityID string
Roles []string
SharingMode metadata.SharingMode
}
type ItemData struct {
Name string
Data []byte
Perms PermData
}
type OnedriveColInfo struct {
PathElements []string
Perms PermData
Files []ItemData
Folders []ItemData
}
type onedriveCollection struct {
service path.ServiceType
PathElements []string
items []ItemInfo
aux []ItemInfo
backupVersion int
}
func (c onedriveCollection) collection() ColInfo {
cat := path.FilesCategory
if c.service == path.SharePointService {
cat = path.LibrariesCategory
}
return ColInfo{
PathElements: c.PathElements,
Category: cat,
Items: c.items,
AuxItems: c.aux,
}
}
func NewOneDriveCollection(
service path.ServiceType,
PathElements []string,
backupVersion int,
) *onedriveCollection {
return &onedriveCollection{
service: service,
PathElements: PathElements,
backupVersion: backupVersion,
}
}
func DataForInfo(
service path.ServiceType,
cols []OnedriveColInfo,
backupVersion int,
) ([]ColInfo, error) {
var (
res []ColInfo
err error
)
for _, c := range cols {
onedriveCol := NewOneDriveCollection(service, c.PathElements, backupVersion)
for _, f := range c.Files {
_, err = onedriveCol.withFile(f.Name, f.Data, f.Perms)
if err != nil {
return res, err
}
}
for _, d := range c.Folders {
_, err = onedriveCol.withFolder(d.Name, d.Perms)
if err != nil {
return res, err
}
}
_, err = onedriveCol.withPermissions(c.Perms)
if err != nil {
return res, err
}
res = append(res, onedriveCol.collection())
}
return res, nil
}
func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) (*onedriveCollection, error) {
switch c.backupVersion {
case 0:
// Lookups will occur using the most recent version of things so we need
// the embedded file name to match that.
item, err := onedriveItemWithData(
name,
name+metadata.DataFileSuffix,
fileData)
if err != nil {
return c, err
}
c.items = append(c.items, item)
// v1-5, early metadata design
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
items, err := onedriveItemWithData(
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData)
if err != nil {
return c, err
}
c.items = append(c.items, items)
md, err := onedriveMetadata(
"",
name+metadata.MetaFileSuffix,
name+metadata.MetaFileSuffix,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
if err != nil {
return c, err
}
c.items = append(c.items, md)
c.aux = append(c.aux, md)
// v6+ current metadata design
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
item, err := onedriveItemWithData(
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData)
if err != nil {
return c, err
}
c.items = append(c.items, item)
md, err := onedriveMetadata(
name,
name+metadata.MetaFileSuffix,
name,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
if err != nil {
return c, err
}
c.items = append(c.items, md)
c.aux = append(c.aux, md)
default:
return c, clues.New(fmt.Sprintf("bad backup version. version %d", c.backupVersion))
}
return c, nil
}
func (c *onedriveCollection) withFolder(name string, perm PermData) (*onedriveCollection, error) {
switch c.backupVersion {
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
return c, nil
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
item, err := onedriveMetadata(
"",
name+metadata.DirMetaFileSuffix,
name+metadata.DirMetaFileSuffix,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
c.items = append(c.items, item)
if err != nil {
return c, err
}
default:
return c, clues.New(fmt.Sprintf("bad backup version.version %d", c.backupVersion))
}
return c, nil
}
// withPermissions adds permissions to the folder represented by this
// onedriveCollection.
func (c *onedriveCollection) withPermissions(perm PermData) (*onedriveCollection, error) {
// These versions didn't store permissions for the folder or didn't store them
// in the folder's collection.
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
return c, nil
}
name := c.PathElements[len(c.PathElements)-1]
metaName := name
if c.backupVersion >= version.OneDrive5DirMetaNoName {
// We switched to just .dirmeta for metadata file names.
metaName = ""
}
if name == odConsts.RootPathDir {
return c, nil
}
md, err := onedriveMetadata(
name,
metaName+metadata.DirMetaFileSuffix,
metaName+metadata.DirMetaFileSuffix,
perm,
c.backupVersion >= versionPermissionSwitchedToID)
if err != nil {
return c, err
}
c.items = append(c.items, md)
c.aux = append(c.aux, md)
return c, err
}
type testOneDriveData struct {
FileName string `json:"fileName,omitempty"`
Data []byte `json:"data,omitempty"`
}
func onedriveItemWithData(
name, lookupKey string,
fileData []byte,
) (ItemInfo, error) {
content := testOneDriveData{
FileName: lookupKey,
Data: fileData,
}
serialized, err := json.Marshal(content)
if err != nil {
return ItemInfo{}, clues.Stack(err)
}
return ItemInfo{
name: name,
data: serialized,
lookupKey: lookupKey,
}, nil
}
func onedriveMetadata(
fileName, itemID, lookupKey string,
perm PermData,
permUseID bool,
) (ItemInfo, error) {
testMeta := getMetadata(fileName, perm, permUseID)
testMetaJSON, err := json.Marshal(testMeta)
if err != nil {
return ItemInfo{}, clues.Wrap(err, "marshalling metadata")
}
return ItemInfo{
name: itemID,
data: testMetaJSON,
lookupKey: lookupKey,
}, nil
}
func GetCollectionsAndExpected(
config ConfigInfo,
testCollections []ColInfo,
backupVersion int,
) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) {
var (
collections []data.RestoreCollection
expectedData = map[string]map[string][]byte{}
totalItems = 0
totalKopiaItems = 0
)
for _, owner := range config.ResourceOwners {
numItems, kopiaItems, ownerCollections, userExpectedData, err := collectionsForInfo(
config.Service,
config.Tenant,
owner,
config.Dest,
testCollections,
backupVersion,
)
if err != nil {
return totalItems, totalKopiaItems, collections, expectedData, err
}
collections = append(collections, ownerCollections...)
totalItems += numItems
totalKopiaItems += kopiaItems
maps.Copy(expectedData, userExpectedData)
}
return totalItems, totalKopiaItems, collections, expectedData, nil
}

View File

@ -11,7 +11,6 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
@ -407,65 +406,30 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
// Exchange Functions // Exchange Functions
//------------------------------------------------------------- //-------------------------------------------------------------
func getCollectionsAndExpected(
t *testing.T,
config configInfo,
testCollections []colInfo,
backupVersion int,
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
t.Helper()
var (
collections []data.RestoreCollection
expectedData = map[string]map[string][]byte{}
totalItems = 0
totalKopiaItems = 0
)
for _, owner := range config.resourceOwners {
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo(
t,
config.service,
config.tenant,
owner,
config.dest,
testCollections,
backupVersion)
collections = append(collections, ownerCollections...)
totalItems += numItems
totalKopiaItems += kopiaItems
maps.Copy(expectedData, userExpectedData)
}
return totalItems, totalKopiaItems, collections, expectedData
}
func runRestore( func runRestore(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
config configInfo, config ConfigInfo,
backupVersion int, backupVersion int,
collections []data.RestoreCollection, collections []data.RestoreCollection,
numRestoreItems int, numRestoreItems int,
) { ) {
t.Logf( t.Logf(
"Restoring collections to %s for resourceOwners(s) %v\n", "Restoring collections to %s for resourceOwners(s) %v\n",
config.dest.ContainerName, config.Dest.ContainerName,
config.resourceOwners) config.ResourceOwners)
start := time.Now() start := time.Now()
restoreGC := loadConnector(ctx, t, config.resource) restoreGC := loadConnector(ctx, t, config.Resource)
restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true) restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true)
deets, err := restoreGC.ConsumeRestoreCollections( deets, err := restoreGC.ConsumeRestoreCollections(
ctx, ctx,
backupVersion, backupVersion,
config.acct, config.Acct,
restoreSel, restoreSel,
config.dest, config.Dest,
config.opts, config.Opts,
collections, collections,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -489,30 +453,30 @@ func runRestore(
func runBackupAndCompare( func runBackupAndCompare(
t *testing.T, t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument ctx context.Context, //revive:disable-line:context-as-argument
config configInfo, config ConfigInfo,
expectedData map[string]map[string][]byte, expectedData map[string]map[string][]byte,
totalItems int, totalItems int,
totalKopiaItems int, totalKopiaItems int,
inputCollections []colInfo, inputCollections []ColInfo,
) { ) {
t.Helper() t.Helper()
// Run a backup and compare its output with what we put in. // Run a backup and compare its output with what we put in.
cats := make(map[path.CategoryType]struct{}, len(inputCollections)) cats := make(map[path.CategoryType]struct{}, len(inputCollections))
for _, c := range inputCollections { for _, c := range inputCollections {
cats[c.category] = struct{}{} cats[c.Category] = struct{}{}
} }
var ( var (
expectedDests = make([]destAndCats, 0, len(config.resourceOwners)) expectedDests = make([]destAndCats, 0, len(config.ResourceOwners))
idToName = map[string]string{} idToName = map[string]string{}
nameToID = map[string]string{} nameToID = map[string]string{}
) )
for _, ro := range config.resourceOwners { for _, ro := range config.ResourceOwners {
expectedDests = append(expectedDests, destAndCats{ expectedDests = append(expectedDests, destAndCats{
resourceOwner: ro, resourceOwner: ro,
dest: config.dest.ContainerName, dest: config.Dest.ContainerName,
cats: cats, cats: cats,
}) })
@ -520,10 +484,10 @@ func runBackupAndCompare(
nameToID[ro] = ro nameToID[ro] = ro
} }
backupGC := loadConnector(ctx, t, config.resource) backupGC := loadConnector(ctx, t, config.Resource)
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID) backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
backupSel := backupSelectorForExpected(t, config.service, expectedDests) backupSel := backupSelectorForExpected(t, config.Service, expectedDests)
t.Logf("Selective backup of %s\n", backupSel) t.Logf("Selective backup of %s\n", backupSel)
start := time.Now() start := time.Now()
@ -533,7 +497,7 @@ func runBackupAndCompare(
backupSel, backupSel,
nil, nil,
version.NoBackup, version.NoBackup,
config.opts, config.Opts,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// No excludes yet because this isn't an incremental backup. // No excludes yet because this isn't an incremental backup.
@ -570,22 +534,23 @@ func runRestoreBackupTest(
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
config := configInfo{ config := ConfigInfo{
acct: acct, Acct: acct,
opts: opts, Opts: opts,
resource: test.resource, Resource: test.resource,
service: test.service, Service: test.service,
tenant: tenant, Tenant: tenant,
resourceOwners: resourceOwners, ResourceOwners: resourceOwners,
dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
totalItems, totalKopiaItems, collections, expectedData := getCollectionsAndExpected( totalItems, totalKopiaItems, collections, expectedData, err := GetCollectionsAndExpected(
t,
config, config,
test.collections, test.collections,
version.Backup) version.Backup)
require.NoError(t, err)
runRestore( runRestore(
t, t,
ctx, ctx,
@ -616,21 +581,21 @@ func runRestoreTestWithVerion(
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
config := configInfo{ config := ConfigInfo{
acct: acct, Acct: acct,
opts: opts, Opts: opts,
resource: test.resource, Resource: test.resource,
service: test.service, Service: test.service,
tenant: tenant, Tenant: tenant,
resourceOwners: resourceOwners, ResourceOwners: resourceOwners,
dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
totalItems, _, collections, _ := getCollectionsAndExpected( totalItems, _, collections, _, err := GetCollectionsAndExpected(
t,
config, config,
test.collectionsPrevious, test.collectionsPrevious,
test.backupVersion) test.backupVersion)
require.NoError(t, err)
runRestore( runRestore(
t, t,
@ -655,21 +620,21 @@ func runRestoreBackupTestVersions(
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
config := configInfo{ config := ConfigInfo{
acct: acct, Acct: acct,
opts: opts, Opts: opts,
resource: test.resource, Resource: test.resource,
service: test.service, Service: test.service,
tenant: tenant, Tenant: tenant,
resourceOwners: resourceOwners, ResourceOwners: resourceOwners,
dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
totalItems, _, collections, _ := getCollectionsAndExpected( totalItems, _, collections, _, err := GetCollectionsAndExpected(
t,
config, config,
test.collectionsPrevious, test.collectionsPrevious,
test.backupVersion) test.backupVersion)
require.NoError(t, err)
runRestore( runRestore(
t, t,
@ -680,11 +645,11 @@ func runRestoreBackupTestVersions(
totalItems) totalItems)
// Get expected output for new version. // Get expected output for new version.
totalItems, totalKopiaItems, _, expectedData := getCollectionsAndExpected( totalItems, totalKopiaItems, _, expectedData, err := GetCollectionsAndExpected(
t,
config, config,
test.collectionsLatest, test.collectionsLatest,
version.Backup) version.Backup)
require.NoError(t, err)
runBackupAndCompare( runBackupAndCompare(
t, t,
@ -705,11 +670,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
name: "EmailsWithAttachments", name: "EmailsWithAttachments",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resource: Users,
collections: []colInfo{ collections: []ColInfo{
{ {
pathElements: []string{"Inbox"}, PathElements: []string{"Inbox"},
category: path.EmailCategory, Category: path.EmailCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID", name: "someencodeditemID",
data: exchMock.MessageWithDirectAttachment( data: exchMock.MessageWithDirectAttachment(
@ -732,11 +697,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
name: "MultipleEmailsMultipleFolders", name: "MultipleEmailsMultipleFolders",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resource: Users,
collections: []colInfo{ collections: []ColInfo{
{ {
pathElements: []string{"Inbox"}, PathElements: []string{"Inbox"},
category: path.EmailCategory, Category: path.EmailCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID", name: "someencodeditemID",
data: exchMock.MessageWithBodyBytes( data: exchMock.MessageWithBodyBytes(
@ -749,9 +714,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
}, },
}, },
{ {
pathElements: []string{"Work"}, PathElements: []string{"Work"},
category: path.EmailCategory, Category: path.EmailCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID2", name: "someencodeditemID2",
data: exchMock.MessageWithBodyBytes( data: exchMock.MessageWithBodyBytes(
@ -773,9 +738,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
}, },
}, },
{ {
pathElements: []string{"Work", "Inbox"}, PathElements: []string{"Work", "Inbox"},
category: path.EmailCategory, Category: path.EmailCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID4", name: "someencodeditemID4",
data: exchMock.MessageWithBodyBytes( data: exchMock.MessageWithBodyBytes(
@ -788,9 +753,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
}, },
}, },
{ {
pathElements: []string{"Work", "Inbox", "Work"}, PathElements: []string{"Work", "Inbox", "Work"},
category: path.EmailCategory, Category: path.EmailCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID5", name: "someencodeditemID5",
data: exchMock.MessageWithBodyBytes( data: exchMock.MessageWithBodyBytes(
@ -808,11 +773,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
name: "MultipleContactsSingleFolder", name: "MultipleContactsSingleFolder",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resource: Users,
collections: []colInfo{ collections: []ColInfo{
{ {
pathElements: []string{"Contacts"}, PathElements: []string{"Contacts"},
category: path.ContactsCategory, Category: path.ContactsCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID", name: "someencodeditemID",
data: exchMock.ContactBytes("Ghimley"), data: exchMock.ContactBytes("Ghimley"),
@ -836,11 +801,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
name: "MultipleContactsMultipleFolders", name: "MultipleContactsMultipleFolders",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resource: Users,
collections: []colInfo{ collections: []ColInfo{
{ {
pathElements: []string{"Work"}, PathElements: []string{"Work"},
category: path.ContactsCategory, Category: path.ContactsCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID", name: "someencodeditemID",
data: exchMock.ContactBytes("Ghimley"), data: exchMock.ContactBytes("Ghimley"),
@ -859,9 +824,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
}, },
}, },
{ {
pathElements: []string{"Personal"}, PathElements: []string{"Personal"},
category: path.ContactsCategory, Category: path.ContactsCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID4", name: "someencodeditemID4",
data: exchMock.ContactBytes("Argon"), data: exchMock.ContactBytes("Argon"),
@ -971,11 +936,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
name: "Contacts", name: "Contacts",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resource: Users,
collections: []colInfo{ collections: []ColInfo{
{ {
pathElements: []string{"Work"}, PathElements: []string{"Work"},
category: path.ContactsCategory, Category: path.ContactsCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID", name: "someencodeditemID",
data: exchMock.ContactBytes("Ghimley"), data: exchMock.ContactBytes("Ghimley"),
@ -984,9 +949,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
}, },
}, },
{ {
pathElements: []string{"Personal"}, PathElements: []string{"Personal"},
category: path.ContactsCategory, Category: path.ContactsCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "someencodeditemID2", name: "someencodeditemID2",
data: exchMock.ContactBytes("Irgot"), data: exchMock.ContactBytes("Irgot"),
@ -1012,9 +977,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
// }, // },
// }, // },
// { // {
// pathElements: []string{"Personal"}, // PathElements: []string{"Personal"},
// category: path.EventsCategory, // Category: path.EventsCategory,
// items: []itemInfo{ // Items: []ItemInfo{
// { // {
// name: "someencodeditemID2", // name: "someencodeditemID2",
// data: exchMock.EventWithSubjectBytes("Irgot"), // data: exchMock.EventWithSubjectBytes("Irgot"),
@ -1045,19 +1010,20 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
resourceOwner: suite.user, resourceOwner: suite.user,
dest: dest.ContainerName, dest: dest.ContainerName,
cats: map[path.CategoryType]struct{}{ cats: map[path.CategoryType]struct{}{
collection.category: {}, collection.Category: {},
}, },
}) })
totalItems, _, collections, expectedData := collectionsForInfo( totalItems, _, collections, expectedData, err := collectionsForInfo(
t,
test.service, test.service,
suite.connector.tenant, suite.connector.tenant,
suite.user, suite.user,
dest, dest,
[]colInfo{collection}, []ColInfo{collection},
version.Backup, version.Backup,
) )
require.NoError(t, err)
allItems += totalItems allItems += totalItems
for k, v := range expectedData { for k, v := range expectedData {
@ -1123,10 +1089,10 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
t.Log("Backup enumeration complete") t.Log("Backup enumeration complete")
ci := configInfo{ ci := ConfigInfo{
opts: control.Options{RestorePermissions: true}, Opts: control.Options{RestorePermissions: true},
// Alright to be empty, needed for OneDrive. // Alright to be empty, needed for OneDrive.
dest: control.RestoreDestination{}, Dest: control.RestoreDestination{},
} }
// Pull the data prior to waiting for the status as otherwise it will // Pull the data prior to waiting for the status as otherwise it will
@ -1149,11 +1115,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
name: "EmailsWithLargeAttachments", name: "EmailsWithLargeAttachments",
service: path.ExchangeService, service: path.ExchangeService,
resource: Users, resource: Users,
collections: []colInfo{ collections: []ColInfo{
{ {
pathElements: []string{"Inbox"}, PathElements: []string{"Inbox"},
category: path.EmailCategory, Category: path.EmailCategory,
items: []itemInfo{ Items: []ItemInfo{
{ {
name: "35mbAttachment", name: "35mbAttachment",
data: exchMock.MessageWithSizedAttachment(subjectText, 35), data: exchMock.MessageWithSizedAttachment(subjectText, 35),

View File

@ -0,0 +1,188 @@
package connector
import (
"bytes"
"context"
"io"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
)
type ColInfo struct {
// Elements (in order) for the path representing this collection. Should
// only contain elements after the prefix that corso uses for the path. For
// example, a collection for the Inbox folder in exchange mail would just be
// "Inbox".
PathElements []string
Category path.CategoryType
Items []ItemInfo
// auxItems are items that can be retrieved with Fetch but won't be returned
// by Items(). These files do not directly participate in comparisosn at the
// end of a test.
AuxItems []ItemInfo
}
type ItemInfo struct {
// lookupKey is a string that can be used to find this data from a set of
// other data in the same collection. This key should be something that will
// be the same before and after restoring the item in M365 and may not be
// the M365 ID. When restoring items out of place, the item is assigned a
// new ID making it unsuitable for a lookup key.
lookupKey string
name string
data []byte
}
type ConfigInfo struct {
Acct account.Account
Opts control.Options
Resource Resource
Service path.ServiceType
Tenant string
ResourceOwners []string
Dest control.RestoreDestination
}
func mustToDataLayerPath(
service path.ServiceType,
tenant, resourceOwner string,
category path.CategoryType,
elements []string,
isItem bool,
) (path.Path, error) {
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
if err != nil {
return nil, err
}
return res, err
}
// backupOutputPathFromRestore returns a path.Path denoting the location in
// kopia the data will be placed at. The location is a data-type specific
// combination of the location the data was recently restored to and where the
// data was originally in the hierarchy.
func backupOutputPathFromRestore(
restoreDest control.RestoreDestination,
inputPath path.Path,
) (path.Path, error) {
base := []string{restoreDest.ContainerName}
// OneDrive has leading information like the drive ID.
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
folders := inputPath.Folders()
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
if len(folders) > 3 {
base = append(base, folders[3:]...)
}
}
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
base = append(base, inputPath.Folders()...)
}
return mustToDataLayerPath(
inputPath.Service(),
inputPath.Tenant(),
inputPath.ResourceOwner(),
inputPath.Category(),
base,
false,
)
}
// TODO(ashmrtn): Make this an actual mock class that can be used in other
// packages.
type mockRestoreCollection struct {
data.Collection
auxItems map[string]data.Stream
}
func (rc mockRestoreCollection) Fetch(
ctx context.Context,
name string,
) (data.Stream, error) {
res := rc.auxItems[name]
if res == nil {
return nil, data.ErrNotFound
}
return res, nil
}
func collectionsForInfo(
service path.ServiceType,
tenant, user string,
dest control.RestoreDestination,
allInfo []ColInfo,
backupVersion int,
) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) {
var (
collections = make([]data.RestoreCollection, 0, len(allInfo))
expectedData = make(map[string]map[string][]byte, len(allInfo))
totalItems = 0
kopiaEntries = 0
)
for _, info := range allInfo {
pth, err := mustToDataLayerPath(
service,
tenant,
user,
info.Category,
info.PathElements,
false)
if err != nil {
return totalItems, kopiaEntries, collections, expectedData, err
}
mc := exchMock.NewCollection(pth, pth, len(info.Items))
baseDestPath, err := backupOutputPathFromRestore(dest, pth)
if err != nil {
return totalItems, kopiaEntries, collections, expectedData, err
}
baseExpected := expectedData[baseDestPath.String()]
if baseExpected == nil {
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items))
baseExpected = expectedData[baseDestPath.String()]
}
for i := 0; i < len(info.Items); i++ {
mc.Names[i] = info.Items[i].name
mc.Data[i] = info.Items[i].data
baseExpected[info.Items[i].lookupKey] = info.Items[i].data
// We do not count metadata files against item count
if backupVersion > 0 &&
(service == path.OneDriveService || service == path.SharePointService) &&
metadata.HasMetaSuffix(info.Items[i].name) {
continue
}
totalItems++
}
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
for _, aux := range info.AuxItems {
c.auxItems[aux.name] = &exchMock.Data{
ID: aux.name,
Reader: io.NopCloser(bytes.NewReader(aux.data)),
}
}
collections = append(collections, c)
kopiaEntries += len(info.Items)
}
return totalItems, kopiaEntries, collections, expectedData, nil
}