Compare commits
4 Commits
main
...
moveOneDri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8938a94eec | ||
|
|
16cb69b8a9 | ||
|
|
341bfd4616 | ||
|
|
cde2ea8492 |
@ -1,13 +1,11 @@
|
||||
package impl
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
@ -20,8 +18,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
@ -200,44 +196,44 @@ func buildCollections(
|
||||
return collections, nil
|
||||
}
|
||||
|
||||
type permData struct {
|
||||
user string // user is only for older versions
|
||||
entityID string
|
||||
roles []string
|
||||
sharingMode onedrive.SharingMode
|
||||
}
|
||||
// type permData struct {
|
||||
// user string // user is only for older versions
|
||||
// EntityID string
|
||||
// Roles []string
|
||||
// sharingMode onedrive.SharingMode
|
||||
// }
|
||||
|
||||
type itemData struct {
|
||||
name string
|
||||
data []byte
|
||||
perms permData
|
||||
}
|
||||
// type connector.ItemData struct {
|
||||
// name string
|
||||
// data []byte
|
||||
// Perms connector.PermData
|
||||
// }
|
||||
|
||||
type itemInfo struct {
|
||||
// lookupKey is a string that can be used to find this data from a set of
|
||||
// other data in the same collection. This key should be something that will
|
||||
// be the same before and after restoring the item in M365 and may not be
|
||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// new ID making it unsuitable for a lookup key.
|
||||
lookupKey string
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
// type itemInfo struct {
|
||||
// // lookupKey is a string that can be used to find this data from a set of
|
||||
// // other data in the same collection. This key should be something that will
|
||||
// // be the same before and after restoring the item in M365 and may not be
|
||||
// // the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// // new ID making it unsuitable for a lookup key.
|
||||
// lookupKey string
|
||||
// name string
|
||||
// data []byte
|
||||
// }
|
||||
|
||||
type onedriveCollection struct {
|
||||
service path.ServiceType
|
||||
pathElements []string
|
||||
items []itemInfo
|
||||
aux []itemInfo
|
||||
backupVersion int
|
||||
}
|
||||
// type onedriveCollection struct {
|
||||
// service path.ServiceType
|
||||
// pathElements []string
|
||||
// items []itemInfo
|
||||
// aux []itemInfo
|
||||
// backupVersion int
|
||||
// }
|
||||
|
||||
type onedriveColInfo struct {
|
||||
pathElements []string
|
||||
perms permData
|
||||
files []itemData
|
||||
folders []itemData
|
||||
}
|
||||
// type onedriveColInfo struct {
|
||||
// pathElements []string
|
||||
// Perms connector.PermData
|
||||
// files []DataForInfconnector.ItemDatao
|
||||
// folders []DataForInfconnector.ItemDatao
|
||||
// }
|
||||
|
||||
var (
|
||||
folderAName = "folder-a"
|
||||
@ -278,7 +274,7 @@ func generateAndRestoreOnedriveItems(
|
||||
driveID := ptr.Val(d.GetId())
|
||||
|
||||
var (
|
||||
cols []onedriveColInfo
|
||||
cols []connector.OnedriveColInfo
|
||||
|
||||
rootPath = []string{"drives", driveID, "root:"}
|
||||
folderAPath = []string{"drives", driveID, "root:", folderAName}
|
||||
@ -292,43 +288,43 @@ func generateAndRestoreOnedriveItems(
|
||||
)
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
col := []onedriveColInfo{
|
||||
col := []connector.OnedriveColInfo{
|
||||
// basic folder and file creation
|
||||
{
|
||||
pathElements: rootPath,
|
||||
files: []itemData{
|
||||
PathElements: rootPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
|
||||
Data: fileAData,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
|
||||
data: fileBData,
|
||||
Name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
|
||||
Data: fileBData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []connector.ItemData{
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
{
|
||||
name: folderAName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderAName,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: folderCName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderCName,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -336,62 +332,62 @@ func generateAndRestoreOnedriveItems(
|
||||
{
|
||||
// a folder that has permissions with an item in the folder with
|
||||
// the different permissions.
|
||||
pathElements: folderAPath,
|
||||
files: []itemData{
|
||||
PathElements: folderAPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
data: fileEData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
Data: fileEData,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
// a folder that has permissions with an item in the folder with
|
||||
// no permissions.
|
||||
pathElements: folderCPath,
|
||||
files: []itemData{
|
||||
PathElements: folderCPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
data: fileAData,
|
||||
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
Data: fileAData,
|
||||
},
|
||||
},
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderBPath,
|
||||
files: []itemData{
|
||||
PathElements: folderBPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
// restoring a file in a non-root folder that doesn't inherit
|
||||
// permissions.
|
||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
data: fileBData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
Data: fileBData,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []connector.ItemData{
|
||||
{
|
||||
name: folderAName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderAName,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -401,302 +397,316 @@ func generateAndRestoreOnedriveItems(
|
||||
cols = append(cols, col...)
|
||||
}
|
||||
|
||||
input := dataForInfo(service, cols, version.Backup)
|
||||
|
||||
collections := getCollections(
|
||||
service,
|
||||
tenantID,
|
||||
[]string{resourceOwner},
|
||||
input,
|
||||
version.Backup)
|
||||
// TODO Neha: work on this
|
||||
t := testing.T{}
|
||||
input := connector.DataForInfo(&t, service, cols, version.Backup)
|
||||
|
||||
opts := control.Options{
|
||||
RestorePermissions: true,
|
||||
ToggleFeatures: control.Toggles{},
|
||||
}
|
||||
|
||||
config := connector.ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: connector.Users,
|
||||
Service: service,
|
||||
Tenant: tenantID,
|
||||
ResourceOwners: []string{resourceOwner},
|
||||
Dest: tester.DefaultTestRestoreDestination(),
|
||||
}
|
||||
|
||||
_, _, collections, _ := connector.GetCollectionsAndExpected(
|
||||
&t,
|
||||
config,
|
||||
// service,
|
||||
// tenantID,
|
||||
// []string{resourceOwner},
|
||||
input,
|
||||
version.Backup)
|
||||
|
||||
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
|
||||
}
|
||||
|
||||
func getCollections(
|
||||
service path.ServiceType,
|
||||
tenant string,
|
||||
resourceOwners []string,
|
||||
testCollections []colInfo,
|
||||
backupVersion int,
|
||||
) []data.RestoreCollection {
|
||||
var collections []data.RestoreCollection
|
||||
// func getCollections(
|
||||
// service path.ServiceType,
|
||||
// tenant string,
|
||||
// resourceOwners []string,
|
||||
// testCollections []connector.ColInfo,
|
||||
// backupVersion int,
|
||||
// ) []data.RestoreCollection {
|
||||
// var collections []data.RestoreCollection
|
||||
|
||||
for _, owner := range resourceOwners {
|
||||
ownerCollections := collectionsForInfo(
|
||||
service,
|
||||
tenant,
|
||||
owner,
|
||||
testCollections,
|
||||
backupVersion,
|
||||
)
|
||||
// for _, owner := range resourceOwners {
|
||||
// ownerCollections := collectionsForInfo(
|
||||
// service,
|
||||
// tenant,
|
||||
// owner,
|
||||
// testCollections,
|
||||
// backupVersion,
|
||||
// )
|
||||
|
||||
collections = append(collections, ownerCollections...)
|
||||
}
|
||||
// collections = append(collections, ownerCollections...)
|
||||
// }
|
||||
|
||||
return collections
|
||||
}
|
||||
// return collections
|
||||
// }
|
||||
|
||||
type mockRestoreCollection struct {
|
||||
data.Collection
|
||||
auxItems map[string]data.Stream
|
||||
}
|
||||
// type mockRestoreCollection struct {
|
||||
// data.Collection
|
||||
// auxItems map[string]data.Stream
|
||||
// }
|
||||
|
||||
func (rc mockRestoreCollection) Fetch(
|
||||
ctx context.Context,
|
||||
name string,
|
||||
) (data.Stream, error) {
|
||||
res := rc.auxItems[name]
|
||||
if res == nil {
|
||||
return nil, data.ErrNotFound
|
||||
}
|
||||
// func (rc mockRestoreCollection) Fetch(
|
||||
// ctx context.Context,
|
||||
// name string,
|
||||
// ) (data.Stream, error) {
|
||||
// res := rc.auxItems[name]
|
||||
// if res == nil {
|
||||
// return nil, data.ErrNotFound
|
||||
// }
|
||||
|
||||
return res, nil
|
||||
}
|
||||
// return res, nil
|
||||
// }
|
||||
|
||||
func collectionsForInfo(
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
allInfo []colInfo,
|
||||
backupVersion int,
|
||||
) []data.RestoreCollection {
|
||||
collections := make([]data.RestoreCollection, 0, len(allInfo))
|
||||
// func collectionsForInfo(
|
||||
// service path.ServiceType,
|
||||
// tenant, user string,
|
||||
// allInfo []colInfo,
|
||||
// backupVersion int,
|
||||
// ) []data.RestoreCollection {
|
||||
// collections := make([]data.RestoreCollection, 0, len(allInfo))
|
||||
|
||||
for _, info := range allInfo {
|
||||
pth := mustToDataLayerPath(
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
info.category,
|
||||
info.pathElements,
|
||||
false)
|
||||
// for _, info := range allInfo {
|
||||
// pth := mustToDataLayerPath(
|
||||
// service,
|
||||
// tenant,
|
||||
// user,
|
||||
// info.category,
|
||||
// info.pathElements,
|
||||
// false)
|
||||
|
||||
mc := exchMock.NewCollection(pth, pth, len(info.items))
|
||||
// mc := exchMock.NewCollection(pth, pth, len(info.items))
|
||||
|
||||
for i := 0; i < len(info.items); i++ {
|
||||
mc.Names[i] = info.items[i].name
|
||||
mc.Data[i] = info.items[i].data
|
||||
// for i := 0; i < len(info.items); i++ {
|
||||
// mc.Names[i] = info.items[i].name
|
||||
// mc.Data[i] = info.items[i].data
|
||||
|
||||
// We do not count metadata files against item count
|
||||
if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) &&
|
||||
(service == path.OneDriveService || service == path.SharePointService) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
// // We do not count metadata files against item count
|
||||
// if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) &&
|
||||
// (service == path.OneDriveService || service == path.SharePointService) {
|
||||
// continue
|
||||
// }
|
||||
// }
|
||||
|
||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
// c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
|
||||
for _, aux := range info.auxItems {
|
||||
c.auxItems[aux.name] = &exchMock.Data{
|
||||
ID: aux.name,
|
||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
}
|
||||
}
|
||||
// for _, aux := range info.auxItems {
|
||||
// c.auxItems[aux.name] = &exchMock.Data{
|
||||
// ID: aux.name,
|
||||
// Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
// }
|
||||
// }
|
||||
|
||||
collections = append(collections, c)
|
||||
}
|
||||
// collections = append(collections, c)
|
||||
// }
|
||||
|
||||
return collections
|
||||
}
|
||||
// return collections
|
||||
// }
|
||||
|
||||
func mustToDataLayerPath(
|
||||
service path.ServiceType,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) path.Path {
|
||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
if err != nil {
|
||||
fmt.Println("building path", clues.ToCore(err))
|
||||
}
|
||||
// func mustToDataLayerPath(
|
||||
// service path.ServiceType,
|
||||
// tenant, resourceOwner string,
|
||||
// category path.CategoryType,
|
||||
// elements []string,
|
||||
// isItem bool,
|
||||
// ) path.Path {
|
||||
// res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
// if err != nil {
|
||||
// fmt.Println("building path", clues.ToCore(err))
|
||||
// }
|
||||
|
||||
return res
|
||||
}
|
||||
// return res
|
||||
// }
|
||||
|
||||
type colInfo struct {
|
||||
// Elements (in order) for the path representing this collection. Should
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
pathElements []string
|
||||
category path.CategoryType
|
||||
items []itemInfo
|
||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// by Items().
|
||||
auxItems []itemInfo
|
||||
}
|
||||
// type colInfo struct {
|
||||
// // Elements (in order) for the path representing this collection. Should
|
||||
// // only contain elements after the prefix that corso uses for the path. For
|
||||
// // example, a collection for the Inbox folder in exchange mail would just be
|
||||
// // "Inbox".
|
||||
// pathElements []string
|
||||
// category path.CategoryType
|
||||
// items []itemInfo
|
||||
// // auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// // by Items().
|
||||
// auxItems []itemInfo
|
||||
// }
|
||||
|
||||
func newOneDriveCollection(
|
||||
service path.ServiceType,
|
||||
pathElements []string,
|
||||
backupVersion int,
|
||||
) *onedriveCollection {
|
||||
return &onedriveCollection{
|
||||
service: service,
|
||||
pathElements: pathElements,
|
||||
backupVersion: backupVersion,
|
||||
}
|
||||
}
|
||||
// func newOneDriveCollection(
|
||||
// service path.ServiceType,
|
||||
// pathElements []string,
|
||||
// backupVersion int,
|
||||
// ) *onedriveCollection {
|
||||
// return &onedriveCollection{
|
||||
// service: service,
|
||||
// pathElements: pathElements,
|
||||
// backupVersion: backupVersion,
|
||||
// }
|
||||
// }
|
||||
|
||||
func dataForInfo(
|
||||
service path.ServiceType,
|
||||
cols []onedriveColInfo,
|
||||
backupVersion int,
|
||||
) []colInfo {
|
||||
var res []colInfo
|
||||
// func dataForInfo(
|
||||
// service path.ServiceType,
|
||||
// cols []onedriveColInfo,
|
||||
// backupVersion int,
|
||||
// ) []colInfo {
|
||||
// var res []colInfo
|
||||
|
||||
for _, c := range cols {
|
||||
onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion)
|
||||
// for _, c := range cols {
|
||||
// onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion)
|
||||
|
||||
for _, f := range c.files {
|
||||
onedriveCol.withFile(f.name, f.data, f.perms)
|
||||
}
|
||||
// for _, f := range c.files {
|
||||
// onedriveCol.withFile(f.name, f.data, f.Perms)
|
||||
// }
|
||||
|
||||
onedriveCol.withPermissions(c.perms)
|
||||
// onedriveCol.withPermissions(c.Perms)
|
||||
|
||||
res = append(res, onedriveCol.collection())
|
||||
}
|
||||
// res = append(res, onedriveCol.collection())
|
||||
// }
|
||||
|
||||
return res
|
||||
}
|
||||
// return res
|
||||
// }
|
||||
|
||||
func (c onedriveCollection) collection() colInfo {
|
||||
cat := path.FilesCategory
|
||||
if c.service == path.SharePointService {
|
||||
cat = path.LibrariesCategory
|
||||
}
|
||||
// func (c onedriveCollection) collection() colInfo {
|
||||
// cat := path.FilesCategory
|
||||
// if c.service == path.SharePointService {
|
||||
// cat = path.LibrariesCategory
|
||||
// }
|
||||
|
||||
return colInfo{
|
||||
pathElements: c.pathElements,
|
||||
category: cat,
|
||||
items: c.items,
|
||||
auxItems: c.aux,
|
||||
}
|
||||
}
|
||||
// return colInfo{
|
||||
// pathElements: c.pathElements,
|
||||
// category: cat,
|
||||
// items: c.items,
|
||||
// auxItems: c.aux,
|
||||
// }
|
||||
// }
|
||||
|
||||
func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection {
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
// func (c *onedriveCollection) withFile(name string, fileData []byte, perm connector.PermData) *onedriveCollection {
|
||||
// c.items = append(c.items, onedriveItemWithData(
|
||||
// name+metadata.DataFileSuffix,
|
||||
// name+metadata.DataFileSuffix,
|
||||
// fileData))
|
||||
|
||||
md := onedriveMetadata(
|
||||
name,
|
||||
name+metadata.MetaFileSuffix,
|
||||
name,
|
||||
perm,
|
||||
true)
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
// md := onedriveMetadata(
|
||||
// name,
|
||||
// name+metadata.MetaFileSuffix,
|
||||
// name,
|
||||
// perm,
|
||||
// true)
|
||||
// c.items = append(c.items, md)
|
||||
// c.aux = append(c.aux, md)
|
||||
|
||||
return c
|
||||
}
|
||||
// return c
|
||||
// }
|
||||
|
||||
// withPermissions adds permissions to the folder represented by this
|
||||
// onedriveCollection.
|
||||
func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection {
|
||||
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
return c
|
||||
}
|
||||
// // withPermissions adds permissions to the folder represented by this
|
||||
// // onedriveCollection.
|
||||
// func (c *onedriveCollection) withPermissions(perm connector.PermData) *onedriveCollection {
|
||||
// if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
// return c
|
||||
// }
|
||||
|
||||
name := c.pathElements[len(c.pathElements)-1]
|
||||
metaName := name
|
||||
// name := c.pathElements[len(c.pathElements)-1]
|
||||
// metaName := name
|
||||
|
||||
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||
// We switched to just .dirmeta for metadata file names.
|
||||
metaName = ""
|
||||
}
|
||||
// if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||
// // We switched to just .dirmeta for metadata file names.
|
||||
// metaName = ""
|
||||
// }
|
||||
|
||||
if name == "root:" {
|
||||
return c
|
||||
}
|
||||
// if name == "root:" {
|
||||
// return c
|
||||
// }
|
||||
|
||||
md := onedriveMetadata(
|
||||
name,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
true)
|
||||
// md := onedriveMetadata(
|
||||
// name,
|
||||
// metaName+metadata.DirMetaFileSuffix,
|
||||
// metaName+metadata.DirMetaFileSuffix,
|
||||
// perm,
|
||||
// true)
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
// c.items = append(c.items, md)
|
||||
// c.aux = append(c.aux, md)
|
||||
|
||||
return c
|
||||
}
|
||||
// return c
|
||||
// }
|
||||
|
||||
type oneDriveData struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Data []byte `json:"data,omitempty"`
|
||||
}
|
||||
// type oneDriveData struct {
|
||||
// FileName string `json:"fileName,omitempty"`
|
||||
// Data []byte `json:"data,omitempty"`
|
||||
// }
|
||||
|
||||
func onedriveItemWithData(
|
||||
name, lookupKey string,
|
||||
fileData []byte,
|
||||
) itemInfo {
|
||||
content := oneDriveData{
|
||||
FileName: lookupKey,
|
||||
Data: fileData,
|
||||
}
|
||||
// func onedriveItemWithData(
|
||||
// name, lookupKey string,
|
||||
// fileData []byte,
|
||||
// ) itemInfo {
|
||||
// content := oneDriveData{
|
||||
// FileName: lookupKey,
|
||||
// Data: fileData,
|
||||
// }
|
||||
|
||||
serialized, _ := json.Marshal(content)
|
||||
// serialized, _ := json.Marshal(content)
|
||||
|
||||
return itemInfo{
|
||||
name: name,
|
||||
data: serialized,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
// return itemInfo{
|
||||
// name: name,
|
||||
// Data: serialized,
|
||||
// lookupKey: lookupKey,
|
||||
// }
|
||||
// }
|
||||
|
||||
func onedriveMetadata(
|
||||
fileName, itemID, lookupKey string,
|
||||
perm permData,
|
||||
permUseID bool,
|
||||
) itemInfo {
|
||||
meta := getMetadata(fileName, perm, permUseID)
|
||||
// func onedriveMetadata(
|
||||
// fileName, itemID, lookupKey string,
|
||||
// perm connector.PermData,
|
||||
// permUseID bool,
|
||||
// ) itemInfo {
|
||||
// meta := getMetadata(fileName, perm, permUseID)
|
||||
|
||||
metaJSON, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
fmt.Println("marshalling metadata", clues.ToCore(err))
|
||||
}
|
||||
// metaJSON, err := json.Marshal(meta)
|
||||
// if err != nil {
|
||||
// fmt.Println("marshalling metadata", clues.ToCore(err))
|
||||
// }
|
||||
|
||||
return itemInfo{
|
||||
name: itemID,
|
||||
data: metaJSON,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
// return itemInfo{
|
||||
// name: itemID,
|
||||
// Data: metaJSON,
|
||||
// lookupKey: lookupKey,
|
||||
// }
|
||||
// }
|
||||
|
||||
func getMetadata(fileName string, perm permData, permUseID bool) onedrive.Metadata {
|
||||
if len(perm.user) == 0 || len(perm.roles) == 0 ||
|
||||
perm.sharingMode != onedrive.SharingModeCustom {
|
||||
return onedrive.Metadata{
|
||||
FileName: fileName,
|
||||
SharingMode: perm.sharingMode,
|
||||
}
|
||||
}
|
||||
// func getMetadata(fileName string, perm connector.PermData, permUseID bool) onedrive.Metadata {
|
||||
// if len(perm.user) == 0 || len(perm.Roles) == 0 ||
|
||||
// perm.sharingMode != onedrive.SharingModeCustom {
|
||||
// return onedrive.Metadata{
|
||||
// FileName: fileName,
|
||||
// SharingMode: perm.sharingMode,
|
||||
// }
|
||||
// }
|
||||
|
||||
// In case of permissions, the id will usually be same for same
|
||||
// user/role combo unless deleted and readded, but we have to do
|
||||
// this as we only have two users of which one is already taken.
|
||||
id := uuid.NewString()
|
||||
uperm := onedrive.UserPermission{ID: id, Roles: perm.roles}
|
||||
// // In case of permissions, the id will usually be same for same
|
||||
// // user/role combo unless deleted and readded, but we have to do
|
||||
// // this as we only have two users of which one is already taken.
|
||||
// id := uuid.NewString()
|
||||
// uperm := onedrive.UserPermission{ID: id, Roles: perm.Roles}
|
||||
|
||||
if permUseID {
|
||||
uperm.EntityID = perm.entityID
|
||||
} else {
|
||||
uperm.Email = perm.user
|
||||
}
|
||||
// if permUseID {
|
||||
// uperm.EntityID = perm.EntityID
|
||||
// } else {
|
||||
// uperm.Email = perm.user
|
||||
// }
|
||||
|
||||
meta := onedrive.Metadata{
|
||||
FileName: fileName,
|
||||
Permissions: []onedrive.UserPermission{uperm},
|
||||
}
|
||||
// meta := onedrive.Metadata{
|
||||
// FileName: fileName,
|
||||
// Permissions: []onedrive.UserPermission{uperm},
|
||||
// }
|
||||
|
||||
return meta
|
||||
}
|
||||
// return meta
|
||||
// }
|
||||
|
||||
@ -134,6 +134,11 @@ func (c Mail) GetItem(
|
||||
immutableIDs bool,
|
||||
errs *fault.Bus,
|
||||
) (serialization.Parsable, *details.ExchangeInfo, error) {
|
||||
var (
|
||||
size int64
|
||||
attachSize int32
|
||||
mailBody models.ItemBodyable
|
||||
)
|
||||
// Will need adjusted if attachments start allowing paging.
|
||||
headers := buildPreferHeaders(false, immutableIDs)
|
||||
itemOpts := &users.ItemMessagesMessageItemRequestBuilderGetRequestConfiguration{
|
||||
@ -145,8 +150,16 @@ func (c Mail) GetItem(
|
||||
return nil, nil, graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
if !ptr.Val(mail.GetHasAttachments()) && !HasAttachments(mail.GetBody()) {
|
||||
return mail, MailInfo(mail), nil
|
||||
mailBody = mail.GetBody()
|
||||
if !ptr.Val(mail.GetHasAttachments()) && !HasAttachments(mailBody) {
|
||||
return mail, MailInfo(mail, 0), nil
|
||||
}
|
||||
|
||||
if mailBody != nil {
|
||||
bodySize := ptr.Val(mailBody.GetContent())
|
||||
if bodySize != "" {
|
||||
size = int64(len(bodySize))
|
||||
}
|
||||
}
|
||||
|
||||
options := &users.ItemMessagesItemAttachmentsRequestBuilderGetRequestConfiguration{
|
||||
@ -163,8 +176,14 @@ func (c Mail) GetItem(
|
||||
Attachments().
|
||||
Get(ctx, options)
|
||||
if err == nil {
|
||||
for _, a := range attached.GetValue() {
|
||||
attachSize = ptr.Val(a.GetSize())
|
||||
size = size + int64(attachSize)
|
||||
}
|
||||
|
||||
mail.SetAttachments(attached.GetValue())
|
||||
return mail, MailInfo(mail), nil
|
||||
|
||||
return mail, MailInfo(mail, size), nil
|
||||
}
|
||||
|
||||
// A failure can be caused by having a lot of attachments as
|
||||
@ -214,11 +233,14 @@ func (c Mail) GetItem(
|
||||
}
|
||||
|
||||
atts = append(atts, att)
|
||||
|
||||
attachSize = ptr.Val(a.GetSize())
|
||||
size = size + int64(attachSize)
|
||||
}
|
||||
|
||||
mail.SetAttachments(atts)
|
||||
|
||||
return mail, MailInfo(mail), nil
|
||||
return mail, MailInfo(mail, size), nil
|
||||
}
|
||||
|
||||
// EnumerateContainers iterates through all of the users current
|
||||
@ -419,7 +441,7 @@ func (c Mail) Serialize(
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
||||
func MailInfo(msg models.Messageable, size int64) *details.ExchangeInfo {
|
||||
var (
|
||||
sender = UnwrapEmailAddress(msg.GetSender())
|
||||
subject = ptr.Val(msg.GetSubject())
|
||||
@ -444,6 +466,7 @@ func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
||||
Recipient: recipients,
|
||||
Subject: subject,
|
||||
Received: received,
|
||||
Size: size,
|
||||
Created: created,
|
||||
Modified: ptr.OrNow(msg.GetLastModifiedDateTime()),
|
||||
}
|
||||
|
||||
@ -152,7 +152,7 @@ func (suite *MailAPIUnitSuite) TestMailInfo() {
|
||||
for _, tt := range tests {
|
||||
suite.Run(tt.name, func() {
|
||||
msg, expected := tt.msgAndRP()
|
||||
assert.Equal(suite.T(), expected, api.MailInfo(msg))
|
||||
assert.Equal(suite.T(), expected, api.MailInfo(msg, 0))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -260,7 +260,12 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
|
||||
return
|
||||
}
|
||||
|
||||
info.Size = int64(len(data))
|
||||
// In case of mail the size of data is calc as- size of body content+size of attachment
|
||||
// in all other case the size is - total item's serialized size
|
||||
if info.Size <= 0 {
|
||||
info.Size = int64(len(data))
|
||||
}
|
||||
|
||||
info.ParentPath = col.locationPath.String()
|
||||
|
||||
col.data <- &Stream{
|
||||
|
||||
@ -218,8 +218,7 @@ func RestoreMailMessage(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := api.MailInfo(clone)
|
||||
info.Size = int64(len(bits))
|
||||
info := api.MailInfo(clone, int64(len(bits)))
|
||||
|
||||
return info, nil
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
@ -17,33 +16,17 @@ import (
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
func mustToDataLayerPath(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) path.Path {
|
||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func testElementsMatch[T any](
|
||||
t *testing.T,
|
||||
expected []T,
|
||||
@ -108,52 +91,52 @@ func testElementsMatch[T any](
|
||||
)
|
||||
}
|
||||
|
||||
type configInfo struct {
|
||||
acct account.Account
|
||||
opts control.Options
|
||||
resource Resource
|
||||
service path.ServiceType
|
||||
tenant string
|
||||
resourceOwners []string
|
||||
dest control.RestoreDestination
|
||||
}
|
||||
// type configInfo struct {
|
||||
// acct account.Account
|
||||
// opts control.Options
|
||||
// resource Resource
|
||||
// service path.ServiceType
|
||||
// tenant string
|
||||
// resourceOwners []string
|
||||
// dest control.RestoreDestination
|
||||
// }
|
||||
|
||||
type itemInfo struct {
|
||||
// lookupKey is a string that can be used to find this data from a set of
|
||||
// other data in the same collection. This key should be something that will
|
||||
// be the same before and after restoring the item in M365 and may not be
|
||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// new ID making it unsuitable for a lookup key.
|
||||
lookupKey string
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
// type itemInfo struct {
|
||||
// // lookupKey is a string that can be used to find this data from a set of
|
||||
// // other data in the same collection. This key should be something that will
|
||||
// // be the same before and after restoring the item in M365 and may not be
|
||||
// // the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// // new ID making it unsuitable for a lookup key.
|
||||
// lookupKey string
|
||||
// name string
|
||||
// data []byte
|
||||
// }
|
||||
|
||||
type colInfo struct {
|
||||
// Elements (in order) for the path representing this collection. Should
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
pathElements []string
|
||||
category path.CategoryType
|
||||
items []itemInfo
|
||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// by Items(). These files do not directly participate in comparisosn at the
|
||||
// end of a test.
|
||||
auxItems []itemInfo
|
||||
}
|
||||
// type colInfo struct {
|
||||
// // Elements (in order) for the path representing this collection. Should
|
||||
// // only contain elements after the prefix that corso uses for the path. For
|
||||
// // example, a collection for the Inbox folder in exchange mail would just be
|
||||
// // "Inbox".
|
||||
// pathElements []string
|
||||
// category path.CategoryType
|
||||
// items []itemInfo
|
||||
// // auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// // by Items(). These files do not directly participate in comparisosn at the
|
||||
// // end of a test.
|
||||
// auxItems []itemInfo
|
||||
// }
|
||||
|
||||
type restoreBackupInfo struct {
|
||||
name string
|
||||
service path.ServiceType
|
||||
collections []colInfo
|
||||
collections []ColInfo
|
||||
resource Resource
|
||||
}
|
||||
|
||||
type restoreBackupInfoMultiVersion struct {
|
||||
service path.ServiceType
|
||||
collectionsLatest []colInfo
|
||||
collectionsPrevious []colInfo
|
||||
collectionsLatest []ColInfo
|
||||
collectionsPrevious []ColInfo
|
||||
resource Resource
|
||||
backupVersion int
|
||||
}
|
||||
@ -1127,127 +1110,6 @@ func backupSelectorForExpected(
|
||||
return selectors.Selector{}
|
||||
}
|
||||
|
||||
// backupOutputPathFromRestore returns a path.Path denoting the location in
|
||||
// kopia the data will be placed at. The location is a data-type specific
|
||||
// combination of the location the data was recently restored to and where the
|
||||
// data was originally in the hierarchy.
|
||||
func backupOutputPathFromRestore(
|
||||
t *testing.T,
|
||||
restoreDest control.RestoreDestination,
|
||||
inputPath path.Path,
|
||||
) path.Path {
|
||||
base := []string{restoreDest.ContainerName}
|
||||
|
||||
// OneDrive has leading information like the drive ID.
|
||||
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
|
||||
folders := inputPath.Folders()
|
||||
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
|
||||
|
||||
if len(folders) > 3 {
|
||||
base = append(base, folders[3:]...)
|
||||
}
|
||||
}
|
||||
|
||||
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
|
||||
base = append(base, inputPath.Folders()...)
|
||||
}
|
||||
|
||||
return mustToDataLayerPath(
|
||||
t,
|
||||
inputPath.Service(),
|
||||
inputPath.Tenant(),
|
||||
inputPath.ResourceOwner(),
|
||||
inputPath.Category(),
|
||||
base,
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Make this an actual mock class that can be used in other
|
||||
// packages.
|
||||
type mockRestoreCollection struct {
|
||||
data.Collection
|
||||
auxItems map[string]data.Stream
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Fetch(
|
||||
ctx context.Context,
|
||||
name string,
|
||||
) (data.Stream, error) {
|
||||
res := rc.auxItems[name]
|
||||
if res == nil {
|
||||
return nil, data.ErrNotFound
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func collectionsForInfo(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
dest control.RestoreDestination,
|
||||
allInfo []colInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||
var (
|
||||
collections = make([]data.RestoreCollection, 0, len(allInfo))
|
||||
expectedData = make(map[string]map[string][]byte, len(allInfo))
|
||||
totalItems = 0
|
||||
kopiaEntries = 0
|
||||
)
|
||||
|
||||
for _, info := range allInfo {
|
||||
pth := mustToDataLayerPath(
|
||||
t,
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
info.category,
|
||||
info.pathElements,
|
||||
false)
|
||||
|
||||
mc := exchMock.NewCollection(pth, pth, len(info.items))
|
||||
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
||||
|
||||
baseExpected := expectedData[baseDestPath.String()]
|
||||
if baseExpected == nil {
|
||||
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.items))
|
||||
baseExpected = expectedData[baseDestPath.String()]
|
||||
}
|
||||
|
||||
for i := 0; i < len(info.items); i++ {
|
||||
mc.Names[i] = info.items[i].name
|
||||
mc.Data[i] = info.items[i].data
|
||||
|
||||
baseExpected[info.items[i].lookupKey] = info.items[i].data
|
||||
|
||||
// We do not count metadata files against item count
|
||||
if backupVersion > 0 &&
|
||||
(service == path.OneDriveService || service == path.SharePointService) &&
|
||||
metadata.HasMetaSuffix(info.items[i].name) {
|
||||
continue
|
||||
}
|
||||
|
||||
totalItems++
|
||||
}
|
||||
|
||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
|
||||
for _, aux := range info.auxItems {
|
||||
c.auxItems[aux.name] = &exchMock.Data{
|
||||
ID: aux.name,
|
||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
}
|
||||
}
|
||||
|
||||
collections = append(collections, c)
|
||||
kopiaEntries += len(info.items)
|
||||
}
|
||||
|
||||
return totalItems, kopiaEntries, collections, expectedData
|
||||
}
|
||||
|
||||
func getSelectorWith(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
361
src/internal/connector/graph_connector_onedrive_test_helper.go
Normal file
361
src/internal/connector/graph_connector_onedrive_test_helper.go
Normal file
@ -0,0 +1,361 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
// For any version post this(inclusive), we expect to be using IDs for
|
||||
// permission instead of email
|
||||
const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions
|
||||
|
||||
var rootFolder = "root:"
|
||||
|
||||
type PermData struct {
|
||||
User string // user is only for older versions
|
||||
EntityID string
|
||||
Roles []string
|
||||
SharingMode onedrive.SharingMode
|
||||
}
|
||||
|
||||
type ItemData struct {
|
||||
Name string
|
||||
Data []byte
|
||||
Perms PermData
|
||||
}
|
||||
|
||||
type OnedriveColInfo struct {
|
||||
PathElements []string
|
||||
Perms PermData
|
||||
Files []ItemData
|
||||
Folders []ItemData
|
||||
}
|
||||
|
||||
type ColInfo struct {
|
||||
// Elements (in order) for the path representing this collection. Should
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
PathElements []string
|
||||
Category path.CategoryType
|
||||
Items []itemInfo
|
||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// by Items(). These files do not directly participate in comparisosn at the
|
||||
// end of a test.
|
||||
AuxItems []itemInfo
|
||||
}
|
||||
|
||||
type itemInfo struct {
|
||||
// lookupKey is a string that can be used to find this data from a set of
|
||||
// other data in the same collection. This key should be something that will
|
||||
// be the same before and after restoring the item in M365 and may not be
|
||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// new ID making it unsuitable for a lookup key.
|
||||
lookupKey string
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
|
||||
type onedriveCollection struct {
|
||||
service path.ServiceType
|
||||
pathElements []string
|
||||
items []itemInfo
|
||||
aux []itemInfo
|
||||
backupVersion int
|
||||
t *testing.T
|
||||
}
|
||||
|
||||
type testOneDriveData struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Data []byte `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
func (c onedriveCollection) collection() ColInfo {
|
||||
cat := path.FilesCategory
|
||||
if c.service == path.SharePointService {
|
||||
cat = path.LibrariesCategory
|
||||
}
|
||||
|
||||
return ColInfo{
|
||||
PathElements: c.pathElements,
|
||||
Category: cat,
|
||||
Items: c.items,
|
||||
AuxItems: c.aux,
|
||||
}
|
||||
}
|
||||
|
||||
func getMetadata(fileName string, perm PermData, permUseID bool) onedrive.Metadata {
|
||||
if len(perm.User) == 0 || len(perm.Roles) == 0 ||
|
||||
perm.SharingMode != onedrive.SharingModeCustom {
|
||||
return onedrive.Metadata{
|
||||
FileName: fileName,
|
||||
SharingMode: perm.SharingMode,
|
||||
}
|
||||
}
|
||||
|
||||
// In case of permissions, the id will usually be same for same
|
||||
// user/role combo unless deleted and readded, but we have to do
|
||||
// this as we only have two users of which one is already taken.
|
||||
id := uuid.NewString()
|
||||
uperm := onedrive.UserPermission{ID: id, Roles: perm.Roles}
|
||||
|
||||
if permUseID {
|
||||
uperm.EntityID = perm.EntityID
|
||||
} else {
|
||||
uperm.Email = perm.User
|
||||
}
|
||||
|
||||
testMeta := onedrive.Metadata{
|
||||
FileName: fileName,
|
||||
Permissions: []onedrive.UserPermission{uperm},
|
||||
}
|
||||
|
||||
return testMeta
|
||||
}
|
||||
|
||||
func onedriveItemWithData(
|
||||
t *testing.T,
|
||||
name, lookupKey string,
|
||||
fileData []byte,
|
||||
) itemInfo {
|
||||
t.Helper()
|
||||
|
||||
content := testOneDriveData{
|
||||
FileName: lookupKey,
|
||||
Data: fileData,
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(content)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return itemInfo{
|
||||
name: name,
|
||||
data: serialized,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
func onedriveMetadata(
|
||||
t *testing.T,
|
||||
fileName, itemID, lookupKey string,
|
||||
perm PermData,
|
||||
permUseID bool,
|
||||
) itemInfo {
|
||||
t.Helper()
|
||||
|
||||
testMeta := getMetadata(fileName, perm, permUseID)
|
||||
|
||||
testMetaJSON, err := json.Marshal(testMeta)
|
||||
require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
|
||||
|
||||
return itemInfo{
|
||||
name: itemID,
|
||||
data: testMetaJSON,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) *onedriveCollection {
|
||||
switch c.backupVersion {
|
||||
case 0:
|
||||
// Lookups will occur using the most recent version of things so we need
|
||||
// the embedded file name to match that.
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
// v1-5, early metadata design
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
|
||||
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
md := onedriveMetadata(
|
||||
c.t,
|
||||
"",
|
||||
name+metadata.MetaFileSuffix,
|
||||
name+metadata.MetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
// v6+ current metadata design
|
||||
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
md := onedriveMetadata(
|
||||
c.t,
|
||||
name,
|
||||
name+metadata.MetaFileSuffix,
|
||||
name,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
default:
|
||||
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFolder(name string, perm PermData) *onedriveCollection {
|
||||
switch c.backupVersion {
|
||||
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
||||
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||
return c
|
||||
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
||||
c.items = append(
|
||||
c.items,
|
||||
onedriveMetadata(
|
||||
c.t,
|
||||
"",
|
||||
name+metadata.DirMetaFileSuffix,
|
||||
name+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID))
|
||||
|
||||
default:
|
||||
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// withPermissions adds permissions to the folder represented by this
|
||||
// onedriveCollection.
|
||||
func (c *onedriveCollection) withPermissions(perm PermData) *onedriveCollection {
|
||||
// These versions didn't store permissions for the folder or didn't store them
|
||||
// in the folder's collection.
|
||||
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
return c
|
||||
}
|
||||
|
||||
name := c.pathElements[len(c.pathElements)-1]
|
||||
metaName := name
|
||||
|
||||
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||
// We switched to just .dirmeta for metadata file names.
|
||||
metaName = ""
|
||||
}
|
||||
|
||||
if name == rootFolder {
|
||||
return c
|
||||
}
|
||||
|
||||
md := onedriveMetadata(
|
||||
c.t,
|
||||
name,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func newOneDriveCollection(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
pathElements []string,
|
||||
backupVersion int,
|
||||
) *onedriveCollection {
|
||||
return &onedriveCollection{
|
||||
service: service,
|
||||
pathElements: pathElements,
|
||||
backupVersion: backupVersion,
|
||||
}
|
||||
}
|
||||
|
||||
func DataForInfo(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
cols []OnedriveColInfo,
|
||||
backupVersion int,
|
||||
) []ColInfo {
|
||||
var res []ColInfo
|
||||
|
||||
for _, c := range cols {
|
||||
onedriveCol := newOneDriveCollection(t, service, c.PathElements, backupVersion)
|
||||
|
||||
for _, f := range c.Files {
|
||||
onedriveCol.withFile(f.Name, f.Data, f.Perms)
|
||||
}
|
||||
|
||||
for _, d := range c.Folders {
|
||||
onedriveCol.withFolder(d.Name, d.Perms)
|
||||
}
|
||||
|
||||
onedriveCol.withPermissions(c.Perms)
|
||||
|
||||
res = append(res, onedriveCol.collection())
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
// Exchange Functions
|
||||
//-------------------------------------------------------------
|
||||
|
||||
func GetCollectionsAndExpected(
|
||||
t *testing.T,
|
||||
config ConfigInfo,
|
||||
testCollections []ColInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||
t.Helper()
|
||||
|
||||
var (
|
||||
collections []data.RestoreCollection
|
||||
expectedData = map[string]map[string][]byte{}
|
||||
totalItems = 0
|
||||
totalKopiaItems = 0
|
||||
)
|
||||
|
||||
for _, owner := range config.ResourceOwners {
|
||||
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo(
|
||||
t,
|
||||
config.Service,
|
||||
config.Tenant,
|
||||
owner,
|
||||
config.Dest,
|
||||
testCollections,
|
||||
backupVersion,
|
||||
)
|
||||
|
||||
collections = append(collections, ownerCollections...)
|
||||
totalItems += numItems
|
||||
totalKopiaItems += kopiaItems
|
||||
|
||||
maps.Copy(expectedData, userExpectedData)
|
||||
}
|
||||
|
||||
return totalItems, totalKopiaItems, collections, expectedData
|
||||
}
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
@ -403,70 +402,30 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
// Exchange Functions
|
||||
//-------------------------------------------------------------
|
||||
|
||||
func getCollectionsAndExpected(
|
||||
t *testing.T,
|
||||
config configInfo,
|
||||
testCollections []colInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||
t.Helper()
|
||||
|
||||
var (
|
||||
collections []data.RestoreCollection
|
||||
expectedData = map[string]map[string][]byte{}
|
||||
totalItems = 0
|
||||
totalKopiaItems = 0
|
||||
)
|
||||
|
||||
for _, owner := range config.resourceOwners {
|
||||
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo(
|
||||
t,
|
||||
config.service,
|
||||
config.tenant,
|
||||
owner,
|
||||
config.dest,
|
||||
testCollections,
|
||||
backupVersion,
|
||||
)
|
||||
|
||||
collections = append(collections, ownerCollections...)
|
||||
totalItems += numItems
|
||||
totalKopiaItems += kopiaItems
|
||||
|
||||
maps.Copy(expectedData, userExpectedData)
|
||||
}
|
||||
|
||||
return totalItems, totalKopiaItems, collections, expectedData
|
||||
}
|
||||
|
||||
func runRestore(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
backupVersion int,
|
||||
collections []data.RestoreCollection,
|
||||
numRestoreItems int,
|
||||
) {
|
||||
t.Logf(
|
||||
"Restoring collections to %s for resourceOwners(s) %v\n",
|
||||
config.dest.ContainerName,
|
||||
config.resourceOwners)
|
||||
config.Dest.ContainerName,
|
||||
config.ResourceOwners)
|
||||
|
||||
start := time.Now()
|
||||
|
||||
restoreGC := loadConnector(ctx, t, config.resource)
|
||||
restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true)
|
||||
restoreGC := loadConnector(ctx, t, config.Resource)
|
||||
restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true)
|
||||
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||
ctx,
|
||||
backupVersion,
|
||||
config.acct,
|
||||
config.Acct,
|
||||
restoreSel,
|
||||
config.dest,
|
||||
config.opts,
|
||||
config.Dest,
|
||||
config.Opts,
|
||||
collections,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -490,30 +449,30 @@ func runRestore(
|
||||
func runBackupAndCompare(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
expectedData map[string]map[string][]byte,
|
||||
totalItems int,
|
||||
totalKopiaItems int,
|
||||
inputCollections []colInfo,
|
||||
inputCollections []ColInfo,
|
||||
) {
|
||||
t.Helper()
|
||||
|
||||
// Run a backup and compare its output with what we put in.
|
||||
cats := make(map[path.CategoryType]struct{}, len(inputCollections))
|
||||
for _, c := range inputCollections {
|
||||
cats[c.category] = struct{}{}
|
||||
cats[c.Category] = struct{}{}
|
||||
}
|
||||
|
||||
var (
|
||||
expectedDests = make([]destAndCats, 0, len(config.resourceOwners))
|
||||
expectedDests = make([]destAndCats, 0, len(config.ResourceOwners))
|
||||
idToName = map[string]string{}
|
||||
nameToID = map[string]string{}
|
||||
)
|
||||
|
||||
for _, ro := range config.resourceOwners {
|
||||
for _, ro := range config.ResourceOwners {
|
||||
expectedDests = append(expectedDests, destAndCats{
|
||||
resourceOwner: ro,
|
||||
dest: config.dest.ContainerName,
|
||||
dest: config.Dest.ContainerName,
|
||||
cats: cats,
|
||||
})
|
||||
|
||||
@ -521,10 +480,10 @@ func runBackupAndCompare(
|
||||
nameToID[ro] = ro
|
||||
}
|
||||
|
||||
backupGC := loadConnector(ctx, t, config.resource)
|
||||
backupGC := loadConnector(ctx, t, config.Resource)
|
||||
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
|
||||
|
||||
backupSel := backupSelectorForExpected(t, config.service, expectedDests)
|
||||
backupSel := backupSelectorForExpected(t, config.Service, expectedDests)
|
||||
t.Logf("Selective backup of %s\n", backupSel)
|
||||
|
||||
start := time.Now()
|
||||
@ -534,7 +493,7 @@ func runBackupAndCompare(
|
||||
backupSel,
|
||||
nil,
|
||||
version.NoBackup,
|
||||
config.opts,
|
||||
config.Opts,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
// No excludes yet because this isn't an incremental backup.
|
||||
@ -550,8 +509,8 @@ func runBackupAndCompare(
|
||||
totalKopiaItems,
|
||||
expectedData,
|
||||
dcs,
|
||||
config.dest,
|
||||
config.opts.RestorePermissions)
|
||||
config.Dest,
|
||||
config.Opts.RestorePermissions)
|
||||
|
||||
status := backupGC.Wait()
|
||||
|
||||
@ -572,17 +531,17 @@ func runRestoreBackupTest(
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
config := configInfo{
|
||||
acct: acct,
|
||||
opts: opts,
|
||||
resource: test.resource,
|
||||
service: test.service,
|
||||
tenant: tenant,
|
||||
resourceOwners: resourceOwners,
|
||||
dest: tester.DefaultTestRestoreDestination(),
|
||||
config := ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: test.resource,
|
||||
Service: test.service,
|
||||
Tenant: tenant,
|
||||
ResourceOwners: resourceOwners,
|
||||
Dest: tester.DefaultTestRestoreDestination(),
|
||||
}
|
||||
|
||||
totalItems, totalKopiaItems, collections, expectedData := getCollectionsAndExpected(
|
||||
totalItems, totalKopiaItems, collections, expectedData := GetCollectionsAndExpected(
|
||||
t,
|
||||
config,
|
||||
test.collections,
|
||||
@ -618,17 +577,17 @@ func runRestoreTestWithVerion(
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
config := configInfo{
|
||||
acct: acct,
|
||||
opts: opts,
|
||||
resource: test.resource,
|
||||
service: test.service,
|
||||
tenant: tenant,
|
||||
resourceOwners: resourceOwners,
|
||||
dest: tester.DefaultTestRestoreDestination(),
|
||||
config := ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: test.resource,
|
||||
Service: test.service,
|
||||
Tenant: tenant,
|
||||
ResourceOwners: resourceOwners,
|
||||
Dest: tester.DefaultTestRestoreDestination(),
|
||||
}
|
||||
|
||||
totalItems, _, collections, _ := getCollectionsAndExpected(
|
||||
totalItems, _, collections, _ := GetCollectionsAndExpected(
|
||||
t,
|
||||
config,
|
||||
test.collectionsPrevious,
|
||||
@ -657,17 +616,17 @@ func runRestoreBackupTestVersions(
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
config := configInfo{
|
||||
acct: acct,
|
||||
opts: opts,
|
||||
resource: test.resource,
|
||||
service: test.service,
|
||||
tenant: tenant,
|
||||
resourceOwners: resourceOwners,
|
||||
dest: tester.DefaultTestRestoreDestination(),
|
||||
config := ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: test.resource,
|
||||
Service: test.service,
|
||||
Tenant: tenant,
|
||||
ResourceOwners: resourceOwners,
|
||||
Dest: tester.DefaultTestRestoreDestination(),
|
||||
}
|
||||
|
||||
totalItems, _, collections, _ := getCollectionsAndExpected(
|
||||
totalItems, _, collections, _ := GetCollectionsAndExpected(
|
||||
t,
|
||||
config,
|
||||
test.collectionsPrevious,
|
||||
@ -682,7 +641,7 @@ func runRestoreBackupTestVersions(
|
||||
totalItems)
|
||||
|
||||
// Get expected output for new version.
|
||||
totalItems, totalKopiaItems, _, expectedData := getCollectionsAndExpected(
|
||||
totalItems, totalKopiaItems, _, expectedData := GetCollectionsAndExpected(
|
||||
t,
|
||||
config,
|
||||
test.collectionsLatest,
|
||||
@ -707,11 +666,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "EmailsWithAttachments",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.MessageWithDirectAttachment(
|
||||
@ -734,11 +693,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "MultipleEmailsMultipleFolders",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -751,9 +710,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Work"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID2",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -775,9 +734,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Work", "Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work", "Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID4",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -790,9 +749,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Work", "Inbox", "Work"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work", "Inbox", "Work"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID5",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -810,11 +769,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "MultipleContactsSingleFolder",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Contacts"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Contacts"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.ContactBytes("Ghimley"),
|
||||
@ -838,11 +797,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "MultipleContactsMultipleFolders",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Work"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.ContactBytes("Ghimley"),
|
||||
@ -861,9 +820,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Personal"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Personal"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID4",
|
||||
data: exchMock.ContactBytes("Argon"),
|
||||
@ -881,11 +840,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
// {
|
||||
// name: "MultipleEventsSingleCalendar",
|
||||
// service: path.ExchangeService,
|
||||
// collections: []colInfo{
|
||||
// collections: []ColInfo{
|
||||
// {
|
||||
// pathElements: []string{"Work"},
|
||||
// category: path.EventsCategory,
|
||||
// items: []itemInfo{
|
||||
// PathElements: []string{"Work"},
|
||||
// Category: path.EventsCategory,
|
||||
// Items: []itemInfo{
|
||||
// {
|
||||
// name: "someencodeditemID",
|
||||
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
||||
@ -908,11 +867,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
// {
|
||||
// name: "MultipleEventsMultipleCalendars",
|
||||
// service: path.ExchangeService,
|
||||
// collections: []colInfo{
|
||||
// collections: []ColInfo{
|
||||
// {
|
||||
// pathElements: []string{"Work"},
|
||||
// category: path.EventsCategory,
|
||||
// items: []itemInfo{
|
||||
// PathElements: []string{"Work"},
|
||||
// Category: path.EventsCategory,
|
||||
// Items: []itemInfo{
|
||||
// {
|
||||
// name: "someencodeditemID",
|
||||
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
||||
@ -931,9 +890,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// pathElements: []string{"Personal"},
|
||||
// category: path.EventsCategory,
|
||||
// items: []itemInfo{
|
||||
// PathElements: []string{"Personal"},
|
||||
// Category: path.EventsCategory,
|
||||
// Items: []itemInfo{
|
||||
// {
|
||||
// name: "someencodeditemID4",
|
||||
// data: exchMock.EventWithSubjectBytes("Argon"),
|
||||
@ -973,11 +932,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
name: "Contacts",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Work"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.ContactBytes("Ghimley"),
|
||||
@ -986,9 +945,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Personal"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Personal"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "someencodeditemID2",
|
||||
data: exchMock.ContactBytes("Irgot"),
|
||||
@ -1001,11 +960,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
// {
|
||||
// name: "Events",
|
||||
// service: path.ExchangeService,
|
||||
// collections: []colInfo{
|
||||
// collections: []ColInfo{
|
||||
// {
|
||||
// pathElements: []string{"Work"},
|
||||
// category: path.EventsCategory,
|
||||
// items: []itemInfo{
|
||||
// PathElements: []string{"Work"},
|
||||
// Category: path.EventsCategory,
|
||||
// Items: []itemInfo{
|
||||
// {
|
||||
// name: "someencodeditemID",
|
||||
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
||||
@ -1014,9 +973,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// pathElements: []string{"Personal"},
|
||||
// category: path.EventsCategory,
|
||||
// items: []itemInfo{
|
||||
// PathElements: []string{"Personal"},
|
||||
// Category: path.EventsCategory,
|
||||
// Items: []itemInfo{
|
||||
// {
|
||||
// name: "someencodeditemID2",
|
||||
// data: exchMock.EventWithSubjectBytes("Irgot"),
|
||||
@ -1047,7 +1006,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
resourceOwner: suite.user,
|
||||
dest: dest.ContainerName,
|
||||
cats: map[path.CategoryType]struct{}{
|
||||
collection.category: {},
|
||||
collection.Category: {},
|
||||
},
|
||||
})
|
||||
|
||||
@ -1057,7 +1016,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
suite.connector.tenant,
|
||||
suite.user,
|
||||
dest,
|
||||
[]colInfo{collection},
|
||||
[]ColInfo{collection},
|
||||
version.Backup,
|
||||
)
|
||||
allItems += totalItems
|
||||
@ -1153,11 +1112,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
|
||||
name: "EmailsWithLargeAttachments",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []itemInfo{
|
||||
{
|
||||
name: "35mbAttachment",
|
||||
data: exchMock.MessageWithSizedAttachment(subjectText, 35),
|
||||
|
||||
162
src/internal/connector/graph_connector_test_helpers.go
Normal file
162
src/internal/connector/graph_connector_test_helpers.go
Normal file
@ -0,0 +1,162 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type ConfigInfo struct {
|
||||
Acct account.Account
|
||||
Opts control.Options
|
||||
Resource Resource
|
||||
Service path.ServiceType
|
||||
Tenant string
|
||||
ResourceOwners []string
|
||||
Dest control.RestoreDestination
|
||||
}
|
||||
|
||||
func mustToDataLayerPath(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) path.Path {
|
||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// backupOutputPathFromRestore returns a path.Path denoting the location in
|
||||
// kopia the data will be placed at. The location is a data-type specific
|
||||
// combination of the location the data was recently restored to and where the
|
||||
// data was originally in the hierarchy.
|
||||
func backupOutputPathFromRestore(
|
||||
t *testing.T,
|
||||
restoreDest control.RestoreDestination,
|
||||
inputPath path.Path,
|
||||
) path.Path {
|
||||
base := []string{restoreDest.ContainerName}
|
||||
|
||||
// OneDrive has leading information like the drive ID.
|
||||
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
|
||||
folders := inputPath.Folders()
|
||||
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
|
||||
|
||||
if len(folders) > 3 {
|
||||
base = append(base, folders[3:]...)
|
||||
}
|
||||
}
|
||||
|
||||
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
|
||||
base = append(base, inputPath.Folders()...)
|
||||
}
|
||||
|
||||
return mustToDataLayerPath(
|
||||
t,
|
||||
inputPath.Service(),
|
||||
inputPath.Tenant(),
|
||||
inputPath.ResourceOwner(),
|
||||
inputPath.Category(),
|
||||
base,
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Make this an actual mock class that can be used in other
|
||||
// packages.
|
||||
type mockRestoreCollection struct {
|
||||
data.Collection
|
||||
auxItems map[string]data.Stream
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Fetch(
|
||||
ctx context.Context,
|
||||
name string,
|
||||
) (data.Stream, error) {
|
||||
res := rc.auxItems[name]
|
||||
if res == nil {
|
||||
return nil, data.ErrNotFound
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func collectionsForInfo(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
dest control.RestoreDestination,
|
||||
allInfo []ColInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||
var (
|
||||
collections = make([]data.RestoreCollection, 0, len(allInfo))
|
||||
expectedData = make(map[string]map[string][]byte, len(allInfo))
|
||||
totalItems = 0
|
||||
kopiaEntries = 0
|
||||
)
|
||||
|
||||
for _, info := range allInfo {
|
||||
pth := mustToDataLayerPath(
|
||||
t,
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
info.Category,
|
||||
info.PathElements,
|
||||
false)
|
||||
|
||||
mc := exchMock.NewCollection(pth, pth, len(info.Items))
|
||||
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
||||
|
||||
baseExpected := expectedData[baseDestPath.String()]
|
||||
if baseExpected == nil {
|
||||
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items))
|
||||
baseExpected = expectedData[baseDestPath.String()]
|
||||
}
|
||||
|
||||
for i := 0; i < len(info.Items); i++ {
|
||||
mc.Names[i] = info.Items[i].name
|
||||
mc.Data[i] = info.Items[i].data
|
||||
|
||||
baseExpected[info.Items[i].lookupKey] = info.Items[i].data
|
||||
|
||||
// We do not count metadata files against item count
|
||||
if backupVersion > 0 &&
|
||||
(service == path.OneDriveService || service == path.SharePointService) &&
|
||||
metadata.HasMetaSuffix(info.Items[i].name) {
|
||||
continue
|
||||
}
|
||||
|
||||
totalItems++
|
||||
}
|
||||
|
||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
|
||||
for _, aux := range info.AuxItems {
|
||||
c.auxItems[aux.name] = &exchMock.Data{
|
||||
ID: aux.name,
|
||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
}
|
||||
}
|
||||
|
||||
collections = append(collections, c)
|
||||
kopiaEntries += len(info.Items)
|
||||
}
|
||||
|
||||
return totalItems, kopiaEntries, collections, expectedData
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user