Compare commits
4 Commits
main
...
moveOneDri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8938a94eec | ||
|
|
16cb69b8a9 | ||
|
|
341bfd4616 | ||
|
|
cde2ea8492 |
@ -1,13 +1,11 @@
|
|||||||
package impl
|
package impl
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
@ -20,8 +18,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
@ -200,44 +196,44 @@ func buildCollections(
|
|||||||
return collections, nil
|
return collections, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type permData struct {
|
// type permData struct {
|
||||||
user string // user is only for older versions
|
// user string // user is only for older versions
|
||||||
entityID string
|
// EntityID string
|
||||||
roles []string
|
// Roles []string
|
||||||
sharingMode onedrive.SharingMode
|
// sharingMode onedrive.SharingMode
|
||||||
}
|
// }
|
||||||
|
|
||||||
type itemData struct {
|
// type connector.ItemData struct {
|
||||||
name string
|
// name string
|
||||||
data []byte
|
// data []byte
|
||||||
perms permData
|
// Perms connector.PermData
|
||||||
}
|
// }
|
||||||
|
|
||||||
type itemInfo struct {
|
// type itemInfo struct {
|
||||||
// lookupKey is a string that can be used to find this data from a set of
|
// // lookupKey is a string that can be used to find this data from a set of
|
||||||
// other data in the same collection. This key should be something that will
|
// // other data in the same collection. This key should be something that will
|
||||||
// be the same before and after restoring the item in M365 and may not be
|
// // be the same before and after restoring the item in M365 and may not be
|
||||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
// // the M365 ID. When restoring items out of place, the item is assigned a
|
||||||
// new ID making it unsuitable for a lookup key.
|
// // new ID making it unsuitable for a lookup key.
|
||||||
lookupKey string
|
// lookupKey string
|
||||||
name string
|
// name string
|
||||||
data []byte
|
// data []byte
|
||||||
}
|
// }
|
||||||
|
|
||||||
type onedriveCollection struct {
|
// type onedriveCollection struct {
|
||||||
service path.ServiceType
|
// service path.ServiceType
|
||||||
pathElements []string
|
// pathElements []string
|
||||||
items []itemInfo
|
// items []itemInfo
|
||||||
aux []itemInfo
|
// aux []itemInfo
|
||||||
backupVersion int
|
// backupVersion int
|
||||||
}
|
// }
|
||||||
|
|
||||||
type onedriveColInfo struct {
|
// type onedriveColInfo struct {
|
||||||
pathElements []string
|
// pathElements []string
|
||||||
perms permData
|
// Perms connector.PermData
|
||||||
files []itemData
|
// files []DataForInfconnector.ItemDatao
|
||||||
folders []itemData
|
// folders []DataForInfconnector.ItemDatao
|
||||||
}
|
// }
|
||||||
|
|
||||||
var (
|
var (
|
||||||
folderAName = "folder-a"
|
folderAName = "folder-a"
|
||||||
@ -278,7 +274,7 @@ func generateAndRestoreOnedriveItems(
|
|||||||
driveID := ptr.Val(d.GetId())
|
driveID := ptr.Val(d.GetId())
|
||||||
|
|
||||||
var (
|
var (
|
||||||
cols []onedriveColInfo
|
cols []connector.OnedriveColInfo
|
||||||
|
|
||||||
rootPath = []string{"drives", driveID, "root:"}
|
rootPath = []string{"drives", driveID, "root:"}
|
||||||
folderAPath = []string{"drives", driveID, "root:", folderAName}
|
folderAPath = []string{"drives", driveID, "root:", folderAName}
|
||||||
@ -292,43 +288,43 @@ func generateAndRestoreOnedriveItems(
|
|||||||
)
|
)
|
||||||
|
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
col := []onedriveColInfo{
|
col := []connector.OnedriveColInfo{
|
||||||
// basic folder and file creation
|
// basic folder and file creation
|
||||||
{
|
{
|
||||||
pathElements: rootPath,
|
PathElements: rootPath,
|
||||||
files: []itemData{
|
Files: []connector.ItemData{
|
||||||
{
|
{
|
||||||
name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
|
Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
|
||||||
data: fileAData,
|
Data: fileAData,
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: writePerm,
|
Roles: writePerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
|
Name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
|
||||||
data: fileBData,
|
Data: fileBData,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
folders: []itemData{
|
Folders: []connector.ItemData{
|
||||||
{
|
{
|
||||||
name: folderBName,
|
Name: folderBName,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: folderAName,
|
Name: folderAName,
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: readPerm,
|
Roles: readPerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: folderCName,
|
Name: folderCName,
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: readPerm,
|
Roles: readPerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -336,62 +332,62 @@ func generateAndRestoreOnedriveItems(
|
|||||||
{
|
{
|
||||||
// a folder that has permissions with an item in the folder with
|
// a folder that has permissions with an item in the folder with
|
||||||
// the different permissions.
|
// the different permissions.
|
||||||
pathElements: folderAPath,
|
PathElements: folderAPath,
|
||||||
files: []itemData{
|
Files: []connector.ItemData{
|
||||||
{
|
{
|
||||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||||
data: fileEData,
|
Data: fileEData,
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: writePerm,
|
Roles: writePerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: readPerm,
|
Roles: readPerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// a folder that has permissions with an item in the folder with
|
// a folder that has permissions with an item in the folder with
|
||||||
// no permissions.
|
// no permissions.
|
||||||
pathElements: folderCPath,
|
PathElements: folderCPath,
|
||||||
files: []itemData{
|
Files: []connector.ItemData{
|
||||||
{
|
{
|
||||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||||
data: fileAData,
|
Data: fileAData,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: readPerm,
|
Roles: readPerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
pathElements: folderBPath,
|
PathElements: folderBPath,
|
||||||
files: []itemData{
|
Files: []connector.ItemData{
|
||||||
{
|
{
|
||||||
// restoring a file in a non-root folder that doesn't inherit
|
// restoring a file in a non-root folder that doesn't inherit
|
||||||
// permissions.
|
// permissions.
|
||||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||||
data: fileBData,
|
Data: fileBData,
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: writePerm,
|
Roles: writePerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
folders: []itemData{
|
Folders: []connector.ItemData{
|
||||||
{
|
{
|
||||||
name: folderAName,
|
Name: folderAName,
|
||||||
perms: permData{
|
Perms: connector.PermData{
|
||||||
user: secondaryUserName,
|
User: secondaryUserName,
|
||||||
entityID: secondaryUserID,
|
EntityID: secondaryUserID,
|
||||||
roles: readPerm,
|
Roles: readPerm,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -401,302 +397,316 @@ func generateAndRestoreOnedriveItems(
|
|||||||
cols = append(cols, col...)
|
cols = append(cols, col...)
|
||||||
}
|
}
|
||||||
|
|
||||||
input := dataForInfo(service, cols, version.Backup)
|
// TODO Neha: work on this
|
||||||
|
t := testing.T{}
|
||||||
collections := getCollections(
|
input := connector.DataForInfo(&t, service, cols, version.Backup)
|
||||||
service,
|
|
||||||
tenantID,
|
|
||||||
[]string{resourceOwner},
|
|
||||||
input,
|
|
||||||
version.Backup)
|
|
||||||
|
|
||||||
opts := control.Options{
|
opts := control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
config := connector.ConfigInfo{
|
||||||
|
Acct: acct,
|
||||||
|
Opts: opts,
|
||||||
|
Resource: connector.Users,
|
||||||
|
Service: service,
|
||||||
|
Tenant: tenantID,
|
||||||
|
ResourceOwners: []string{resourceOwner},
|
||||||
|
Dest: tester.DefaultTestRestoreDestination(),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, collections, _ := connector.GetCollectionsAndExpected(
|
||||||
|
&t,
|
||||||
|
config,
|
||||||
|
// service,
|
||||||
|
// tenantID,
|
||||||
|
// []string{resourceOwner},
|
||||||
|
input,
|
||||||
|
version.Backup)
|
||||||
|
|
||||||
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
|
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getCollections(
|
// func getCollections(
|
||||||
service path.ServiceType,
|
// service path.ServiceType,
|
||||||
tenant string,
|
// tenant string,
|
||||||
resourceOwners []string,
|
// resourceOwners []string,
|
||||||
testCollections []colInfo,
|
// testCollections []connector.ColInfo,
|
||||||
backupVersion int,
|
// backupVersion int,
|
||||||
) []data.RestoreCollection {
|
// ) []data.RestoreCollection {
|
||||||
var collections []data.RestoreCollection
|
// var collections []data.RestoreCollection
|
||||||
|
|
||||||
for _, owner := range resourceOwners {
|
// for _, owner := range resourceOwners {
|
||||||
ownerCollections := collectionsForInfo(
|
// ownerCollections := collectionsForInfo(
|
||||||
service,
|
// service,
|
||||||
tenant,
|
// tenant,
|
||||||
owner,
|
// owner,
|
||||||
testCollections,
|
// testCollections,
|
||||||
backupVersion,
|
// backupVersion,
|
||||||
)
|
// )
|
||||||
|
|
||||||
collections = append(collections, ownerCollections...)
|
// collections = append(collections, ownerCollections...)
|
||||||
}
|
// }
|
||||||
|
|
||||||
return collections
|
// return collections
|
||||||
}
|
// }
|
||||||
|
|
||||||
type mockRestoreCollection struct {
|
// type mockRestoreCollection struct {
|
||||||
data.Collection
|
// data.Collection
|
||||||
auxItems map[string]data.Stream
|
// auxItems map[string]data.Stream
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (rc mockRestoreCollection) Fetch(
|
// func (rc mockRestoreCollection) Fetch(
|
||||||
ctx context.Context,
|
// ctx context.Context,
|
||||||
name string,
|
// name string,
|
||||||
) (data.Stream, error) {
|
// ) (data.Stream, error) {
|
||||||
res := rc.auxItems[name]
|
// res := rc.auxItems[name]
|
||||||
if res == nil {
|
// if res == nil {
|
||||||
return nil, data.ErrNotFound
|
// return nil, data.ErrNotFound
|
||||||
}
|
// }
|
||||||
|
|
||||||
return res, nil
|
// return res, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
func collectionsForInfo(
|
// func collectionsForInfo(
|
||||||
service path.ServiceType,
|
// service path.ServiceType,
|
||||||
tenant, user string,
|
// tenant, user string,
|
||||||
allInfo []colInfo,
|
// allInfo []colInfo,
|
||||||
backupVersion int,
|
// backupVersion int,
|
||||||
) []data.RestoreCollection {
|
// ) []data.RestoreCollection {
|
||||||
collections := make([]data.RestoreCollection, 0, len(allInfo))
|
// collections := make([]data.RestoreCollection, 0, len(allInfo))
|
||||||
|
|
||||||
for _, info := range allInfo {
|
// for _, info := range allInfo {
|
||||||
pth := mustToDataLayerPath(
|
// pth := mustToDataLayerPath(
|
||||||
service,
|
// service,
|
||||||
tenant,
|
// tenant,
|
||||||
user,
|
// user,
|
||||||
info.category,
|
// info.category,
|
||||||
info.pathElements,
|
// info.pathElements,
|
||||||
false)
|
// false)
|
||||||
|
|
||||||
mc := exchMock.NewCollection(pth, pth, len(info.items))
|
// mc := exchMock.NewCollection(pth, pth, len(info.items))
|
||||||
|
|
||||||
for i := 0; i < len(info.items); i++ {
|
// for i := 0; i < len(info.items); i++ {
|
||||||
mc.Names[i] = info.items[i].name
|
// mc.Names[i] = info.items[i].name
|
||||||
mc.Data[i] = info.items[i].data
|
// mc.Data[i] = info.items[i].data
|
||||||
|
|
||||||
// We do not count metadata files against item count
|
// // We do not count metadata files against item count
|
||||||
if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) &&
|
// if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) &&
|
||||||
(service == path.OneDriveService || service == path.SharePointService) {
|
// (service == path.OneDriveService || service == path.SharePointService) {
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
// c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||||
|
|
||||||
for _, aux := range info.auxItems {
|
// for _, aux := range info.auxItems {
|
||||||
c.auxItems[aux.name] = &exchMock.Data{
|
// c.auxItems[aux.name] = &exchMock.Data{
|
||||||
ID: aux.name,
|
// ID: aux.name,
|
||||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
// Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
collections = append(collections, c)
|
// collections = append(collections, c)
|
||||||
}
|
// }
|
||||||
|
|
||||||
return collections
|
// return collections
|
||||||
}
|
// }
|
||||||
|
|
||||||
func mustToDataLayerPath(
|
// func mustToDataLayerPath(
|
||||||
service path.ServiceType,
|
// service path.ServiceType,
|
||||||
tenant, resourceOwner string,
|
// tenant, resourceOwner string,
|
||||||
category path.CategoryType,
|
// category path.CategoryType,
|
||||||
elements []string,
|
// elements []string,
|
||||||
isItem bool,
|
// isItem bool,
|
||||||
) path.Path {
|
// ) path.Path {
|
||||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
// res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
fmt.Println("building path", clues.ToCore(err))
|
// fmt.Println("building path", clues.ToCore(err))
|
||||||
}
|
// }
|
||||||
|
|
||||||
return res
|
// return res
|
||||||
}
|
// }
|
||||||
|
|
||||||
type colInfo struct {
|
// type colInfo struct {
|
||||||
// Elements (in order) for the path representing this collection. Should
|
// // Elements (in order) for the path representing this collection. Should
|
||||||
// only contain elements after the prefix that corso uses for the path. For
|
// // only contain elements after the prefix that corso uses for the path. For
|
||||||
// example, a collection for the Inbox folder in exchange mail would just be
|
// // example, a collection for the Inbox folder in exchange mail would just be
|
||||||
// "Inbox".
|
// // "Inbox".
|
||||||
pathElements []string
|
// pathElements []string
|
||||||
category path.CategoryType
|
// category path.CategoryType
|
||||||
items []itemInfo
|
// items []itemInfo
|
||||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
// // auxItems are items that can be retrieved with Fetch but won't be returned
|
||||||
// by Items().
|
// // by Items().
|
||||||
auxItems []itemInfo
|
// auxItems []itemInfo
|
||||||
}
|
// }
|
||||||
|
|
||||||
func newOneDriveCollection(
|
// func newOneDriveCollection(
|
||||||
service path.ServiceType,
|
// service path.ServiceType,
|
||||||
pathElements []string,
|
// pathElements []string,
|
||||||
backupVersion int,
|
// backupVersion int,
|
||||||
) *onedriveCollection {
|
// ) *onedriveCollection {
|
||||||
return &onedriveCollection{
|
// return &onedriveCollection{
|
||||||
service: service,
|
// service: service,
|
||||||
pathElements: pathElements,
|
// pathElements: pathElements,
|
||||||
backupVersion: backupVersion,
|
// backupVersion: backupVersion,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func dataForInfo(
|
// func dataForInfo(
|
||||||
service path.ServiceType,
|
// service path.ServiceType,
|
||||||
cols []onedriveColInfo,
|
// cols []onedriveColInfo,
|
||||||
backupVersion int,
|
// backupVersion int,
|
||||||
) []colInfo {
|
// ) []colInfo {
|
||||||
var res []colInfo
|
// var res []colInfo
|
||||||
|
|
||||||
for _, c := range cols {
|
// for _, c := range cols {
|
||||||
onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion)
|
// onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion)
|
||||||
|
|
||||||
for _, f := range c.files {
|
// for _, f := range c.files {
|
||||||
onedriveCol.withFile(f.name, f.data, f.perms)
|
// onedriveCol.withFile(f.name, f.data, f.Perms)
|
||||||
}
|
// }
|
||||||
|
|
||||||
onedriveCol.withPermissions(c.perms)
|
// onedriveCol.withPermissions(c.Perms)
|
||||||
|
|
||||||
res = append(res, onedriveCol.collection())
|
// res = append(res, onedriveCol.collection())
|
||||||
}
|
// }
|
||||||
|
|
||||||
return res
|
// return res
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (c onedriveCollection) collection() colInfo {
|
// func (c onedriveCollection) collection() colInfo {
|
||||||
cat := path.FilesCategory
|
// cat := path.FilesCategory
|
||||||
if c.service == path.SharePointService {
|
// if c.service == path.SharePointService {
|
||||||
cat = path.LibrariesCategory
|
// cat = path.LibrariesCategory
|
||||||
}
|
// }
|
||||||
|
|
||||||
return colInfo{
|
// return colInfo{
|
||||||
pathElements: c.pathElements,
|
// pathElements: c.pathElements,
|
||||||
category: cat,
|
// category: cat,
|
||||||
items: c.items,
|
// items: c.items,
|
||||||
auxItems: c.aux,
|
// auxItems: c.aux,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection {
|
// func (c *onedriveCollection) withFile(name string, fileData []byte, perm connector.PermData) *onedriveCollection {
|
||||||
c.items = append(c.items, onedriveItemWithData(
|
// c.items = append(c.items, onedriveItemWithData(
|
||||||
name+metadata.DataFileSuffix,
|
// name+metadata.DataFileSuffix,
|
||||||
name+metadata.DataFileSuffix,
|
// name+metadata.DataFileSuffix,
|
||||||
fileData))
|
// fileData))
|
||||||
|
|
||||||
md := onedriveMetadata(
|
// md := onedriveMetadata(
|
||||||
name,
|
// name,
|
||||||
name+metadata.MetaFileSuffix,
|
// name+metadata.MetaFileSuffix,
|
||||||
name,
|
// name,
|
||||||
perm,
|
// perm,
|
||||||
true)
|
// true)
|
||||||
c.items = append(c.items, md)
|
// c.items = append(c.items, md)
|
||||||
c.aux = append(c.aux, md)
|
// c.aux = append(c.aux, md)
|
||||||
|
|
||||||
return c
|
// return c
|
||||||
}
|
// }
|
||||||
|
|
||||||
// withPermissions adds permissions to the folder represented by this
|
// // withPermissions adds permissions to the folder represented by this
|
||||||
// onedriveCollection.
|
// // onedriveCollection.
|
||||||
func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection {
|
// func (c *onedriveCollection) withPermissions(perm connector.PermData) *onedriveCollection {
|
||||||
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
// if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||||
return c
|
// return c
|
||||||
}
|
// }
|
||||||
|
|
||||||
name := c.pathElements[len(c.pathElements)-1]
|
// name := c.pathElements[len(c.pathElements)-1]
|
||||||
metaName := name
|
// metaName := name
|
||||||
|
|
||||||
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
// if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||||
// We switched to just .dirmeta for metadata file names.
|
// // We switched to just .dirmeta for metadata file names.
|
||||||
metaName = ""
|
// metaName = ""
|
||||||
}
|
// }
|
||||||
|
|
||||||
if name == "root:" {
|
// if name == "root:" {
|
||||||
return c
|
// return c
|
||||||
}
|
// }
|
||||||
|
|
||||||
md := onedriveMetadata(
|
// md := onedriveMetadata(
|
||||||
name,
|
// name,
|
||||||
metaName+metadata.DirMetaFileSuffix,
|
// metaName+metadata.DirMetaFileSuffix,
|
||||||
metaName+metadata.DirMetaFileSuffix,
|
// metaName+metadata.DirMetaFileSuffix,
|
||||||
perm,
|
// perm,
|
||||||
true)
|
// true)
|
||||||
|
|
||||||
c.items = append(c.items, md)
|
// c.items = append(c.items, md)
|
||||||
c.aux = append(c.aux, md)
|
// c.aux = append(c.aux, md)
|
||||||
|
|
||||||
return c
|
// return c
|
||||||
}
|
// }
|
||||||
|
|
||||||
type oneDriveData struct {
|
// type oneDriveData struct {
|
||||||
FileName string `json:"fileName,omitempty"`
|
// FileName string `json:"fileName,omitempty"`
|
||||||
Data []byte `json:"data,omitempty"`
|
// Data []byte `json:"data,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func onedriveItemWithData(
|
// func onedriveItemWithData(
|
||||||
name, lookupKey string,
|
// name, lookupKey string,
|
||||||
fileData []byte,
|
// fileData []byte,
|
||||||
) itemInfo {
|
// ) itemInfo {
|
||||||
content := oneDriveData{
|
// content := oneDriveData{
|
||||||
FileName: lookupKey,
|
// FileName: lookupKey,
|
||||||
Data: fileData,
|
// Data: fileData,
|
||||||
}
|
// }
|
||||||
|
|
||||||
serialized, _ := json.Marshal(content)
|
// serialized, _ := json.Marshal(content)
|
||||||
|
|
||||||
return itemInfo{
|
// return itemInfo{
|
||||||
name: name,
|
// name: name,
|
||||||
data: serialized,
|
// Data: serialized,
|
||||||
lookupKey: lookupKey,
|
// lookupKey: lookupKey,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func onedriveMetadata(
|
// func onedriveMetadata(
|
||||||
fileName, itemID, lookupKey string,
|
// fileName, itemID, lookupKey string,
|
||||||
perm permData,
|
// perm connector.PermData,
|
||||||
permUseID bool,
|
// permUseID bool,
|
||||||
) itemInfo {
|
// ) itemInfo {
|
||||||
meta := getMetadata(fileName, perm, permUseID)
|
// meta := getMetadata(fileName, perm, permUseID)
|
||||||
|
|
||||||
metaJSON, err := json.Marshal(meta)
|
// metaJSON, err := json.Marshal(meta)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
fmt.Println("marshalling metadata", clues.ToCore(err))
|
// fmt.Println("marshalling metadata", clues.ToCore(err))
|
||||||
}
|
// }
|
||||||
|
|
||||||
return itemInfo{
|
// return itemInfo{
|
||||||
name: itemID,
|
// name: itemID,
|
||||||
data: metaJSON,
|
// Data: metaJSON,
|
||||||
lookupKey: lookupKey,
|
// lookupKey: lookupKey,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func getMetadata(fileName string, perm permData, permUseID bool) onedrive.Metadata {
|
// func getMetadata(fileName string, perm connector.PermData, permUseID bool) onedrive.Metadata {
|
||||||
if len(perm.user) == 0 || len(perm.roles) == 0 ||
|
// if len(perm.user) == 0 || len(perm.Roles) == 0 ||
|
||||||
perm.sharingMode != onedrive.SharingModeCustom {
|
// perm.sharingMode != onedrive.SharingModeCustom {
|
||||||
return onedrive.Metadata{
|
// return onedrive.Metadata{
|
||||||
FileName: fileName,
|
// FileName: fileName,
|
||||||
SharingMode: perm.sharingMode,
|
// SharingMode: perm.sharingMode,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
// In case of permissions, the id will usually be same for same
|
// // In case of permissions, the id will usually be same for same
|
||||||
// user/role combo unless deleted and readded, but we have to do
|
// // user/role combo unless deleted and readded, but we have to do
|
||||||
// this as we only have two users of which one is already taken.
|
// // this as we only have two users of which one is already taken.
|
||||||
id := uuid.NewString()
|
// id := uuid.NewString()
|
||||||
uperm := onedrive.UserPermission{ID: id, Roles: perm.roles}
|
// uperm := onedrive.UserPermission{ID: id, Roles: perm.Roles}
|
||||||
|
|
||||||
if permUseID {
|
// if permUseID {
|
||||||
uperm.EntityID = perm.entityID
|
// uperm.EntityID = perm.EntityID
|
||||||
} else {
|
// } else {
|
||||||
uperm.Email = perm.user
|
// uperm.Email = perm.user
|
||||||
}
|
// }
|
||||||
|
|
||||||
meta := onedrive.Metadata{
|
// meta := onedrive.Metadata{
|
||||||
FileName: fileName,
|
// FileName: fileName,
|
||||||
Permissions: []onedrive.UserPermission{uperm},
|
// Permissions: []onedrive.UserPermission{uperm},
|
||||||
}
|
// }
|
||||||
|
|
||||||
return meta
|
// return meta
|
||||||
}
|
// }
|
||||||
|
|||||||
@ -134,6 +134,11 @@ func (c Mail) GetItem(
|
|||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (serialization.Parsable, *details.ExchangeInfo, error) {
|
) (serialization.Parsable, *details.ExchangeInfo, error) {
|
||||||
|
var (
|
||||||
|
size int64
|
||||||
|
attachSize int32
|
||||||
|
mailBody models.ItemBodyable
|
||||||
|
)
|
||||||
// Will need adjusted if attachments start allowing paging.
|
// Will need adjusted if attachments start allowing paging.
|
||||||
headers := buildPreferHeaders(false, immutableIDs)
|
headers := buildPreferHeaders(false, immutableIDs)
|
||||||
itemOpts := &users.ItemMessagesMessageItemRequestBuilderGetRequestConfiguration{
|
itemOpts := &users.ItemMessagesMessageItemRequestBuilderGetRequestConfiguration{
|
||||||
@ -145,8 +150,16 @@ func (c Mail) GetItem(
|
|||||||
return nil, nil, graph.Stack(ctx, err)
|
return nil, nil, graph.Stack(ctx, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ptr.Val(mail.GetHasAttachments()) && !HasAttachments(mail.GetBody()) {
|
mailBody = mail.GetBody()
|
||||||
return mail, MailInfo(mail), nil
|
if !ptr.Val(mail.GetHasAttachments()) && !HasAttachments(mailBody) {
|
||||||
|
return mail, MailInfo(mail, 0), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if mailBody != nil {
|
||||||
|
bodySize := ptr.Val(mailBody.GetContent())
|
||||||
|
if bodySize != "" {
|
||||||
|
size = int64(len(bodySize))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
options := &users.ItemMessagesItemAttachmentsRequestBuilderGetRequestConfiguration{
|
options := &users.ItemMessagesItemAttachmentsRequestBuilderGetRequestConfiguration{
|
||||||
@ -163,8 +176,14 @@ func (c Mail) GetItem(
|
|||||||
Attachments().
|
Attachments().
|
||||||
Get(ctx, options)
|
Get(ctx, options)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
for _, a := range attached.GetValue() {
|
||||||
|
attachSize = ptr.Val(a.GetSize())
|
||||||
|
size = size + int64(attachSize)
|
||||||
|
}
|
||||||
|
|
||||||
mail.SetAttachments(attached.GetValue())
|
mail.SetAttachments(attached.GetValue())
|
||||||
return mail, MailInfo(mail), nil
|
|
||||||
|
return mail, MailInfo(mail, size), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// A failure can be caused by having a lot of attachments as
|
// A failure can be caused by having a lot of attachments as
|
||||||
@ -214,11 +233,14 @@ func (c Mail) GetItem(
|
|||||||
}
|
}
|
||||||
|
|
||||||
atts = append(atts, att)
|
atts = append(atts, att)
|
||||||
|
|
||||||
|
attachSize = ptr.Val(a.GetSize())
|
||||||
|
size = size + int64(attachSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
mail.SetAttachments(atts)
|
mail.SetAttachments(atts)
|
||||||
|
|
||||||
return mail, MailInfo(mail), nil
|
return mail, MailInfo(mail, size), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// EnumerateContainers iterates through all of the users current
|
// EnumerateContainers iterates through all of the users current
|
||||||
@ -419,7 +441,7 @@ func (c Mail) Serialize(
|
|||||||
// Helpers
|
// Helpers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
func MailInfo(msg models.Messageable, size int64) *details.ExchangeInfo {
|
||||||
var (
|
var (
|
||||||
sender = UnwrapEmailAddress(msg.GetSender())
|
sender = UnwrapEmailAddress(msg.GetSender())
|
||||||
subject = ptr.Val(msg.GetSubject())
|
subject = ptr.Val(msg.GetSubject())
|
||||||
@ -444,6 +466,7 @@ func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
|||||||
Recipient: recipients,
|
Recipient: recipients,
|
||||||
Subject: subject,
|
Subject: subject,
|
||||||
Received: received,
|
Received: received,
|
||||||
|
Size: size,
|
||||||
Created: created,
|
Created: created,
|
||||||
Modified: ptr.OrNow(msg.GetLastModifiedDateTime()),
|
Modified: ptr.OrNow(msg.GetLastModifiedDateTime()),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -152,7 +152,7 @@ func (suite *MailAPIUnitSuite) TestMailInfo() {
|
|||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
suite.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
msg, expected := tt.msgAndRP()
|
msg, expected := tt.msgAndRP()
|
||||||
assert.Equal(suite.T(), expected, api.MailInfo(msg))
|
assert.Equal(suite.T(), expected, api.MailInfo(msg, 0))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -260,7 +260,12 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
info.Size = int64(len(data))
|
// In case of mail the size of data is calc as- size of body content+size of attachment
|
||||||
|
// in all other case the size is - total item's serialized size
|
||||||
|
if info.Size <= 0 {
|
||||||
|
info.Size = int64(len(data))
|
||||||
|
}
|
||||||
|
|
||||||
info.ParentPath = col.locationPath.String()
|
info.ParentPath = col.locationPath.String()
|
||||||
|
|
||||||
col.data <- &Stream{
|
col.data <- &Stream{
|
||||||
|
|||||||
@ -218,8 +218,7 @@ func RestoreMailMessage(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
info := api.MailInfo(clone)
|
info := api.MailInfo(clone, int64(len(bits)))
|
||||||
info.Size = int64(len(bits))
|
|
||||||
|
|
||||||
return info, nil
|
return info, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
package connector
|
package connector
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
@ -17,33 +16,17 @@ import (
|
|||||||
"golang.org/x/exp/slices"
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
func mustToDataLayerPath(
|
|
||||||
t *testing.T,
|
|
||||||
service path.ServiceType,
|
|
||||||
tenant, resourceOwner string,
|
|
||||||
category path.CategoryType,
|
|
||||||
elements []string,
|
|
||||||
isItem bool,
|
|
||||||
) path.Path {
|
|
||||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func testElementsMatch[T any](
|
func testElementsMatch[T any](
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
expected []T,
|
expected []T,
|
||||||
@ -108,52 +91,52 @@ func testElementsMatch[T any](
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
type configInfo struct {
|
// type configInfo struct {
|
||||||
acct account.Account
|
// acct account.Account
|
||||||
opts control.Options
|
// opts control.Options
|
||||||
resource Resource
|
// resource Resource
|
||||||
service path.ServiceType
|
// service path.ServiceType
|
||||||
tenant string
|
// tenant string
|
||||||
resourceOwners []string
|
// resourceOwners []string
|
||||||
dest control.RestoreDestination
|
// dest control.RestoreDestination
|
||||||
}
|
// }
|
||||||
|
|
||||||
type itemInfo struct {
|
// type itemInfo struct {
|
||||||
// lookupKey is a string that can be used to find this data from a set of
|
// // lookupKey is a string that can be used to find this data from a set of
|
||||||
// other data in the same collection. This key should be something that will
|
// // other data in the same collection. This key should be something that will
|
||||||
// be the same before and after restoring the item in M365 and may not be
|
// // be the same before and after restoring the item in M365 and may not be
|
||||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
// // the M365 ID. When restoring items out of place, the item is assigned a
|
||||||
// new ID making it unsuitable for a lookup key.
|
// // new ID making it unsuitable for a lookup key.
|
||||||
lookupKey string
|
// lookupKey string
|
||||||
name string
|
// name string
|
||||||
data []byte
|
// data []byte
|
||||||
}
|
// }
|
||||||
|
|
||||||
type colInfo struct {
|
// type colInfo struct {
|
||||||
// Elements (in order) for the path representing this collection. Should
|
// // Elements (in order) for the path representing this collection. Should
|
||||||
// only contain elements after the prefix that corso uses for the path. For
|
// // only contain elements after the prefix that corso uses for the path. For
|
||||||
// example, a collection for the Inbox folder in exchange mail would just be
|
// // example, a collection for the Inbox folder in exchange mail would just be
|
||||||
// "Inbox".
|
// // "Inbox".
|
||||||
pathElements []string
|
// pathElements []string
|
||||||
category path.CategoryType
|
// category path.CategoryType
|
||||||
items []itemInfo
|
// items []itemInfo
|
||||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
// // auxItems are items that can be retrieved with Fetch but won't be returned
|
||||||
// by Items(). These files do not directly participate in comparisosn at the
|
// // by Items(). These files do not directly participate in comparisosn at the
|
||||||
// end of a test.
|
// // end of a test.
|
||||||
auxItems []itemInfo
|
// auxItems []itemInfo
|
||||||
}
|
// }
|
||||||
|
|
||||||
type restoreBackupInfo struct {
|
type restoreBackupInfo struct {
|
||||||
name string
|
name string
|
||||||
service path.ServiceType
|
service path.ServiceType
|
||||||
collections []colInfo
|
collections []ColInfo
|
||||||
resource Resource
|
resource Resource
|
||||||
}
|
}
|
||||||
|
|
||||||
type restoreBackupInfoMultiVersion struct {
|
type restoreBackupInfoMultiVersion struct {
|
||||||
service path.ServiceType
|
service path.ServiceType
|
||||||
collectionsLatest []colInfo
|
collectionsLatest []ColInfo
|
||||||
collectionsPrevious []colInfo
|
collectionsPrevious []ColInfo
|
||||||
resource Resource
|
resource Resource
|
||||||
backupVersion int
|
backupVersion int
|
||||||
}
|
}
|
||||||
@ -1127,127 +1110,6 @@ func backupSelectorForExpected(
|
|||||||
return selectors.Selector{}
|
return selectors.Selector{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// backupOutputPathFromRestore returns a path.Path denoting the location in
|
|
||||||
// kopia the data will be placed at. The location is a data-type specific
|
|
||||||
// combination of the location the data was recently restored to and where the
|
|
||||||
// data was originally in the hierarchy.
|
|
||||||
func backupOutputPathFromRestore(
|
|
||||||
t *testing.T,
|
|
||||||
restoreDest control.RestoreDestination,
|
|
||||||
inputPath path.Path,
|
|
||||||
) path.Path {
|
|
||||||
base := []string{restoreDest.ContainerName}
|
|
||||||
|
|
||||||
// OneDrive has leading information like the drive ID.
|
|
||||||
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
|
|
||||||
folders := inputPath.Folders()
|
|
||||||
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
|
|
||||||
|
|
||||||
if len(folders) > 3 {
|
|
||||||
base = append(base, folders[3:]...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
|
|
||||||
base = append(base, inputPath.Folders()...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return mustToDataLayerPath(
|
|
||||||
t,
|
|
||||||
inputPath.Service(),
|
|
||||||
inputPath.Tenant(),
|
|
||||||
inputPath.ResourceOwner(),
|
|
||||||
inputPath.Category(),
|
|
||||||
base,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(ashmrtn): Make this an actual mock class that can be used in other
|
|
||||||
// packages.
|
|
||||||
type mockRestoreCollection struct {
|
|
||||||
data.Collection
|
|
||||||
auxItems map[string]data.Stream
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc mockRestoreCollection) Fetch(
|
|
||||||
ctx context.Context,
|
|
||||||
name string,
|
|
||||||
) (data.Stream, error) {
|
|
||||||
res := rc.auxItems[name]
|
|
||||||
if res == nil {
|
|
||||||
return nil, data.ErrNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func collectionsForInfo(
|
|
||||||
t *testing.T,
|
|
||||||
service path.ServiceType,
|
|
||||||
tenant, user string,
|
|
||||||
dest control.RestoreDestination,
|
|
||||||
allInfo []colInfo,
|
|
||||||
backupVersion int,
|
|
||||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
|
||||||
var (
|
|
||||||
collections = make([]data.RestoreCollection, 0, len(allInfo))
|
|
||||||
expectedData = make(map[string]map[string][]byte, len(allInfo))
|
|
||||||
totalItems = 0
|
|
||||||
kopiaEntries = 0
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, info := range allInfo {
|
|
||||||
pth := mustToDataLayerPath(
|
|
||||||
t,
|
|
||||||
service,
|
|
||||||
tenant,
|
|
||||||
user,
|
|
||||||
info.category,
|
|
||||||
info.pathElements,
|
|
||||||
false)
|
|
||||||
|
|
||||||
mc := exchMock.NewCollection(pth, pth, len(info.items))
|
|
||||||
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
|
||||||
|
|
||||||
baseExpected := expectedData[baseDestPath.String()]
|
|
||||||
if baseExpected == nil {
|
|
||||||
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.items))
|
|
||||||
baseExpected = expectedData[baseDestPath.String()]
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < len(info.items); i++ {
|
|
||||||
mc.Names[i] = info.items[i].name
|
|
||||||
mc.Data[i] = info.items[i].data
|
|
||||||
|
|
||||||
baseExpected[info.items[i].lookupKey] = info.items[i].data
|
|
||||||
|
|
||||||
// We do not count metadata files against item count
|
|
||||||
if backupVersion > 0 &&
|
|
||||||
(service == path.OneDriveService || service == path.SharePointService) &&
|
|
||||||
metadata.HasMetaSuffix(info.items[i].name) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
totalItems++
|
|
||||||
}
|
|
||||||
|
|
||||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
|
||||||
|
|
||||||
for _, aux := range info.auxItems {
|
|
||||||
c.auxItems[aux.name] = &exchMock.Data{
|
|
||||||
ID: aux.name,
|
|
||||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
collections = append(collections, c)
|
|
||||||
kopiaEntries += len(info.items)
|
|
||||||
}
|
|
||||||
|
|
||||||
return totalItems, kopiaEntries, collections, expectedData
|
|
||||||
}
|
|
||||||
|
|
||||||
func getSelectorWith(
|
func getSelectorWith(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
361
src/internal/connector/graph_connector_onedrive_test_helper.go
Normal file
361
src/internal/connector/graph_connector_onedrive_test_helper.go
Normal file
@ -0,0 +1,361 @@
|
|||||||
|
package connector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
// For any version post this(inclusive), we expect to be using IDs for
|
||||||
|
// permission instead of email
|
||||||
|
const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions
|
||||||
|
|
||||||
|
var rootFolder = "root:"
|
||||||
|
|
||||||
|
type PermData struct {
|
||||||
|
User string // user is only for older versions
|
||||||
|
EntityID string
|
||||||
|
Roles []string
|
||||||
|
SharingMode onedrive.SharingMode
|
||||||
|
}
|
||||||
|
|
||||||
|
type ItemData struct {
|
||||||
|
Name string
|
||||||
|
Data []byte
|
||||||
|
Perms PermData
|
||||||
|
}
|
||||||
|
|
||||||
|
type OnedriveColInfo struct {
|
||||||
|
PathElements []string
|
||||||
|
Perms PermData
|
||||||
|
Files []ItemData
|
||||||
|
Folders []ItemData
|
||||||
|
}
|
||||||
|
|
||||||
|
type ColInfo struct {
|
||||||
|
// Elements (in order) for the path representing this collection. Should
|
||||||
|
// only contain elements after the prefix that corso uses for the path. For
|
||||||
|
// example, a collection for the Inbox folder in exchange mail would just be
|
||||||
|
// "Inbox".
|
||||||
|
PathElements []string
|
||||||
|
Category path.CategoryType
|
||||||
|
Items []itemInfo
|
||||||
|
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||||
|
// by Items(). These files do not directly participate in comparisosn at the
|
||||||
|
// end of a test.
|
||||||
|
AuxItems []itemInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type itemInfo struct {
|
||||||
|
// lookupKey is a string that can be used to find this data from a set of
|
||||||
|
// other data in the same collection. This key should be something that will
|
||||||
|
// be the same before and after restoring the item in M365 and may not be
|
||||||
|
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||||
|
// new ID making it unsuitable for a lookup key.
|
||||||
|
lookupKey string
|
||||||
|
name string
|
||||||
|
data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type onedriveCollection struct {
|
||||||
|
service path.ServiceType
|
||||||
|
pathElements []string
|
||||||
|
items []itemInfo
|
||||||
|
aux []itemInfo
|
||||||
|
backupVersion int
|
||||||
|
t *testing.T
|
||||||
|
}
|
||||||
|
|
||||||
|
type testOneDriveData struct {
|
||||||
|
FileName string `json:"fileName,omitempty"`
|
||||||
|
Data []byte `json:"data,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c onedriveCollection) collection() ColInfo {
|
||||||
|
cat := path.FilesCategory
|
||||||
|
if c.service == path.SharePointService {
|
||||||
|
cat = path.LibrariesCategory
|
||||||
|
}
|
||||||
|
|
||||||
|
return ColInfo{
|
||||||
|
PathElements: c.pathElements,
|
||||||
|
Category: cat,
|
||||||
|
Items: c.items,
|
||||||
|
AuxItems: c.aux,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMetadata(fileName string, perm PermData, permUseID bool) onedrive.Metadata {
|
||||||
|
if len(perm.User) == 0 || len(perm.Roles) == 0 ||
|
||||||
|
perm.SharingMode != onedrive.SharingModeCustom {
|
||||||
|
return onedrive.Metadata{
|
||||||
|
FileName: fileName,
|
||||||
|
SharingMode: perm.SharingMode,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// In case of permissions, the id will usually be same for same
|
||||||
|
// user/role combo unless deleted and readded, but we have to do
|
||||||
|
// this as we only have two users of which one is already taken.
|
||||||
|
id := uuid.NewString()
|
||||||
|
uperm := onedrive.UserPermission{ID: id, Roles: perm.Roles}
|
||||||
|
|
||||||
|
if permUseID {
|
||||||
|
uperm.EntityID = perm.EntityID
|
||||||
|
} else {
|
||||||
|
uperm.Email = perm.User
|
||||||
|
}
|
||||||
|
|
||||||
|
testMeta := onedrive.Metadata{
|
||||||
|
FileName: fileName,
|
||||||
|
Permissions: []onedrive.UserPermission{uperm},
|
||||||
|
}
|
||||||
|
|
||||||
|
return testMeta
|
||||||
|
}
|
||||||
|
|
||||||
|
func onedriveItemWithData(
|
||||||
|
t *testing.T,
|
||||||
|
name, lookupKey string,
|
||||||
|
fileData []byte,
|
||||||
|
) itemInfo {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
content := testOneDriveData{
|
||||||
|
FileName: lookupKey,
|
||||||
|
Data: fileData,
|
||||||
|
}
|
||||||
|
|
||||||
|
serialized, err := json.Marshal(content)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
return itemInfo{
|
||||||
|
name: name,
|
||||||
|
data: serialized,
|
||||||
|
lookupKey: lookupKey,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func onedriveMetadata(
|
||||||
|
t *testing.T,
|
||||||
|
fileName, itemID, lookupKey string,
|
||||||
|
perm PermData,
|
||||||
|
permUseID bool,
|
||||||
|
) itemInfo {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
testMeta := getMetadata(fileName, perm, permUseID)
|
||||||
|
|
||||||
|
testMetaJSON, err := json.Marshal(testMeta)
|
||||||
|
require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
|
||||||
|
|
||||||
|
return itemInfo{
|
||||||
|
name: itemID,
|
||||||
|
data: testMetaJSON,
|
||||||
|
lookupKey: lookupKey,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) *onedriveCollection {
|
||||||
|
switch c.backupVersion {
|
||||||
|
case 0:
|
||||||
|
// Lookups will occur using the most recent version of things so we need
|
||||||
|
// the embedded file name to match that.
|
||||||
|
c.items = append(c.items, onedriveItemWithData(
|
||||||
|
c.t,
|
||||||
|
name,
|
||||||
|
name+metadata.DataFileSuffix,
|
||||||
|
fileData))
|
||||||
|
|
||||||
|
// v1-5, early metadata design
|
||||||
|
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
|
||||||
|
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
|
||||||
|
c.items = append(c.items, onedriveItemWithData(
|
||||||
|
c.t,
|
||||||
|
name+metadata.DataFileSuffix,
|
||||||
|
name+metadata.DataFileSuffix,
|
||||||
|
fileData))
|
||||||
|
|
||||||
|
md := onedriveMetadata(
|
||||||
|
c.t,
|
||||||
|
"",
|
||||||
|
name+metadata.MetaFileSuffix,
|
||||||
|
name+metadata.MetaFileSuffix,
|
||||||
|
perm,
|
||||||
|
c.backupVersion >= versionPermissionSwitchedToID)
|
||||||
|
c.items = append(c.items, md)
|
||||||
|
c.aux = append(c.aux, md)
|
||||||
|
|
||||||
|
// v6+ current metadata design
|
||||||
|
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||||
|
c.items = append(c.items, onedriveItemWithData(
|
||||||
|
c.t,
|
||||||
|
name+metadata.DataFileSuffix,
|
||||||
|
name+metadata.DataFileSuffix,
|
||||||
|
fileData))
|
||||||
|
|
||||||
|
md := onedriveMetadata(
|
||||||
|
c.t,
|
||||||
|
name,
|
||||||
|
name+metadata.MetaFileSuffix,
|
||||||
|
name,
|
||||||
|
perm,
|
||||||
|
c.backupVersion >= versionPermissionSwitchedToID)
|
||||||
|
c.items = append(c.items, md)
|
||||||
|
c.aux = append(c.aux, md)
|
||||||
|
|
||||||
|
default:
|
||||||
|
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *onedriveCollection) withFolder(name string, perm PermData) *onedriveCollection {
|
||||||
|
switch c.backupVersion {
|
||||||
|
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
||||||
|
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||||
|
return c
|
||||||
|
|
||||||
|
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
||||||
|
c.items = append(
|
||||||
|
c.items,
|
||||||
|
onedriveMetadata(
|
||||||
|
c.t,
|
||||||
|
"",
|
||||||
|
name+metadata.DirMetaFileSuffix,
|
||||||
|
name+metadata.DirMetaFileSuffix,
|
||||||
|
perm,
|
||||||
|
c.backupVersion >= versionPermissionSwitchedToID))
|
||||||
|
|
||||||
|
default:
|
||||||
|
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
// withPermissions adds permissions to the folder represented by this
|
||||||
|
// onedriveCollection.
|
||||||
|
func (c *onedriveCollection) withPermissions(perm PermData) *onedriveCollection {
|
||||||
|
// These versions didn't store permissions for the folder or didn't store them
|
||||||
|
// in the folder's collection.
|
||||||
|
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
name := c.pathElements[len(c.pathElements)-1]
|
||||||
|
metaName := name
|
||||||
|
|
||||||
|
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||||
|
// We switched to just .dirmeta for metadata file names.
|
||||||
|
metaName = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == rootFolder {
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
md := onedriveMetadata(
|
||||||
|
c.t,
|
||||||
|
name,
|
||||||
|
metaName+metadata.DirMetaFileSuffix,
|
||||||
|
metaName+metadata.DirMetaFileSuffix,
|
||||||
|
perm,
|
||||||
|
c.backupVersion >= versionPermissionSwitchedToID)
|
||||||
|
|
||||||
|
c.items = append(c.items, md)
|
||||||
|
c.aux = append(c.aux, md)
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func newOneDriveCollection(
|
||||||
|
t *testing.T,
|
||||||
|
service path.ServiceType,
|
||||||
|
pathElements []string,
|
||||||
|
backupVersion int,
|
||||||
|
) *onedriveCollection {
|
||||||
|
return &onedriveCollection{
|
||||||
|
service: service,
|
||||||
|
pathElements: pathElements,
|
||||||
|
backupVersion: backupVersion,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func DataForInfo(
|
||||||
|
t *testing.T,
|
||||||
|
service path.ServiceType,
|
||||||
|
cols []OnedriveColInfo,
|
||||||
|
backupVersion int,
|
||||||
|
) []ColInfo {
|
||||||
|
var res []ColInfo
|
||||||
|
|
||||||
|
for _, c := range cols {
|
||||||
|
onedriveCol := newOneDriveCollection(t, service, c.PathElements, backupVersion)
|
||||||
|
|
||||||
|
for _, f := range c.Files {
|
||||||
|
onedriveCol.withFile(f.Name, f.Data, f.Perms)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, d := range c.Folders {
|
||||||
|
onedriveCol.withFolder(d.Name, d.Perms)
|
||||||
|
}
|
||||||
|
|
||||||
|
onedriveCol.withPermissions(c.Perms)
|
||||||
|
|
||||||
|
res = append(res, onedriveCol.collection())
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
//-------------------------------------------------------------
|
||||||
|
// Exchange Functions
|
||||||
|
//-------------------------------------------------------------
|
||||||
|
|
||||||
|
func GetCollectionsAndExpected(
|
||||||
|
t *testing.T,
|
||||||
|
config ConfigInfo,
|
||||||
|
testCollections []ColInfo,
|
||||||
|
backupVersion int,
|
||||||
|
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
var (
|
||||||
|
collections []data.RestoreCollection
|
||||||
|
expectedData = map[string]map[string][]byte{}
|
||||||
|
totalItems = 0
|
||||||
|
totalKopiaItems = 0
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, owner := range config.ResourceOwners {
|
||||||
|
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo(
|
||||||
|
t,
|
||||||
|
config.Service,
|
||||||
|
config.Tenant,
|
||||||
|
owner,
|
||||||
|
config.Dest,
|
||||||
|
testCollections,
|
||||||
|
backupVersion,
|
||||||
|
)
|
||||||
|
|
||||||
|
collections = append(collections, ownerCollections...)
|
||||||
|
totalItems += numItems
|
||||||
|
totalKopiaItems += kopiaItems
|
||||||
|
|
||||||
|
maps.Copy(expectedData, userExpectedData)
|
||||||
|
}
|
||||||
|
|
||||||
|
return totalItems, totalKopiaItems, collections, expectedData
|
||||||
|
}
|
||||||
@ -11,7 +11,6 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
|
|
||||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
@ -403,70 +402,30 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//-------------------------------------------------------------
|
|
||||||
// Exchange Functions
|
|
||||||
//-------------------------------------------------------------
|
|
||||||
|
|
||||||
func getCollectionsAndExpected(
|
|
||||||
t *testing.T,
|
|
||||||
config configInfo,
|
|
||||||
testCollections []colInfo,
|
|
||||||
backupVersion int,
|
|
||||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
var (
|
|
||||||
collections []data.RestoreCollection
|
|
||||||
expectedData = map[string]map[string][]byte{}
|
|
||||||
totalItems = 0
|
|
||||||
totalKopiaItems = 0
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, owner := range config.resourceOwners {
|
|
||||||
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo(
|
|
||||||
t,
|
|
||||||
config.service,
|
|
||||||
config.tenant,
|
|
||||||
owner,
|
|
||||||
config.dest,
|
|
||||||
testCollections,
|
|
||||||
backupVersion,
|
|
||||||
)
|
|
||||||
|
|
||||||
collections = append(collections, ownerCollections...)
|
|
||||||
totalItems += numItems
|
|
||||||
totalKopiaItems += kopiaItems
|
|
||||||
|
|
||||||
maps.Copy(expectedData, userExpectedData)
|
|
||||||
}
|
|
||||||
|
|
||||||
return totalItems, totalKopiaItems, collections, expectedData
|
|
||||||
}
|
|
||||||
|
|
||||||
func runRestore(
|
func runRestore(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
config configInfo,
|
config ConfigInfo,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
collections []data.RestoreCollection,
|
collections []data.RestoreCollection,
|
||||||
numRestoreItems int,
|
numRestoreItems int,
|
||||||
) {
|
) {
|
||||||
t.Logf(
|
t.Logf(
|
||||||
"Restoring collections to %s for resourceOwners(s) %v\n",
|
"Restoring collections to %s for resourceOwners(s) %v\n",
|
||||||
config.dest.ContainerName,
|
config.Dest.ContainerName,
|
||||||
config.resourceOwners)
|
config.ResourceOwners)
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
restoreGC := loadConnector(ctx, t, config.resource)
|
restoreGC := loadConnector(ctx, t, config.Resource)
|
||||||
restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true)
|
restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true)
|
||||||
deets, err := restoreGC.ConsumeRestoreCollections(
|
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
config.acct,
|
config.Acct,
|
||||||
restoreSel,
|
restoreSel,
|
||||||
config.dest,
|
config.Dest,
|
||||||
config.opts,
|
config.Opts,
|
||||||
collections,
|
collections,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -490,30 +449,30 @@ func runRestore(
|
|||||||
func runBackupAndCompare(
|
func runBackupAndCompare(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
config configInfo,
|
config ConfigInfo,
|
||||||
expectedData map[string]map[string][]byte,
|
expectedData map[string]map[string][]byte,
|
||||||
totalItems int,
|
totalItems int,
|
||||||
totalKopiaItems int,
|
totalKopiaItems int,
|
||||||
inputCollections []colInfo,
|
inputCollections []ColInfo,
|
||||||
) {
|
) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
// Run a backup and compare its output with what we put in.
|
// Run a backup and compare its output with what we put in.
|
||||||
cats := make(map[path.CategoryType]struct{}, len(inputCollections))
|
cats := make(map[path.CategoryType]struct{}, len(inputCollections))
|
||||||
for _, c := range inputCollections {
|
for _, c := range inputCollections {
|
||||||
cats[c.category] = struct{}{}
|
cats[c.Category] = struct{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
expectedDests = make([]destAndCats, 0, len(config.resourceOwners))
|
expectedDests = make([]destAndCats, 0, len(config.ResourceOwners))
|
||||||
idToName = map[string]string{}
|
idToName = map[string]string{}
|
||||||
nameToID = map[string]string{}
|
nameToID = map[string]string{}
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, ro := range config.resourceOwners {
|
for _, ro := range config.ResourceOwners {
|
||||||
expectedDests = append(expectedDests, destAndCats{
|
expectedDests = append(expectedDests, destAndCats{
|
||||||
resourceOwner: ro,
|
resourceOwner: ro,
|
||||||
dest: config.dest.ContainerName,
|
dest: config.Dest.ContainerName,
|
||||||
cats: cats,
|
cats: cats,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -521,10 +480,10 @@ func runBackupAndCompare(
|
|||||||
nameToID[ro] = ro
|
nameToID[ro] = ro
|
||||||
}
|
}
|
||||||
|
|
||||||
backupGC := loadConnector(ctx, t, config.resource)
|
backupGC := loadConnector(ctx, t, config.Resource)
|
||||||
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
|
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
|
||||||
|
|
||||||
backupSel := backupSelectorForExpected(t, config.service, expectedDests)
|
backupSel := backupSelectorForExpected(t, config.Service, expectedDests)
|
||||||
t.Logf("Selective backup of %s\n", backupSel)
|
t.Logf("Selective backup of %s\n", backupSel)
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
@ -534,7 +493,7 @@ func runBackupAndCompare(
|
|||||||
backupSel,
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
version.NoBackup,
|
version.NoBackup,
|
||||||
config.opts,
|
config.Opts,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
@ -550,8 +509,8 @@ func runBackupAndCompare(
|
|||||||
totalKopiaItems,
|
totalKopiaItems,
|
||||||
expectedData,
|
expectedData,
|
||||||
dcs,
|
dcs,
|
||||||
config.dest,
|
config.Dest,
|
||||||
config.opts.RestorePermissions)
|
config.Opts.RestorePermissions)
|
||||||
|
|
||||||
status := backupGC.Wait()
|
status := backupGC.Wait()
|
||||||
|
|
||||||
@ -572,17 +531,17 @@ func runRestoreBackupTest(
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
config := configInfo{
|
config := ConfigInfo{
|
||||||
acct: acct,
|
Acct: acct,
|
||||||
opts: opts,
|
Opts: opts,
|
||||||
resource: test.resource,
|
Resource: test.resource,
|
||||||
service: test.service,
|
Service: test.service,
|
||||||
tenant: tenant,
|
Tenant: tenant,
|
||||||
resourceOwners: resourceOwners,
|
ResourceOwners: resourceOwners,
|
||||||
dest: tester.DefaultTestRestoreDestination(),
|
Dest: tester.DefaultTestRestoreDestination(),
|
||||||
}
|
}
|
||||||
|
|
||||||
totalItems, totalKopiaItems, collections, expectedData := getCollectionsAndExpected(
|
totalItems, totalKopiaItems, collections, expectedData := GetCollectionsAndExpected(
|
||||||
t,
|
t,
|
||||||
config,
|
config,
|
||||||
test.collections,
|
test.collections,
|
||||||
@ -618,17 +577,17 @@ func runRestoreTestWithVerion(
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
config := configInfo{
|
config := ConfigInfo{
|
||||||
acct: acct,
|
Acct: acct,
|
||||||
opts: opts,
|
Opts: opts,
|
||||||
resource: test.resource,
|
Resource: test.resource,
|
||||||
service: test.service,
|
Service: test.service,
|
||||||
tenant: tenant,
|
Tenant: tenant,
|
||||||
resourceOwners: resourceOwners,
|
ResourceOwners: resourceOwners,
|
||||||
dest: tester.DefaultTestRestoreDestination(),
|
Dest: tester.DefaultTestRestoreDestination(),
|
||||||
}
|
}
|
||||||
|
|
||||||
totalItems, _, collections, _ := getCollectionsAndExpected(
|
totalItems, _, collections, _ := GetCollectionsAndExpected(
|
||||||
t,
|
t,
|
||||||
config,
|
config,
|
||||||
test.collectionsPrevious,
|
test.collectionsPrevious,
|
||||||
@ -657,17 +616,17 @@ func runRestoreBackupTestVersions(
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
config := configInfo{
|
config := ConfigInfo{
|
||||||
acct: acct,
|
Acct: acct,
|
||||||
opts: opts,
|
Opts: opts,
|
||||||
resource: test.resource,
|
Resource: test.resource,
|
||||||
service: test.service,
|
Service: test.service,
|
||||||
tenant: tenant,
|
Tenant: tenant,
|
||||||
resourceOwners: resourceOwners,
|
ResourceOwners: resourceOwners,
|
||||||
dest: tester.DefaultTestRestoreDestination(),
|
Dest: tester.DefaultTestRestoreDestination(),
|
||||||
}
|
}
|
||||||
|
|
||||||
totalItems, _, collections, _ := getCollectionsAndExpected(
|
totalItems, _, collections, _ := GetCollectionsAndExpected(
|
||||||
t,
|
t,
|
||||||
config,
|
config,
|
||||||
test.collectionsPrevious,
|
test.collectionsPrevious,
|
||||||
@ -682,7 +641,7 @@ func runRestoreBackupTestVersions(
|
|||||||
totalItems)
|
totalItems)
|
||||||
|
|
||||||
// Get expected output for new version.
|
// Get expected output for new version.
|
||||||
totalItems, totalKopiaItems, _, expectedData := getCollectionsAndExpected(
|
totalItems, totalKopiaItems, _, expectedData := GetCollectionsAndExpected(
|
||||||
t,
|
t,
|
||||||
config,
|
config,
|
||||||
test.collectionsLatest,
|
test.collectionsLatest,
|
||||||
@ -707,11 +666,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
name: "EmailsWithAttachments",
|
name: "EmailsWithAttachments",
|
||||||
service: path.ExchangeService,
|
service: path.ExchangeService,
|
||||||
resource: Users,
|
resource: Users,
|
||||||
collections: []colInfo{
|
collections: []ColInfo{
|
||||||
{
|
{
|
||||||
pathElements: []string{"Inbox"},
|
PathElements: []string{"Inbox"},
|
||||||
category: path.EmailCategory,
|
Category: path.EmailCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID",
|
name: "someencodeditemID",
|
||||||
data: exchMock.MessageWithDirectAttachment(
|
data: exchMock.MessageWithDirectAttachment(
|
||||||
@ -734,11 +693,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
name: "MultipleEmailsMultipleFolders",
|
name: "MultipleEmailsMultipleFolders",
|
||||||
service: path.ExchangeService,
|
service: path.ExchangeService,
|
||||||
resource: Users,
|
resource: Users,
|
||||||
collections: []colInfo{
|
collections: []ColInfo{
|
||||||
{
|
{
|
||||||
pathElements: []string{"Inbox"},
|
PathElements: []string{"Inbox"},
|
||||||
category: path.EmailCategory,
|
Category: path.EmailCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID",
|
name: "someencodeditemID",
|
||||||
data: exchMock.MessageWithBodyBytes(
|
data: exchMock.MessageWithBodyBytes(
|
||||||
@ -751,9 +710,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
pathElements: []string{"Work"},
|
PathElements: []string{"Work"},
|
||||||
category: path.EmailCategory,
|
Category: path.EmailCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID2",
|
name: "someencodeditemID2",
|
||||||
data: exchMock.MessageWithBodyBytes(
|
data: exchMock.MessageWithBodyBytes(
|
||||||
@ -775,9 +734,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
pathElements: []string{"Work", "Inbox"},
|
PathElements: []string{"Work", "Inbox"},
|
||||||
category: path.EmailCategory,
|
Category: path.EmailCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID4",
|
name: "someencodeditemID4",
|
||||||
data: exchMock.MessageWithBodyBytes(
|
data: exchMock.MessageWithBodyBytes(
|
||||||
@ -790,9 +749,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
pathElements: []string{"Work", "Inbox", "Work"},
|
PathElements: []string{"Work", "Inbox", "Work"},
|
||||||
category: path.EmailCategory,
|
Category: path.EmailCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID5",
|
name: "someencodeditemID5",
|
||||||
data: exchMock.MessageWithBodyBytes(
|
data: exchMock.MessageWithBodyBytes(
|
||||||
@ -810,11 +769,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
name: "MultipleContactsSingleFolder",
|
name: "MultipleContactsSingleFolder",
|
||||||
service: path.ExchangeService,
|
service: path.ExchangeService,
|
||||||
resource: Users,
|
resource: Users,
|
||||||
collections: []colInfo{
|
collections: []ColInfo{
|
||||||
{
|
{
|
||||||
pathElements: []string{"Contacts"},
|
PathElements: []string{"Contacts"},
|
||||||
category: path.ContactsCategory,
|
Category: path.ContactsCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID",
|
name: "someencodeditemID",
|
||||||
data: exchMock.ContactBytes("Ghimley"),
|
data: exchMock.ContactBytes("Ghimley"),
|
||||||
@ -838,11 +797,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
name: "MultipleContactsMultipleFolders",
|
name: "MultipleContactsMultipleFolders",
|
||||||
service: path.ExchangeService,
|
service: path.ExchangeService,
|
||||||
resource: Users,
|
resource: Users,
|
||||||
collections: []colInfo{
|
collections: []ColInfo{
|
||||||
{
|
{
|
||||||
pathElements: []string{"Work"},
|
PathElements: []string{"Work"},
|
||||||
category: path.ContactsCategory,
|
Category: path.ContactsCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID",
|
name: "someencodeditemID",
|
||||||
data: exchMock.ContactBytes("Ghimley"),
|
data: exchMock.ContactBytes("Ghimley"),
|
||||||
@ -861,9 +820,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
pathElements: []string{"Personal"},
|
PathElements: []string{"Personal"},
|
||||||
category: path.ContactsCategory,
|
Category: path.ContactsCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID4",
|
name: "someencodeditemID4",
|
||||||
data: exchMock.ContactBytes("Argon"),
|
data: exchMock.ContactBytes("Argon"),
|
||||||
@ -881,11 +840,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
// {
|
// {
|
||||||
// name: "MultipleEventsSingleCalendar",
|
// name: "MultipleEventsSingleCalendar",
|
||||||
// service: path.ExchangeService,
|
// service: path.ExchangeService,
|
||||||
// collections: []colInfo{
|
// collections: []ColInfo{
|
||||||
// {
|
// {
|
||||||
// pathElements: []string{"Work"},
|
// PathElements: []string{"Work"},
|
||||||
// category: path.EventsCategory,
|
// Category: path.EventsCategory,
|
||||||
// items: []itemInfo{
|
// Items: []itemInfo{
|
||||||
// {
|
// {
|
||||||
// name: "someencodeditemID",
|
// name: "someencodeditemID",
|
||||||
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
||||||
@ -908,11 +867,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
// {
|
// {
|
||||||
// name: "MultipleEventsMultipleCalendars",
|
// name: "MultipleEventsMultipleCalendars",
|
||||||
// service: path.ExchangeService,
|
// service: path.ExchangeService,
|
||||||
// collections: []colInfo{
|
// collections: []ColInfo{
|
||||||
// {
|
// {
|
||||||
// pathElements: []string{"Work"},
|
// PathElements: []string{"Work"},
|
||||||
// category: path.EventsCategory,
|
// Category: path.EventsCategory,
|
||||||
// items: []itemInfo{
|
// Items: []itemInfo{
|
||||||
// {
|
// {
|
||||||
// name: "someencodeditemID",
|
// name: "someencodeditemID",
|
||||||
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
||||||
@ -931,9 +890,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
// },
|
// },
|
||||||
// },
|
// },
|
||||||
// {
|
// {
|
||||||
// pathElements: []string{"Personal"},
|
// PathElements: []string{"Personal"},
|
||||||
// category: path.EventsCategory,
|
// Category: path.EventsCategory,
|
||||||
// items: []itemInfo{
|
// Items: []itemInfo{
|
||||||
// {
|
// {
|
||||||
// name: "someencodeditemID4",
|
// name: "someencodeditemID4",
|
||||||
// data: exchMock.EventWithSubjectBytes("Argon"),
|
// data: exchMock.EventWithSubjectBytes("Argon"),
|
||||||
@ -973,11 +932,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
name: "Contacts",
|
name: "Contacts",
|
||||||
service: path.ExchangeService,
|
service: path.ExchangeService,
|
||||||
resource: Users,
|
resource: Users,
|
||||||
collections: []colInfo{
|
collections: []ColInfo{
|
||||||
{
|
{
|
||||||
pathElements: []string{"Work"},
|
PathElements: []string{"Work"},
|
||||||
category: path.ContactsCategory,
|
Category: path.ContactsCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID",
|
name: "someencodeditemID",
|
||||||
data: exchMock.ContactBytes("Ghimley"),
|
data: exchMock.ContactBytes("Ghimley"),
|
||||||
@ -986,9 +945,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
pathElements: []string{"Personal"},
|
PathElements: []string{"Personal"},
|
||||||
category: path.ContactsCategory,
|
Category: path.ContactsCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "someencodeditemID2",
|
name: "someencodeditemID2",
|
||||||
data: exchMock.ContactBytes("Irgot"),
|
data: exchMock.ContactBytes("Irgot"),
|
||||||
@ -1001,11 +960,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
// {
|
// {
|
||||||
// name: "Events",
|
// name: "Events",
|
||||||
// service: path.ExchangeService,
|
// service: path.ExchangeService,
|
||||||
// collections: []colInfo{
|
// collections: []ColInfo{
|
||||||
// {
|
// {
|
||||||
// pathElements: []string{"Work"},
|
// PathElements: []string{"Work"},
|
||||||
// category: path.EventsCategory,
|
// Category: path.EventsCategory,
|
||||||
// items: []itemInfo{
|
// Items: []itemInfo{
|
||||||
// {
|
// {
|
||||||
// name: "someencodeditemID",
|
// name: "someencodeditemID",
|
||||||
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
// data: exchMock.EventWithSubjectBytes("Ghimley"),
|
||||||
@ -1014,9 +973,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
// },
|
// },
|
||||||
// },
|
// },
|
||||||
// {
|
// {
|
||||||
// pathElements: []string{"Personal"},
|
// PathElements: []string{"Personal"},
|
||||||
// category: path.EventsCategory,
|
// Category: path.EventsCategory,
|
||||||
// items: []itemInfo{
|
// Items: []itemInfo{
|
||||||
// {
|
// {
|
||||||
// name: "someencodeditemID2",
|
// name: "someencodeditemID2",
|
||||||
// data: exchMock.EventWithSubjectBytes("Irgot"),
|
// data: exchMock.EventWithSubjectBytes("Irgot"),
|
||||||
@ -1047,7 +1006,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
resourceOwner: suite.user,
|
resourceOwner: suite.user,
|
||||||
dest: dest.ContainerName,
|
dest: dest.ContainerName,
|
||||||
cats: map[path.CategoryType]struct{}{
|
cats: map[path.CategoryType]struct{}{
|
||||||
collection.category: {},
|
collection.Category: {},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -1057,7 +1016,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
suite.connector.tenant,
|
suite.connector.tenant,
|
||||||
suite.user,
|
suite.user,
|
||||||
dest,
|
dest,
|
||||||
[]colInfo{collection},
|
[]ColInfo{collection},
|
||||||
version.Backup,
|
version.Backup,
|
||||||
)
|
)
|
||||||
allItems += totalItems
|
allItems += totalItems
|
||||||
@ -1153,11 +1112,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
|
|||||||
name: "EmailsWithLargeAttachments",
|
name: "EmailsWithLargeAttachments",
|
||||||
service: path.ExchangeService,
|
service: path.ExchangeService,
|
||||||
resource: Users,
|
resource: Users,
|
||||||
collections: []colInfo{
|
collections: []ColInfo{
|
||||||
{
|
{
|
||||||
pathElements: []string{"Inbox"},
|
PathElements: []string{"Inbox"},
|
||||||
category: path.EmailCategory,
|
Category: path.EmailCategory,
|
||||||
items: []itemInfo{
|
Items: []itemInfo{
|
||||||
{
|
{
|
||||||
name: "35mbAttachment",
|
name: "35mbAttachment",
|
||||||
data: exchMock.MessageWithSizedAttachment(subjectText, 35),
|
data: exchMock.MessageWithSizedAttachment(subjectText, 35),
|
||||||
|
|||||||
162
src/internal/connector/graph_connector_test_helpers.go
Normal file
162
src/internal/connector/graph_connector_test_helpers.go
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
package connector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConfigInfo struct {
|
||||||
|
Acct account.Account
|
||||||
|
Opts control.Options
|
||||||
|
Resource Resource
|
||||||
|
Service path.ServiceType
|
||||||
|
Tenant string
|
||||||
|
ResourceOwners []string
|
||||||
|
Dest control.RestoreDestination
|
||||||
|
}
|
||||||
|
|
||||||
|
func mustToDataLayerPath(
|
||||||
|
t *testing.T,
|
||||||
|
service path.ServiceType,
|
||||||
|
tenant, resourceOwner string,
|
||||||
|
category path.CategoryType,
|
||||||
|
elements []string,
|
||||||
|
isItem bool,
|
||||||
|
) path.Path {
|
||||||
|
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// backupOutputPathFromRestore returns a path.Path denoting the location in
|
||||||
|
// kopia the data will be placed at. The location is a data-type specific
|
||||||
|
// combination of the location the data was recently restored to and where the
|
||||||
|
// data was originally in the hierarchy.
|
||||||
|
func backupOutputPathFromRestore(
|
||||||
|
t *testing.T,
|
||||||
|
restoreDest control.RestoreDestination,
|
||||||
|
inputPath path.Path,
|
||||||
|
) path.Path {
|
||||||
|
base := []string{restoreDest.ContainerName}
|
||||||
|
|
||||||
|
// OneDrive has leading information like the drive ID.
|
||||||
|
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
|
||||||
|
folders := inputPath.Folders()
|
||||||
|
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
|
||||||
|
|
||||||
|
if len(folders) > 3 {
|
||||||
|
base = append(base, folders[3:]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
|
||||||
|
base = append(base, inputPath.Folders()...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return mustToDataLayerPath(
|
||||||
|
t,
|
||||||
|
inputPath.Service(),
|
||||||
|
inputPath.Tenant(),
|
||||||
|
inputPath.ResourceOwner(),
|
||||||
|
inputPath.Category(),
|
||||||
|
base,
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(ashmrtn): Make this an actual mock class that can be used in other
|
||||||
|
// packages.
|
||||||
|
type mockRestoreCollection struct {
|
||||||
|
data.Collection
|
||||||
|
auxItems map[string]data.Stream
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rc mockRestoreCollection) Fetch(
|
||||||
|
ctx context.Context,
|
||||||
|
name string,
|
||||||
|
) (data.Stream, error) {
|
||||||
|
res := rc.auxItems[name]
|
||||||
|
if res == nil {
|
||||||
|
return nil, data.ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func collectionsForInfo(
|
||||||
|
t *testing.T,
|
||||||
|
service path.ServiceType,
|
||||||
|
tenant, user string,
|
||||||
|
dest control.RestoreDestination,
|
||||||
|
allInfo []ColInfo,
|
||||||
|
backupVersion int,
|
||||||
|
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||||
|
var (
|
||||||
|
collections = make([]data.RestoreCollection, 0, len(allInfo))
|
||||||
|
expectedData = make(map[string]map[string][]byte, len(allInfo))
|
||||||
|
totalItems = 0
|
||||||
|
kopiaEntries = 0
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, info := range allInfo {
|
||||||
|
pth := mustToDataLayerPath(
|
||||||
|
t,
|
||||||
|
service,
|
||||||
|
tenant,
|
||||||
|
user,
|
||||||
|
info.Category,
|
||||||
|
info.PathElements,
|
||||||
|
false)
|
||||||
|
|
||||||
|
mc := exchMock.NewCollection(pth, pth, len(info.Items))
|
||||||
|
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
||||||
|
|
||||||
|
baseExpected := expectedData[baseDestPath.String()]
|
||||||
|
if baseExpected == nil {
|
||||||
|
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items))
|
||||||
|
baseExpected = expectedData[baseDestPath.String()]
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(info.Items); i++ {
|
||||||
|
mc.Names[i] = info.Items[i].name
|
||||||
|
mc.Data[i] = info.Items[i].data
|
||||||
|
|
||||||
|
baseExpected[info.Items[i].lookupKey] = info.Items[i].data
|
||||||
|
|
||||||
|
// We do not count metadata files against item count
|
||||||
|
if backupVersion > 0 &&
|
||||||
|
(service == path.OneDriveService || service == path.SharePointService) &&
|
||||||
|
metadata.HasMetaSuffix(info.Items[i].name) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
totalItems++
|
||||||
|
}
|
||||||
|
|
||||||
|
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||||
|
|
||||||
|
for _, aux := range info.AuxItems {
|
||||||
|
c.auxItems[aux.name] = &exchMock.Data{
|
||||||
|
ID: aux.name,
|
||||||
|
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
collections = append(collections, c)
|
||||||
|
kopiaEntries += len(info.Items)
|
||||||
|
}
|
||||||
|
|
||||||
|
return totalItems, kopiaEntries, collections, expectedData
|
||||||
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user