diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index f0ab0696c..b07ea4a08 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -1,13 +1,11 @@ package impl import ( - "bytes" "context" - "encoding/json" "fmt" - "io" "os" "strings" + "testing" "time" "github.com/alcionai/clues" @@ -20,7 +18,6 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" @@ -82,7 +79,7 @@ func generateAndRestoreItems( } collections := []collection{{ - pathElements: []string{destFldr}, + PathElements: []string{destFldr}, category: cat, items: items, }} @@ -161,7 +158,7 @@ type collection struct { // only contain elements after the prefix that corso uses for the path. For // example, a collection for the Inbox folder in exchange mail would just be // "Inbox". - pathElements []string + PathElements []string category path.CategoryType items []item } @@ -181,7 +178,7 @@ func buildCollections( service, c.category, false, - c.pathElements...) + c.PathElements...) if err != nil { return nil, err } @@ -199,44 +196,44 @@ func buildCollections( return collections, nil } -type permData struct { - user string // user is only for older versions - entityID string - roles []string - sharingMode metadata.SharingMode -} +// type connector.PermData struct { +// user string // user is only for older versions +// entityID string +// roles []string +// sharingMode metadata.SharingMode +// } -type itemData struct { - name string - data []byte - perms permData -} +// type connector.ItemData struct { +// name string +// data []byte +// perms connector.PermData +// } -type itemInfo struct { - // lookupKey is a string that can be used to find this data from a set of - // other data in the same collection. This key should be something that will - // be the same before and after restoring the item in M365 and may not be - // the M365 ID. When restoring items out of place, the item is assigned a - // new ID making it unsuitable for a lookup key. - lookupKey string - name string - data []byte -} +// type itemInfo struct { +// // lookupKey is a string that can be used to find this data from a set of +// // other data in the same collection. This key should be something that will +// // be the same before and after restoring the item in M365 and may not be +// // the M365 ID. When restoring items out of place, the item is assigned a +// // new ID making it unsuitable for a lookup key. +// lookupKey string +// name string +// data []byte +// } -type onedriveCollection struct { - service path.ServiceType - pathElements []string - items []itemInfo - aux []itemInfo - backupVersion int -} +// type onedriveCollection struct { +// service path.ServiceType +// PathElements []string +// items []itemInfo +// aux []itemInfo +// backupVersion int +// } -type onedriveColInfo struct { - pathElements []string - perms permData - files []itemData - folders []itemData -} +// type connector.OnedriveColInfo struct { +// PathElements []string +// perms connector.PermData +// Files []connector.ItemData +// folders []connector.ItemData +// } var ( folderAName = "folder-a" @@ -277,7 +274,7 @@ func generateAndRestoreOnedriveItems( driveID := ptr.Val(d.GetId()) var ( - cols []onedriveColInfo + cols []connector.OnedriveColInfo rootPath = []string{"drives", driveID, "root:"} folderAPath = []string{"drives", driveID, "root:", folderAName} @@ -291,43 +288,43 @@ func generateAndRestoreOnedriveItems( ) for i := 0; i < count; i++ { - col := []onedriveColInfo{ + col := []connector.OnedriveColInfo{ // basic folder and file creation { - pathElements: rootPath, - files: []itemData{ + PathElements: rootPath, + Files: []connector.ItemData{ { - name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime), - data: fileAData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime), + Data: fileAData, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, { - name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime), - data: fileBData, + Name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime), + Data: fileBData, }, }, - folders: []itemData{ + Folders: []connector.ItemData{ { - name: folderBName, + Name: folderBName, }, { - name: folderAName, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Name: folderAName, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, { - name: folderCName, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Name: folderCName, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, }, @@ -335,62 +332,62 @@ func generateAndRestoreOnedriveItems( { // a folder that has permissions with an item in the folder with // the different permissions. - pathElements: folderAPath, - files: []itemData{ + PathElements: folderAPath, + Files: []connector.ItemData{ { - name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), - data: fileEData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), + Data: fileEData, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, }, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, { // a folder that has permissions with an item in the folder with // no permissions. - pathElements: folderCPath, - files: []itemData{ + PathElements: folderCPath, + Files: []connector.ItemData{ { - name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), - data: fileAData, + Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), + Data: fileAData, }, }, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, { - pathElements: folderBPath, - files: []itemData{ + PathElements: folderBPath, + Files: []connector.ItemData{ { // restoring a file in a non-root folder that doesn't inherit // permissions. - name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), - data: fileBData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), + Data: fileBData, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, }, - folders: []itemData{ + Folders: []connector.ItemData{ { - name: folderAName, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Name: folderAName, + Perms: connector.PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, }, @@ -400,302 +397,323 @@ func generateAndRestoreOnedriveItems( cols = append(cols, col...) } - input := dataForInfo(service, cols, version.Backup) + // TODO Neha: work on this + t := testing.T{} + input := connector.DataForInfo(&t, service, cols, version.Backup) - collections := getCollections( - service, - tenantID, - []string{resourceOwner}, - input, - version.Backup) + // collections := getCollections( + // service, + // tenantID, + // []string{resourceOwner}, + // input, + // version.Backup) opts := control.Options{ RestorePermissions: true, ToggleFeatures: control.Toggles{}, } + config := connector.ConfigInfo{ + Acct: acct, + Opts: opts, + Resource: connector.Users, + Service: service, + Tenant: tenantID, + ResourceOwners: []string{resourceOwner}, + Dest: tester.DefaultTestRestoreDestination(), + } + + _, _, collections, _ := connector.GetCollectionsAndExpected( + &t, + config, + // service, + // tenantID, + // []string{resourceOwner}, + input, + version.Backup) + return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs) } -func getCollections( - service path.ServiceType, - tenant string, - resourceOwners []string, - testCollections []colInfo, - backupVersion int, -) []data.RestoreCollection { - var collections []data.RestoreCollection +// func getCollections( +// service path.ServiceType, +// tenant string, +// resourceOwners []string, +// testCollections []colInfo, +// backupVersion int, +// ) []data.RestoreCollection { +// var collections []data.RestoreCollection - for _, owner := range resourceOwners { - ownerCollections := collectionsForInfo( - service, - tenant, - owner, - testCollections, - backupVersion, - ) +// for _, owner := range resourceOwners { +// ownerCollections := collectionsForInfo( +// service, +// tenant, +// owner, +// testCollections, +// backupVersion, +// ) - collections = append(collections, ownerCollections...) - } +// collections = append(collections, ownerCollections...) +// } - return collections -} +// return collections +// } -type mockRestoreCollection struct { - data.Collection - auxItems map[string]data.Stream -} +// type mockRestoreCollection struct { +// data.Collection +// auxItems map[string]data.Stream +// } -func (rc mockRestoreCollection) Fetch( - ctx context.Context, - name string, -) (data.Stream, error) { - res := rc.auxItems[name] - if res == nil { - return nil, data.ErrNotFound - } +// func (rc mockRestoreCollection) Fetch( +// ctx context.Context, +// name string, +// ) (data.Stream, error) { +// res := rc.auxItems[name] +// if res == nil { +// return nil, data.ErrNotFound +// } - return res, nil -} +// return res, nil +// } -func collectionsForInfo( - service path.ServiceType, - tenant, user string, - allInfo []colInfo, - backupVersion int, -) []data.RestoreCollection { - collections := make([]data.RestoreCollection, 0, len(allInfo)) +// func collectionsForInfo( +// service path.ServiceType, +// tenant, user string, +// allInfo []colInfo, +// backupVersion int, +// ) []data.RestoreCollection { +// collections := make([]data.RestoreCollection, 0, len(allInfo)) - for _, info := range allInfo { - pth := mustToDataLayerPath( - service, - tenant, - user, - info.category, - info.pathElements, - false) +// for _, info := range allInfo { +// pth := mustToDataLayerPath( +// service, +// tenant, +// user, +// info.category, +// info.PathElements, +// false) - mc := exchMock.NewCollection(pth, pth, len(info.items)) +// mc := exchMock.NewCollection(pth, pth, len(info.items)) - for i := 0; i < len(info.items); i++ { - mc.Names[i] = info.items[i].name - mc.Data[i] = info.items[i].data +// for i := 0; i < len(info.items); i++ { +// mc.Names[i] = info.items[i].name +// mc.Data[i] = info.items[i].data - // We do not count metadata files against item count - if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) && - (service == path.OneDriveService || service == path.SharePointService) { - continue - } - } +// // We do not count metadata Files against item count +// if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) && +// (service == path.OneDriveService || service == path.SharePointService) { +// continue +// } +// } - c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}} +// c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}} - for _, aux := range info.auxItems { - c.auxItems[aux.name] = &exchMock.Data{ - ID: aux.name, - Reader: io.NopCloser(bytes.NewReader(aux.data)), - } - } +// for _, aux := range info.auxItems { +// c.auxItems[aux.name] = &exchMock.Data{ +// ID: aux.name, +// Reader: io.NopCloser(bytes.NewReader(aux.data)), +// } +// } - collections = append(collections, c) - } +// collections = append(collections, c) +// } - return collections -} +// return collections +// } -func mustToDataLayerPath( - service path.ServiceType, - tenant, resourceOwner string, - category path.CategoryType, - elements []string, - isItem bool, -) path.Path { - res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) - if err != nil { - fmt.Println("building path", clues.ToCore(err)) - } +// func mustToDataLayerPath( +// service path.ServiceType, +// tenant, resourceOwner string, +// category path.CategoryType, +// elements []string, +// isItem bool, +// ) path.Path { +// res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) +// if err != nil { +// fmt.Println("building path", clues.ToCore(err)) +// } - return res -} +// return res +// } -type colInfo struct { - // Elements (in order) for the path representing this collection. Should - // only contain elements after the prefix that corso uses for the path. For - // example, a collection for the Inbox folder in exchange mail would just be - // "Inbox". - pathElements []string - category path.CategoryType - items []itemInfo - // auxItems are items that can be retrieved with Fetch but won't be returned - // by Items(). - auxItems []itemInfo -} +// type colInfo struct { +// // Elements (in order) for the path representing this collection. Should +// // only contain elements after the prefix that corso uses for the path. For +// // example, a collection for the Inbox folder in exchange mail would just be +// // "Inbox". +// PathElements []string +// category path.CategoryType +// items []itemInfo +// // auxItems are items that can be retrieved with Fetch but won't be returned +// // by Items(). +// auxItems []itemInfo +// } -func newOneDriveCollection( - service path.ServiceType, - pathElements []string, - backupVersion int, -) *onedriveCollection { - return &onedriveCollection{ - service: service, - pathElements: pathElements, - backupVersion: backupVersion, - } -} +// func newOneDriveCollection( +// service path.ServiceType, +// PathElements []string, +// backupVersion int, +// ) *onedriveCollection { +// return &onedriveCollection{ +// service: service, +// PathElements: PathElements, +// backupVersion: backupVersion, +// } +// } -func dataForInfo( - service path.ServiceType, - cols []onedriveColInfo, - backupVersion int, -) []colInfo { - var res []colInfo +// func dataForInfo( +// service path.ServiceType, +// cols []connector.OnedriveColInfo, +// backupVersion int, +// ) []colInfo { +// var res []colInfo - for _, c := range cols { - onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion) +// for _, c := range cols { +// onedriveCol := newOneDriveCollection(service, c.PathElements, backupVersion) - for _, f := range c.files { - onedriveCol.withFile(f.name, f.data, f.perms) - } +// for _, f := range c.Files { +// onedriveCol.withFile(f.Name, f.Data, f.Perms) +// } - onedriveCol.withPermissions(c.perms) +// onedriveCol.withPermissions(c.Perms) - res = append(res, onedriveCol.collection()) - } +// res = append(res, onedriveCol.collection()) +// } - return res -} +// return res +// } -func (c onedriveCollection) collection() colInfo { - cat := path.FilesCategory - if c.service == path.SharePointService { - cat = path.LibrariesCategory - } +// func (c onedriveCollection) collection() colInfo { +// cat := path.FilesCategory +// if c.service == path.SharePointService { +// cat = path.LibrariesCategory +// } - return colInfo{ - pathElements: c.pathElements, - category: cat, - items: c.items, - auxItems: c.aux, - } -} +// return colInfo{ +// PathElements: c.PathElements, +// category: cat, +// items: c.items, +// auxItems: c.aux, +// } +// } -func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection { - c.items = append(c.items, onedriveItemWithData( - name+metadata.DataFileSuffix, - name+metadata.DataFileSuffix, - fileData)) +// func (c *onedriveCollection) withFile(name string, fileData []byte, perm connector.PermData) *onedriveCollection { +// c.items = append(c.items, onedriveItemWithData( +// name+metadata.DataFileSuffix, +// name+metadata.DataFileSuffix, +// fileData)) - md := onedriveMetadata( - name, - name+metadata.MetaFileSuffix, - name, - perm, - true) - c.items = append(c.items, md) - c.aux = append(c.aux, md) +// md := onedriveMetadata( +// name, +// name+metadata.MetaFileSuffix, +// name, +// perm, +// true) +// c.items = append(c.items, md) +// c.aux = append(c.aux, md) - return c -} +// return c +// } -// withPermissions adds permissions to the folder represented by this -// onedriveCollection. -func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection { - if c.backupVersion < version.OneDrive4DirIncludesPermissions { - return c - } +// // withPermissions adds permissions to the folder represented by this +// // onedriveCollection. +// func (c *onedriveCollection) withPermissions(perm connector.PermData) *onedriveCollection { +// if c.backupVersion < version.OneDrive4DirIncludesPermissions { +// return c +// } - name := c.pathElements[len(c.pathElements)-1] - metaName := name +// name := c.PathElements[len(c.PathElements)-1] +// metaName := name - if c.backupVersion >= version.OneDrive5DirMetaNoName { - // We switched to just .dirmeta for metadata file names. - metaName = "" - } +// if c.backupVersion >= version.OneDrive5DirMetaNoName { +// // We switched to just .dirmeta for metadata file names. +// metaName = "" +// } - if name == "root:" { - return c - } +// if name == "root:" { +// return c +// } - md := onedriveMetadata( - name, - metaName+metadata.DirMetaFileSuffix, - metaName+metadata.DirMetaFileSuffix, - perm, - true) +// md := onedriveMetadata( +// name, +// metaName+metadata.DirMetaFileSuffix, +// metaName+metadata.DirMetaFileSuffix, +// perm, +// true) - c.items = append(c.items, md) - c.aux = append(c.aux, md) +// c.items = append(c.items, md) +// c.aux = append(c.aux, md) - return c -} +// return c +// } -type oneDriveData struct { - FileName string `json:"fileName,omitempty"` - Data []byte `json:"data,omitempty"` -} +// type oneDriveData struct { +// FileName string `json:"fileName,omitempty"` +// Data []byte `json:"data,omitempty"` +// } -func onedriveItemWithData( - name, lookupKey string, - fileData []byte, -) itemInfo { - content := oneDriveData{ - FileName: lookupKey, - Data: fileData, - } +// func onedriveItemWithData( +// name, lookupKey string, +// fileData []byte, +// ) itemInfo { +// content := oneDriveData{ +// FileName: lookupKey, +// Data: fileData, +// } - serialized, _ := json.Marshal(content) +// serialized, _ := json.Marshal(content) - return itemInfo{ - name: name, - data: serialized, - lookupKey: lookupKey, - } -} +// return itemInfo{ +// name: name, +// data: serialized, +// lookupKey: lookupKey, +// } +// } -func onedriveMetadata( - fileName, itemID, lookupKey string, - perm permData, - permUseID bool, -) itemInfo { - meta := getMetadata(fileName, perm, permUseID) +// func onedriveMetadata( +// fileName, itemID, lookupKey string, +// perm connector.PermData, +// permUseID bool, +// ) itemInfo { +// meta := getMetadata(fileName, perm, permUseID) - metaJSON, err := json.Marshal(meta) - if err != nil { - fmt.Println("marshalling metadata", clues.ToCore(err)) - } +// metaJSON, err := json.Marshal(meta) +// if err != nil { +// fmt.Println("marshalling metadata", clues.ToCore(err)) +// } - return itemInfo{ - name: itemID, - data: metaJSON, - lookupKey: lookupKey, - } -} +// return itemInfo{ +// name: itemID, +// data: metaJSON, +// lookupKey: lookupKey, +// } +// } -func getMetadata(fileName string, perm permData, permUseID bool) metadata.Metadata { - if len(perm.user) == 0 || len(perm.roles) == 0 || - perm.sharingMode != metadata.SharingModeCustom { - return metadata.Metadata{ - FileName: fileName, - SharingMode: perm.sharingMode, - } - } +// func getMetadata(fileName string, perm connector.PermData, permUseID bool) metadata.Metadata { +// if len(perm.User) == 0 || len(perm.Roles) == 0 || +// perm.SharingMode != metadata.SharingModeCustom { +// return metadata.Metadata{ +// FileName: fileName, +// SharingMode: perm.SharingMode, +// } +// } - // In case of permissions, the id will usually be same for same - // user/role combo unless deleted and readded, but we have to do - // this as we only have two users of which one is already taken. - id := uuid.NewString() - uperm := metadata.Permission{ID: id, Roles: perm.roles} +// // In case of permissions, the id will usually be same for same +// // user/role combo unless deleted and readded, but we have to do +// // this as we only have two users of which one is already taken. +// id := uuid.NewString() +// uperm := metadata.Permission{ID: id, Roles: perm.Roles} - if permUseID { - uperm.EntityID = perm.entityID - } else { - uperm.Email = perm.user - } +// if permUseID { +// uperm.EntityID = perm.EntityID +// } else { +// uperm.Email = perm.User +// } - meta := metadata.Metadata{ - FileName: fileName, - Permissions: []metadata.Permission{uperm}, - } +// meta := metadata.Metadata{ +// FileName: fileName, +// Permissions: []metadata.Permission{uperm}, +// } - return meta -} +// return meta +// } diff --git a/src/internal/connector/graph_connector_helper_test.go b/src/internal/connector/graph_connector_helper_test.go index 6934162ab..8f33b0247 100644 --- a/src/internal/connector/graph_connector_helper_test.go +++ b/src/internal/connector/graph_connector_helper_test.go @@ -1,7 +1,6 @@ package connector import ( - "bytes" "context" "encoding/json" "io" @@ -17,33 +16,17 @@ import ( "golang.org/x/exp/slices" "github.com/alcionai/corso/src/internal/common/ptr" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/tester" - "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" ) -func mustToDataLayerPath( - t *testing.T, - service path.ServiceType, - tenant, resourceOwner string, - category path.CategoryType, - elements []string, - isItem bool, -) path.Path { - res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) - require.NoError(t, err, clues.ToCore(err)) - - return res -} - func testElementsMatch[T any]( t *testing.T, expected []T, @@ -100,7 +83,7 @@ func testElementsMatch[T any]( assert.Failf( t, "contain different elements", - "missing items: (%T)%v\nunexpected items: (%T)%v\n", + "missing Items: (%T)%v\nunexpected Items: (%T)%v\n", expected, missing, got, @@ -108,52 +91,17 @@ func testElementsMatch[T any]( ) } -type configInfo struct { - acct account.Account - opts control.Options - resource Resource - service path.ServiceType - tenant string - resourceOwners []string - dest control.RestoreDestination -} - -type itemInfo struct { - // lookupKey is a string that can be used to find this data from a set of - // other data in the same collection. This key should be something that will - // be the same before and after restoring the item in M365 and may not be - // the M365 ID. When restoring items out of place, the item is assigned a - // new ID making it unsuitable for a lookup key. - lookupKey string - name string - data []byte -} - -type colInfo struct { - // Elements (in order) for the path representing this collection. Should - // only contain elements after the prefix that corso uses for the path. For - // example, a collection for the Inbox folder in exchange mail would just be - // "Inbox". - pathElements []string - category path.CategoryType - items []itemInfo - // auxItems are items that can be retrieved with Fetch but won't be returned - // by Items(). These files do not directly participate in comparisosn at the - // end of a test. - auxItems []itemInfo -} - type restoreBackupInfo struct { name string service path.ServiceType - collections []colInfo + collections []ColInfo resource Resource } type restoreBackupInfoMultiVersion struct { service path.ServiceType - collectionsLatest []colInfo - collectionsPrevious []colInfo + collectionsLatest []ColInfo + collectionsPrevious []ColInfo resource Resource backupVersion int } @@ -1099,7 +1047,7 @@ func makeSharePointBackupSel( } // backupSelectorForExpected creates a selector that can be used to backup the -// given items in expected based on the item paths. Fails the test if items from +// given Items in expected based on the item paths. Fails the test if items from // multiple services are in expected. func backupSelectorForExpected( t *testing.T, @@ -1126,127 +1074,6 @@ func backupSelectorForExpected( return selectors.Selector{} } -// backupOutputPathFromRestore returns a path.Path denoting the location in -// kopia the data will be placed at. The location is a data-type specific -// combination of the location the data was recently restored to and where the -// data was originally in the hierarchy. -func backupOutputPathFromRestore( - t *testing.T, - restoreDest control.RestoreDestination, - inputPath path.Path, -) path.Path { - base := []string{restoreDest.ContainerName} - - // OneDrive has leading information like the drive ID. - if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService { - folders := inputPath.Folders() - base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName) - - if len(folders) > 3 { - base = append(base, folders[3:]...) - } - } - - if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory { - base = append(base, inputPath.Folders()...) - } - - return mustToDataLayerPath( - t, - inputPath.Service(), - inputPath.Tenant(), - inputPath.ResourceOwner(), - inputPath.Category(), - base, - false, - ) -} - -// TODO(ashmrtn): Make this an actual mock class that can be used in other -// packages. -type mockRestoreCollection struct { - data.Collection - auxItems map[string]data.Stream -} - -func (rc mockRestoreCollection) Fetch( - ctx context.Context, - name string, -) (data.Stream, error) { - res := rc.auxItems[name] - if res == nil { - return nil, data.ErrNotFound - } - - return res, nil -} - -func collectionsForInfo( - t *testing.T, - service path.ServiceType, - tenant, user string, - dest control.RestoreDestination, - allInfo []colInfo, - backupVersion int, -) (int, int, []data.RestoreCollection, map[string]map[string][]byte) { - var ( - collections = make([]data.RestoreCollection, 0, len(allInfo)) - expectedData = make(map[string]map[string][]byte, len(allInfo)) - totalItems = 0 - kopiaEntries = 0 - ) - - for _, info := range allInfo { - pth := mustToDataLayerPath( - t, - service, - tenant, - user, - info.category, - info.pathElements, - false) - - mc := exchMock.NewCollection(pth, pth, len(info.items)) - baseDestPath := backupOutputPathFromRestore(t, dest, pth) - - baseExpected := expectedData[baseDestPath.String()] - if baseExpected == nil { - expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.items)) - baseExpected = expectedData[baseDestPath.String()] - } - - for i := 0; i < len(info.items); i++ { - mc.Names[i] = info.items[i].name - mc.Data[i] = info.items[i].data - - baseExpected[info.items[i].lookupKey] = info.items[i].data - - // We do not count metadata files against item count - if backupVersion > 0 && - (service == path.OneDriveService || service == path.SharePointService) && - metadata.HasMetaSuffix(info.items[i].name) { - continue - } - - totalItems++ - } - - c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}} - - for _, aux := range info.auxItems { - c.auxItems[aux.name] = &exchMock.Data{ - ID: aux.name, - Reader: io.NopCloser(bytes.NewReader(aux.data)), - } - } - - collections = append(collections, c) - kopiaEntries += len(info.items) - } - - return totalItems, kopiaEntries, collections, expectedData -} - func getSelectorWith( t *testing.T, service path.ServiceType, diff --git a/src/internal/connector/graph_connector_onedrive_test.go b/src/internal/connector/graph_connector_onedrive_test.go index 0c4c40a47..99e0fbbe1 100644 --- a/src/internal/connector/graph_connector_onedrive_test.go +++ b/src/internal/connector/graph_connector_onedrive_test.go @@ -2,13 +2,11 @@ package connector import ( "context" - "encoding/json" "fmt" "strings" "testing" "github.com/alcionai/clues" - "github.com/google/uuid" "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -24,92 +22,12 @@ import ( "github.com/alcionai/corso/src/pkg/path" ) -// For any version post this(inclusive), we expect to be using IDs for -// permission instead of email -const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions - -func getMetadata(fileName string, perm permData, permUseID bool) metadata.Metadata { - if len(perm.user) == 0 || len(perm.roles) == 0 || - perm.sharingMode != metadata.SharingModeCustom { - return metadata.Metadata{ - FileName: fileName, - SharingMode: perm.sharingMode, - } - } - - // In case of permissions, the id will usually be same for same - // user/role combo unless deleted and readded, but we have to do - // this as we only have two users of which one is already taken. - id := uuid.NewString() - uperm := metadata.Permission{ID: id, Roles: perm.roles} - - if permUseID { - uperm.EntityID = perm.entityID - } else { - uperm.Email = perm.user - } - - testMeta := metadata.Metadata{ - FileName: fileName, - Permissions: []metadata.Permission{uperm}, - } - - return testMeta -} - -type testOneDriveData struct { - FileName string `json:"fileName,omitempty"` - Data []byte `json:"data,omitempty"` -} - -func onedriveItemWithData( - t *testing.T, - name, lookupKey string, - fileData []byte, -) itemInfo { - t.Helper() - - content := testOneDriveData{ - FileName: lookupKey, - Data: fileData, - } - - serialized, err := json.Marshal(content) - require.NoError(t, err, clues.ToCore(err)) - - return itemInfo{ - name: name, - data: serialized, - lookupKey: lookupKey, - } -} - -func onedriveMetadata( - t *testing.T, - fileName, itemID, lookupKey string, - perm permData, - permUseID bool, -) itemInfo { - t.Helper() - - testMeta := getMetadata(fileName, perm, permUseID) - - testMetaJSON, err := json.Marshal(testMeta) - require.NoError(t, err, "marshalling metadata", clues.ToCore(err)) - - return itemInfo{ - name: itemID, - data: testMetaJSON, - lookupKey: lookupKey, - } -} - var ( fileName = "test-file.txt" folderAName = "folder-a" folderBName = "b" folderNamedFolder = "folder" - rootFolder = "root:" + // rootFolder = "root:" fileAData = []byte(strings.Repeat("a", 33)) fileBData = []byte(strings.Repeat("b", 65)) @@ -122,204 +40,6 @@ var ( readPerm = []string{"read"} ) -func newOneDriveCollection( - t *testing.T, - service path.ServiceType, - pathElements []string, - backupVersion int, -) *onedriveCollection { - return &onedriveCollection{ - service: service, - pathElements: pathElements, - backupVersion: backupVersion, - t: t, - } -} - -type onedriveCollection struct { - service path.ServiceType - pathElements []string - items []itemInfo - aux []itemInfo - backupVersion int - t *testing.T -} - -func (c onedriveCollection) collection() colInfo { - cat := path.FilesCategory - if c.service == path.SharePointService { - cat = path.LibrariesCategory - } - - return colInfo{ - pathElements: c.pathElements, - category: cat, - items: c.items, - auxItems: c.aux, - } -} - -func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection { - switch c.backupVersion { - case 0: - // Lookups will occur using the most recent version of things so we need - // the embedded file name to match that. - c.items = append(c.items, onedriveItemWithData( - c.t, - name, - name+metadata.DataFileSuffix, - fileData)) - - // v1-5, early metadata design - case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker, - version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName: - c.items = append(c.items, onedriveItemWithData( - c.t, - name+metadata.DataFileSuffix, - name+metadata.DataFileSuffix, - fileData)) - - md := onedriveMetadata( - c.t, - "", - name+metadata.MetaFileSuffix, - name+metadata.MetaFileSuffix, - perm, - c.backupVersion >= versionPermissionSwitchedToID) - c.items = append(c.items, md) - c.aux = append(c.aux, md) - - // v6+ current metadata design - case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: - c.items = append(c.items, onedriveItemWithData( - c.t, - name+metadata.DataFileSuffix, - name+metadata.DataFileSuffix, - fileData)) - - md := onedriveMetadata( - c.t, - name, - name+metadata.MetaFileSuffix, - name, - perm, - c.backupVersion >= versionPermissionSwitchedToID) - c.items = append(c.items, md) - c.aux = append(c.aux, md) - - default: - assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion) - } - - return c -} - -func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection { - switch c.backupVersion { - case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName, - version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: - return c - - case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker: - c.items = append( - c.items, - onedriveMetadata( - c.t, - "", - name+metadata.DirMetaFileSuffix, - name+metadata.DirMetaFileSuffix, - perm, - c.backupVersion >= versionPermissionSwitchedToID)) - - default: - assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion) - } - - return c -} - -// withPermissions adds permissions to the folder represented by this -// onedriveCollection. -func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection { - // These versions didn't store permissions for the folder or didn't store them - // in the folder's collection. - if c.backupVersion < version.OneDrive4DirIncludesPermissions { - return c - } - - name := c.pathElements[len(c.pathElements)-1] - metaName := name - - if c.backupVersion >= version.OneDrive5DirMetaNoName { - // We switched to just .dirmeta for metadata file names. - metaName = "" - } - - if name == rootFolder { - return c - } - - md := onedriveMetadata( - c.t, - name, - metaName+metadata.DirMetaFileSuffix, - metaName+metadata.DirMetaFileSuffix, - perm, - c.backupVersion >= versionPermissionSwitchedToID) - - c.items = append(c.items, md) - c.aux = append(c.aux, md) - - return c -} - -type permData struct { - user string // user is only for older versions - entityID string - roles []string - sharingMode metadata.SharingMode -} - -type itemData struct { - name string - data []byte - perms permData -} - -type onedriveColInfo struct { - pathElements []string - perms permData - files []itemData - folders []itemData -} - -func testDataForInfo( - t *testing.T, - service path.ServiceType, - cols []onedriveColInfo, - backupVersion int, -) []colInfo { - var res []colInfo - - for _, c := range cols { - onedriveCol := newOneDriveCollection(t, service, c.pathElements, backupVersion) - - for _, f := range c.files { - onedriveCol.withFile(f.name, f.data, f.perms) - } - - for _, d := range c.folders { - onedriveCol.withFolder(d.name, d.perms) - } - - onedriveCol.withPermissions(c.perms) - - res = append(res, onedriveCol.collection()) - } - - return res -} - func mustGetDefaultDriveID( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument @@ -663,78 +383,78 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( folderBName, } - cols := []onedriveColInfo{ + cols := []OnedriveColInfo{ { - pathElements: rootPath, - files: []itemData{ + PathElements: rootPath, + Files: []ItemData{ { - name: fileName, - data: fileAData, + Name: fileName, + Data: fileAData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderAName, + Name: folderAName, }, { - name: folderBName, + Name: folderBName, }, }, }, { - pathElements: folderAPath, - files: []itemData{ + PathElements: folderAPath, + Files: []ItemData{ { - name: fileName, - data: fileBData, + Name: fileName, + Data: fileBData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderBName, + Name: folderBName, }, }, }, { - pathElements: subfolderBPath, - files: []itemData{ + PathElements: subfolderBPath, + Files: []ItemData{ { - name: fileName, - data: fileCData, + Name: fileName, + Data: fileCData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderAName, + Name: folderAName, }, }, }, { - pathElements: subfolderAPath, - files: []itemData{ + PathElements: subfolderAPath, + Files: []ItemData{ { - name: fileName, - data: fileDData, + Name: fileName, + Data: fileDData, }, }, }, { - pathElements: folderBPath, - files: []itemData{ + PathElements: folderBPath, + Files: []ItemData{ { - name: fileName, - data: fileEData, + Name: fileName, + Data: fileEData, }, }, }, } - expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) + expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("Version%d", vn), func() { t := suite.T() - input := testDataForInfo(t, suite.BackupService(), cols, vn) + input := DataForInfo(t, suite.BackupService(), cols, vn) testData := restoreBackupInfoMultiVersion{ service: suite.BackupService(), @@ -807,71 +527,71 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { folderCName, } - cols := []onedriveColInfo{ + cols := []OnedriveColInfo{ { - pathElements: rootPath, - files: []itemData{ + PathElements: rootPath, + Files: []ItemData{ { // Test restoring a file that doesn't inherit permissions. - name: fileName, - data: fileAData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fileName, + Data: fileAData, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, { // Test restoring a file that doesn't inherit permissions and has // no permissions. - name: fileName2, - data: fileBData, + Name: fileName2, + Data: fileBData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderBName, + Name: folderBName, }, { - name: folderAName, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Name: folderAName, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, { - name: folderCName, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Name: folderCName, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, }, }, { - pathElements: folderBPath, - files: []itemData{ + PathElements: folderBPath, + Files: []ItemData{ { // Test restoring a file in a non-root folder that doesn't inherit // permissions. - name: fileName, - data: fileBData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fileName, + Data: fileBData, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderAName, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Name: folderAName, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, }, @@ -893,52 +613,52 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { // }, // }, // }, - // perms: permData{ - // user: secondaryUserName, - // entityID: secondaryUserID, - // roles: readPerm, + // Perms: PermData{ + // User: secondaryUserName, + // EntityID: secondaryUserID, + // Roles: readPerm, // }, // }, { // Tests a folder that has permissions with an item in the folder with // the different permissions. - pathElements: folderAPath, - files: []itemData{ + PathElements: folderAPath, + Files: []ItemData{ { - name: fileName, - data: fileEData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fileName, + Data: fileEData, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, }, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, { // Tests a folder that has permissions with an item in the folder with // no permissions. - pathElements: folderCPath, - files: []itemData{ + PathElements: folderCPath, + Files: []ItemData{ { - name: fileName, - data: fileAData, + Name: fileName, + Data: fileAData, }, }, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: readPerm, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: readPerm, }, }, } - expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) + expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("Version%d", vn), func() { @@ -946,7 +666,7 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { // Ideally this can always be true or false and still // work, but limiting older versions to use emails so as // to validate that flow as well. - input := testDataForInfo(t, suite.BackupService(), cols, vn) + input := DataForInfo(t, suite.BackupService(), cols, vn) testData := restoreBackupInfoMultiVersion{ service: suite.BackupService(), @@ -984,50 +704,50 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) { suite.Service(), suite.BackupResourceOwner()) - inputCols := []onedriveColInfo{ + inputCols := []OnedriveColInfo{ { - pathElements: []string{ + PathElements: []string{ "drives", driveID, rootFolder, }, - files: []itemData{ + Files: []ItemData{ { - name: fileName, - data: fileAData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, + Name: fileName, + Data: fileAData, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, }, }, }, }, } - expectedCols := []onedriveColInfo{ + expectedCols := []OnedriveColInfo{ { - pathElements: []string{ + PathElements: []string{ "drives", driveID, rootFolder, }, - files: []itemData{ + Files: []ItemData{ { // No permissions on the output since they weren't restored. - name: fileName, - data: fileAData, + Name: fileName, + Data: fileAData, }, }, }, } - expected := testDataForInfo(suite.T(), suite.BackupService(), expectedCols, version.Backup) + expected := DataForInfo(suite.T(), suite.BackupService(), expectedCols, version.Backup) for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("Version%d", vn), func() { t := suite.T() - input := testDataForInfo(t, suite.BackupService(), inputCols, vn) + input := DataForInfo(t, suite.BackupService(), inputCols, vn) testData := restoreBackupInfoMultiVersion{ service: suite.BackupService(), @@ -1105,29 +825,29 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio folderCName, } - fileSet := []itemData{ + fileSet := []ItemData{ { - name: "file-custom", - data: fileAData, - perms: permData{ - user: secondaryUserName, - entityID: secondaryUserID, - roles: writePerm, - sharingMode: metadata.SharingModeCustom, + Name: "file-custom", + Data: fileAData, + Perms: PermData{ + User: secondaryUserName, + EntityID: secondaryUserID, + Roles: writePerm, + SharingMode: metadata.SharingModeCustom, }, }, { - name: "file-inherited", - data: fileAData, - perms: permData{ - sharingMode: metadata.SharingModeInherited, + Name: "file-inherited", + Data: fileAData, + Perms: PermData{ + SharingMode: metadata.SharingModeInherited, }, }, { - name: "file-empty", - data: fileAData, - perms: permData{ - sharingMode: metadata.SharingModeCustom, + Name: "file-empty", + Data: fileAData, + Perms: PermData{ + SharingMode: metadata.SharingModeCustom, }, }, } @@ -1150,55 +870,55 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio // - inherted-permission-file // - empty-permission-file (empty/empty might have interesting behavior) - cols := []onedriveColInfo{ + cols := []OnedriveColInfo{ { - pathElements: rootPath, - files: []itemData{}, - folders: []itemData{ - {name: folderAName}, + PathElements: rootPath, + Files: []ItemData{}, + Folders: []ItemData{ + {Name: folderAName}, }, }, { - pathElements: folderAPath, - files: fileSet, - folders: []itemData{ - {name: folderAName}, - {name: folderBName}, - {name: folderCName}, + PathElements: folderAPath, + Files: fileSet, + Folders: []ItemData{ + {Name: folderAName}, + {Name: folderBName}, + {Name: folderCName}, }, - perms: permData{ - user: tertiaryUserName, - entityID: tertiaryUserID, - roles: readPerm, + Perms: PermData{ + User: tertiaryUserName, + EntityID: tertiaryUserID, + Roles: readPerm, }, }, { - pathElements: subfolderAAPath, - files: fileSet, - perms: permData{ - user: tertiaryUserName, - entityID: tertiaryUserID, - roles: writePerm, - sharingMode: metadata.SharingModeCustom, + PathElements: subfolderAAPath, + Files: fileSet, + Perms: PermData{ + User: tertiaryUserName, + EntityID: tertiaryUserID, + Roles: writePerm, + SharingMode: metadata.SharingModeCustom, }, }, { - pathElements: subfolderABPath, - files: fileSet, - perms: permData{ - sharingMode: metadata.SharingModeInherited, + PathElements: subfolderABPath, + Files: fileSet, + Perms: PermData{ + SharingMode: metadata.SharingModeInherited, }, }, { - pathElements: subfolderACPath, - files: fileSet, - perms: permData{ - sharingMode: metadata.SharingModeCustom, + PathElements: subfolderACPath, + Files: fileSet, + Perms: PermData{ + SharingMode: metadata.SharingModeCustom, }, }, } - expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) + expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("Version%d", vn), func() { @@ -1206,7 +926,7 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio // Ideally this can always be true or false and still // work, but limiting older versions to use emails so as // to validate that flow as well. - input := testDataForInfo(t, suite.BackupService(), cols, vn) + input := DataForInfo(t, suite.BackupService(), cols, vn) testData := restoreBackupInfoMultiVersion{ service: suite.BackupService(), @@ -1264,60 +984,60 @@ func testRestoreFolderNamedFolderRegression( folderBName, } - cols := []onedriveColInfo{ + cols := []OnedriveColInfo{ { - pathElements: rootPath, - files: []itemData{ + PathElements: rootPath, + Files: []ItemData{ { - name: fileName, - data: fileAData, + Name: fileName, + Data: fileAData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderNamedFolder, + Name: folderNamedFolder, }, { - name: folderBName, + Name: folderBName, }, }, }, { - pathElements: folderFolderPath, - files: []itemData{ + PathElements: folderFolderPath, + Files: []ItemData{ { - name: fileName, - data: fileBData, + Name: fileName, + Data: fileBData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderBName, + Name: folderBName, }, }, }, { - pathElements: subfolderPath, - files: []itemData{ + PathElements: subfolderPath, + Files: []ItemData{ { - name: fileName, - data: fileCData, + Name: fileName, + Data: fileCData, }, }, - folders: []itemData{ + Folders: []ItemData{ { - name: folderNamedFolder, + Name: folderNamedFolder, }, }, }, } - expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) + expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("Version%d", vn), func() { t := suite.T() - input := testDataForInfo(t, suite.BackupService(), cols, vn) + input := DataForInfo(t, suite.BackupService(), cols, vn) testData := restoreBackupInfoMultiVersion{ service: suite.BackupService(), diff --git a/src/internal/connector/graph_connector_onedrive_test_helper.go b/src/internal/connector/graph_connector_onedrive_test_helper.go new file mode 100644 index 000000000..26d71bfe8 --- /dev/null +++ b/src/internal/connector/graph_connector_onedrive_test_helper.go @@ -0,0 +1,332 @@ +package connector + +import ( + "encoding/json" + "testing" + + "github.com/alcionai/clues" + "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/version" + "github.com/alcionai/corso/src/pkg/path" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/exp/maps" +) + +// For any version post this(inclusive), we expect to be using IDs for +// permission instead of email +const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions + +var rootFolder = "root:" + +func getMetadata(fileName string, perm PermData, permUseID bool) metadata.Metadata { + if len(perm.User) == 0 || len(perm.Roles) == 0 || + perm.SharingMode != metadata.SharingModeCustom { + return metadata.Metadata{ + FileName: fileName, + SharingMode: perm.SharingMode, + } + } + + // In case of permissions, the id will usually be same for same + // user/role combo unless deleted and readded, but we have to do + // this as we only have two users of which one is already taken. + id := uuid.NewString() + uperm := metadata.Permission{ID: id, Roles: perm.Roles} + + if permUseID { + uperm.EntityID = perm.EntityID + } else { + uperm.Email = perm.User + } + + testMeta := metadata.Metadata{ + FileName: fileName, + Permissions: []metadata.Permission{uperm}, + } + + return testMeta +} + +type PermData struct { + User string // user is only for older versions + EntityID string + Roles []string + SharingMode metadata.SharingMode +} + +type ItemData struct { + Name string + Data []byte + Perms PermData +} + +type OnedriveColInfo struct { + PathElements []string + Perms PermData + Files []ItemData + Folders []ItemData +} + +type onedriveCollection struct { + service path.ServiceType + PathElements []string + items []ItemInfo + aux []ItemInfo + backupVersion int + t *testing.T +} + +func (c onedriveCollection) collection() ColInfo { + cat := path.FilesCategory + if c.service == path.SharePointService { + cat = path.LibrariesCategory + } + + return ColInfo{ + PathElements: c.PathElements, + Category: cat, + Items: c.items, + AuxItems: c.aux, + } +} + +func NewOneDriveCollection( + t *testing.T, + service path.ServiceType, + PathElements []string, + backupVersion int, +) *onedriveCollection { + return &onedriveCollection{ + service: service, + PathElements: PathElements, + backupVersion: backupVersion, + t: t, + } +} + +func DataForInfo( + t *testing.T, + service path.ServiceType, + cols []OnedriveColInfo, + backupVersion int, +) []ColInfo { + var res []ColInfo + + for _, c := range cols { + onedriveCol := NewOneDriveCollection(t, service, c.PathElements, backupVersion) + + for _, f := range c.Files { + onedriveCol.withFile(f.Name, f.Data, f.Perms) + } + + for _, d := range c.Folders { + onedriveCol.withFolder(d.Name, d.Perms) + } + + onedriveCol.withPermissions(c.Perms) + + res = append(res, onedriveCol.collection()) + } + + return res +} + +func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) *onedriveCollection { + switch c.backupVersion { + case 0: + // Lookups will occur using the most recent version of things so we need + // the embedded file name to match that. + c.items = append(c.items, onedriveItemWithData( + c.t, + name, + name+metadata.DataFileSuffix, + fileData)) + + // v1-5, early metadata design + case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker, + version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName: + c.items = append(c.items, onedriveItemWithData( + c.t, + name+metadata.DataFileSuffix, + name+metadata.DataFileSuffix, + fileData)) + + md := onedriveMetadata( + c.t, + "", + name+metadata.MetaFileSuffix, + name+metadata.MetaFileSuffix, + perm, + c.backupVersion >= versionPermissionSwitchedToID) + c.items = append(c.items, md) + c.aux = append(c.aux, md) + + // v6+ current metadata design + case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: + c.items = append(c.items, onedriveItemWithData( + c.t, + name+metadata.DataFileSuffix, + name+metadata.DataFileSuffix, + fileData)) + + md := onedriveMetadata( + c.t, + name, + name+metadata.MetaFileSuffix, + name, + perm, + c.backupVersion >= versionPermissionSwitchedToID) + c.items = append(c.items, md) + c.aux = append(c.aux, md) + + default: + assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion) + } + + return c +} + +func (c *onedriveCollection) withFolder(name string, perm PermData) *onedriveCollection { + switch c.backupVersion { + case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName, + version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: + return c + + case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker: + c.items = append( + c.items, + onedriveMetadata( + c.t, + "", + name+metadata.DirMetaFileSuffix, + name+metadata.DirMetaFileSuffix, + perm, + c.backupVersion >= versionPermissionSwitchedToID)) + + default: + assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion) + } + + return c +} + +// withPermissions adds permissions to the folder represented by this +// onedriveCollection. +func (c *onedriveCollection) withPermissions(perm PermData) *onedriveCollection { + // These versions didn't store permissions for the folder or didn't store them + // in the folder's collection. + if c.backupVersion < version.OneDrive4DirIncludesPermissions { + return c + } + + name := c.PathElements[len(c.PathElements)-1] + metaName := name + + if c.backupVersion >= version.OneDrive5DirMetaNoName { + // We switched to just .dirmeta for metadata file names. + metaName = "" + } + + if name == rootFolder { + return c + } + + md := onedriveMetadata( + c.t, + name, + metaName+metadata.DirMetaFileSuffix, + metaName+metadata.DirMetaFileSuffix, + perm, + c.backupVersion >= versionPermissionSwitchedToID) + + c.items = append(c.items, md) + c.aux = append(c.aux, md) + + return c +} + +type testOneDriveData struct { + FileName string `json:"fileName,omitempty"` + Data []byte `json:"data,omitempty"` +} + +func onedriveItemWithData( + t *testing.T, + name, lookupKey string, + fileData []byte, +) ItemInfo { + t.Helper() + + content := testOneDriveData{ + FileName: lookupKey, + Data: fileData, + } + + serialized, err := json.Marshal(content) + require.NoError(t, err, clues.ToCore(err)) + + return ItemInfo{ + name: name, + data: serialized, + lookupKey: lookupKey, + } +} + +func onedriveMetadata( + t *testing.T, + fileName, itemID, lookupKey string, + perm PermData, + permUseID bool, +) ItemInfo { + t.Helper() + + testMeta := getMetadata(fileName, perm, permUseID) + + testMetaJSON, err := json.Marshal(testMeta) + require.NoError(t, err, "marshalling metadata", clues.ToCore(err)) + + return ItemInfo{ + name: itemID, + data: testMetaJSON, + lookupKey: lookupKey, + } +} + +func GetCollectionsAndExpected( + t *testing.T, + config ConfigInfo, + testCollections []ColInfo, + backupVersion int, +) (int, int, []data.RestoreCollection, map[string]map[string][]byte) { + t.Helper() + + var ( + collections []data.RestoreCollection + expectedData = map[string]map[string][]byte{} + totalItems = 0 + totalKopiaItems = 0 + ) + + for _, owner := range config.ResourceOwners { + numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo( + t, + config.Service, + config.Tenant, + owner, + config.Dest, + testCollections, + backupVersion, + ) + + collections = append(collections, ownerCollections...) + totalItems += numItems + totalKopiaItems += kopiaItems + + maps.Copy(expectedData, userExpectedData) + } + + return totalItems, totalKopiaItems, collections, expectedData +} diff --git a/src/internal/connector/graph_connector_test.go b/src/internal/connector/graph_connector_test.go index 00731b93e..7fcacf255 100644 --- a/src/internal/connector/graph_connector_test.go +++ b/src/internal/connector/graph_connector_test.go @@ -11,7 +11,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "golang.org/x/exp/maps" inMock "github.com/alcionai/corso/src/internal/common/idname/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" @@ -407,66 +406,30 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { // Exchange Functions //------------------------------------------------------------- -func getCollectionsAndExpected( - t *testing.T, - config configInfo, - testCollections []colInfo, - backupVersion int, -) (int, int, []data.RestoreCollection, map[string]map[string][]byte) { - t.Helper() - - var ( - collections []data.RestoreCollection - expectedData = map[string]map[string][]byte{} - totalItems = 0 - totalKopiaItems = 0 - ) - - for _, owner := range config.resourceOwners { - numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo( - t, - config.service, - config.tenant, - owner, - config.dest, - testCollections, - backupVersion, - ) - - collections = append(collections, ownerCollections...) - totalItems += numItems - totalKopiaItems += kopiaItems - - maps.Copy(expectedData, userExpectedData) - } - - return totalItems, totalKopiaItems, collections, expectedData -} - func runRestore( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument - config configInfo, + config ConfigInfo, backupVersion int, collections []data.RestoreCollection, numRestoreItems int, ) { t.Logf( "Restoring collections to %s for resourceOwners(s) %v\n", - config.dest.ContainerName, - config.resourceOwners) + config.Dest.ContainerName, + config.ResourceOwners) start := time.Now() - restoreGC := loadConnector(ctx, t, config.resource) - restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true) + restoreGC := loadConnector(ctx, t, config.Resource) + restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true) deets, err := restoreGC.ConsumeRestoreCollections( ctx, backupVersion, - config.acct, + config.Acct, restoreSel, - config.dest, - config.opts, + config.Dest, + config.Opts, collections, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) @@ -490,30 +453,30 @@ func runRestore( func runBackupAndCompare( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument - config configInfo, + config ConfigInfo, expectedData map[string]map[string][]byte, totalItems int, totalKopiaItems int, - inputCollections []colInfo, + inputCollections []ColInfo, ) { t.Helper() // Run a backup and compare its output with what we put in. cats := make(map[path.CategoryType]struct{}, len(inputCollections)) for _, c := range inputCollections { - cats[c.category] = struct{}{} + cats[c.Category] = struct{}{} } var ( - expectedDests = make([]destAndCats, 0, len(config.resourceOwners)) + expectedDests = make([]destAndCats, 0, len(config.ResourceOwners)) idToName = map[string]string{} nameToID = map[string]string{} ) - for _, ro := range config.resourceOwners { + for _, ro := range config.ResourceOwners { expectedDests = append(expectedDests, destAndCats{ resourceOwner: ro, - dest: config.dest.ContainerName, + dest: config.Dest.ContainerName, cats: cats, }) @@ -521,10 +484,10 @@ func runBackupAndCompare( nameToID[ro] = ro } - backupGC := loadConnector(ctx, t, config.resource) + backupGC := loadConnector(ctx, t, config.Resource) backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID) - backupSel := backupSelectorForExpected(t, config.service, expectedDests) + backupSel := backupSelectorForExpected(t, config.Service, expectedDests) t.Logf("Selective backup of %s\n", backupSel) start := time.Now() @@ -534,7 +497,7 @@ func runBackupAndCompare( backupSel, nil, version.NoBackup, - config.opts, + config.Opts, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) // No excludes yet because this isn't an incremental backup. @@ -550,8 +513,8 @@ func runBackupAndCompare( totalKopiaItems, expectedData, dcs, - config.dest, - config.opts.RestorePermissions) + config.Dest, + config.Opts.RestorePermissions) status := backupGC.Wait() @@ -572,17 +535,17 @@ func runRestoreBackupTest( ctx, flush := tester.NewContext() defer flush() - config := configInfo{ - acct: acct, - opts: opts, - resource: test.resource, - service: test.service, - tenant: tenant, - resourceOwners: resourceOwners, - dest: tester.DefaultTestRestoreDestination(), + config := ConfigInfo{ + Acct: acct, + Opts: opts, + Resource: test.resource, + Service: test.service, + Tenant: tenant, + ResourceOwners: resourceOwners, + Dest: tester.DefaultTestRestoreDestination(), } - totalItems, totalKopiaItems, collections, expectedData := getCollectionsAndExpected( + totalItems, totalKopiaItems, collections, expectedData := GetCollectionsAndExpected( t, config, test.collections, @@ -618,17 +581,17 @@ func runRestoreTestWithVerion( ctx, flush := tester.NewContext() defer flush() - config := configInfo{ - acct: acct, - opts: opts, - resource: test.resource, - service: test.service, - tenant: tenant, - resourceOwners: resourceOwners, - dest: tester.DefaultTestRestoreDestination(), + config := ConfigInfo{ + Acct: acct, + Opts: opts, + Resource: test.resource, + Service: test.service, + Tenant: tenant, + ResourceOwners: resourceOwners, + Dest: tester.DefaultTestRestoreDestination(), } - totalItems, _, collections, _ := getCollectionsAndExpected( + totalItems, _, collections, _ := GetCollectionsAndExpected( t, config, test.collectionsPrevious, @@ -657,17 +620,17 @@ func runRestoreBackupTestVersions( ctx, flush := tester.NewContext() defer flush() - config := configInfo{ - acct: acct, - opts: opts, - resource: test.resource, - service: test.service, - tenant: tenant, - resourceOwners: resourceOwners, - dest: tester.DefaultTestRestoreDestination(), + config := ConfigInfo{ + Acct: acct, + Opts: opts, + Resource: test.resource, + Service: test.service, + Tenant: tenant, + ResourceOwners: resourceOwners, + Dest: tester.DefaultTestRestoreDestination(), } - totalItems, _, collections, _ := getCollectionsAndExpected( + totalItems, _, collections, _ := GetCollectionsAndExpected( t, config, test.collectionsPrevious, @@ -682,7 +645,7 @@ func runRestoreBackupTestVersions( totalItems) // Get expected output for new version. - totalItems, totalKopiaItems, _, expectedData := getCollectionsAndExpected( + totalItems, totalKopiaItems, _, expectedData := GetCollectionsAndExpected( t, config, test.collectionsLatest, @@ -707,11 +670,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { name: "EmailsWithAttachments", service: path.ExchangeService, resource: Users, - collections: []colInfo{ + collections: []ColInfo{ { - pathElements: []string{"Inbox"}, - category: path.EmailCategory, - items: []itemInfo{ + PathElements: []string{"Inbox"}, + Category: path.EmailCategory, + Items: []ItemInfo{ { name: "someencodeditemID", data: exchMock.MessageWithDirectAttachment( @@ -734,11 +697,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { name: "MultipleEmailsMultipleFolders", service: path.ExchangeService, resource: Users, - collections: []colInfo{ + collections: []ColInfo{ { - pathElements: []string{"Inbox"}, - category: path.EmailCategory, - items: []itemInfo{ + PathElements: []string{"Inbox"}, + Category: path.EmailCategory, + Items: []ItemInfo{ { name: "someencodeditemID", data: exchMock.MessageWithBodyBytes( @@ -751,9 +714,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - pathElements: []string{"Work"}, - category: path.EmailCategory, - items: []itemInfo{ + PathElements: []string{"Work"}, + Category: path.EmailCategory, + Items: []ItemInfo{ { name: "someencodeditemID2", data: exchMock.MessageWithBodyBytes( @@ -775,9 +738,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - pathElements: []string{"Work", "Inbox"}, - category: path.EmailCategory, - items: []itemInfo{ + PathElements: []string{"Work", "Inbox"}, + Category: path.EmailCategory, + Items: []ItemInfo{ { name: "someencodeditemID4", data: exchMock.MessageWithBodyBytes( @@ -790,9 +753,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - pathElements: []string{"Work", "Inbox", "Work"}, - category: path.EmailCategory, - items: []itemInfo{ + PathElements: []string{"Work", "Inbox", "Work"}, + Category: path.EmailCategory, + Items: []ItemInfo{ { name: "someencodeditemID5", data: exchMock.MessageWithBodyBytes( @@ -810,11 +773,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { name: "MultipleContactsSingleFolder", service: path.ExchangeService, resource: Users, - collections: []colInfo{ + collections: []ColInfo{ { - pathElements: []string{"Contacts"}, - category: path.ContactsCategory, - items: []itemInfo{ + PathElements: []string{"Contacts"}, + Category: path.ContactsCategory, + Items: []ItemInfo{ { name: "someencodeditemID", data: exchMock.ContactBytes("Ghimley"), @@ -838,11 +801,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { name: "MultipleContactsMultipleFolders", service: path.ExchangeService, resource: Users, - collections: []colInfo{ + collections: []ColInfo{ { - pathElements: []string{"Work"}, - category: path.ContactsCategory, - items: []itemInfo{ + PathElements: []string{"Work"}, + Category: path.ContactsCategory, + Items: []ItemInfo{ { name: "someencodeditemID", data: exchMock.ContactBytes("Ghimley"), @@ -861,9 +824,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - pathElements: []string{"Personal"}, - category: path.ContactsCategory, - items: []itemInfo{ + PathElements: []string{"Personal"}, + Category: path.ContactsCategory, + Items: []ItemInfo{ { name: "someencodeditemID4", data: exchMock.ContactBytes("Argon"), @@ -973,11 +936,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames name: "Contacts", service: path.ExchangeService, resource: Users, - collections: []colInfo{ + collections: []ColInfo{ { - pathElements: []string{"Work"}, - category: path.ContactsCategory, - items: []itemInfo{ + PathElements: []string{"Work"}, + Category: path.ContactsCategory, + Items: []ItemInfo{ { name: "someencodeditemID", data: exchMock.ContactBytes("Ghimley"), @@ -986,9 +949,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames }, }, { - pathElements: []string{"Personal"}, - category: path.ContactsCategory, - items: []itemInfo{ + PathElements: []string{"Personal"}, + Category: path.ContactsCategory, + Items: []ItemInfo{ { name: "someencodeditemID2", data: exchMock.ContactBytes("Irgot"), @@ -1014,9 +977,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames // }, // }, // { - // pathElements: []string{"Personal"}, - // category: path.EventsCategory, - // items: []itemInfo{ + // PathElements: []string{"Personal"}, + // Category: path.EventsCategory, + // Items: []ItemInfo{ // { // name: "someencodeditemID2", // data: exchMock.EventWithSubjectBytes("Irgot"), @@ -1047,7 +1010,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames resourceOwner: suite.user, dest: dest.ContainerName, cats: map[path.CategoryType]struct{}{ - collection.category: {}, + collection.Category: {}, }, }) @@ -1057,7 +1020,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames suite.connector.tenant, suite.user, dest, - []colInfo{collection}, + []ColInfo{collection}, version.Backup, ) allItems += totalItems @@ -1153,11 +1116,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac name: "EmailsWithLargeAttachments", service: path.ExchangeService, resource: Users, - collections: []colInfo{ + collections: []ColInfo{ { - pathElements: []string{"Inbox"}, - category: path.EmailCategory, - items: []itemInfo{ + PathElements: []string{"Inbox"}, + Category: path.EmailCategory, + Items: []ItemInfo{ { name: "35mbAttachment", data: exchMock.MessageWithSizedAttachment(subjectText, 35), diff --git a/src/internal/connector/graph_connector_test_helper.go b/src/internal/connector/graph_connector_test_helper.go new file mode 100644 index 000000000..2fb77675b --- /dev/null +++ b/src/internal/connector/graph_connector_test_helper.go @@ -0,0 +1,187 @@ +package connector + +import ( + "bytes" + "context" + "io" + "testing" + + "github.com/alcionai/clues" + exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/path" + "github.com/stretchr/testify/require" +) + +type ColInfo struct { + // Elements (in order) for the path representing this collection. Should + // only contain elements after the prefix that corso uses for the path. For + // example, a collection for the Inbox folder in exchange mail would just be + // "Inbox". + PathElements []string + Category path.CategoryType + Items []ItemInfo + // auxItems are items that can be retrieved with Fetch but won't be returned + // by Items(). These files do not directly participate in comparisosn at the + // end of a test. + AuxItems []ItemInfo +} + +type ItemInfo struct { + // lookupKey is a string that can be used to find this data from a set of + // other data in the same collection. This key should be something that will + // be the same before and after restoring the item in M365 and may not be + // the M365 ID. When restoring items out of place, the item is assigned a + // new ID making it unsuitable for a lookup key. + lookupKey string + name string + data []byte +} + +type ConfigInfo struct { + Acct account.Account + Opts control.Options + Resource Resource + Service path.ServiceType + Tenant string + ResourceOwners []string + Dest control.RestoreDestination +} + +func mustToDataLayerPath( + t *testing.T, + service path.ServiceType, + tenant, resourceOwner string, + category path.CategoryType, + elements []string, + isItem bool, +) path.Path { + res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) + require.NoError(t, err, clues.ToCore(err)) + + return res +} + +// backupOutputPathFromRestore returns a path.Path denoting the location in +// kopia the data will be placed at. The location is a data-type specific +// combination of the location the data was recently restored to and where the +// data was originally in the hierarchy. +func backupOutputPathFromRestore( + t *testing.T, + restoreDest control.RestoreDestination, + inputPath path.Path, +) path.Path { + base := []string{restoreDest.ContainerName} + + // OneDrive has leading information like the drive ID. + if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService { + folders := inputPath.Folders() + base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName) + + if len(folders) > 3 { + base = append(base, folders[3:]...) + } + } + + if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory { + base = append(base, inputPath.Folders()...) + } + + return mustToDataLayerPath( + t, + inputPath.Service(), + inputPath.Tenant(), + inputPath.ResourceOwner(), + inputPath.Category(), + base, + false, + ) +} + +// TODO(ashmrtn): Make this an actual mock class that can be used in other +// packages. +type mockRestoreCollection struct { + data.Collection + auxItems map[string]data.Stream +} + +func (rc mockRestoreCollection) Fetch( + ctx context.Context, + name string, +) (data.Stream, error) { + res := rc.auxItems[name] + if res == nil { + return nil, data.ErrNotFound + } + + return res, nil +} + +func collectionsForInfo( + t *testing.T, + service path.ServiceType, + tenant, user string, + dest control.RestoreDestination, + allInfo []ColInfo, + backupVersion int, +) (int, int, []data.RestoreCollection, map[string]map[string][]byte) { + var ( + collections = make([]data.RestoreCollection, 0, len(allInfo)) + expectedData = make(map[string]map[string][]byte, len(allInfo)) + totalItems = 0 + kopiaEntries = 0 + ) + + for _, info := range allInfo { + pth := mustToDataLayerPath( + t, + service, + tenant, + user, + info.Category, + info.PathElements, + false) + + mc := exchMock.NewCollection(pth, pth, len(info.Items)) + baseDestPath := backupOutputPathFromRestore(t, dest, pth) + + baseExpected := expectedData[baseDestPath.String()] + if baseExpected == nil { + expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items)) + baseExpected = expectedData[baseDestPath.String()] + } + + for i := 0; i < len(info.Items); i++ { + mc.Names[i] = info.Items[i].name + mc.Data[i] = info.Items[i].data + + baseExpected[info.Items[i].lookupKey] = info.Items[i].data + + // We do not count metadata files against item count + if backupVersion > 0 && + (service == path.OneDriveService || service == path.SharePointService) && + metadata.HasMetaSuffix(info.Items[i].name) { + continue + } + + totalItems++ + } + + c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}} + + for _, aux := range info.AuxItems { + c.auxItems[aux.name] = &exchMock.Data{ + ID: aux.name, + Reader: io.NopCloser(bytes.NewReader(aux.data)), + } + } + + collections = append(collections, c) + kopiaEntries += len(info.Items) + } + + return totalItems, kopiaEntries, collections, expectedData +}