remove testing object

This commit is contained in:
neha-Gupta1 2023-05-15 12:40:01 +05:30
parent bcf184bbaa
commit 944887268d
5 changed files with 229 additions and 111 deletions

View File

@ -5,7 +5,6 @@ import (
"fmt" "fmt"
"os" "os"
"strings" "strings"
"testing"
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -397,9 +396,10 @@ func generateAndRestoreOnedriveItems(
cols = append(cols, col...) cols = append(cols, col...)
} }
// TODO Neha: work on this input, err := connector.DataForInfo(service, cols, version.Backup)
t := testing.T{} if err != nil {
input := connector.DataForInfo(&t, service, cols, version.Backup) return nil, err
}
// collections := getCollections( // collections := getCollections(
// service, // service,
@ -423,8 +423,8 @@ func generateAndRestoreOnedriveItems(
Dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
_, _, collections, _ := connector.GetCollectionsAndExpected( _, _, collections, _, err := connector.GetCollectionsAndExpected(
&t, // &t,
config, config,
// service, // service,
// tenantID, // tenantID,
@ -432,6 +432,10 @@ func generateAndRestoreOnedriveItems(
input, input,
version.Backup) version.Backup)
if err != nil {
return nil, err
}
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs) return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
} }

View File

@ -449,12 +449,18 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
}, },
} }
expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() { suite.Run(fmt.Sprintf("Version%d", vn), func() {
t := suite.T() t := suite.T()
input := DataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -658,7 +664,10 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
}, },
} }
expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() { suite.Run(fmt.Sprintf("Version%d", vn), func() {
@ -666,7 +675,10 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
// Ideally this can always be true or false and still // Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as // work, but limiting older versions to use emails so as
// to validate that flow as well. // to validate that flow as well.
input := DataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -742,12 +754,18 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
}, },
} }
expected := DataForInfo(suite.T(), suite.BackupService(), expectedCols, version.Backup) expected, err := DataForInfo(suite.BackupService(), expectedCols, version.Backup)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() { suite.Run(fmt.Sprintf("Version%d", vn), func() {
t := suite.T() t := suite.T()
input := DataForInfo(t, suite.BackupService(), inputCols, vn) input, err := DataForInfo(suite.BackupService(), inputCols, vn)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -918,7 +936,10 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
}, },
} }
expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() { suite.Run(fmt.Sprintf("Version%d", vn), func() {
@ -926,7 +947,10 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
// Ideally this can always be true or false and still // Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as // work, but limiting older versions to use emails so as
// to validate that flow as well. // to validate that flow as well.
input := DataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),
@ -1032,12 +1056,18 @@ func testRestoreFolderNamedFolderRegression(
}, },
} }
expected := DataForInfo(suite.T(), suite.BackupService(), cols, version.Backup) expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
for vn := startVersion; vn <= version.Backup; vn++ { for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() { suite.Run(fmt.Sprintf("Version%d", vn), func() {
t := suite.T() t := suite.T()
input := DataForInfo(t, suite.BackupService(), cols, vn) input, err := DataForInfo(suite.BackupService(), cols, vn)
if err != nil {
assert.FailNow(suite.T(), err.Error())
}
testData := restoreBackupInfoMultiVersion{ testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(), service: suite.BackupService(),

View File

@ -2,12 +2,10 @@ package connector
import ( import (
"encoding/json" "encoding/json"
"testing" "fmt"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
@ -76,7 +74,7 @@ type onedriveCollection struct {
items []ItemInfo items []ItemInfo
aux []ItemInfo aux []ItemInfo
backupVersion int backupVersion int
t *testing.T // t *testing.T
} }
func (c onedriveCollection) collection() ColInfo { func (c onedriveCollection) collection() ColInfo {
@ -94,7 +92,7 @@ func (c onedriveCollection) collection() ColInfo {
} }
func NewOneDriveCollection( func NewOneDriveCollection(
t *testing.T, // t *testing.T,
service path.ServiceType, service path.ServiceType,
PathElements []string, PathElements []string,
backupVersion int, backupVersion int,
@ -103,123 +101,170 @@ func NewOneDriveCollection(
service: service, service: service,
PathElements: PathElements, PathElements: PathElements,
backupVersion: backupVersion, backupVersion: backupVersion,
t: t, // t: t,
} }
} }
func DataForInfo( func DataForInfo(
t *testing.T, // t *testing.T,
service path.ServiceType, service path.ServiceType,
cols []OnedriveColInfo, cols []OnedriveColInfo,
backupVersion int, backupVersion int,
) []ColInfo { ) ([]ColInfo, error) {
var res []ColInfo var (
res []ColInfo
err error
)
for _, c := range cols { for _, c := range cols {
onedriveCol := NewOneDriveCollection(t, service, c.PathElements, backupVersion) onedriveCol := NewOneDriveCollection(service, c.PathElements, backupVersion)
for _, f := range c.Files { for _, f := range c.Files {
onedriveCol.withFile(f.Name, f.Data, f.Perms) _, err = onedriveCol.withFile(f.Name, f.Data, f.Perms)
if err != nil {
return res, err
}
} }
for _, d := range c.Folders { for _, d := range c.Folders {
onedriveCol.withFolder(d.Name, d.Perms) _, err = onedriveCol.withFolder(d.Name, d.Perms)
if err != nil {
return res, err
}
} }
onedriveCol.withPermissions(c.Perms) _, err = onedriveCol.withPermissions(c.Perms)
if err != nil {
return res, err
}
res = append(res, onedriveCol.collection()) res = append(res, onedriveCol.collection())
} }
return res return res, nil
} }
func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) *onedriveCollection { func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) (*onedriveCollection, error) {
switch c.backupVersion { switch c.backupVersion {
case 0: case 0:
// Lookups will occur using the most recent version of things so we need // Lookups will occur using the most recent version of things so we need
// the embedded file name to match that. // the embedded file name to match that.
c.items = append(c.items, onedriveItemWithData( item, err := onedriveItemWithData(
c.t, // c.t,
name, name,
name+metadata.DataFileSuffix, name+metadata.DataFileSuffix,
fileData)) fileData)
if err != nil {
return c, err
}
c.items = append(c.items, item)
// v1-5, early metadata design // v1-5, early metadata design
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker, case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName: version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
c.items = append(c.items, onedriveItemWithData(
c.t,
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData))
md := onedriveMetadata( items, err := onedriveItemWithData(
c.t, // c.t,
name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix,
fileData)
if err != nil {
return c, err
}
c.items = append(c.items, items)
md, err := onedriveMetadata(
// c.t,
"", "",
name+metadata.MetaFileSuffix, name+metadata.MetaFileSuffix,
name+metadata.MetaFileSuffix, name+metadata.MetaFileSuffix,
perm, perm,
c.backupVersion >= versionPermissionSwitchedToID) c.backupVersion >= versionPermissionSwitchedToID)
if err != nil {
return c, err
}
c.items = append(c.items, md) c.items = append(c.items, md)
c.aux = append(c.aux, md) c.aux = append(c.aux, md)
// v6+ current metadata design // v6+ current metadata design
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
c.items = append(c.items, onedriveItemWithData( item, err := onedriveItemWithData(
c.t, // c.t,
name+metadata.DataFileSuffix, name+metadata.DataFileSuffix,
name+metadata.DataFileSuffix, name+metadata.DataFileSuffix,
fileData)) fileData)
md := onedriveMetadata( if err != nil {
c.t, return c, err
}
c.items = append(c.items, item)
md, err := onedriveMetadata(
// c.t,
name, name,
name+metadata.MetaFileSuffix, name+metadata.MetaFileSuffix,
name, name,
perm, perm,
c.backupVersion >= versionPermissionSwitchedToID) c.backupVersion >= versionPermissionSwitchedToID)
if err != nil {
return c, err
}
c.items = append(c.items, md) c.items = append(c.items, md)
c.aux = append(c.aux, md) c.aux = append(c.aux, md)
default: default:
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion) return c, clues.New(fmt.Sprintf("bad backup version. version %d", c.backupVersion))
// assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
} }
return c return c, nil
} }
func (c *onedriveCollection) withFolder(name string, perm PermData) *onedriveCollection { func (c *onedriveCollection) withFolder(name string, perm PermData) (*onedriveCollection, error) {
switch c.backupVersion { switch c.backupVersion {
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName, case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
return c return c, nil
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker: case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
c.items = append( item, err := onedriveMetadata(
c.items, // c.t,
onedriveMetadata(
c.t,
"", "",
name+metadata.DirMetaFileSuffix, name+metadata.DirMetaFileSuffix,
name+metadata.DirMetaFileSuffix, name+metadata.DirMetaFileSuffix,
perm, perm,
c.backupVersion >= versionPermissionSwitchedToID)) c.backupVersion >= versionPermissionSwitchedToID)
default: c.items = append(c.items, item)
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
if err != nil {
return c, err
} }
return c default:
return c, clues.New(fmt.Sprintf("bad backup version.version %d", c.backupVersion))
// assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
}
return c, nil
} }
// withPermissions adds permissions to the folder represented by this // withPermissions adds permissions to the folder represented by this
// onedriveCollection. // onedriveCollection.
func (c *onedriveCollection) withPermissions(perm PermData) *onedriveCollection { func (c *onedriveCollection) withPermissions(perm PermData) (*onedriveCollection, error) {
// These versions didn't store permissions for the folder or didn't store them // These versions didn't store permissions for the folder or didn't store them
// in the folder's collection. // in the folder's collection.
if c.backupVersion < version.OneDrive4DirIncludesPermissions { if c.backupVersion < version.OneDrive4DirIncludesPermissions {
return c return c, nil
} }
name := c.PathElements[len(c.PathElements)-1] name := c.PathElements[len(c.PathElements)-1]
@ -231,21 +276,24 @@ func (c *onedriveCollection) withPermissions(perm PermData) *onedriveCollection
} }
if name == odConsts.RootPathDir { if name == odConsts.RootPathDir {
return c return c, nil
} }
md := onedriveMetadata( md, err := onedriveMetadata(
c.t, // c.t,
name, name,
metaName+metadata.DirMetaFileSuffix, metaName+metadata.DirMetaFileSuffix,
metaName+metadata.DirMetaFileSuffix, metaName+metadata.DirMetaFileSuffix,
perm, perm,
c.backupVersion >= versionPermissionSwitchedToID) c.backupVersion >= versionPermissionSwitchedToID)
if err != nil {
return c, err
}
c.items = append(c.items, md) c.items = append(c.items, md)
c.aux = append(c.aux, md) c.aux = append(c.aux, md)
return c return c, err
} }
type testOneDriveData struct { type testOneDriveData struct {
@ -254,11 +302,11 @@ type testOneDriveData struct {
} }
func onedriveItemWithData( func onedriveItemWithData(
t *testing.T, // t *testing.T,
name, lookupKey string, name, lookupKey string,
fileData []byte, fileData []byte,
) ItemInfo { ) (ItemInfo, error) {
t.Helper() // t.Helper()
content := testOneDriveData{ content := testOneDriveData{
FileName: lookupKey, FileName: lookupKey,
@ -266,42 +314,48 @@ func onedriveItemWithData(
} }
serialized, err := json.Marshal(content) serialized, err := json.Marshal(content)
require.NoError(t, err, clues.ToCore(err)) if err != nil {
return ItemInfo{}, clues.Stack(err)
}
// require.NoError(t, err, clues.ToCore(err))
return ItemInfo{ return ItemInfo{
name: name, name: name,
data: serialized, data: serialized,
lookupKey: lookupKey, lookupKey: lookupKey,
} }, nil
} }
func onedriveMetadata( func onedriveMetadata(
t *testing.T, // t *testing.T,
fileName, itemID, lookupKey string, fileName, itemID, lookupKey string,
perm PermData, perm PermData,
permUseID bool, permUseID bool,
) ItemInfo { ) (ItemInfo, error) {
t.Helper() // t.Helper()
testMeta := getMetadata(fileName, perm, permUseID) testMeta := getMetadata(fileName, perm, permUseID)
testMetaJSON, err := json.Marshal(testMeta) testMetaJSON, err := json.Marshal(testMeta)
require.NoError(t, err, "marshalling metadata", clues.ToCore(err)) if err != nil {
return ItemInfo{}, clues.Wrap(err, "marshalling metadata")
}
// require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
return ItemInfo{ return ItemInfo{
name: itemID, name: itemID,
data: testMetaJSON, data: testMetaJSON,
lookupKey: lookupKey, lookupKey: lookupKey,
} }, nil
} }
func GetCollectionsAndExpected( func GetCollectionsAndExpected(
t *testing.T, // t *testing.T,
config ConfigInfo, config ConfigInfo,
testCollections []ColInfo, testCollections []ColInfo,
backupVersion int, backupVersion int,
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) { ) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) {
t.Helper() // t.Helper()
var ( var (
collections []data.RestoreCollection collections []data.RestoreCollection
@ -311,8 +365,8 @@ func GetCollectionsAndExpected(
) )
for _, owner := range config.ResourceOwners { for _, owner := range config.ResourceOwners {
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo( numItems, kopiaItems, ownerCollections, userExpectedData, err := collectionsForInfo(
t, // t,
config.Service, config.Service,
config.Tenant, config.Tenant,
owner, owner,
@ -320,6 +374,9 @@ func GetCollectionsAndExpected(
testCollections, testCollections,
backupVersion, backupVersion,
) )
if err != nil {
return totalItems, totalKopiaItems, collections, expectedData, err
}
collections = append(collections, ownerCollections...) collections = append(collections, ownerCollections...)
totalItems += numItems totalItems += numItems
@ -328,5 +385,5 @@ func GetCollectionsAndExpected(
maps.Copy(expectedData, userExpectedData) maps.Copy(expectedData, userExpectedData)
} }
return totalItems, totalKopiaItems, collections, expectedData return totalItems, totalKopiaItems, collections, expectedData, nil
} }

View File

@ -545,12 +545,16 @@ func runRestoreBackupTest(
Dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
totalItems, totalKopiaItems, collections, expectedData := GetCollectionsAndExpected( totalItems, totalKopiaItems, collections, expectedData, err := GetCollectionsAndExpected(
t, // t,
config, config,
test.collections, test.collections,
version.Backup) version.Backup)
if err != nil {
assert.FailNow(t, "failed with error", err)
}
runRestore( runRestore(
t, t,
ctx, ctx,
@ -591,12 +595,16 @@ func runRestoreTestWithVerion(
Dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
totalItems, _, collections, _ := GetCollectionsAndExpected( totalItems, _, collections, _, err := GetCollectionsAndExpected(
t, // t,
config, config,
test.collectionsPrevious, test.collectionsPrevious,
test.backupVersion) test.backupVersion)
if err != nil {
assert.FailNow(t, "failed with error", err)
}
runRestore( runRestore(
t, t,
ctx, ctx,
@ -630,12 +638,16 @@ func runRestoreBackupTestVersions(
Dest: tester.DefaultTestRestoreDestination(""), Dest: tester.DefaultTestRestoreDestination(""),
} }
totalItems, _, collections, _ := GetCollectionsAndExpected( totalItems, _, collections, _, err := GetCollectionsAndExpected(
t, // t,
config, config,
test.collectionsPrevious, test.collectionsPrevious,
test.backupVersion) test.backupVersion)
if err != nil {
assert.FailNow(t, "failed with error", err)
}
runRestore( runRestore(
t, t,
ctx, ctx,
@ -645,12 +657,16 @@ func runRestoreBackupTestVersions(
totalItems) totalItems)
// Get expected output for new version. // Get expected output for new version.
totalItems, totalKopiaItems, _, expectedData := GetCollectionsAndExpected( totalItems, totalKopiaItems, _, expectedData, err := GetCollectionsAndExpected(
t, // t,
config, config,
test.collectionsLatest, test.collectionsLatest,
version.Backup) version.Backup)
if err != nil {
assert.FailNow(t, "failed with error", err)
}
runBackupAndCompare( runBackupAndCompare(
t, t,
ctx, ctx,
@ -1014,8 +1030,8 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
}, },
}) })
totalItems, _, collections, expectedData := collectionsForInfo( totalItems, _, collections, expectedData, err := collectionsForInfo(
t, // t,
test.service, test.service,
suite.connector.tenant, suite.connector.tenant,
suite.user, suite.user,
@ -1023,6 +1039,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
[]ColInfo{collection}, []ColInfo{collection},
version.Backup, version.Backup,
) )
if err != nil {
assert.FailNow(t, "failed with error", err)
}
allItems += totalItems allItems += totalItems
for k, v := range expectedData { for k, v := range expectedData {

View File

@ -4,10 +4,6 @@ import (
"bytes" "bytes"
"context" "context"
"io" "io"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
@ -53,17 +49,20 @@ type ConfigInfo struct {
} }
func mustToDataLayerPath( func mustToDataLayerPath(
t *testing.T, // t *testing.T,
service path.ServiceType, service path.ServiceType,
tenant, resourceOwner string, tenant, resourceOwner string,
category path.CategoryType, category path.CategoryType,
elements []string, elements []string,
isItem bool, isItem bool,
) path.Path { ) (path.Path, error) {
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
require.NoError(t, err, clues.ToCore(err)) if err != nil {
return nil, err
}
// require.NoError(t, err, clues.ToCore(err))
return res return res, err
} }
// backupOutputPathFromRestore returns a path.Path denoting the location in // backupOutputPathFromRestore returns a path.Path denoting the location in
@ -71,10 +70,10 @@ func mustToDataLayerPath(
// combination of the location the data was recently restored to and where the // combination of the location the data was recently restored to and where the
// data was originally in the hierarchy. // data was originally in the hierarchy.
func backupOutputPathFromRestore( func backupOutputPathFromRestore(
t *testing.T, // t *testing.T,
restoreDest control.RestoreDestination, restoreDest control.RestoreDestination,
inputPath path.Path, inputPath path.Path,
) path.Path { ) (path.Path, error) {
base := []string{restoreDest.ContainerName} base := []string{restoreDest.ContainerName}
// OneDrive has leading information like the drive ID. // OneDrive has leading information like the drive ID.
@ -91,8 +90,8 @@ func backupOutputPathFromRestore(
base = append(base, inputPath.Folders()...) base = append(base, inputPath.Folders()...)
} }
return mustToDataLayerPath( path, err := mustToDataLayerPath(
t, // t,
inputPath.Service(), inputPath.Service(),
inputPath.Tenant(), inputPath.Tenant(),
inputPath.ResourceOwner(), inputPath.ResourceOwner(),
@ -100,6 +99,7 @@ func backupOutputPathFromRestore(
base, base,
false, false,
) )
return path, err
} }
// TODO(ashmrtn): Make this an actual mock class that can be used in other // TODO(ashmrtn): Make this an actual mock class that can be used in other
@ -122,13 +122,13 @@ func (rc mockRestoreCollection) Fetch(
} }
func collectionsForInfo( func collectionsForInfo(
t *testing.T, // t *testing.T,
service path.ServiceType, service path.ServiceType,
tenant, user string, tenant, user string,
dest control.RestoreDestination, dest control.RestoreDestination,
allInfo []ColInfo, allInfo []ColInfo,
backupVersion int, backupVersion int,
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) { ) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) {
var ( var (
collections = make([]data.RestoreCollection, 0, len(allInfo)) collections = make([]data.RestoreCollection, 0, len(allInfo))
expectedData = make(map[string]map[string][]byte, len(allInfo)) expectedData = make(map[string]map[string][]byte, len(allInfo))
@ -137,17 +137,23 @@ func collectionsForInfo(
) )
for _, info := range allInfo { for _, info := range allInfo {
pth := mustToDataLayerPath( pth, err := mustToDataLayerPath(
t, // t,
service, service,
tenant, tenant,
user, user,
info.Category, info.Category,
info.PathElements, info.PathElements,
false) false)
if err != nil {
return totalItems, kopiaEntries, collections, expectedData, err
}
mc := exchMock.NewCollection(pth, pth, len(info.Items)) mc := exchMock.NewCollection(pth, pth, len(info.Items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth) baseDestPath, err := backupOutputPathFromRestore(dest, pth)
if err != nil {
return totalItems, kopiaEntries, collections, expectedData, err
}
baseExpected := expectedData[baseDestPath.String()] baseExpected := expectedData[baseDestPath.String()]
if baseExpected == nil { if baseExpected == nil {
@ -184,5 +190,5 @@ func collectionsForInfo(
kopiaEntries += len(info.Items) kopiaEntries += len(info.Items)
} }
return totalItems, kopiaEntries, collections, expectedData return totalItems, kopiaEntries, collections, expectedData, nil
} }