clean up drive unit test helpers (#4805)
Adds some cleanups to the drive unit test helpers. Goals were as follows: 1. remove idx() and namex() funcs, replace with id() and name() 2. minimize factory duplication in helper_test.go 3. improve readability in id() and name() calls by adding file and folder variants to handle the 99% of cases we use them in. No logic changes in this PR. Only test func updates. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🤖 Supportability/Tests #### Issue(s) * #4689 #### Test Plan - [x] ⚡ Unit test
This commit is contained in:
parent
3f1f9588f2
commit
dc3cfd1ec3
@ -23,7 +23,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -108,7 +107,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
name: "oneDrive, no duplicates",
|
||||
numInstances: 1,
|
||||
service: path.OneDriveService,
|
||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
||||
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
|
||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||
getErr: nil,
|
||||
@ -118,7 +117,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
name: "oneDrive, duplicates",
|
||||
numInstances: 3,
|
||||
service: path.OneDriveService,
|
||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
||||
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||
getErr: nil,
|
||||
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
|
||||
@ -128,7 +127,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
name: "oneDrive, malware",
|
||||
numInstances: 3,
|
||||
service: path.OneDriveService,
|
||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
||||
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||
itemInfo: details.ItemInfo{},
|
||||
getBody: nil,
|
||||
getErr: clues.New("test malware").Label(graph.LabelsMalware),
|
||||
@ -139,7 +138,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
name: "oneDrive, not found",
|
||||
numInstances: 3,
|
||||
service: path.OneDriveService,
|
||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
||||
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||
itemInfo: details.ItemInfo{},
|
||||
getBody: nil,
|
||||
getErr: clues.New("test not found").Label(graph.LabelStatus(http.StatusNotFound)),
|
||||
@ -150,7 +149,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
name: "sharePoint, no duplicates",
|
||||
numInstances: 1,
|
||||
service: path.SharePointService,
|
||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
||||
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
|
||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||
getErr: nil,
|
||||
@ -160,7 +159,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
name: "sharePoint, duplicates",
|
||||
numInstances: 3,
|
||||
service: path.SharePointService,
|
||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
||||
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
|
||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||
getErr: nil,
|
||||
@ -185,9 +184,9 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
if test.service == path.SharePointService {
|
||||
mbh = mock.DefaultSharePointBH("a-site")
|
||||
mbh = defaultSharePointBH("a-site")
|
||||
mbh.ItemInfo.SharePoint.Modified = now
|
||||
mbh.ItemInfo.SharePoint.ItemName = stubItemName
|
||||
} else {
|
||||
@ -202,10 +201,10 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
},
|
||||
}
|
||||
mbh.GetErrs = []error{test.getErr}
|
||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
||||
mbh.GI = getsItem{Err: assert.AnError}
|
||||
|
||||
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
||||
mbh.GIP = mock.GetsItemPermission{Perm: pcr}
|
||||
mbh.GIP = getsItemPermission{Perm: pcr}
|
||||
|
||||
coll, err := NewCollection(
|
||||
mbh,
|
||||
@ -305,7 +304,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() {
|
||||
collStatus = support.ControllerOperationStatus{}
|
||||
wg = sync.WaitGroup{}
|
||||
name = "name"
|
||||
size = defaultItemSize
|
||||
size = defaultFileSize
|
||||
now = time.Now()
|
||||
)
|
||||
|
||||
@ -318,9 +317,9 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() {
|
||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
mbh.GI = getsItem{Err: assert.AnError}
|
||||
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh.GetResps = []*http.Response{
|
||||
nil,
|
||||
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
|
||||
@ -375,7 +374,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() {
|
||||
collStatus = support.ControllerOperationStatus{}
|
||||
wg = sync.WaitGroup{}
|
||||
name = "name"
|
||||
size = defaultItemSize
|
||||
size = defaultFileSize
|
||||
now = time.Now()
|
||||
)
|
||||
|
||||
@ -397,9 +396,9 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() {
|
||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||
require.NoError(t, err)
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh.GI = mock.GetsItem{Item: stubItem}
|
||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
mbh.GI = getsItem{Item: stubItem}
|
||||
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh.GetResps = []*http.Response{
|
||||
nil,
|
||||
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
|
||||
@ -457,9 +456,9 @@ func (suite *CollectionUnitSuite) TestCollectionPermissionBackupLatestModTime()
|
||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
|
||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh.GetResps = []*http.Response{{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(strings.NewReader("Fake Data!")),
|
||||
@ -635,8 +634,8 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
|
||||
|
||||
stubItem.GetFile().SetMimeType(&test.itemMimeType)
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh.GI = mock.GetsItem{Item: stubItem}
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
mbh.GI = getsItem{Item: stubItem}
|
||||
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
|
||||
mbh.GetErrs = []error{test.err}
|
||||
|
||||
@ -692,7 +691,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
mgi mock.GetsItem
|
||||
mgi getsItem
|
||||
itemInfo details.ItemInfo
|
||||
respBody []io.ReadCloser
|
||||
getErr []error
|
||||
@ -711,7 +710,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
},
|
||||
{
|
||||
name: "expired url redownloads",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
||||
mgi: getsItem{Item: itemWID, Err: nil},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, iorc},
|
||||
getErr: []error{errUnauth, nil},
|
||||
@ -731,14 +730,14 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
name: "re-fetching the item fails",
|
||||
itemInfo: details.ItemInfo{},
|
||||
getErr: []error{errUnauth},
|
||||
mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
|
||||
mgi: getsItem{Item: nil, Err: assert.AnError},
|
||||
expectErr: require.Error,
|
||||
expect: require.Nil,
|
||||
muc: m,
|
||||
},
|
||||
{
|
||||
name: "expired url fails redownload",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
||||
mgi: getsItem{Item: itemWID, Err: nil},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, nil},
|
||||
getErr: []error{errUnauth, assert.AnError},
|
||||
@ -748,7 +747,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
},
|
||||
{
|
||||
name: "url refreshed from cache",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
||||
mgi: getsItem{Item: itemWID, Err: nil},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, iorc},
|
||||
getErr: []error{errUnauth, nil},
|
||||
@ -766,7 +765,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
},
|
||||
{
|
||||
name: "url refreshed from cache but item deleted",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: graph.ErrDeletedInFlight},
|
||||
mgi: getsItem{Item: itemWID, Err: graph.ErrDeletedInFlight},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, nil, nil},
|
||||
getErr: []error{errUnauth, graph.ErrDeletedInFlight, graph.ErrDeletedInFlight},
|
||||
@ -784,7 +783,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
},
|
||||
{
|
||||
name: "fallback to item fetch on any cache error",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
||||
mgi: getsItem{Item: itemWID, Err: nil},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, iorc},
|
||||
getErr: []error{errUnauth, nil},
|
||||
@ -814,7 +813,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
}
|
||||
}
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
mbh.GI = test.mgi
|
||||
mbh.ItemInfo = test.itemInfo
|
||||
mbh.GetResps = resps
|
||||
@ -980,9 +979,9 @@ func (suite *CollectionUnitSuite) TestItemExtensions() {
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
mbh := mock.DefaultOneDriveBH("a-user")
|
||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh := defaultOneDriveBH("a-user")
|
||||
mbh.GI = getsItem{Err: assert.AnError}
|
||||
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||
mbh.GetResps = []*http.Response{
|
||||
{
|
||||
StatusCode: http.StatusOK,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -629,7 +629,7 @@ func (c *Collections) addFileToTree(
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
_, alreadySeen := tree.fileIDToParentID[fileID]
|
||||
alreadySeen := tree.hasFile(fileID)
|
||||
parentNode, parentNotNil := tree.folderIDToNode[parentID]
|
||||
|
||||
if parentNotNil && !alreadySeen {
|
||||
@ -686,25 +686,10 @@ func (c *Collections) makeDriveTombstones(
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: call NewTombstoneCollection
|
||||
coll, err := NewCollection(
|
||||
c.handler,
|
||||
c.protectedResource,
|
||||
nil, // delete the drive
|
||||
coll := data.NewTombstoneCollection(
|
||||
prevDrivePath,
|
||||
driveID,
|
||||
c.statusUpdater,
|
||||
c.ctrl,
|
||||
false,
|
||||
true,
|
||||
nil,
|
||||
c.counter.Local())
|
||||
if err != nil {
|
||||
err = clues.WrapWC(ctx, err, "making drive tombstone")
|
||||
el.AddRecoverable(ctx, err)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
colls = append(colls, coll)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -309,6 +309,11 @@ func (face *folderyMcFolderFace) setPreviousPath(
|
||||
// file handling
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func (face *folderyMcFolderFace) hasFile(id string) bool {
|
||||
_, exists := face.fileIDToParentID[id]
|
||||
return exists
|
||||
}
|
||||
|
||||
// addFile places the file in the correct parent node. If the
|
||||
// file was already added to the tree and is getting relocated,
|
||||
// this func will update and/or clean up all the old references.
|
||||
|
||||
@ -9,7 +9,6 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
@ -68,7 +67,7 @@ func (suite *DeltaTreeUnitSuite) TestNewNodeyMcNodeFace() {
|
||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||
table := []struct {
|
||||
tname string
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
parentID string
|
||||
id string
|
||||
name string
|
||||
@ -94,16 +93,16 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||
tname: "add folder",
|
||||
tree: treeWithRoot,
|
||||
parentID: rootID,
|
||||
id: id(folder),
|
||||
name: name(folder),
|
||||
id: folderID(),
|
||||
name: folderName(),
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
tname: "add package",
|
||||
tree: treeWithRoot,
|
||||
parentID: rootID,
|
||||
id: id(folder),
|
||||
name: name(folder),
|
||||
id: folderID(),
|
||||
name: folderName(),
|
||||
isPackage: true,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
@ -111,7 +110,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||
tname: "missing ID",
|
||||
tree: treeWithRoot,
|
||||
parentID: rootID,
|
||||
name: name(folder),
|
||||
name: folderName(),
|
||||
isPackage: true,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
@ -119,15 +118,15 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||
tname: "missing name",
|
||||
tree: treeWithRoot,
|
||||
parentID: rootID,
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
isPackage: true,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
tname: "missing parentID",
|
||||
tree: treeWithRoot,
|
||||
id: id(folder),
|
||||
name: name(folder),
|
||||
id: folderID(),
|
||||
name: folderName(),
|
||||
isPackage: true,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
@ -135,29 +134,29 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||
tname: "already tombstoned",
|
||||
tree: treeWithTombstone,
|
||||
parentID: rootID,
|
||||
id: id(folder),
|
||||
name: name(folder),
|
||||
id: folderID(),
|
||||
name: folderName(),
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
tname: "add folder before parent",
|
||||
tree: func(t *testing.T) *folderyMcFolderFace {
|
||||
tree: func(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
|
||||
return &folderyMcFolderFace{
|
||||
folderIDToNode: map[string]*nodeyMcNodeFace{},
|
||||
}
|
||||
},
|
||||
parentID: rootID,
|
||||
id: id(folder),
|
||||
name: name(folder),
|
||||
id: folderID(),
|
||||
name: folderName(),
|
||||
isPackage: true,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
tname: "folder already exists",
|
||||
tree: treeWithFolders,
|
||||
parentID: idx(folder, "parent"),
|
||||
id: id(folder),
|
||||
name: name(folder),
|
||||
parentID: folderID("parent"),
|
||||
id: folderID(),
|
||||
name: folderName(),
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
}
|
||||
@ -168,7 +167,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
tree := test.tree(t)
|
||||
tree := test.tree(t, drive())
|
||||
|
||||
err := tree.setFolder(
|
||||
ctx,
|
||||
@ -203,18 +202,18 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
||||
table := []struct {
|
||||
name string
|
||||
id string
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "add tombstone",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
tree: newTree,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "duplicate tombstone",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
tree: treeWithTombstone,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
@ -224,14 +223,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "conflict: folder alive",
|
||||
id: id(folder),
|
||||
tree: treeWithTombstone,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "already tombstoned",
|
||||
id: id(folder),
|
||||
name: "folder exists and is alive",
|
||||
id: folderID(),
|
||||
tree: treeWithTombstone,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
@ -243,7 +236,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
tree := test.tree(t)
|
||||
tree := test.tree(t, drive())
|
||||
|
||||
err := tree.setTombstone(ctx, test.id)
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
@ -270,14 +263,14 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
||||
name string
|
||||
id string
|
||||
prev path.Path
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
expectLive bool
|
||||
expectTombstone bool
|
||||
}{
|
||||
{
|
||||
name: "no changes become a no-op",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
prev: pathWith(defaultLoc()),
|
||||
tree: newTree,
|
||||
expectErr: assert.NoError,
|
||||
@ -295,7 +288,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
||||
},
|
||||
{
|
||||
name: "create tombstone after reset",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
prev: pathWith(defaultLoc()),
|
||||
tree: treeAfterReset,
|
||||
expectErr: assert.NoError,
|
||||
@ -312,7 +305,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
||||
},
|
||||
{
|
||||
name: "missing prev",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
tree: newTree,
|
||||
expectErr: assert.Error,
|
||||
expectLive: false,
|
||||
@ -320,7 +313,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
||||
},
|
||||
{
|
||||
name: "update live folder",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
prev: pathWith(defaultLoc()),
|
||||
tree: treeWithFolders,
|
||||
expectErr: assert.NoError,
|
||||
@ -329,7 +322,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
||||
},
|
||||
{
|
||||
name: "update tombstone",
|
||||
id: id(folder),
|
||||
id: folderID(),
|
||||
prev: pathWith(defaultLoc()),
|
||||
tree: treeWithTombstone,
|
||||
expectErr: assert.NoError,
|
||||
@ -340,7 +333,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
tree := test.tree(t)
|
||||
tree := test.tree(t, drive())
|
||||
|
||||
err := tree.setPreviousPath(test.id, test.prev)
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
@ -478,7 +471,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTree()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
tree := treeWithRoot(t)
|
||||
tree := treeWithRoot(t, drive())
|
||||
|
||||
set := func(
|
||||
parentID, fid, fname string,
|
||||
@ -564,7 +557,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
tree := treeWithRoot(t)
|
||||
tree := treeWithRoot(t, drive())
|
||||
|
||||
set := func(
|
||||
parentID, fid, fname string,
|
||||
@ -739,7 +732,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst
|
||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||
table := []struct {
|
||||
tname string
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
id string
|
||||
oldParentID string
|
||||
parentID string
|
||||
contentSize int64
|
||||
@ -749,69 +743,87 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||
{
|
||||
tname: "add file to root",
|
||||
tree: treeWithRoot,
|
||||
id: fileID(),
|
||||
oldParentID: "",
|
||||
parentID: rootID,
|
||||
contentSize: 42,
|
||||
contentSize: defaultFileSize,
|
||||
expectErr: assert.NoError,
|
||||
expectFiles: map[string]string{id(file): rootID},
|
||||
expectFiles: map[string]string{fileID(): rootID},
|
||||
},
|
||||
{
|
||||
tname: "add file to folder",
|
||||
tree: treeWithFolders,
|
||||
id: fileID(),
|
||||
oldParentID: "",
|
||||
parentID: id(folder),
|
||||
parentID: folderID(),
|
||||
contentSize: 24,
|
||||
expectErr: assert.NoError,
|
||||
expectFiles: map[string]string{id(file): id(folder)},
|
||||
expectFiles: map[string]string{fileID(): folderID()},
|
||||
},
|
||||
{
|
||||
tname: "re-add file at the same location",
|
||||
tree: treeWithFileAtRoot,
|
||||
id: fileID(),
|
||||
oldParentID: rootID,
|
||||
parentID: rootID,
|
||||
contentSize: 84,
|
||||
expectErr: assert.NoError,
|
||||
expectFiles: map[string]string{id(file): rootID},
|
||||
expectFiles: map[string]string{fileID(): rootID},
|
||||
},
|
||||
{
|
||||
tname: "move file from folder to root",
|
||||
tree: treeWithFileInFolder,
|
||||
oldParentID: id(folder),
|
||||
id: fileID(),
|
||||
oldParentID: folderID(),
|
||||
parentID: rootID,
|
||||
contentSize: 48,
|
||||
expectErr: assert.NoError,
|
||||
expectFiles: map[string]string{id(file): rootID},
|
||||
expectFiles: map[string]string{fileID(): rootID},
|
||||
},
|
||||
{
|
||||
tname: "move file from tombstone to root",
|
||||
tree: treeWithFileInTombstone,
|
||||
oldParentID: id(folder),
|
||||
id: fileID(),
|
||||
oldParentID: folderID(),
|
||||
parentID: rootID,
|
||||
contentSize: 2,
|
||||
expectErr: assert.NoError,
|
||||
expectFiles: map[string]string{id(file): rootID},
|
||||
expectFiles: map[string]string{fileID(): rootID},
|
||||
},
|
||||
{
|
||||
tname: "adding file with no ID",
|
||||
tree: treeWithTombstone,
|
||||
id: "",
|
||||
oldParentID: "",
|
||||
parentID: folderID(),
|
||||
contentSize: 4,
|
||||
expectErr: assert.Error,
|
||||
expectFiles: map[string]string{},
|
||||
},
|
||||
{
|
||||
tname: "error adding file to tombstone",
|
||||
tree: treeWithTombstone,
|
||||
id: fileID(),
|
||||
oldParentID: "",
|
||||
parentID: id(folder),
|
||||
contentSize: 4,
|
||||
parentID: folderID(),
|
||||
contentSize: 8,
|
||||
expectErr: assert.Error,
|
||||
expectFiles: map[string]string{},
|
||||
},
|
||||
{
|
||||
tname: "error adding file before parent",
|
||||
tree: treeWithTombstone,
|
||||
id: fileID(),
|
||||
oldParentID: "",
|
||||
parentID: idx(folder, 1),
|
||||
contentSize: 8,
|
||||
parentID: folderID("not-in-tree"),
|
||||
contentSize: 16,
|
||||
expectErr: assert.Error,
|
||||
expectFiles: map[string]string{},
|
||||
},
|
||||
{
|
||||
tname: "error adding file without parent id",
|
||||
tree: treeWithTombstone,
|
||||
id: fileID(),
|
||||
oldParentID: "",
|
||||
parentID: "",
|
||||
contentSize: 16,
|
||||
@ -822,15 +834,13 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||
for _, test := range table {
|
||||
suite.Run(test.tname, func() {
|
||||
t := suite.T()
|
||||
tree := test.tree(t)
|
||||
|
||||
df := driveFile(file, parentDir(), test.parentID)
|
||||
df.SetSize(ptr.To(test.contentSize))
|
||||
d := drive()
|
||||
tree := test.tree(t, d)
|
||||
|
||||
err := tree.addFile(
|
||||
test.parentID,
|
||||
id(file),
|
||||
custom.ToCustomDriveItem(df))
|
||||
test.id,
|
||||
custom.ToCustomDriveItem(d.fileWSizeAt(test.contentSize, test.parentID)))
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, test.expectFiles, tree.fileIDToParentID)
|
||||
|
||||
@ -841,7 +851,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||
parent := tree.getNode(test.parentID)
|
||||
|
||||
require.NotNil(t, parent)
|
||||
assert.Contains(t, parent.files, id(file))
|
||||
assert.Contains(t, parent.files, fileID())
|
||||
|
||||
countSize := tree.countLiveFilesAndSizes()
|
||||
assert.Equal(t, 1, countSize.numFiles, "should have one file in the tree")
|
||||
@ -851,7 +861,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||
old := tree.getNode(test.oldParentID)
|
||||
|
||||
require.NotNil(t, old)
|
||||
assert.NotContains(t, old.files, id(file))
|
||||
assert.NotContains(t, old.files, fileID())
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -860,7 +870,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() {
|
||||
table := []struct {
|
||||
tname string
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
parentID string
|
||||
}{
|
||||
{
|
||||
@ -876,34 +886,35 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() {
|
||||
{
|
||||
tname: "delete file from folder",
|
||||
tree: treeWithFileInFolder,
|
||||
parentID: id(folder),
|
||||
parentID: folderID(),
|
||||
},
|
||||
{
|
||||
tname: "delete file from tombstone",
|
||||
tree: treeWithFileInTombstone,
|
||||
parentID: id(folder),
|
||||
parentID: folderID(),
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.tname, func() {
|
||||
t := suite.T()
|
||||
tree := test.tree(t)
|
||||
tree := test.tree(t, drive())
|
||||
|
||||
tree.deleteFile(id(file))
|
||||
tree.deleteFile(fileID())
|
||||
|
||||
parent := tree.getNode(test.parentID)
|
||||
|
||||
require.NotNil(t, parent)
|
||||
assert.NotContains(t, parent.files, id(file))
|
||||
assert.NotContains(t, tree.fileIDToParentID, id(file))
|
||||
assert.Contains(t, tree.deletedFileIDs, id(file))
|
||||
assert.NotContains(t, parent.files, fileID())
|
||||
assert.NotContains(t, tree.fileIDToParentID, fileID())
|
||||
assert.Contains(t, tree.deletedFileIDs, fileID())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
||||
t := suite.T()
|
||||
tree := treeWithRoot(t)
|
||||
d := drive()
|
||||
tree := treeWithRoot(t, d)
|
||||
fID := id(file)
|
||||
|
||||
require.Len(t, tree.fileIDToParentID, 0)
|
||||
@ -916,7 +927,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
||||
assert.Len(t, tree.deletedFileIDs, 1)
|
||||
assert.Contains(t, tree.deletedFileIDs, fID)
|
||||
|
||||
err := tree.addFile(rootID, fID, custom.ToCustomDriveItem(fileAtRoot()))
|
||||
err := tree.addFile(rootID, fID, custom.ToCustomDriveItem(d.fileAtRoot()))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
assert.Len(t, tree.fileIDToParentID, 1)
|
||||
@ -935,7 +946,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs() {
|
||||
table := []struct {
|
||||
name string
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
expect map[string]struct{}
|
||||
}{
|
||||
{
|
||||
@ -946,7 +957,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs(
|
||||
{
|
||||
name: "one file in a folder",
|
||||
tree: treeWithFileInFolder,
|
||||
expect: makeExcludeMap(id(file)),
|
||||
expect: makeExcludeMap(fileID()),
|
||||
},
|
||||
{
|
||||
name: "one file in a tombstone",
|
||||
@ -956,22 +967,22 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs(
|
||||
{
|
||||
name: "one deleted file",
|
||||
tree: treeWithDeletedFile,
|
||||
expect: makeExcludeMap(idx(file, "d")),
|
||||
expect: makeExcludeMap(fileID("d")),
|
||||
},
|
||||
{
|
||||
name: "files in folders and tombstones",
|
||||
tree: fullTree,
|
||||
expect: makeExcludeMap(
|
||||
id(file),
|
||||
idx(file, "r"),
|
||||
idx(file, "p"),
|
||||
idx(file, "d")),
|
||||
fileID(),
|
||||
fileID("r"),
|
||||
fileID("p"),
|
||||
fileID("d")),
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
tree := test.tree(t)
|
||||
tree := test.tree(t, drive())
|
||||
|
||||
result := tree.generateExcludeItemIDs()
|
||||
assert.Equal(t, test.expect, result)
|
||||
@ -985,10 +996,11 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs(
|
||||
|
||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() {
|
||||
t := suite.T()
|
||||
d := drive()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
tree func(t *testing.T) *folderyMcFolderFace
|
||||
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||
prevPaths map[string]string
|
||||
expectErr require.ErrorAssertionFunc
|
||||
expect map[string]collectable
|
||||
@ -1005,7 +1017,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
expectErr: require.NoError,
|
||||
expect: map[string]collectable{
|
||||
rootID: {
|
||||
currPath: fullPathPath(t),
|
||||
currPath: d.fullPath(t),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: rootID,
|
||||
isPackageOrChildOfPackage: false,
|
||||
@ -1019,9 +1031,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
expectErr: require.NoError,
|
||||
expect: map[string]collectable{
|
||||
rootID: {
|
||||
currPath: fullPathPath(t),
|
||||
currPath: d.fullPath(t),
|
||||
files: map[string]*custom.DriveItem{
|
||||
id(file): custom.ToCustomDriveItem(fileAtRoot()),
|
||||
fileID(): custom.ToCustomDriveItem(d.fileAtRoot()),
|
||||
},
|
||||
folderID: rootID,
|
||||
isPackageOrChildOfPackage: false,
|
||||
@ -1035,41 +1047,41 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
expectErr: require.NoError,
|
||||
expect: map[string]collectable{
|
||||
rootID: {
|
||||
currPath: fullPathPath(t),
|
||||
currPath: d.fullPath(t),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: rootID,
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{},
|
||||
},
|
||||
idx(folder, "parent"): {
|
||||
currPath: fullPathPath(t, namex(folder, "parent")),
|
||||
folderID("parent"): {
|
||||
currPath: d.fullPath(t, folderName("parent")),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: idx(folder, "parent"),
|
||||
folderID: folderID("parent"),
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{rootName},
|
||||
},
|
||||
id(folder): {
|
||||
currPath: fullPathPath(t, namex(folder, "parent"), name(folder)),
|
||||
folderID(): {
|
||||
currPath: d.fullPath(t, folderName("parent"), folderName()),
|
||||
files: map[string]*custom.DriveItem{
|
||||
id(file): custom.ToCustomDriveItem(fileAt("parent")),
|
||||
fileID(): custom.ToCustomDriveItem(d.fileAt("parent")),
|
||||
},
|
||||
folderID: id(folder),
|
||||
folderID: folderID(),
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{rootName, namex(folder, "parent")},
|
||||
loc: path.Elements{rootName, folderName("parent")},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "package in hierarchy",
|
||||
tree: func(t *testing.T) *folderyMcFolderFace {
|
||||
tree: func(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
tree := treeWithRoot(t)
|
||||
tree := treeWithRoot(t, d)
|
||||
err := tree.setFolder(ctx, rootID, id(pkg), name(pkg), true)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = tree.setFolder(ctx, id(pkg), id(folder), name(folder), false)
|
||||
err = tree.setFolder(ctx, id(pkg), folderID(), folderName(), false)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return tree
|
||||
@ -1077,23 +1089,23 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
expectErr: require.NoError,
|
||||
expect: map[string]collectable{
|
||||
rootID: {
|
||||
currPath: fullPathPath(t),
|
||||
currPath: d.fullPath(t),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: rootID,
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{},
|
||||
},
|
||||
id(pkg): {
|
||||
currPath: fullPathPath(t, name(pkg)),
|
||||
currPath: d.fullPath(t, name(pkg)),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: id(pkg),
|
||||
isPackageOrChildOfPackage: true,
|
||||
loc: path.Elements{rootName},
|
||||
},
|
||||
id(folder): {
|
||||
currPath: fullPathPath(t, name(pkg), name(folder)),
|
||||
folderID(): {
|
||||
currPath: d.fullPath(t, name(pkg), folderName()),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: id(folder),
|
||||
folderID: folderID(),
|
||||
isPackageOrChildOfPackage: true,
|
||||
loc: path.Elements{rootName, name(pkg)},
|
||||
},
|
||||
@ -1104,36 +1116,36 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
tree: treeWithFileInFolder,
|
||||
expectErr: require.NoError,
|
||||
prevPaths: map[string]string{
|
||||
rootID: fullPath(),
|
||||
idx(folder, "parent"): fullPath(namex(folder, "parent-prev")),
|
||||
id(folder): fullPath(namex(folder, "parent-prev"), name(folder)),
|
||||
rootID: d.strPath(),
|
||||
folderID("parent"): d.strPath(folderName("parent-prev")),
|
||||
folderID(): d.strPath(folderName("parent-prev"), folderName()),
|
||||
},
|
||||
expect: map[string]collectable{
|
||||
rootID: {
|
||||
currPath: fullPathPath(t),
|
||||
currPath: d.fullPath(t),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: rootID,
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{},
|
||||
prevPath: fullPathPath(t),
|
||||
prevPath: d.fullPath(t),
|
||||
},
|
||||
idx(folder, "parent"): {
|
||||
currPath: fullPathPath(t, namex(folder, "parent")),
|
||||
folderID("parent"): {
|
||||
currPath: d.fullPath(t, folderName("parent")),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: idx(folder, "parent"),
|
||||
folderID: folderID("parent"),
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{rootName},
|
||||
prevPath: fullPathPath(t, namex(folder, "parent-prev")),
|
||||
prevPath: d.fullPath(t, folderName("parent-prev")),
|
||||
},
|
||||
id(folder): {
|
||||
currPath: fullPathPath(t, namex(folder, "parent"), name(folder)),
|
||||
folderID: id(folder),
|
||||
folderID(): {
|
||||
currPath: d.fullPath(t, folderName("parent"), folderName()),
|
||||
folderID: folderID(),
|
||||
isPackageOrChildOfPackage: false,
|
||||
files: map[string]*custom.DriveItem{
|
||||
id(file): custom.ToCustomDriveItem(fileAt("parent")),
|
||||
fileID(): custom.ToCustomDriveItem(d.fileAt("parent")),
|
||||
},
|
||||
loc: path.Elements{rootName, namex(folder, "parent")},
|
||||
prevPath: fullPathPath(t, namex(folder, "parent-prev"), name(folder)),
|
||||
loc: path.Elements{rootName, folderName("parent")},
|
||||
prevPath: d.fullPath(t, folderName("parent-prev"), folderName()),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1141,24 +1153,24 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
name: "root and tombstones",
|
||||
tree: treeWithFileInTombstone,
|
||||
prevPaths: map[string]string{
|
||||
rootID: fullPath(),
|
||||
id(folder): fullPath(name(folder)),
|
||||
rootID: d.strPath(),
|
||||
folderID(): d.strPath(folderName()),
|
||||
},
|
||||
expectErr: require.NoError,
|
||||
expect: map[string]collectable{
|
||||
rootID: {
|
||||
currPath: fullPathPath(t),
|
||||
currPath: d.fullPath(t),
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: rootID,
|
||||
isPackageOrChildOfPackage: false,
|
||||
loc: path.Elements{},
|
||||
prevPath: fullPathPath(t),
|
||||
prevPath: d.fullPath(t),
|
||||
},
|
||||
id(folder): {
|
||||
folderID(): {
|
||||
files: map[string]*custom.DriveItem{},
|
||||
folderID: id(folder),
|
||||
folderID: folderID(),
|
||||
isPackageOrChildOfPackage: false,
|
||||
prevPath: fullPathPath(t, name(folder)),
|
||||
prevPath: d.fullPath(t, folderName()),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1166,7 +1178,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
tree := test.tree(t)
|
||||
tree := test.tree(t, d)
|
||||
|
||||
if len(test.prevPaths) > 0 {
|
||||
for id, ps := range test.prevPaths {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -12,14 +12,11 @@ import (
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
type LimiterUnitSuite struct {
|
||||
@ -33,23 +30,14 @@ func TestLimiterUnitSuite(t *testing.T) {
|
||||
type backupLimitTest struct {
|
||||
name string
|
||||
limits control.PreviewItemLimits
|
||||
drives []models.Driveable
|
||||
enumerator mock.EnumerateDriveItemsDelta
|
||||
enumerator enumerateDriveItemsDelta
|
||||
// Collection name -> set of item IDs. We can't check item data because
|
||||
// that's not mocked out. Metadata is checked separately.
|
||||
expectedItemIDsInCollection map[string][]string
|
||||
}
|
||||
|
||||
func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) {
|
||||
drive1 := models.NewDrive()
|
||||
drive1.SetId(ptr.To(id(drive)))
|
||||
drive1.SetName(ptr.To(name(drive)))
|
||||
|
||||
drive2 := models.NewDrive()
|
||||
drive2.SetId(ptr.To(idx(drive, 2)))
|
||||
drive2.SetName(ptr.To(namex(drive, 2)))
|
||||
|
||||
tbl := []backupLimitTest{
|
||||
func backupLimitTable(d1, d2 *deltaDrive) []backupLimitTest {
|
||||
return []backupLimitTest{
|
||||
{
|
||||
name: "OneDrive SinglePage ExcludeItemsOverMaxSize",
|
||||
limits: control.PreviewItemLimits{
|
||||
@ -60,15 +48,14 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 5,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(aPage(
|
||||
filexWSizeAtRoot(1, 7),
|
||||
filexWSizeAtRoot(2, 1),
|
||||
filexWSizeAtRoot(3, 1))))),
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
d1.fileWSizeAtRoot(7, "f1"),
|
||||
d1.fileWSizeAtRoot(1, "f2"),
|
||||
d1.fileWSizeAtRoot(1, "f3"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 2), idx(file, 3)},
|
||||
d1.strPath(): {fileID("f2"), fileID("f3")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -81,15 +68,14 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 3,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(aPage(
|
||||
filexWSizeAtRoot(1, 1),
|
||||
filexWSizeAtRoot(2, 2),
|
||||
filexWSizeAtRoot(3, 1))))),
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
d1.fileWSizeAtRoot(1, "f1"),
|
||||
d1.fileWSizeAtRoot(2, "f2"),
|
||||
d1.fileWSizeAtRoot(1, "f3"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -102,17 +88,16 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 3,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(aPage(
|
||||
filexWSizeAtRoot(1, 1),
|
||||
folderxAtRoot(1),
|
||||
filexWSizeAt(2, 1, 2),
|
||||
filexWSizeAt(3, 1, 1))))),
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
d1.fileWSizeAtRoot(1, "f1"),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileWSizeAt(2, folder, "f2"),
|
||||
d1.fileWSizeAt(1, folder, "f3"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)},
|
||||
d1.strPath(): {fileID("f1")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f2")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -125,18 +110,17 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3),
|
||||
filexAtRoot(4),
|
||||
filexAtRoot(5),
|
||||
filexAtRoot(6))))),
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3"),
|
||||
d1.fileAtRoot("f4"),
|
||||
d1.fileAtRoot("f5"),
|
||||
d1.fileAtRoot("f6"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -149,24 +133,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2")),
|
||||
aPage(
|
||||
// Repeated items shouldn't count against the limit.
|
||||
filexAtRoot(1),
|
||||
folderxAtRoot(1),
|
||||
filexAt(3, 1),
|
||||
filexAt(4, 1),
|
||||
filexAt(5, 1),
|
||||
filexAt(6, 1))))),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f3"),
|
||||
d1.fileAt(folder, "f4"),
|
||||
d1.fileAt(folder, "f5"),
|
||||
d1.fileAt(folder, "f6"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f3")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -179,21 +162,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 1,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(3, 1),
|
||||
filexAt(4, 1),
|
||||
filexAt(5, 1),
|
||||
filexAt(6, 1))))),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f3"),
|
||||
d1.fileAt(folder, "f4"),
|
||||
d1.fileAt(folder, "f5"),
|
||||
d1.fileAt(folder, "f6"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -206,23 +188,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(4, 1),
|
||||
filexAt(5, 1))))),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f4"),
|
||||
d1.fileAt(folder, "f5"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
// Root has an additional item. It's hard to fix that in the code
|
||||
// though.
|
||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f4")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -235,23 +216,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
folderAtRoot(),
|
||||
filexAt(1, folder),
|
||||
filexAt(2, folder)),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f1"),
|
||||
d1.fileAt(folder, "f2")),
|
||||
aPage(
|
||||
folderAtRoot(),
|
||||
d1.folderAtRoot(),
|
||||
// Updated item that shouldn't count against the limit a second time.
|
||||
filexAt(2, folder),
|
||||
filexAt(3, folder),
|
||||
filexAt(4, folder))))),
|
||||
d1.fileAt(folder, "f2"),
|
||||
d1.fileAt(folder, "f3"),
|
||||
d1.fileAt(folder, "f4"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {},
|
||||
fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
d1.strPath(): {},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -264,24 +244,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
// Put folder 0 at limit.
|
||||
folderAtRoot(),
|
||||
filexAt(3, folder),
|
||||
filexAt(4, folder)),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f3"),
|
||||
d1.fileAt(folder, "f4")),
|
||||
aPage(
|
||||
folderAtRoot(),
|
||||
d1.folderAtRoot(),
|
||||
// Try to move item from root to folder 0 which is already at the limit.
|
||||
filexAt(1, folder))))),
|
||||
d1.fileAt(folder, "f1"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
||||
fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f3"), fileID("f4")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -294,23 +273,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(4, 1)),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f4")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(5, 1))))),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f5"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -323,26 +301,25 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(4, 1),
|
||||
filexAt(5, 1),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f4"),
|
||||
d1.fileAt(folder, "f5"),
|
||||
// This container shouldn't be returned.
|
||||
folderxAtRoot(2),
|
||||
filexAt(7, 2),
|
||||
filexAt(8, 2),
|
||||
filexAt(9, 2))))),
|
||||
d1.folderAtRoot(2),
|
||||
d1.fileAt(2, "f7"),
|
||||
d1.fileAt(2, "f8"),
|
||||
d1.fileAt(2, "f9"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -355,27 +332,26 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(4, 1),
|
||||
filexAt(5, 1)),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f4"),
|
||||
d1.fileAt(folder, "f5")),
|
||||
aPage(
|
||||
// This container shouldn't be returned.
|
||||
folderxAtRoot(2),
|
||||
filexAt(7, 2),
|
||||
filexAt(8, 2),
|
||||
filexAt(9, 2))))),
|
||||
d1.folderAtRoot(2),
|
||||
d1.fileAt(2, "f7"),
|
||||
d1.fileAt(2, "f8"),
|
||||
d1.fileAt(2, "f9"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -388,25 +364,24 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 999999,
|
||||
MaxPages: 999,
|
||||
},
|
||||
drives: []models.Driveable{drive1, drive2},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3),
|
||||
filexAtRoot(4),
|
||||
filexAtRoot(5)))),
|
||||
mock.Drive(idx(drive, 2)).With(
|
||||
mock.Delta(id(delta), nil).With(aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3),
|
||||
filexAtRoot(4),
|
||||
filexAtRoot(5))))),
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3"),
|
||||
d1.fileAtRoot("f4"),
|
||||
d1.fileAtRoot("f5")))),
|
||||
d2.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
d2.fileAtRoot("f1"),
|
||||
d2.fileAtRoot("f2"),
|
||||
d2.fileAtRoot("f3"),
|
||||
d2.fileAtRoot("f4"),
|
||||
d2.fileAtRoot("f5"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
d2.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -418,28 +393,25 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
MaxBytes: 1,
|
||||
MaxPages: 1,
|
||||
},
|
||||
drives: []models.Driveable{drive1},
|
||||
enumerator: mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
filexAtRoot(1),
|
||||
filexAtRoot(2),
|
||||
filexAtRoot(3)),
|
||||
d1.fileAtRoot("f1"),
|
||||
d1.fileAtRoot("f2"),
|
||||
d1.fileAtRoot("f3")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(4, 1)),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f4")),
|
||||
aPage(
|
||||
folderxAtRoot(1),
|
||||
filexAt(5, 1))))),
|
||||
d1.folderAtRoot(),
|
||||
d1.fileAt(folder, "f5"))))),
|
||||
expectedItemIDsInCollection: map[string][]string{
|
||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
||||
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return drive1, drive2, tbl
|
||||
}
|
||||
|
||||
// TestGet_PreviewLimits checks that the limits set for preview backups in
|
||||
@ -447,16 +419,7 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
||||
// checks that don't examine metadata, collection states, etc. They really just
|
||||
// check the expected items appear.
|
||||
func (suite *LimiterUnitSuite) TestGet_PreviewLimits_noTree() {
|
||||
_, _, tbl := backupLimitTable()
|
||||
|
||||
for _, test := range tbl {
|
||||
suite.Run(test.name, func() {
|
||||
runGetPreviewLimits(
|
||||
suite.T(),
|
||||
test,
|
||||
control.DefaultOptions())
|
||||
})
|
||||
}
|
||||
iterGetPreviewLimitsTests(suite, control.DefaultOptions())
|
||||
}
|
||||
|
||||
// TestGet_PreviewLimits checks that the limits set for preview backups in
|
||||
@ -469,13 +432,21 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() {
|
||||
opts := control.DefaultOptions()
|
||||
opts.ToggleFeatures.UseDeltaTree = true
|
||||
|
||||
_, _, tbl := backupLimitTable()
|
||||
iterGetPreviewLimitsTests(suite, opts)
|
||||
}
|
||||
|
||||
for _, test := range tbl {
|
||||
func iterGetPreviewLimitsTests(
|
||||
suite *LimiterUnitSuite,
|
||||
opts control.Options,
|
||||
) {
|
||||
d1, d2 := drive(), drive(2)
|
||||
|
||||
for _, test := range backupLimitTable(d1, d2) {
|
||||
suite.Run(test.name, func() {
|
||||
runGetPreviewLimits(
|
||||
suite.T(),
|
||||
test,
|
||||
d1, d2,
|
||||
opts)
|
||||
})
|
||||
}
|
||||
@ -484,6 +455,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() {
|
||||
func runGetPreviewLimits(
|
||||
t *testing.T,
|
||||
test backupLimitTest,
|
||||
drive1, drive2 *deltaDrive,
|
||||
opts control.Options,
|
||||
) {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
@ -500,12 +472,7 @@ func runGetPreviewLimits(
|
||||
opts.PreviewLimits = test.limits
|
||||
|
||||
var (
|
||||
mockDrivePager = &apiMock.Pager[models.Driveable]{
|
||||
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||
{Values: test.drives},
|
||||
},
|
||||
}
|
||||
mbh = mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator)
|
||||
mbh = defaultDriveBHWith(user, test.enumerator)
|
||||
c = collWithMBHAndOpts(mbh, opts)
|
||||
errs = fault.New(true)
|
||||
delList = prefixmatcher.NewStringSetBuilder()
|
||||
@ -558,7 +525,7 @@ func runGetPreviewLimits(
|
||||
t,
|
||||
test.expectedItemIDsInCollection[folderPath],
|
||||
itemIDs,
|
||||
"item IDs in collection with path %q",
|
||||
"item IDs in collection with path:\n\t%q",
|
||||
folderPath)
|
||||
}
|
||||
|
||||
@ -741,20 +708,17 @@ func runGetPreviewLimitsDefaults(
|
||||
false)
|
||||
require.NoError(t, err, "making metadata path", clues.ToCore(err))
|
||||
|
||||
drv := models.NewDrive()
|
||||
drv.SetId(ptr.To(id(drive)))
|
||||
drv.SetName(ptr.To(name(drive)))
|
||||
|
||||
pages := make([]mock.NextPage, 0, test.numContainers)
|
||||
d := drive()
|
||||
pages := make([]nextPage, 0, test.numContainers)
|
||||
|
||||
for containerIdx := 0; containerIdx < test.numContainers; containerIdx++ {
|
||||
page := mock.NextPage{
|
||||
page := nextPage{
|
||||
Items: []models.DriveItemable{
|
||||
driveRootItem(),
|
||||
driveRootFolder(),
|
||||
driveItem(
|
||||
idx(folder, containerIdx),
|
||||
namex(folder, containerIdx),
|
||||
parentDir(),
|
||||
folderID(containerIdx),
|
||||
folderName(containerIdx),
|
||||
d.dir(),
|
||||
rootID,
|
||||
isFolder),
|
||||
},
|
||||
@ -763,11 +727,11 @@ func runGetPreviewLimitsDefaults(
|
||||
for itemIdx := 0; itemIdx < test.numItemsPerContainer; itemIdx++ {
|
||||
itemSuffix := fmt.Sprintf("%d-%d", containerIdx, itemIdx)
|
||||
|
||||
page.Items = append(page.Items, driveItemWithSize(
|
||||
idx(file, itemSuffix),
|
||||
namex(file, itemSuffix),
|
||||
parentDir(namex(folder, containerIdx)),
|
||||
idx(folder, containerIdx),
|
||||
page.Items = append(page.Items, driveItemWSize(
|
||||
fileID(itemSuffix),
|
||||
fileName(itemSuffix),
|
||||
d.dir(folderName(containerIdx)),
|
||||
folderID(containerIdx),
|
||||
test.itemSize,
|
||||
isFile))
|
||||
}
|
||||
@ -778,15 +742,10 @@ func runGetPreviewLimitsDefaults(
|
||||
opts.PreviewLimits = test.limits
|
||||
|
||||
var (
|
||||
mockDrivePager = &apiMock.Pager[models.Driveable]{
|
||||
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||
{Values: []models.Driveable{drv}},
|
||||
},
|
||||
}
|
||||
mockEnumerator = mock.DriveEnumerator(
|
||||
mock.Drive(id(drive)).With(
|
||||
mock.Delta(id(delta), nil).With(pages...)))
|
||||
mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator)
|
||||
mockEnumerator = driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(pages...)))
|
||||
mbh = defaultDriveBHWith(user, mockEnumerator)
|
||||
c = collWithMBHAndOpts(mbh, opts)
|
||||
errs = fault.New(true)
|
||||
delList = prefixmatcher.NewStringSetBuilder()
|
||||
|
||||
@ -15,7 +15,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
odMock "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -53,7 +53,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
onCollision control.CollisionPolicy
|
||||
deleteErr error
|
||||
expectSkipped assert.BoolAssertionFunc
|
||||
expectMock func(*testing.T, *odMock.RestoreHandler)
|
||||
expectMock func(*testing.T, *mockRestoreHandler)
|
||||
expectCounts counts
|
||||
}{
|
||||
{
|
||||
@ -61,7 +61,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
collisionKeys: map[string]api.DriveItemIDType{},
|
||||
onCollision: control.Copy,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -72,7 +72,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
collisionKeys: map[string]api.DriveItemIDType{},
|
||||
onCollision: control.Replace,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -83,7 +83,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
collisionKeys: map[string]api.DriveItemIDType{},
|
||||
onCollision: control.Skip,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -92,11 +92,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "collision, copy",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {ItemID: mndiID},
|
||||
mock.DriveItemFileName: {ItemID: mndiID},
|
||||
},
|
||||
onCollision: control.Copy,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -105,11 +105,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "collision, replace",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {ItemID: mndiID},
|
||||
mock.DriveItemFileName: {ItemID: mndiID},
|
||||
},
|
||||
onCollision: control.Replace,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.True(t, rh.CalledDeleteItem, "new item deleted")
|
||||
assert.Equal(t, mndiID, rh.CalledDeleteItemOn, "deleted the correct item")
|
||||
@ -119,12 +119,12 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "collision, replace - err already deleted",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {ItemID: "smarf"},
|
||||
mock.DriveItemFileName: {ItemID: "smarf"},
|
||||
},
|
||||
onCollision: control.Replace,
|
||||
deleteErr: graph.ErrDeletedInFlight,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.True(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -133,11 +133,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "collision, skip",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {ItemID: mndiID},
|
||||
mock.DriveItemFileName: {ItemID: mndiID},
|
||||
},
|
||||
onCollision: control.Skip,
|
||||
expectSkipped: assert.True,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.False(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -146,14 +146,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "file-folder collision, copy",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {
|
||||
mock.DriveItemFileName: {
|
||||
ItemID: mndiID,
|
||||
IsFolder: true,
|
||||
},
|
||||
},
|
||||
onCollision: control.Copy,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -162,14 +162,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "file-folder collision, replace",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {
|
||||
mock.DriveItemFileName: {
|
||||
ItemID: mndiID,
|
||||
IsFolder: true,
|
||||
},
|
||||
},
|
||||
onCollision: control.Replace,
|
||||
expectSkipped: assert.False,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -178,14 +178,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
{
|
||||
name: "file-folder collision, skip",
|
||||
collisionKeys: map[string]api.DriveItemIDType{
|
||||
odMock.DriveItemFileName: {
|
||||
mock.DriveItemFileName: {
|
||||
ItemID: mndiID,
|
||||
IsFolder: true,
|
||||
},
|
||||
},
|
||||
onCollision: control.Skip,
|
||||
expectSkipped: assert.True,
|
||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
||||
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||
assert.False(t, rh.CalledPostItem, "new item posted")
|
||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||
},
|
||||
@ -204,7 +204,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
|
||||
var (
|
||||
caches = NewRestoreCaches(nil)
|
||||
rh = &odMock.RestoreHandler{
|
||||
rh = &mockRestoreHandler{
|
||||
PostItemResp: models.NewDriveItem(),
|
||||
DeleteItemErr: test.deleteErr,
|
||||
}
|
||||
@ -232,9 +232,9 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
ctx,
|
||||
rh,
|
||||
rcc,
|
||||
odMock.FetchItemByName{
|
||||
mock.FetchItemByName{
|
||||
Item: &dataMock.Item{
|
||||
Reader: odMock.FileRespReadCloser(odMock.DriveFileMetaData),
|
||||
Reader: mock.FileRespReadCloser(mock.DriveFileMetaData),
|
||||
ItemInfo: odStub.DriveItemInfo(),
|
||||
},
|
||||
},
|
||||
@ -244,7 +244,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
||||
caches,
|
||||
&dataMock.Item{
|
||||
ItemID: uuid.NewString(),
|
||||
Reader: odMock.FileRespReadCloser(odMock.DriveFilePayloadData),
|
||||
Reader: mock.FileRespReadCloser(mock.DriveFilePayloadData),
|
||||
ItemInfo: odStub.DriveItemInfo(),
|
||||
},
|
||||
nil,
|
||||
|
||||
@ -18,7 +18,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
@ -214,12 +213,15 @@ func TestURLCacheUnitSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
deltaString := "delta"
|
||||
driveID := "drive1"
|
||||
d := drive()
|
||||
|
||||
aURL := func(n int) string {
|
||||
return fmt.Sprintf("https://dummy%d.com", n)
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
pages []mock.NextPage
|
||||
pages []nextPage
|
||||
pagerErr error
|
||||
expectedItemProps map[string]itemProps
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
@ -227,14 +229,12 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
}{
|
||||
{
|
||||
name: "single item in cache",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(d.fileWURLAtRoot(aURL(1), false, 1)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
fileID(1): {
|
||||
downloadURL: aURL(1),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -247,34 +247,33 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "multiple items in cache",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3),
|
||||
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
fileID(1): {
|
||||
downloadURL: aURL(1),
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
fileID(2): {
|
||||
downloadURL: aURL(2),
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
fileID(3): {
|
||||
downloadURL: aURL(3),
|
||||
isDeleted: false,
|
||||
},
|
||||
"4": {
|
||||
downloadURL: "https://dummy4.com",
|
||||
fileID(4): {
|
||||
downloadURL: aURL(4),
|
||||
isDeleted: false,
|
||||
},
|
||||
"5": {
|
||||
downloadURL: "https://dummy5.com",
|
||||
fileID(5): {
|
||||
downloadURL: aURL(5),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -287,36 +286,34 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "multiple pages",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
}},
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
fileID(1): {
|
||||
downloadURL: aURL(1),
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
fileID(2): {
|
||||
downloadURL: aURL(2),
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
fileID(3): {
|
||||
downloadURL: aURL(3),
|
||||
isDeleted: false,
|
||||
},
|
||||
"4": {
|
||||
downloadURL: "https://dummy4.com",
|
||||
fileID(4): {
|
||||
downloadURL: aURL(4),
|
||||
isDeleted: false,
|
||||
},
|
||||
"5": {
|
||||
downloadURL: "https://dummy5.com",
|
||||
fileID(5): {
|
||||
downloadURL: aURL(5),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -329,53 +326,41 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "multiple pages with resets",
|
||||
pages: []mock.NextPage{
|
||||
{
|
||||
Items: []models.DriveItemable{
|
||||
fileItem("-1", "file-1", "root", "root", "https://dummy-1.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
},
|
||||
},
|
||||
{
|
||||
Items: []models.DriveItemable{},
|
||||
Reset: true,
|
||||
},
|
||||
{
|
||||
Items: []models.DriveItemable{
|
||||
fileItem("0", "file1", "root", "root", "https://dummy0.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
},
|
||||
},
|
||||
{
|
||||
Items: []models.DriveItemable{
|
||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||
},
|
||||
},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(-1), false, -1),
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||
aReset(),
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(0), false, 0),
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
fileID(1): {
|
||||
downloadURL: aURL(1),
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
fileID(2): {
|
||||
downloadURL: aURL(2),
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
fileID(3): {
|
||||
downloadURL: aURL(3),
|
||||
isDeleted: false,
|
||||
},
|
||||
"4": {
|
||||
downloadURL: "https://dummy4.com",
|
||||
fileID(4): {
|
||||
downloadURL: aURL(4),
|
||||
isDeleted: false,
|
||||
},
|
||||
"5": {
|
||||
downloadURL: "https://dummy5.com",
|
||||
fileID(5): {
|
||||
downloadURL: aURL(5),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -388,48 +373,39 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "multiple pages with resets and combo reset+items in page",
|
||||
pages: []mock.NextPage{
|
||||
{
|
||||
Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
},
|
||||
},
|
||||
{
|
||||
Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
},
|
||||
Reset: true,
|
||||
},
|
||||
{
|
||||
Items: []models.DriveItemable{
|
||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||
},
|
||||
},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(0), false, 0),
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||
aPageWReset(
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
fileID(1): {
|
||||
downloadURL: aURL(1),
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
fileID(2): {
|
||||
downloadURL: aURL(2),
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
fileID(3): {
|
||||
downloadURL: aURL(3),
|
||||
isDeleted: false,
|
||||
},
|
||||
"4": {
|
||||
downloadURL: "https://dummy4.com",
|
||||
fileID(4): {
|
||||
downloadURL: aURL(4),
|
||||
isDeleted: false,
|
||||
},
|
||||
"5": {
|
||||
downloadURL: "https://dummy5.com",
|
||||
fileID(5): {
|
||||
downloadURL: aURL(5),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -442,26 +418,25 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "duplicate items with potentially new urls",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://test1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://test2.com", false),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(3), false, 3),
|
||||
d.fileWURLAtRoot(aURL(100), false, 1),
|
||||
d.fileWURLAtRoot(aURL(200), false, 2)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://test1.com",
|
||||
fileID(1): {
|
||||
downloadURL: aURL(100),
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://test2.com",
|
||||
fileID(2): {
|
||||
downloadURL: aURL(200),
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
fileID(3): {
|
||||
downloadURL: aURL(3),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -474,20 +449,19 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "deleted items",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||
d.fileWURLAtRoot(aURL(1), true, 1)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
fileID(1): {
|
||||
downloadURL: "",
|
||||
isDeleted: true,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
fileID(2): {
|
||||
downloadURL: aURL(2),
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
@ -500,13 +474,11 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "item not found in cache",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(d.fileWURLAtRoot(aURL(1), false, 1)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"2": {},
|
||||
fileID(2): {},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
||||
@ -517,13 +489,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "delta query error",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{}},
|
||||
pages: []nextPage{
|
||||
aPage(),
|
||||
},
|
||||
pagerErr: errors.New("delta query error"),
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {},
|
||||
"2": {},
|
||||
fileID(1): {},
|
||||
fileID(2): {},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
||||
@ -534,14 +506,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
},
|
||||
{
|
||||
name: "folder item",
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
driveItem("2", "folder2", "root", "root", isFolder),
|
||||
}},
|
||||
pages: []nextPage{
|
||||
aPage(
|
||||
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||
d.folderAtRoot(2)),
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"2": {},
|
||||
fileID(2): {},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
||||
@ -562,14 +533,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
driveEnumer := mock.DriveEnumerator(
|
||||
mock.Drive(driveID).
|
||||
WithErr(test.pagerErr).
|
||||
With(mock.Delta(deltaString, test.pagerErr).
|
||||
With(test.pages...)))
|
||||
drive := drive()
|
||||
|
||||
driveEnumer := driveEnumerator(
|
||||
drive.newEnumer().
|
||||
withErr(test.pagerErr).
|
||||
with(
|
||||
delta(deltaURL, test.pagerErr).
|
||||
with(test.pages...)))
|
||||
|
||||
cache, err := newURLCache(
|
||||
driveID,
|
||||
drive.id,
|
||||
"",
|
||||
1*time.Hour,
|
||||
driveEnumer,
|
||||
@ -609,18 +583,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
||||
var (
|
||||
t = suite.T()
|
||||
driveID = "drive1"
|
||||
refreshInterval = 1 * time.Second
|
||||
drv = drive()
|
||||
)
|
||||
|
||||
cache, err := newURLCache(
|
||||
driveID,
|
||||
drv.id,
|
||||
"",
|
||||
refreshInterval,
|
||||
&mock.EnumerateDriveItemsDelta{},
|
||||
&enumerateDriveItemsDelta{},
|
||||
count.New(),
|
||||
fault.New(true))
|
||||
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// cache is empty
|
||||
@ -641,6 +614,8 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
||||
}
|
||||
|
||||
func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
drv := drive()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
driveID string
|
||||
@ -653,21 +628,21 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
name: "invalid driveID",
|
||||
driveID: "",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: &mock.EnumerateDriveItemsDelta{},
|
||||
itemPager: &enumerateDriveItemsDelta{},
|
||||
errors: fault.New(true),
|
||||
expectErr: require.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid refresh interval",
|
||||
driveID: "drive1",
|
||||
driveID: drv.id,
|
||||
refreshInt: 100 * time.Millisecond,
|
||||
itemPager: &mock.EnumerateDriveItemsDelta{},
|
||||
itemPager: &enumerateDriveItemsDelta{},
|
||||
errors: fault.New(true),
|
||||
expectErr: require.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid item enumerator",
|
||||
driveID: "drive1",
|
||||
driveID: drv.id,
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: nil,
|
||||
errors: fault.New(true),
|
||||
@ -675,9 +650,9 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
},
|
||||
{
|
||||
name: "valid",
|
||||
driveID: "drive1",
|
||||
driveID: drv.id,
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: &mock.EnumerateDriveItemsDelta{},
|
||||
itemPager: &enumerateDriveItemsDelta{},
|
||||
errors: fault.New(true),
|
||||
expectErr: require.NoError,
|
||||
},
|
||||
|
||||
@ -1,5 +1,16 @@
|
||||
package mock
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// >>> TODO <<<
|
||||
// https://github.com/alcionai/corso/issues/4846
|
||||
// This file's functions are duplicated into /drive/helper_test.go, which
|
||||
// should act as the clear primary owner of that functionality. However,
|
||||
// packages outside of /drive (such as sharepoint) depend on these helpers
|
||||
// for test functionality. We'll want to unify the two at some point.
|
||||
// In the meantime, make sure you're referencing and updating the correct
|
||||
// set of helpers (prefer the /drive version over this one).
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
@ -50,7 +61,6 @@ type BackupHandler[T any] struct {
|
||||
Service path.ServiceType
|
||||
Category path.CategoryType
|
||||
|
||||
DrivePagerV pagers.NonDeltaHandler[models.Driveable]
|
||||
// driveID -> itemPager
|
||||
ItemPagerV map[string]pagers.DeltaHandler[models.DriveItemable]
|
||||
|
||||
@ -126,11 +136,9 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab
|
||||
|
||||
func DefaultDriveBHWith(
|
||||
resource string,
|
||||
drivePager *apiMock.Pager[models.Driveable],
|
||||
enumerator EnumerateDriveItemsDelta,
|
||||
) *BackupHandler[models.DriveItemable] {
|
||||
mbh := DefaultOneDriveBH(resource)
|
||||
mbh.DrivePagerV = drivePager
|
||||
mbh.DriveItemEnumeration = enumerator
|
||||
|
||||
return mbh
|
||||
@ -168,7 +176,7 @@ func (h BackupHandler[T]) ServiceCat() (path.ServiceType, path.CategoryType) {
|
||||
}
|
||||
|
||||
func (h BackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] {
|
||||
return h.DrivePagerV
|
||||
return h.DriveItemEnumeration.DrivePager()
|
||||
}
|
||||
|
||||
func (h BackupHandler[T]) FormatDisplayPath(_ string, pb *path.Builder) string {
|
||||
@ -333,18 +341,18 @@ type NextPage struct {
|
||||
}
|
||||
|
||||
type EnumerateDriveItemsDelta struct {
|
||||
DrivePagers map[string]*DriveDeltaEnumerator
|
||||
DrivePagers map[string]*DeltaDriveEnumerator
|
||||
}
|
||||
|
||||
func DriveEnumerator(
|
||||
ds ...*DriveDeltaEnumerator,
|
||||
ds ...*DeltaDriveEnumerator,
|
||||
) EnumerateDriveItemsDelta {
|
||||
enumerator := EnumerateDriveItemsDelta{
|
||||
DrivePagers: map[string]*DriveDeltaEnumerator{},
|
||||
DrivePagers: map[string]*DeltaDriveEnumerator{},
|
||||
}
|
||||
|
||||
for _, drive := range ds {
|
||||
enumerator.DrivePagers[drive.DriveID] = drive
|
||||
enumerator.DrivePagers[drive.Drive.ID] = drive
|
||||
}
|
||||
|
||||
return enumerator
|
||||
@ -359,29 +367,75 @@ func (en EnumerateDriveItemsDelta) EnumerateDriveItemsDelta(
|
||||
return iterator.nextDelta()
|
||||
}
|
||||
|
||||
type DriveDeltaEnumerator struct {
|
||||
DriveID string
|
||||
func (en EnumerateDriveItemsDelta) DrivePager() *apiMock.Pager[models.Driveable] {
|
||||
ds := []models.Driveable{}
|
||||
|
||||
for _, dp := range en.DrivePagers {
|
||||
ds = append(ds, dp.Drive.Able)
|
||||
}
|
||||
|
||||
return &apiMock.Pager[models.Driveable]{
|
||||
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||
{Values: ds},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (en EnumerateDriveItemsDelta) Drives() []*DeltaDrive {
|
||||
ds := []*DeltaDrive{}
|
||||
|
||||
for _, dp := range en.DrivePagers {
|
||||
ds = append(ds, dp.Drive)
|
||||
}
|
||||
|
||||
return ds
|
||||
}
|
||||
|
||||
type DeltaDrive struct {
|
||||
ID string
|
||||
Able models.Driveable
|
||||
}
|
||||
|
||||
func Drive(driveSuffix ...any) *DeltaDrive {
|
||||
driveID := id("drive", driveSuffix...)
|
||||
|
||||
able := models.NewDrive()
|
||||
able.SetId(ptr.To(driveID))
|
||||
able.SetName(ptr.To(name("drive", driveSuffix...)))
|
||||
|
||||
return &DeltaDrive{
|
||||
ID: driveID,
|
||||
Able: able,
|
||||
}
|
||||
}
|
||||
|
||||
func (dd *DeltaDrive) NewEnumer() *DeltaDriveEnumerator {
|
||||
cp := &DeltaDrive{}
|
||||
|
||||
*cp = *dd
|
||||
|
||||
return &DeltaDriveEnumerator{Drive: cp}
|
||||
}
|
||||
|
||||
type DeltaDriveEnumerator struct {
|
||||
Drive *DeltaDrive
|
||||
idx int
|
||||
DeltaQueries []*DeltaQuery
|
||||
Err error
|
||||
}
|
||||
|
||||
func Drive(driveID string) *DriveDeltaEnumerator {
|
||||
return &DriveDeltaEnumerator{DriveID: driveID}
|
||||
}
|
||||
|
||||
func (dde *DriveDeltaEnumerator) With(ds ...*DeltaQuery) *DriveDeltaEnumerator {
|
||||
func (dde *DeltaDriveEnumerator) With(ds ...*DeltaQuery) *DeltaDriveEnumerator {
|
||||
dde.DeltaQueries = ds
|
||||
return dde
|
||||
}
|
||||
|
||||
// WithErr adds an error that is always returned in the last delta index.
|
||||
func (dde *DriveDeltaEnumerator) WithErr(err error) *DriveDeltaEnumerator {
|
||||
func (dde *DeltaDriveEnumerator) WithErr(err error) *DeltaDriveEnumerator {
|
||||
dde.Err = err
|
||||
return dde
|
||||
}
|
||||
|
||||
func (dde *DriveDeltaEnumerator) nextDelta() *DeltaQuery {
|
||||
func (dde *DeltaDriveEnumerator) nextDelta() *DeltaQuery {
|
||||
if dde.idx == len(dde.DeltaQueries) {
|
||||
// at the end of the enumeration, return an empty page with no items,
|
||||
// not even the root. This is what graph api would do to signify an absence
|
||||
@ -455,7 +509,7 @@ func (dq *DeltaQuery) NextPage() ([]models.DriveItemable, bool, bool) {
|
||||
}
|
||||
|
||||
np := dq.Pages[dq.idx]
|
||||
dq.idx = dq.idx + 1
|
||||
dq.idx++
|
||||
|
||||
return np.Items, np.Reset, false
|
||||
}
|
||||
@ -598,3 +652,49 @@ func (h *RestoreHandler) GetRootFolder(
|
||||
) (models.DriveItemable, error) {
|
||||
return models.NewDriveItem(), nil
|
||||
}
|
||||
|
||||
// assumption is only one suffix per id. Mostly using
|
||||
// the variadic as an "optional" extension.
|
||||
func id(v string, suffixes ...any) string {
|
||||
id := fmt.Sprintf("id_%s", v)
|
||||
|
||||
// a bit weird, but acts as a quality of life
|
||||
// that allows some funcs to take in the `file`
|
||||
// or `folder` or etc monikers as the suffix
|
||||
// without producing weird outputs.
|
||||
if len(suffixes) == 1 {
|
||||
sfx0, ok := suffixes[0].(string)
|
||||
if ok && sfx0 == v {
|
||||
return id
|
||||
}
|
||||
}
|
||||
|
||||
for _, sfx := range suffixes {
|
||||
id = fmt.Sprintf("%s_%v", id, sfx)
|
||||
}
|
||||
|
||||
return id
|
||||
}
|
||||
|
||||
// assumption is only one suffix per name. Mostly using
|
||||
// the variadic as an "optional" extension.
|
||||
func name(v string, suffixes ...any) string {
|
||||
name := fmt.Sprintf("n_%s", v)
|
||||
|
||||
// a bit weird, but acts as a quality of life
|
||||
// that allows some funcs to take in the `file`
|
||||
// or `folder` or etc monikers as the suffix
|
||||
// without producing weird outputs.
|
||||
if len(suffixes) == 1 {
|
||||
sfx0, ok := suffixes[0].(string)
|
||||
if ok && sfx0 == v {
|
||||
return name
|
||||
}
|
||||
}
|
||||
|
||||
for _, sfx := range suffixes {
|
||||
name = fmt.Sprintf("%s_%v", name, sfx)
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
@ -28,7 +28,7 @@ import (
|
||||
|
||||
var testBaseDrivePath = path.Builder{}.Append(
|
||||
odConsts.DrivesPathDir,
|
||||
"driveID1",
|
||||
"id_drive",
|
||||
odConsts.RootPathDir)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -44,12 +44,14 @@ func TestLibrariesBackupUnitSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||
anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
|
||||
var (
|
||||
anyFolder = (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
|
||||
drv = mock.Drive()
|
||||
)
|
||||
|
||||
const (
|
||||
tenantID = "tenant"
|
||||
siteID = "site"
|
||||
driveID = "driveID1"
|
||||
)
|
||||
|
||||
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
|
||||
@ -96,13 +98,13 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||
paths = map[string]string{}
|
||||
excluded = map[string]struct{}{}
|
||||
collMap = map[string]map[string]*drive.Collection{
|
||||
driveID: {},
|
||||
drv.ID: {},
|
||||
}
|
||||
topLevelPackages = map[string]struct{}{}
|
||||
)
|
||||
|
||||
mbh.DriveItemEnumeration = mock.DriveEnumerator(
|
||||
mock.Drive(driveID).With(
|
||||
drv.NewEnumer().With(
|
||||
mock.Delta("notempty", nil).With(mock.NextPage{Items: test.items})))
|
||||
|
||||
c := drive.NewCollections(
|
||||
@ -117,7 +119,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||
|
||||
_, _, err := c.PopulateDriveCollections(
|
||||
ctx,
|
||||
driveID,
|
||||
drv.ID,
|
||||
"General",
|
||||
paths,
|
||||
excluded,
|
||||
@ -134,10 +136,10 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||
assert.Empty(t, topLevelPackages, "should not find package type folders")
|
||||
|
||||
for _, collPath := range test.expectedCollectionIDs {
|
||||
assert.Contains(t, c.CollectionMap[driveID], collPath)
|
||||
assert.Contains(t, c.CollectionMap[drv.ID], collPath)
|
||||
}
|
||||
|
||||
for _, col := range c.CollectionMap[driveID] {
|
||||
for _, col := range c.CollectionMap[drv.ID] {
|
||||
assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String())
|
||||
}
|
||||
})
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user