clean up drive unit test helpers (#4805)
Adds some cleanups to the drive unit test helpers. Goals were as follows: 1. remove idx() and namex() funcs, replace with id() and name() 2. minimize factory duplication in helper_test.go 3. improve readability in id() and name() calls by adding file and folder variants to handle the 99% of cases we use them in. No logic changes in this PR. Only test func updates. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🤖 Supportability/Tests #### Issue(s) * #4689 #### Test Plan - [x] ⚡ Unit test
This commit is contained in:
parent
3f1f9588f2
commit
dc3cfd1ec3
@ -23,7 +23,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
|
||||||
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -108,7 +107,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
name: "oneDrive, no duplicates",
|
name: "oneDrive, no duplicates",
|
||||||
numInstances: 1,
|
numInstances: 1,
|
||||||
service: path.OneDriveService,
|
service: path.OneDriveService,
|
||||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||||
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
|
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
|
||||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||||
getErr: nil,
|
getErr: nil,
|
||||||
@ -118,7 +117,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
name: "oneDrive, duplicates",
|
name: "oneDrive, duplicates",
|
||||||
numInstances: 3,
|
numInstances: 3,
|
||||||
service: path.OneDriveService,
|
service: path.OneDriveService,
|
||||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||||
getErr: nil,
|
getErr: nil,
|
||||||
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
|
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
|
||||||
@ -128,7 +127,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
name: "oneDrive, malware",
|
name: "oneDrive, malware",
|
||||||
numInstances: 3,
|
numInstances: 3,
|
||||||
service: path.OneDriveService,
|
service: path.OneDriveService,
|
||||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
getBody: nil,
|
getBody: nil,
|
||||||
getErr: clues.New("test malware").Label(graph.LabelsMalware),
|
getErr: clues.New("test malware").Label(graph.LabelsMalware),
|
||||||
@ -139,7 +138,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
name: "oneDrive, not found",
|
name: "oneDrive, not found",
|
||||||
numInstances: 3,
|
numInstances: 3,
|
||||||
service: path.OneDriveService,
|
service: path.OneDriveService,
|
||||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
getBody: nil,
|
getBody: nil,
|
||||||
getErr: clues.New("test not found").Label(graph.LabelStatus(http.StatusNotFound)),
|
getErr: clues.New("test not found").Label(graph.LabelStatus(http.StatusNotFound)),
|
||||||
@ -150,7 +149,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
name: "sharePoint, no duplicates",
|
name: "sharePoint, no duplicates",
|
||||||
numInstances: 1,
|
numInstances: 1,
|
||||||
service: path.SharePointService,
|
service: path.SharePointService,
|
||||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||||
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
|
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
|
||||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||||
getErr: nil,
|
getErr: nil,
|
||||||
@ -160,7 +159,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
name: "sharePoint, duplicates",
|
name: "sharePoint, duplicates",
|
||||||
numInstances: 3,
|
numInstances: 3,
|
||||||
service: path.SharePointService,
|
service: path.SharePointService,
|
||||||
itemDeets: nst{stubItemName, defaultItemSize, now},
|
itemDeets: nst{stubItemName, defaultFileSize, now},
|
||||||
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
|
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
|
||||||
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
|
||||||
getErr: nil,
|
getErr: nil,
|
||||||
@ -185,9 +184,9 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
if test.service == path.SharePointService {
|
if test.service == path.SharePointService {
|
||||||
mbh = mock.DefaultSharePointBH("a-site")
|
mbh = defaultSharePointBH("a-site")
|
||||||
mbh.ItemInfo.SharePoint.Modified = now
|
mbh.ItemInfo.SharePoint.Modified = now
|
||||||
mbh.ItemInfo.SharePoint.ItemName = stubItemName
|
mbh.ItemInfo.SharePoint.ItemName = stubItemName
|
||||||
} else {
|
} else {
|
||||||
@ -202,10 +201,10 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
mbh.GetErrs = []error{test.getErr}
|
mbh.GetErrs = []error{test.getErr}
|
||||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
mbh.GI = getsItem{Err: assert.AnError}
|
||||||
|
|
||||||
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: pcr}
|
mbh.GIP = getsItemPermission{Perm: pcr}
|
||||||
|
|
||||||
coll, err := NewCollection(
|
coll, err := NewCollection(
|
||||||
mbh,
|
mbh,
|
||||||
@ -305,7 +304,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() {
|
|||||||
collStatus = support.ControllerOperationStatus{}
|
collStatus = support.ControllerOperationStatus{}
|
||||||
wg = sync.WaitGroup{}
|
wg = sync.WaitGroup{}
|
||||||
name = "name"
|
name = "name"
|
||||||
size = defaultItemSize
|
size = defaultFileSize
|
||||||
now = time.Now()
|
now = time.Now()
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -318,9 +317,9 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() {
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
mbh.GI = getsItem{Err: assert.AnError}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{
|
mbh.GetResps = []*http.Response{
|
||||||
nil,
|
nil,
|
||||||
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
|
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
|
||||||
@ -375,7 +374,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() {
|
|||||||
collStatus = support.ControllerOperationStatus{}
|
collStatus = support.ControllerOperationStatus{}
|
||||||
wg = sync.WaitGroup{}
|
wg = sync.WaitGroup{}
|
||||||
name = "name"
|
name = "name"
|
||||||
size = defaultItemSize
|
size = defaultFileSize
|
||||||
now = time.Now()
|
now = time.Now()
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -397,9 +396,9 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() {
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Item: stubItem}
|
mbh.GI = getsItem{Item: stubItem}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{
|
mbh.GetResps = []*http.Response{
|
||||||
nil,
|
nil,
|
||||||
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
|
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
|
||||||
@ -457,9 +456,9 @@ func (suite *CollectionUnitSuite) TestCollectionPermissionBackupLatestModTime()
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
|
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{{
|
mbh.GetResps = []*http.Response{{
|
||||||
StatusCode: http.StatusOK,
|
StatusCode: http.StatusOK,
|
||||||
Body: io.NopCloser(strings.NewReader("Fake Data!")),
|
Body: io.NopCloser(strings.NewReader("Fake Data!")),
|
||||||
@ -635,8 +634,8 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
|
|||||||
|
|
||||||
stubItem.GetFile().SetMimeType(&test.itemMimeType)
|
stubItem.GetFile().SetMimeType(&test.itemMimeType)
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Item: stubItem}
|
mbh.GI = getsItem{Item: stubItem}
|
||||||
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
|
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
|
||||||
mbh.GetErrs = []error{test.err}
|
mbh.GetErrs = []error{test.err}
|
||||||
|
|
||||||
@ -692,7 +691,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
mgi mock.GetsItem
|
mgi getsItem
|
||||||
itemInfo details.ItemInfo
|
itemInfo details.ItemInfo
|
||||||
respBody []io.ReadCloser
|
respBody []io.ReadCloser
|
||||||
getErr []error
|
getErr []error
|
||||||
@ -711,7 +710,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "expired url redownloads",
|
name: "expired url redownloads",
|
||||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
mgi: getsItem{Item: itemWID, Err: nil},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
respBody: []io.ReadCloser{nil, iorc},
|
respBody: []io.ReadCloser{nil, iorc},
|
||||||
getErr: []error{errUnauth, nil},
|
getErr: []error{errUnauth, nil},
|
||||||
@ -731,14 +730,14 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
name: "re-fetching the item fails",
|
name: "re-fetching the item fails",
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
getErr: []error{errUnauth},
|
getErr: []error{errUnauth},
|
||||||
mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
|
mgi: getsItem{Item: nil, Err: assert.AnError},
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
expect: require.Nil,
|
expect: require.Nil,
|
||||||
muc: m,
|
muc: m,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "expired url fails redownload",
|
name: "expired url fails redownload",
|
||||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
mgi: getsItem{Item: itemWID, Err: nil},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
respBody: []io.ReadCloser{nil, nil},
|
respBody: []io.ReadCloser{nil, nil},
|
||||||
getErr: []error{errUnauth, assert.AnError},
|
getErr: []error{errUnauth, assert.AnError},
|
||||||
@ -748,7 +747,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "url refreshed from cache",
|
name: "url refreshed from cache",
|
||||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
mgi: getsItem{Item: itemWID, Err: nil},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
respBody: []io.ReadCloser{nil, iorc},
|
respBody: []io.ReadCloser{nil, iorc},
|
||||||
getErr: []error{errUnauth, nil},
|
getErr: []error{errUnauth, nil},
|
||||||
@ -766,7 +765,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "url refreshed from cache but item deleted",
|
name: "url refreshed from cache but item deleted",
|
||||||
mgi: mock.GetsItem{Item: itemWID, Err: graph.ErrDeletedInFlight},
|
mgi: getsItem{Item: itemWID, Err: graph.ErrDeletedInFlight},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
respBody: []io.ReadCloser{nil, nil, nil},
|
respBody: []io.ReadCloser{nil, nil, nil},
|
||||||
getErr: []error{errUnauth, graph.ErrDeletedInFlight, graph.ErrDeletedInFlight},
|
getErr: []error{errUnauth, graph.ErrDeletedInFlight, graph.ErrDeletedInFlight},
|
||||||
@ -784,7 +783,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "fallback to item fetch on any cache error",
|
name: "fallback to item fetch on any cache error",
|
||||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
mgi: getsItem{Item: itemWID, Err: nil},
|
||||||
itemInfo: details.ItemInfo{},
|
itemInfo: details.ItemInfo{},
|
||||||
respBody: []io.ReadCloser{nil, iorc},
|
respBody: []io.ReadCloser{nil, iorc},
|
||||||
getErr: []error{errUnauth, nil},
|
getErr: []error{errUnauth, nil},
|
||||||
@ -814,7 +813,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
mbh.GI = test.mgi
|
mbh.GI = test.mgi
|
||||||
mbh.ItemInfo = test.itemInfo
|
mbh.ItemInfo = test.itemInfo
|
||||||
mbh.GetResps = resps
|
mbh.GetResps = resps
|
||||||
@ -980,9 +979,9 @@ func (suite *CollectionUnitSuite) TestItemExtensions() {
|
|||||||
|
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH("a-user")
|
mbh := defaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
mbh.GI = getsItem{Err: assert.AnError}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{
|
mbh.GetResps = []*http.Response{
|
||||||
{
|
{
|
||||||
StatusCode: http.StatusOK,
|
StatusCode: http.StatusOK,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -629,7 +629,7 @@ func (c *Collections) addFileToTree(
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
_, alreadySeen := tree.fileIDToParentID[fileID]
|
alreadySeen := tree.hasFile(fileID)
|
||||||
parentNode, parentNotNil := tree.folderIDToNode[parentID]
|
parentNode, parentNotNil := tree.folderIDToNode[parentID]
|
||||||
|
|
||||||
if parentNotNil && !alreadySeen {
|
if parentNotNil && !alreadySeen {
|
||||||
@ -686,25 +686,10 @@ func (c *Collections) makeDriveTombstones(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: call NewTombstoneCollection
|
coll := data.NewTombstoneCollection(
|
||||||
coll, err := NewCollection(
|
|
||||||
c.handler,
|
|
||||||
c.protectedResource,
|
|
||||||
nil, // delete the drive
|
|
||||||
prevDrivePath,
|
prevDrivePath,
|
||||||
driveID,
|
|
||||||
c.statusUpdater,
|
|
||||||
c.ctrl,
|
c.ctrl,
|
||||||
false,
|
|
||||||
true,
|
|
||||||
nil,
|
|
||||||
c.counter.Local())
|
c.counter.Local())
|
||||||
if err != nil {
|
|
||||||
err = clues.WrapWC(ctx, err, "making drive tombstone")
|
|
||||||
el.AddRecoverable(ctx, err)
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
colls = append(colls, coll)
|
colls = append(colls, coll)
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -309,6 +309,11 @@ func (face *folderyMcFolderFace) setPreviousPath(
|
|||||||
// file handling
|
// file handling
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (face *folderyMcFolderFace) hasFile(id string) bool {
|
||||||
|
_, exists := face.fileIDToParentID[id]
|
||||||
|
return exists
|
||||||
|
}
|
||||||
|
|
||||||
// addFile places the file in the correct parent node. If the
|
// addFile places the file in the correct parent node. If the
|
||||||
// file was already added to the tree and is getting relocated,
|
// file was already added to the tree and is getting relocated,
|
||||||
// this func will update and/or clean up all the old references.
|
// this func will update and/or clean up all the old references.
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||||
@ -68,7 +67,7 @@ func (suite *DeltaTreeUnitSuite) TestNewNodeyMcNodeFace() {
|
|||||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
tname string
|
tname string
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
parentID string
|
parentID string
|
||||||
id string
|
id string
|
||||||
name string
|
name string
|
||||||
@ -94,16 +93,16 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
|||||||
tname: "add folder",
|
tname: "add folder",
|
||||||
tree: treeWithRoot,
|
tree: treeWithRoot,
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "add package",
|
tname: "add package",
|
||||||
tree: treeWithRoot,
|
tree: treeWithRoot,
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
isPackage: true,
|
isPackage: true,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
@ -111,7 +110,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
|||||||
tname: "missing ID",
|
tname: "missing ID",
|
||||||
tree: treeWithRoot,
|
tree: treeWithRoot,
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
isPackage: true,
|
isPackage: true,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
@ -119,15 +118,15 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
|||||||
tname: "missing name",
|
tname: "missing name",
|
||||||
tree: treeWithRoot,
|
tree: treeWithRoot,
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
isPackage: true,
|
isPackage: true,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "missing parentID",
|
tname: "missing parentID",
|
||||||
tree: treeWithRoot,
|
tree: treeWithRoot,
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
isPackage: true,
|
isPackage: true,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
@ -135,29 +134,29 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
|||||||
tname: "already tombstoned",
|
tname: "already tombstoned",
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "add folder before parent",
|
tname: "add folder before parent",
|
||||||
tree: func(t *testing.T) *folderyMcFolderFace {
|
tree: func(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
|
||||||
return &folderyMcFolderFace{
|
return &folderyMcFolderFace{
|
||||||
folderIDToNode: map[string]*nodeyMcNodeFace{},
|
folderIDToNode: map[string]*nodeyMcNodeFace{},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
isPackage: true,
|
isPackage: true,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "folder already exists",
|
tname: "folder already exists",
|
||||||
tree: treeWithFolders,
|
tree: treeWithFolders,
|
||||||
parentID: idx(folder, "parent"),
|
parentID: folderID("parent"),
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
name: name(folder),
|
name: folderName(),
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -168,7 +167,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
tree := test.tree(t)
|
tree := test.tree(t, drive())
|
||||||
|
|
||||||
err := tree.setFolder(
|
err := tree.setFolder(
|
||||||
ctx,
|
ctx,
|
||||||
@ -203,18 +202,18 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
|||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
id string
|
id string
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
expectErr assert.ErrorAssertionFunc
|
expectErr assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "add tombstone",
|
name: "add tombstone",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
tree: newTree,
|
tree: newTree,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "duplicate tombstone",
|
name: "duplicate tombstone",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
@ -224,14 +223,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
|||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "conflict: folder alive",
|
name: "folder exists and is alive",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
tree: treeWithTombstone,
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "already tombstoned",
|
|
||||||
id: id(folder),
|
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
@ -243,7 +236,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
tree := test.tree(t)
|
tree := test.tree(t, drive())
|
||||||
|
|
||||||
err := tree.setTombstone(ctx, test.id)
|
err := tree.setTombstone(ctx, test.id)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
@ -270,14 +263,14 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
|||||||
name string
|
name string
|
||||||
id string
|
id string
|
||||||
prev path.Path
|
prev path.Path
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
expectErr assert.ErrorAssertionFunc
|
expectErr assert.ErrorAssertionFunc
|
||||||
expectLive bool
|
expectLive bool
|
||||||
expectTombstone bool
|
expectTombstone bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "no changes become a no-op",
|
name: "no changes become a no-op",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
prev: pathWith(defaultLoc()),
|
prev: pathWith(defaultLoc()),
|
||||||
tree: newTree,
|
tree: newTree,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
@ -295,7 +288,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "create tombstone after reset",
|
name: "create tombstone after reset",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
prev: pathWith(defaultLoc()),
|
prev: pathWith(defaultLoc()),
|
||||||
tree: treeAfterReset,
|
tree: treeAfterReset,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
@ -312,7 +305,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "missing prev",
|
name: "missing prev",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
tree: newTree,
|
tree: newTree,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
expectLive: false,
|
expectLive: false,
|
||||||
@ -320,7 +313,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "update live folder",
|
name: "update live folder",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
prev: pathWith(defaultLoc()),
|
prev: pathWith(defaultLoc()),
|
||||||
tree: treeWithFolders,
|
tree: treeWithFolders,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
@ -329,7 +322,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "update tombstone",
|
name: "update tombstone",
|
||||||
id: id(folder),
|
id: folderID(),
|
||||||
prev: pathWith(defaultLoc()),
|
prev: pathWith(defaultLoc()),
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
@ -340,7 +333,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
tree := test.tree(t)
|
tree := test.tree(t, drive())
|
||||||
|
|
||||||
err := tree.setPreviousPath(test.id, test.prev)
|
err := tree.setPreviousPath(test.id, test.prev)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
@ -478,7 +471,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTree()
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
tree := treeWithRoot(t)
|
tree := treeWithRoot(t, drive())
|
||||||
|
|
||||||
set := func(
|
set := func(
|
||||||
parentID, fid, fname string,
|
parentID, fid, fname string,
|
||||||
@ -564,7 +557,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
tree := treeWithRoot(t)
|
tree := treeWithRoot(t, drive())
|
||||||
|
|
||||||
set := func(
|
set := func(
|
||||||
parentID, fid, fname string,
|
parentID, fid, fname string,
|
||||||
@ -739,7 +732,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst
|
|||||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
tname string
|
tname string
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
|
id string
|
||||||
oldParentID string
|
oldParentID string
|
||||||
parentID string
|
parentID string
|
||||||
contentSize int64
|
contentSize int64
|
||||||
@ -749,69 +743,87 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
|||||||
{
|
{
|
||||||
tname: "add file to root",
|
tname: "add file to root",
|
||||||
tree: treeWithRoot,
|
tree: treeWithRoot,
|
||||||
|
id: fileID(),
|
||||||
oldParentID: "",
|
oldParentID: "",
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
contentSize: 42,
|
contentSize: defaultFileSize,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
expectFiles: map[string]string{id(file): rootID},
|
expectFiles: map[string]string{fileID(): rootID},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "add file to folder",
|
tname: "add file to folder",
|
||||||
tree: treeWithFolders,
|
tree: treeWithFolders,
|
||||||
|
id: fileID(),
|
||||||
oldParentID: "",
|
oldParentID: "",
|
||||||
parentID: id(folder),
|
parentID: folderID(),
|
||||||
contentSize: 24,
|
contentSize: 24,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
expectFiles: map[string]string{id(file): id(folder)},
|
expectFiles: map[string]string{fileID(): folderID()},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "re-add file at the same location",
|
tname: "re-add file at the same location",
|
||||||
tree: treeWithFileAtRoot,
|
tree: treeWithFileAtRoot,
|
||||||
|
id: fileID(),
|
||||||
oldParentID: rootID,
|
oldParentID: rootID,
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
contentSize: 84,
|
contentSize: 84,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
expectFiles: map[string]string{id(file): rootID},
|
expectFiles: map[string]string{fileID(): rootID},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "move file from folder to root",
|
tname: "move file from folder to root",
|
||||||
tree: treeWithFileInFolder,
|
tree: treeWithFileInFolder,
|
||||||
oldParentID: id(folder),
|
id: fileID(),
|
||||||
|
oldParentID: folderID(),
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
contentSize: 48,
|
contentSize: 48,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
expectFiles: map[string]string{id(file): rootID},
|
expectFiles: map[string]string{fileID(): rootID},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "move file from tombstone to root",
|
tname: "move file from tombstone to root",
|
||||||
tree: treeWithFileInTombstone,
|
tree: treeWithFileInTombstone,
|
||||||
oldParentID: id(folder),
|
id: fileID(),
|
||||||
|
oldParentID: folderID(),
|
||||||
parentID: rootID,
|
parentID: rootID,
|
||||||
contentSize: 2,
|
contentSize: 2,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
expectFiles: map[string]string{id(file): rootID},
|
expectFiles: map[string]string{fileID(): rootID},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tname: "adding file with no ID",
|
||||||
|
tree: treeWithTombstone,
|
||||||
|
id: "",
|
||||||
|
oldParentID: "",
|
||||||
|
parentID: folderID(),
|
||||||
|
contentSize: 4,
|
||||||
|
expectErr: assert.Error,
|
||||||
|
expectFiles: map[string]string{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "error adding file to tombstone",
|
tname: "error adding file to tombstone",
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
|
id: fileID(),
|
||||||
oldParentID: "",
|
oldParentID: "",
|
||||||
parentID: id(folder),
|
parentID: folderID(),
|
||||||
contentSize: 4,
|
contentSize: 8,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
expectFiles: map[string]string{},
|
expectFiles: map[string]string{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "error adding file before parent",
|
tname: "error adding file before parent",
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
|
id: fileID(),
|
||||||
oldParentID: "",
|
oldParentID: "",
|
||||||
parentID: idx(folder, 1),
|
parentID: folderID("not-in-tree"),
|
||||||
contentSize: 8,
|
contentSize: 16,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
expectFiles: map[string]string{},
|
expectFiles: map[string]string{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "error adding file without parent id",
|
tname: "error adding file without parent id",
|
||||||
tree: treeWithTombstone,
|
tree: treeWithTombstone,
|
||||||
|
id: fileID(),
|
||||||
oldParentID: "",
|
oldParentID: "",
|
||||||
parentID: "",
|
parentID: "",
|
||||||
contentSize: 16,
|
contentSize: 16,
|
||||||
@ -822,15 +834,13 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.tname, func() {
|
suite.Run(test.tname, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
tree := test.tree(t)
|
d := drive()
|
||||||
|
tree := test.tree(t, d)
|
||||||
df := driveFile(file, parentDir(), test.parentID)
|
|
||||||
df.SetSize(ptr.To(test.contentSize))
|
|
||||||
|
|
||||||
err := tree.addFile(
|
err := tree.addFile(
|
||||||
test.parentID,
|
test.parentID,
|
||||||
id(file),
|
test.id,
|
||||||
custom.ToCustomDriveItem(df))
|
custom.ToCustomDriveItem(d.fileWSizeAt(test.contentSize, test.parentID)))
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
assert.Equal(t, test.expectFiles, tree.fileIDToParentID)
|
assert.Equal(t, test.expectFiles, tree.fileIDToParentID)
|
||||||
|
|
||||||
@ -841,7 +851,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
|||||||
parent := tree.getNode(test.parentID)
|
parent := tree.getNode(test.parentID)
|
||||||
|
|
||||||
require.NotNil(t, parent)
|
require.NotNil(t, parent)
|
||||||
assert.Contains(t, parent.files, id(file))
|
assert.Contains(t, parent.files, fileID())
|
||||||
|
|
||||||
countSize := tree.countLiveFilesAndSizes()
|
countSize := tree.countLiveFilesAndSizes()
|
||||||
assert.Equal(t, 1, countSize.numFiles, "should have one file in the tree")
|
assert.Equal(t, 1, countSize.numFiles, "should have one file in the tree")
|
||||||
@ -851,7 +861,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
|||||||
old := tree.getNode(test.oldParentID)
|
old := tree.getNode(test.oldParentID)
|
||||||
|
|
||||||
require.NotNil(t, old)
|
require.NotNil(t, old)
|
||||||
assert.NotContains(t, old.files, id(file))
|
assert.NotContains(t, old.files, fileID())
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -860,7 +870,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() {
|
|||||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() {
|
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
tname string
|
tname string
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
parentID string
|
parentID string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
@ -876,34 +886,35 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() {
|
|||||||
{
|
{
|
||||||
tname: "delete file from folder",
|
tname: "delete file from folder",
|
||||||
tree: treeWithFileInFolder,
|
tree: treeWithFileInFolder,
|
||||||
parentID: id(folder),
|
parentID: folderID(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "delete file from tombstone",
|
tname: "delete file from tombstone",
|
||||||
tree: treeWithFileInTombstone,
|
tree: treeWithFileInTombstone,
|
||||||
parentID: id(folder),
|
parentID: folderID(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.tname, func() {
|
suite.Run(test.tname, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
tree := test.tree(t)
|
tree := test.tree(t, drive())
|
||||||
|
|
||||||
tree.deleteFile(id(file))
|
tree.deleteFile(fileID())
|
||||||
|
|
||||||
parent := tree.getNode(test.parentID)
|
parent := tree.getNode(test.parentID)
|
||||||
|
|
||||||
require.NotNil(t, parent)
|
require.NotNil(t, parent)
|
||||||
assert.NotContains(t, parent.files, id(file))
|
assert.NotContains(t, parent.files, fileID())
|
||||||
assert.NotContains(t, tree.fileIDToParentID, id(file))
|
assert.NotContains(t, tree.fileIDToParentID, fileID())
|
||||||
assert.Contains(t, tree.deletedFileIDs, id(file))
|
assert.Contains(t, tree.deletedFileIDs, fileID())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
tree := treeWithRoot(t)
|
d := drive()
|
||||||
|
tree := treeWithRoot(t, d)
|
||||||
fID := id(file)
|
fID := id(file)
|
||||||
|
|
||||||
require.Len(t, tree.fileIDToParentID, 0)
|
require.Len(t, tree.fileIDToParentID, 0)
|
||||||
@ -916,7 +927,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
|||||||
assert.Len(t, tree.deletedFileIDs, 1)
|
assert.Len(t, tree.deletedFileIDs, 1)
|
||||||
assert.Contains(t, tree.deletedFileIDs, fID)
|
assert.Contains(t, tree.deletedFileIDs, fID)
|
||||||
|
|
||||||
err := tree.addFile(rootID, fID, custom.ToCustomDriveItem(fileAtRoot()))
|
err := tree.addFile(rootID, fID, custom.ToCustomDriveItem(d.fileAtRoot()))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
assert.Len(t, tree.fileIDToParentID, 1)
|
assert.Len(t, tree.fileIDToParentID, 1)
|
||||||
@ -935,7 +946,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() {
|
|||||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs() {
|
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
expect map[string]struct{}
|
expect map[string]struct{}
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
@ -946,7 +957,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs(
|
|||||||
{
|
{
|
||||||
name: "one file in a folder",
|
name: "one file in a folder",
|
||||||
tree: treeWithFileInFolder,
|
tree: treeWithFileInFolder,
|
||||||
expect: makeExcludeMap(id(file)),
|
expect: makeExcludeMap(fileID()),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "one file in a tombstone",
|
name: "one file in a tombstone",
|
||||||
@ -956,22 +967,22 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs(
|
|||||||
{
|
{
|
||||||
name: "one deleted file",
|
name: "one deleted file",
|
||||||
tree: treeWithDeletedFile,
|
tree: treeWithDeletedFile,
|
||||||
expect: makeExcludeMap(idx(file, "d")),
|
expect: makeExcludeMap(fileID("d")),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "files in folders and tombstones",
|
name: "files in folders and tombstones",
|
||||||
tree: fullTree,
|
tree: fullTree,
|
||||||
expect: makeExcludeMap(
|
expect: makeExcludeMap(
|
||||||
id(file),
|
fileID(),
|
||||||
idx(file, "r"),
|
fileID("r"),
|
||||||
idx(file, "p"),
|
fileID("p"),
|
||||||
idx(file, "d")),
|
fileID("d")),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
tree := test.tree(t)
|
tree := test.tree(t, drive())
|
||||||
|
|
||||||
result := tree.generateExcludeItemIDs()
|
result := tree.generateExcludeItemIDs()
|
||||||
assert.Equal(t, test.expect, result)
|
assert.Equal(t, test.expect, result)
|
||||||
@ -985,10 +996,11 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs(
|
|||||||
|
|
||||||
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() {
|
func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
d := drive()
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
tree func(t *testing.T) *folderyMcFolderFace
|
tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace
|
||||||
prevPaths map[string]string
|
prevPaths map[string]string
|
||||||
expectErr require.ErrorAssertionFunc
|
expectErr require.ErrorAssertionFunc
|
||||||
expect map[string]collectable
|
expect map[string]collectable
|
||||||
@ -1005,7 +1017,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: map[string]collectable{
|
expect: map[string]collectable{
|
||||||
rootID: {
|
rootID: {
|
||||||
currPath: fullPathPath(t),
|
currPath: d.fullPath(t),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: rootID,
|
folderID: rootID,
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
@ -1019,9 +1031,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: map[string]collectable{
|
expect: map[string]collectable{
|
||||||
rootID: {
|
rootID: {
|
||||||
currPath: fullPathPath(t),
|
currPath: d.fullPath(t),
|
||||||
files: map[string]*custom.DriveItem{
|
files: map[string]*custom.DriveItem{
|
||||||
id(file): custom.ToCustomDriveItem(fileAtRoot()),
|
fileID(): custom.ToCustomDriveItem(d.fileAtRoot()),
|
||||||
},
|
},
|
||||||
folderID: rootID,
|
folderID: rootID,
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
@ -1035,41 +1047,41 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: map[string]collectable{
|
expect: map[string]collectable{
|
||||||
rootID: {
|
rootID: {
|
||||||
currPath: fullPathPath(t),
|
currPath: d.fullPath(t),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: rootID,
|
folderID: rootID,
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{},
|
loc: path.Elements{},
|
||||||
},
|
},
|
||||||
idx(folder, "parent"): {
|
folderID("parent"): {
|
||||||
currPath: fullPathPath(t, namex(folder, "parent")),
|
currPath: d.fullPath(t, folderName("parent")),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: idx(folder, "parent"),
|
folderID: folderID("parent"),
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{rootName},
|
loc: path.Elements{rootName},
|
||||||
},
|
},
|
||||||
id(folder): {
|
folderID(): {
|
||||||
currPath: fullPathPath(t, namex(folder, "parent"), name(folder)),
|
currPath: d.fullPath(t, folderName("parent"), folderName()),
|
||||||
files: map[string]*custom.DriveItem{
|
files: map[string]*custom.DriveItem{
|
||||||
id(file): custom.ToCustomDriveItem(fileAt("parent")),
|
fileID(): custom.ToCustomDriveItem(d.fileAt("parent")),
|
||||||
},
|
},
|
||||||
folderID: id(folder),
|
folderID: folderID(),
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{rootName, namex(folder, "parent")},
|
loc: path.Elements{rootName, folderName("parent")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "package in hierarchy",
|
name: "package in hierarchy",
|
||||||
tree: func(t *testing.T) *folderyMcFolderFace {
|
tree: func(t *testing.T, d *deltaDrive) *folderyMcFolderFace {
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
tree := treeWithRoot(t)
|
tree := treeWithRoot(t, d)
|
||||||
err := tree.setFolder(ctx, rootID, id(pkg), name(pkg), true)
|
err := tree.setFolder(ctx, rootID, id(pkg), name(pkg), true)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = tree.setFolder(ctx, id(pkg), id(folder), name(folder), false)
|
err = tree.setFolder(ctx, id(pkg), folderID(), folderName(), false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
return tree
|
return tree
|
||||||
@ -1077,23 +1089,23 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: map[string]collectable{
|
expect: map[string]collectable{
|
||||||
rootID: {
|
rootID: {
|
||||||
currPath: fullPathPath(t),
|
currPath: d.fullPath(t),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: rootID,
|
folderID: rootID,
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{},
|
loc: path.Elements{},
|
||||||
},
|
},
|
||||||
id(pkg): {
|
id(pkg): {
|
||||||
currPath: fullPathPath(t, name(pkg)),
|
currPath: d.fullPath(t, name(pkg)),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: id(pkg),
|
folderID: id(pkg),
|
||||||
isPackageOrChildOfPackage: true,
|
isPackageOrChildOfPackage: true,
|
||||||
loc: path.Elements{rootName},
|
loc: path.Elements{rootName},
|
||||||
},
|
},
|
||||||
id(folder): {
|
folderID(): {
|
||||||
currPath: fullPathPath(t, name(pkg), name(folder)),
|
currPath: d.fullPath(t, name(pkg), folderName()),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: id(folder),
|
folderID: folderID(),
|
||||||
isPackageOrChildOfPackage: true,
|
isPackageOrChildOfPackage: true,
|
||||||
loc: path.Elements{rootName, name(pkg)},
|
loc: path.Elements{rootName, name(pkg)},
|
||||||
},
|
},
|
||||||
@ -1104,36 +1116,36 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
tree: treeWithFileInFolder,
|
tree: treeWithFileInFolder,
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
prevPaths: map[string]string{
|
prevPaths: map[string]string{
|
||||||
rootID: fullPath(),
|
rootID: d.strPath(),
|
||||||
idx(folder, "parent"): fullPath(namex(folder, "parent-prev")),
|
folderID("parent"): d.strPath(folderName("parent-prev")),
|
||||||
id(folder): fullPath(namex(folder, "parent-prev"), name(folder)),
|
folderID(): d.strPath(folderName("parent-prev"), folderName()),
|
||||||
},
|
},
|
||||||
expect: map[string]collectable{
|
expect: map[string]collectable{
|
||||||
rootID: {
|
rootID: {
|
||||||
currPath: fullPathPath(t),
|
currPath: d.fullPath(t),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: rootID,
|
folderID: rootID,
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{},
|
loc: path.Elements{},
|
||||||
prevPath: fullPathPath(t),
|
prevPath: d.fullPath(t),
|
||||||
},
|
},
|
||||||
idx(folder, "parent"): {
|
folderID("parent"): {
|
||||||
currPath: fullPathPath(t, namex(folder, "parent")),
|
currPath: d.fullPath(t, folderName("parent")),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: idx(folder, "parent"),
|
folderID: folderID("parent"),
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{rootName},
|
loc: path.Elements{rootName},
|
||||||
prevPath: fullPathPath(t, namex(folder, "parent-prev")),
|
prevPath: d.fullPath(t, folderName("parent-prev")),
|
||||||
},
|
},
|
||||||
id(folder): {
|
folderID(): {
|
||||||
currPath: fullPathPath(t, namex(folder, "parent"), name(folder)),
|
currPath: d.fullPath(t, folderName("parent"), folderName()),
|
||||||
folderID: id(folder),
|
folderID: folderID(),
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
files: map[string]*custom.DriveItem{
|
files: map[string]*custom.DriveItem{
|
||||||
id(file): custom.ToCustomDriveItem(fileAt("parent")),
|
fileID(): custom.ToCustomDriveItem(d.fileAt("parent")),
|
||||||
},
|
},
|
||||||
loc: path.Elements{rootName, namex(folder, "parent")},
|
loc: path.Elements{rootName, folderName("parent")},
|
||||||
prevPath: fullPathPath(t, namex(folder, "parent-prev"), name(folder)),
|
prevPath: d.fullPath(t, folderName("parent-prev"), folderName()),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1141,24 +1153,24 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
name: "root and tombstones",
|
name: "root and tombstones",
|
||||||
tree: treeWithFileInTombstone,
|
tree: treeWithFileInTombstone,
|
||||||
prevPaths: map[string]string{
|
prevPaths: map[string]string{
|
||||||
rootID: fullPath(),
|
rootID: d.strPath(),
|
||||||
id(folder): fullPath(name(folder)),
|
folderID(): d.strPath(folderName()),
|
||||||
},
|
},
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: map[string]collectable{
|
expect: map[string]collectable{
|
||||||
rootID: {
|
rootID: {
|
||||||
currPath: fullPathPath(t),
|
currPath: d.fullPath(t),
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: rootID,
|
folderID: rootID,
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
loc: path.Elements{},
|
loc: path.Elements{},
|
||||||
prevPath: fullPathPath(t),
|
prevPath: d.fullPath(t),
|
||||||
},
|
},
|
||||||
id(folder): {
|
folderID(): {
|
||||||
files: map[string]*custom.DriveItem{},
|
files: map[string]*custom.DriveItem{},
|
||||||
folderID: id(folder),
|
folderID: folderID(),
|
||||||
isPackageOrChildOfPackage: false,
|
isPackageOrChildOfPackage: false,
|
||||||
prevPath: fullPathPath(t, name(folder)),
|
prevPath: d.fullPath(t, folderName()),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1166,7 +1178,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables()
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
tree := test.tree(t)
|
tree := test.tree(t, d)
|
||||||
|
|
||||||
if len(test.prevPaths) > 0 {
|
if len(test.prevPaths) > 0 {
|
||||||
for id, ps := range test.prevPaths {
|
for id, ps := range test.prevPaths {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -12,14 +12,11 @@ import (
|
|||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type LimiterUnitSuite struct {
|
type LimiterUnitSuite struct {
|
||||||
@ -33,23 +30,14 @@ func TestLimiterUnitSuite(t *testing.T) {
|
|||||||
type backupLimitTest struct {
|
type backupLimitTest struct {
|
||||||
name string
|
name string
|
||||||
limits control.PreviewItemLimits
|
limits control.PreviewItemLimits
|
||||||
drives []models.Driveable
|
enumerator enumerateDriveItemsDelta
|
||||||
enumerator mock.EnumerateDriveItemsDelta
|
|
||||||
// Collection name -> set of item IDs. We can't check item data because
|
// Collection name -> set of item IDs. We can't check item data because
|
||||||
// that's not mocked out. Metadata is checked separately.
|
// that's not mocked out. Metadata is checked separately.
|
||||||
expectedItemIDsInCollection map[string][]string
|
expectedItemIDsInCollection map[string][]string
|
||||||
}
|
}
|
||||||
|
|
||||||
func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) {
|
func backupLimitTable(d1, d2 *deltaDrive) []backupLimitTest {
|
||||||
drive1 := models.NewDrive()
|
return []backupLimitTest{
|
||||||
drive1.SetId(ptr.To(id(drive)))
|
|
||||||
drive1.SetName(ptr.To(name(drive)))
|
|
||||||
|
|
||||||
drive2 := models.NewDrive()
|
|
||||||
drive2.SetId(ptr.To(idx(drive, 2)))
|
|
||||||
drive2.SetName(ptr.To(namex(drive, 2)))
|
|
||||||
|
|
||||||
tbl := []backupLimitTest{
|
|
||||||
{
|
{
|
||||||
name: "OneDrive SinglePage ExcludeItemsOverMaxSize",
|
name: "OneDrive SinglePage ExcludeItemsOverMaxSize",
|
||||||
limits: control.PreviewItemLimits{
|
limits: control.PreviewItemLimits{
|
||||||
@ -60,15 +48,14 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 5,
|
MaxBytes: 5,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(aPage(
|
||||||
mock.Delta(id(delta), nil).With(aPage(
|
d1.fileWSizeAtRoot(7, "f1"),
|
||||||
filexWSizeAtRoot(1, 7),
|
d1.fileWSizeAtRoot(1, "f2"),
|
||||||
filexWSizeAtRoot(2, 1),
|
d1.fileWSizeAtRoot(1, "f3"))))),
|
||||||
filexWSizeAtRoot(3, 1))))),
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f2"), fileID("f3")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -81,15 +68,14 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 3,
|
MaxBytes: 3,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(aPage(
|
||||||
mock.Delta(id(delta), nil).With(aPage(
|
d1.fileWSizeAtRoot(1, "f1"),
|
||||||
filexWSizeAtRoot(1, 1),
|
d1.fileWSizeAtRoot(2, "f2"),
|
||||||
filexWSizeAtRoot(2, 2),
|
d1.fileWSizeAtRoot(1, "f3"))))),
|
||||||
filexWSizeAtRoot(3, 1))))),
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -102,17 +88,16 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 3,
|
MaxBytes: 3,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(aPage(
|
||||||
mock.Delta(id(delta), nil).With(aPage(
|
d1.fileWSizeAtRoot(1, "f1"),
|
||||||
filexWSizeAtRoot(1, 1),
|
d1.folderAtRoot(),
|
||||||
folderxAtRoot(1),
|
d1.fileWSizeAt(2, folder, "f2"),
|
||||||
filexWSizeAt(2, 1, 2),
|
d1.fileWSizeAt(1, folder, "f3"))))),
|
||||||
filexWSizeAt(3, 1, 1))))),
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1)},
|
d1.strPath(): {fileID("f1")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)},
|
d1.strPath(folderName()): {folderID(), fileID("f2")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -125,18 +110,17 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(aPage(
|
||||||
mock.Delta(id(delta), nil).With(aPage(
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f3"),
|
||||||
filexAtRoot(3),
|
d1.fileAtRoot("f4"),
|
||||||
filexAtRoot(4),
|
d1.fileAtRoot("f5"),
|
||||||
filexAtRoot(5),
|
d1.fileAtRoot("f6"))))),
|
||||||
filexAtRoot(6))))),
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -149,24 +133,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2)),
|
d1.fileAtRoot("f2")),
|
||||||
aPage(
|
aPage(
|
||||||
// Repeated items shouldn't count against the limit.
|
// Repeated items shouldn't count against the limit.
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(3, 1),
|
d1.fileAt(folder, "f3"),
|
||||||
filexAt(4, 1),
|
d1.fileAt(folder, "f4"),
|
||||||
filexAt(5, 1),
|
d1.fileAt(folder, "f5"),
|
||||||
filexAt(6, 1))))),
|
d1.fileAt(folder, "f6"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)},
|
d1.strPath(folderName()): {folderID(), fileID("f3")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -179,21 +162,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 1,
|
MaxPages: 1,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2)),
|
d1.fileAtRoot("f2")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(3, 1),
|
d1.fileAt(folder, "f3"),
|
||||||
filexAt(4, 1),
|
d1.fileAt(folder, "f4"),
|
||||||
filexAt(5, 1),
|
d1.fileAt(folder, "f5"),
|
||||||
filexAt(6, 1))))),
|
d1.fileAt(folder, "f6"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -206,23 +188,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(3)),
|
d1.fileAtRoot("f3")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(4, 1),
|
d1.fileAt(folder, "f4"),
|
||||||
filexAt(5, 1))))),
|
d1.fileAt(folder, "f5"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
// Root has an additional item. It's hard to fix that in the code
|
// Root has an additional item. It's hard to fix that in the code
|
||||||
// though.
|
// though.
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4)},
|
d1.strPath(folderName()): {folderID(), fileID("f4")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -235,23 +216,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
folderAtRoot(),
|
d1.folderAtRoot(),
|
||||||
filexAt(1, folder),
|
d1.fileAt(folder, "f1"),
|
||||||
filexAt(2, folder)),
|
d1.fileAt(folder, "f2")),
|
||||||
aPage(
|
aPage(
|
||||||
folderAtRoot(),
|
d1.folderAtRoot(),
|
||||||
// Updated item that shouldn't count against the limit a second time.
|
// Updated item that shouldn't count against the limit a second time.
|
||||||
filexAt(2, folder),
|
d1.fileAt(folder, "f2"),
|
||||||
filexAt(3, folder),
|
d1.fileAt(folder, "f3"),
|
||||||
filexAt(4, folder))))),
|
d1.fileAt(folder, "f4"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {},
|
d1.strPath(): {},
|
||||||
fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(folderName()): {folderID(), fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -264,24 +244,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f2"),
|
||||||
// Put folder 0 at limit.
|
// Put folder 0 at limit.
|
||||||
folderAtRoot(),
|
d1.folderAtRoot(),
|
||||||
filexAt(3, folder),
|
d1.fileAt(folder, "f3"),
|
||||||
filexAt(4, folder)),
|
d1.fileAt(folder, "f4")),
|
||||||
aPage(
|
aPage(
|
||||||
folderAtRoot(),
|
d1.folderAtRoot(),
|
||||||
// Try to move item from root to folder 0 which is already at the limit.
|
// Try to move item from root to folder 0 which is already at the limit.
|
||||||
filexAt(1, folder))))),
|
d1.fileAt(folder, "f1"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2)},
|
d1.strPath(): {fileID("f1"), fileID("f2")},
|
||||||
fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)},
|
d1.strPath(folderName()): {folderID(), fileID("f3"), fileID("f4")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -294,23 +273,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(3)),
|
d1.fileAtRoot("f3")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(4, 1)),
|
d1.fileAt(folder, "f4")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(5, 1))))),
|
d1.fileAt(folder, "f5"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -323,26 +301,25 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(3)),
|
d1.fileAtRoot("f3")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(4, 1),
|
d1.fileAt(folder, "f4"),
|
||||||
filexAt(5, 1),
|
d1.fileAt(folder, "f5"),
|
||||||
// This container shouldn't be returned.
|
// This container shouldn't be returned.
|
||||||
folderxAtRoot(2),
|
d1.folderAtRoot(2),
|
||||||
filexAt(7, 2),
|
d1.fileAt(2, "f7"),
|
||||||
filexAt(8, 2),
|
d1.fileAt(2, "f8"),
|
||||||
filexAt(9, 2))))),
|
d1.fileAt(2, "f9"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -355,27 +332,26 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(3)),
|
d1.fileAtRoot("f3")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(4, 1),
|
d1.fileAt(folder, "f4"),
|
||||||
filexAt(5, 1)),
|
d1.fileAt(folder, "f5")),
|
||||||
aPage(
|
aPage(
|
||||||
// This container shouldn't be returned.
|
// This container shouldn't be returned.
|
||||||
folderxAtRoot(2),
|
d1.folderAtRoot(2),
|
||||||
filexAt(7, 2),
|
d1.fileAt(2, "f7"),
|
||||||
filexAt(8, 2),
|
d1.fileAt(2, "f8"),
|
||||||
filexAt(9, 2))))),
|
d1.fileAt(2, "f9"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -388,25 +364,24 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 999999,
|
MaxBytes: 999999,
|
||||||
MaxPages: 999,
|
MaxPages: 999,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1, drive2},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(aPage(
|
||||||
mock.Delta(id(delta), nil).With(aPage(
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f3"),
|
||||||
filexAtRoot(3),
|
d1.fileAtRoot("f4"),
|
||||||
filexAtRoot(4),
|
d1.fileAtRoot("f5")))),
|
||||||
filexAtRoot(5)))),
|
d2.newEnumer().with(
|
||||||
mock.Drive(idx(drive, 2)).With(
|
delta(id(deltaURL), nil).with(aPage(
|
||||||
mock.Delta(id(delta), nil).With(aPage(
|
d2.fileAtRoot("f1"),
|
||||||
filexAtRoot(1),
|
d2.fileAtRoot("f2"),
|
||||||
filexAtRoot(2),
|
d2.fileAtRoot("f3"),
|
||||||
filexAtRoot(3),
|
d2.fileAtRoot("f4"),
|
||||||
filexAtRoot(4),
|
d2.fileAtRoot("f5"))))),
|
||||||
filexAtRoot(5))))),
|
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d2.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -418,28 +393,25 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
MaxBytes: 1,
|
MaxBytes: 1,
|
||||||
MaxPages: 1,
|
MaxPages: 1,
|
||||||
},
|
},
|
||||||
drives: []models.Driveable{drive1},
|
enumerator: driveEnumerator(
|
||||||
enumerator: mock.DriveEnumerator(
|
d1.newEnumer().with(
|
||||||
mock.Drive(id(drive)).With(
|
delta(id(deltaURL), nil).with(
|
||||||
mock.Delta(id(delta), nil).With(
|
|
||||||
aPage(
|
aPage(
|
||||||
filexAtRoot(1),
|
d1.fileAtRoot("f1"),
|
||||||
filexAtRoot(2),
|
d1.fileAtRoot("f2"),
|
||||||
filexAtRoot(3)),
|
d1.fileAtRoot("f3")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(4, 1)),
|
d1.fileAt(folder, "f4")),
|
||||||
aPage(
|
aPage(
|
||||||
folderxAtRoot(1),
|
d1.folderAtRoot(),
|
||||||
filexAt(5, 1))))),
|
d1.fileAt(folder, "f5"))))),
|
||||||
expectedItemIDsInCollection: map[string][]string{
|
expectedItemIDsInCollection: map[string][]string{
|
||||||
fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)},
|
d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")},
|
||||||
fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)},
|
d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
return drive1, drive2, tbl
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGet_PreviewLimits checks that the limits set for preview backups in
|
// TestGet_PreviewLimits checks that the limits set for preview backups in
|
||||||
@ -447,16 +419,7 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest)
|
|||||||
// checks that don't examine metadata, collection states, etc. They really just
|
// checks that don't examine metadata, collection states, etc. They really just
|
||||||
// check the expected items appear.
|
// check the expected items appear.
|
||||||
func (suite *LimiterUnitSuite) TestGet_PreviewLimits_noTree() {
|
func (suite *LimiterUnitSuite) TestGet_PreviewLimits_noTree() {
|
||||||
_, _, tbl := backupLimitTable()
|
iterGetPreviewLimitsTests(suite, control.DefaultOptions())
|
||||||
|
|
||||||
for _, test := range tbl {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
runGetPreviewLimits(
|
|
||||||
suite.T(),
|
|
||||||
test,
|
|
||||||
control.DefaultOptions())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGet_PreviewLimits checks that the limits set for preview backups in
|
// TestGet_PreviewLimits checks that the limits set for preview backups in
|
||||||
@ -469,13 +432,21 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() {
|
|||||||
opts := control.DefaultOptions()
|
opts := control.DefaultOptions()
|
||||||
opts.ToggleFeatures.UseDeltaTree = true
|
opts.ToggleFeatures.UseDeltaTree = true
|
||||||
|
|
||||||
_, _, tbl := backupLimitTable()
|
iterGetPreviewLimitsTests(suite, opts)
|
||||||
|
}
|
||||||
|
|
||||||
for _, test := range tbl {
|
func iterGetPreviewLimitsTests(
|
||||||
|
suite *LimiterUnitSuite,
|
||||||
|
opts control.Options,
|
||||||
|
) {
|
||||||
|
d1, d2 := drive(), drive(2)
|
||||||
|
|
||||||
|
for _, test := range backupLimitTable(d1, d2) {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
runGetPreviewLimits(
|
runGetPreviewLimits(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
test,
|
test,
|
||||||
|
d1, d2,
|
||||||
opts)
|
opts)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -484,6 +455,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() {
|
|||||||
func runGetPreviewLimits(
|
func runGetPreviewLimits(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
test backupLimitTest,
|
test backupLimitTest,
|
||||||
|
drive1, drive2 *deltaDrive,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
) {
|
) {
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
@ -500,12 +472,7 @@ func runGetPreviewLimits(
|
|||||||
opts.PreviewLimits = test.limits
|
opts.PreviewLimits = test.limits
|
||||||
|
|
||||||
var (
|
var (
|
||||||
mockDrivePager = &apiMock.Pager[models.Driveable]{
|
mbh = defaultDriveBHWith(user, test.enumerator)
|
||||||
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
|
||||||
{Values: test.drives},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
mbh = mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator)
|
|
||||||
c = collWithMBHAndOpts(mbh, opts)
|
c = collWithMBHAndOpts(mbh, opts)
|
||||||
errs = fault.New(true)
|
errs = fault.New(true)
|
||||||
delList = prefixmatcher.NewStringSetBuilder()
|
delList = prefixmatcher.NewStringSetBuilder()
|
||||||
@ -558,7 +525,7 @@ func runGetPreviewLimits(
|
|||||||
t,
|
t,
|
||||||
test.expectedItemIDsInCollection[folderPath],
|
test.expectedItemIDsInCollection[folderPath],
|
||||||
itemIDs,
|
itemIDs,
|
||||||
"item IDs in collection with path %q",
|
"item IDs in collection with path:\n\t%q",
|
||||||
folderPath)
|
folderPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -741,20 +708,17 @@ func runGetPreviewLimitsDefaults(
|
|||||||
false)
|
false)
|
||||||
require.NoError(t, err, "making metadata path", clues.ToCore(err))
|
require.NoError(t, err, "making metadata path", clues.ToCore(err))
|
||||||
|
|
||||||
drv := models.NewDrive()
|
d := drive()
|
||||||
drv.SetId(ptr.To(id(drive)))
|
pages := make([]nextPage, 0, test.numContainers)
|
||||||
drv.SetName(ptr.To(name(drive)))
|
|
||||||
|
|
||||||
pages := make([]mock.NextPage, 0, test.numContainers)
|
|
||||||
|
|
||||||
for containerIdx := 0; containerIdx < test.numContainers; containerIdx++ {
|
for containerIdx := 0; containerIdx < test.numContainers; containerIdx++ {
|
||||||
page := mock.NextPage{
|
page := nextPage{
|
||||||
Items: []models.DriveItemable{
|
Items: []models.DriveItemable{
|
||||||
driveRootItem(),
|
driveRootFolder(),
|
||||||
driveItem(
|
driveItem(
|
||||||
idx(folder, containerIdx),
|
folderID(containerIdx),
|
||||||
namex(folder, containerIdx),
|
folderName(containerIdx),
|
||||||
parentDir(),
|
d.dir(),
|
||||||
rootID,
|
rootID,
|
||||||
isFolder),
|
isFolder),
|
||||||
},
|
},
|
||||||
@ -763,11 +727,11 @@ func runGetPreviewLimitsDefaults(
|
|||||||
for itemIdx := 0; itemIdx < test.numItemsPerContainer; itemIdx++ {
|
for itemIdx := 0; itemIdx < test.numItemsPerContainer; itemIdx++ {
|
||||||
itemSuffix := fmt.Sprintf("%d-%d", containerIdx, itemIdx)
|
itemSuffix := fmt.Sprintf("%d-%d", containerIdx, itemIdx)
|
||||||
|
|
||||||
page.Items = append(page.Items, driveItemWithSize(
|
page.Items = append(page.Items, driveItemWSize(
|
||||||
idx(file, itemSuffix),
|
fileID(itemSuffix),
|
||||||
namex(file, itemSuffix),
|
fileName(itemSuffix),
|
||||||
parentDir(namex(folder, containerIdx)),
|
d.dir(folderName(containerIdx)),
|
||||||
idx(folder, containerIdx),
|
folderID(containerIdx),
|
||||||
test.itemSize,
|
test.itemSize,
|
||||||
isFile))
|
isFile))
|
||||||
}
|
}
|
||||||
@ -778,15 +742,10 @@ func runGetPreviewLimitsDefaults(
|
|||||||
opts.PreviewLimits = test.limits
|
opts.PreviewLimits = test.limits
|
||||||
|
|
||||||
var (
|
var (
|
||||||
mockDrivePager = &apiMock.Pager[models.Driveable]{
|
mockEnumerator = driveEnumerator(
|
||||||
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
d.newEnumer().with(
|
||||||
{Values: []models.Driveable{drv}},
|
delta(id(deltaURL), nil).with(pages...)))
|
||||||
},
|
mbh = defaultDriveBHWith(user, mockEnumerator)
|
||||||
}
|
|
||||||
mockEnumerator = mock.DriveEnumerator(
|
|
||||||
mock.Drive(id(drive)).With(
|
|
||||||
mock.Delta(id(delta), nil).With(pages...)))
|
|
||||||
mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator)
|
|
||||||
c = collWithMBHAndOpts(mbh, opts)
|
c = collWithMBHAndOpts(mbh, opts)
|
||||||
errs = fault.New(true)
|
errs = fault.New(true)
|
||||||
delList = prefixmatcher.NewStringSetBuilder()
|
delList = prefixmatcher.NewStringSetBuilder()
|
||||||
|
|||||||
@ -15,7 +15,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
odMock "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -53,7 +53,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
onCollision control.CollisionPolicy
|
onCollision control.CollisionPolicy
|
||||||
deleteErr error
|
deleteErr error
|
||||||
expectSkipped assert.BoolAssertionFunc
|
expectSkipped assert.BoolAssertionFunc
|
||||||
expectMock func(*testing.T, *odMock.RestoreHandler)
|
expectMock func(*testing.T, *mockRestoreHandler)
|
||||||
expectCounts counts
|
expectCounts counts
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
@ -61,7 +61,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
collisionKeys: map[string]api.DriveItemIDType{},
|
collisionKeys: map[string]api.DriveItemIDType{},
|
||||||
onCollision: control.Copy,
|
onCollision: control.Copy,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -72,7 +72,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
collisionKeys: map[string]api.DriveItemIDType{},
|
collisionKeys: map[string]api.DriveItemIDType{},
|
||||||
onCollision: control.Replace,
|
onCollision: control.Replace,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -83,7 +83,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
collisionKeys: map[string]api.DriveItemIDType{},
|
collisionKeys: map[string]api.DriveItemIDType{},
|
||||||
onCollision: control.Skip,
|
onCollision: control.Skip,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -92,11 +92,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "collision, copy",
|
name: "collision, copy",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {ItemID: mndiID},
|
mock.DriveItemFileName: {ItemID: mndiID},
|
||||||
},
|
},
|
||||||
onCollision: control.Copy,
|
onCollision: control.Copy,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -105,11 +105,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "collision, replace",
|
name: "collision, replace",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {ItemID: mndiID},
|
mock.DriveItemFileName: {ItemID: mndiID},
|
||||||
},
|
},
|
||||||
onCollision: control.Replace,
|
onCollision: control.Replace,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.True(t, rh.CalledDeleteItem, "new item deleted")
|
assert.True(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
assert.Equal(t, mndiID, rh.CalledDeleteItemOn, "deleted the correct item")
|
assert.Equal(t, mndiID, rh.CalledDeleteItemOn, "deleted the correct item")
|
||||||
@ -119,12 +119,12 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "collision, replace - err already deleted",
|
name: "collision, replace - err already deleted",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {ItemID: "smarf"},
|
mock.DriveItemFileName: {ItemID: "smarf"},
|
||||||
},
|
},
|
||||||
onCollision: control.Replace,
|
onCollision: control.Replace,
|
||||||
deleteErr: graph.ErrDeletedInFlight,
|
deleteErr: graph.ErrDeletedInFlight,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.True(t, rh.CalledDeleteItem, "new item deleted")
|
assert.True(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -133,11 +133,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "collision, skip",
|
name: "collision, skip",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {ItemID: mndiID},
|
mock.DriveItemFileName: {ItemID: mndiID},
|
||||||
},
|
},
|
||||||
onCollision: control.Skip,
|
onCollision: control.Skip,
|
||||||
expectSkipped: assert.True,
|
expectSkipped: assert.True,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.False(t, rh.CalledPostItem, "new item posted")
|
assert.False(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -146,14 +146,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "file-folder collision, copy",
|
name: "file-folder collision, copy",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {
|
mock.DriveItemFileName: {
|
||||||
ItemID: mndiID,
|
ItemID: mndiID,
|
||||||
IsFolder: true,
|
IsFolder: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
onCollision: control.Copy,
|
onCollision: control.Copy,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -162,14 +162,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "file-folder collision, replace",
|
name: "file-folder collision, replace",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {
|
mock.DriveItemFileName: {
|
||||||
ItemID: mndiID,
|
ItemID: mndiID,
|
||||||
IsFolder: true,
|
IsFolder: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
onCollision: control.Replace,
|
onCollision: control.Replace,
|
||||||
expectSkipped: assert.False,
|
expectSkipped: assert.False,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.True(t, rh.CalledPostItem, "new item posted")
|
assert.True(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -178,14 +178,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
{
|
{
|
||||||
name: "file-folder collision, skip",
|
name: "file-folder collision, skip",
|
||||||
collisionKeys: map[string]api.DriveItemIDType{
|
collisionKeys: map[string]api.DriveItemIDType{
|
||||||
odMock.DriveItemFileName: {
|
mock.DriveItemFileName: {
|
||||||
ItemID: mndiID,
|
ItemID: mndiID,
|
||||||
IsFolder: true,
|
IsFolder: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
onCollision: control.Skip,
|
onCollision: control.Skip,
|
||||||
expectSkipped: assert.True,
|
expectSkipped: assert.True,
|
||||||
expectMock: func(t *testing.T, rh *odMock.RestoreHandler) {
|
expectMock: func(t *testing.T, rh *mockRestoreHandler) {
|
||||||
assert.False(t, rh.CalledPostItem, "new item posted")
|
assert.False(t, rh.CalledPostItem, "new item posted")
|
||||||
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
assert.False(t, rh.CalledDeleteItem, "new item deleted")
|
||||||
},
|
},
|
||||||
@ -204,7 +204,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
caches = NewRestoreCaches(nil)
|
caches = NewRestoreCaches(nil)
|
||||||
rh = &odMock.RestoreHandler{
|
rh = &mockRestoreHandler{
|
||||||
PostItemResp: models.NewDriveItem(),
|
PostItemResp: models.NewDriveItem(),
|
||||||
DeleteItemErr: test.deleteErr,
|
DeleteItemErr: test.deleteErr,
|
||||||
}
|
}
|
||||||
@ -232,9 +232,9 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
ctx,
|
ctx,
|
||||||
rh,
|
rh,
|
||||||
rcc,
|
rcc,
|
||||||
odMock.FetchItemByName{
|
mock.FetchItemByName{
|
||||||
Item: &dataMock.Item{
|
Item: &dataMock.Item{
|
||||||
Reader: odMock.FileRespReadCloser(odMock.DriveFileMetaData),
|
Reader: mock.FileRespReadCloser(mock.DriveFileMetaData),
|
||||||
ItemInfo: odStub.DriveItemInfo(),
|
ItemInfo: odStub.DriveItemInfo(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -244,7 +244,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
|
|||||||
caches,
|
caches,
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: uuid.NewString(),
|
ItemID: uuid.NewString(),
|
||||||
Reader: odMock.FileRespReadCloser(odMock.DriveFilePayloadData),
|
Reader: mock.FileRespReadCloser(mock.DriveFilePayloadData),
|
||||||
ItemInfo: odStub.DriveItemInfo(),
|
ItemInfo: odStub.DriveItemInfo(),
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
|
|||||||
@ -18,7 +18,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -214,12 +213,15 @@ func TestURLCacheUnitSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||||
deltaString := "delta"
|
d := drive()
|
||||||
driveID := "drive1"
|
|
||||||
|
aURL := func(n int) string {
|
||||||
|
return fmt.Sprintf("https://dummy%d.com", n)
|
||||||
|
}
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
pages []mock.NextPage
|
pages []nextPage
|
||||||
pagerErr error
|
pagerErr error
|
||||||
expectedItemProps map[string]itemProps
|
expectedItemProps map[string]itemProps
|
||||||
expectErr assert.ErrorAssertionFunc
|
expectErr assert.ErrorAssertionFunc
|
||||||
@ -227,14 +229,12 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "single item in cache",
|
name: "single item in cache",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(d.fileWURLAtRoot(aURL(1), false, 1)),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "https://dummy1.com",
|
downloadURL: aURL(1),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -247,34 +247,33 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple items in cache",
|
name: "multiple items in cache",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
d.fileWURLAtRoot(aURL(3), false, 3),
|
||||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "https://dummy1.com",
|
downloadURL: aURL(1),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"2": {
|
fileID(2): {
|
||||||
downloadURL: "https://dummy2.com",
|
downloadURL: aURL(2),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"3": {
|
fileID(3): {
|
||||||
downloadURL: "https://dummy3.com",
|
downloadURL: aURL(3),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"4": {
|
fileID(4): {
|
||||||
downloadURL: "https://dummy4.com",
|
downloadURL: aURL(4),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"5": {
|
fileID(5): {
|
||||||
downloadURL: "https://dummy5.com",
|
downloadURL: aURL(5),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -287,36 +286,34 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple pages",
|
name: "multiple pages",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||||
}},
|
aPage(
|
||||||
{Items: []models.DriveItemable{
|
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "https://dummy1.com",
|
downloadURL: aURL(1),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"2": {
|
fileID(2): {
|
||||||
downloadURL: "https://dummy2.com",
|
downloadURL: aURL(2),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"3": {
|
fileID(3): {
|
||||||
downloadURL: "https://dummy3.com",
|
downloadURL: aURL(3),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"4": {
|
fileID(4): {
|
||||||
downloadURL: "https://dummy4.com",
|
downloadURL: aURL(4),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"5": {
|
fileID(5): {
|
||||||
downloadURL: "https://dummy5.com",
|
downloadURL: aURL(5),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -329,53 +326,41 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple pages with resets",
|
name: "multiple pages with resets",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{
|
aPage(
|
||||||
Items: []models.DriveItemable{
|
d.fileWURLAtRoot(aURL(-1), false, -1),
|
||||||
fileItem("-1", "file-1", "root", "root", "https://dummy-1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
aReset(),
|
||||||
},
|
aPage(
|
||||||
},
|
d.fileWURLAtRoot(aURL(0), false, 0),
|
||||||
{
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
Items: []models.DriveItemable{},
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
Reset: true,
|
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||||
},
|
aPage(
|
||||||
{
|
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||||
Items: []models.DriveItemable{
|
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||||
fileItem("0", "file1", "root", "root", "https://dummy0.com", false),
|
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Items: []models.DriveItemable{
|
|
||||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
|
||||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "https://dummy1.com",
|
downloadURL: aURL(1),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"2": {
|
fileID(2): {
|
||||||
downloadURL: "https://dummy2.com",
|
downloadURL: aURL(2),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"3": {
|
fileID(3): {
|
||||||
downloadURL: "https://dummy3.com",
|
downloadURL: aURL(3),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"4": {
|
fileID(4): {
|
||||||
downloadURL: "https://dummy4.com",
|
downloadURL: aURL(4),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"5": {
|
fileID(5): {
|
||||||
downloadURL: "https://dummy5.com",
|
downloadURL: aURL(5),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -388,48 +373,39 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple pages with resets and combo reset+items in page",
|
name: "multiple pages with resets and combo reset+items in page",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{
|
aPage(
|
||||||
Items: []models.DriveItemable{
|
d.fileWURLAtRoot(aURL(0), false, 0),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||||
},
|
aPageWReset(
|
||||||
},
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
{
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
Items: []models.DriveItemable{
|
d.fileWURLAtRoot(aURL(3), false, 3)),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
aPage(
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(4), false, 4),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
d.fileWURLAtRoot(aURL(5), false, 5)),
|
||||||
},
|
|
||||||
Reset: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Items: []models.DriveItemable{
|
|
||||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
|
||||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "https://dummy1.com",
|
downloadURL: aURL(1),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"2": {
|
fileID(2): {
|
||||||
downloadURL: "https://dummy2.com",
|
downloadURL: aURL(2),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"3": {
|
fileID(3): {
|
||||||
downloadURL: "https://dummy3.com",
|
downloadURL: aURL(3),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"4": {
|
fileID(4): {
|
||||||
downloadURL: "https://dummy4.com",
|
downloadURL: aURL(4),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"5": {
|
fileID(5): {
|
||||||
downloadURL: "https://dummy5.com",
|
downloadURL: aURL(5),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -442,26 +418,25 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "duplicate items with potentially new urls",
|
name: "duplicate items with potentially new urls",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
d.fileWURLAtRoot(aURL(3), false, 3),
|
||||||
fileItem("1", "file1", "root", "root", "https://test1.com", false),
|
d.fileWURLAtRoot(aURL(100), false, 1),
|
||||||
fileItem("2", "file2", "root", "root", "https://test2.com", false),
|
d.fileWURLAtRoot(aURL(200), false, 2)),
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "https://test1.com",
|
downloadURL: aURL(100),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"2": {
|
fileID(2): {
|
||||||
downloadURL: "https://test2.com",
|
downloadURL: aURL(200),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
"3": {
|
fileID(3): {
|
||||||
downloadURL: "https://dummy3.com",
|
downloadURL: aURL(3),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -474,20 +449,19 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "deleted items",
|
name: "deleted items",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
d.fileWURLAtRoot(aURL(2), false, 2),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
|
d.fileWURLAtRoot(aURL(1), true, 1)),
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {
|
fileID(1): {
|
||||||
downloadURL: "",
|
downloadURL: "",
|
||||||
isDeleted: true,
|
isDeleted: true,
|
||||||
},
|
},
|
||||||
"2": {
|
fileID(2): {
|
||||||
downloadURL: "https://dummy2.com",
|
downloadURL: aURL(2),
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -500,13 +474,11 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "item not found in cache",
|
name: "item not found in cache",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(d.fileWURLAtRoot(aURL(1), false, 1)),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"2": {},
|
fileID(2): {},
|
||||||
},
|
},
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
||||||
@ -517,13 +489,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "delta query error",
|
name: "delta query error",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{}},
|
aPage(),
|
||||||
},
|
},
|
||||||
pagerErr: errors.New("delta query error"),
|
pagerErr: errors.New("delta query error"),
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"1": {},
|
fileID(1): {},
|
||||||
"2": {},
|
fileID(2): {},
|
||||||
},
|
},
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
||||||
@ -534,14 +506,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "folder item",
|
name: "folder item",
|
||||||
pages: []mock.NextPage{
|
pages: []nextPage{
|
||||||
{Items: []models.DriveItemable{
|
aPage(
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
d.fileWURLAtRoot(aURL(1), false, 1),
|
||||||
driveItem("2", "folder2", "root", "root", isFolder),
|
d.folderAtRoot(2)),
|
||||||
}},
|
|
||||||
},
|
},
|
||||||
expectedItemProps: map[string]itemProps{
|
expectedItemProps: map[string]itemProps{
|
||||||
"2": {},
|
fileID(2): {},
|
||||||
},
|
},
|
||||||
expectErr: assert.Error,
|
expectErr: assert.Error,
|
||||||
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
expect: func(t *testing.T, uc *urlCache, startTime time.Time) {
|
||||||
@ -562,14 +533,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
driveEnumer := mock.DriveEnumerator(
|
drive := drive()
|
||||||
mock.Drive(driveID).
|
|
||||||
WithErr(test.pagerErr).
|
driveEnumer := driveEnumerator(
|
||||||
With(mock.Delta(deltaString, test.pagerErr).
|
drive.newEnumer().
|
||||||
With(test.pages...)))
|
withErr(test.pagerErr).
|
||||||
|
with(
|
||||||
|
delta(deltaURL, test.pagerErr).
|
||||||
|
with(test.pages...)))
|
||||||
|
|
||||||
cache, err := newURLCache(
|
cache, err := newURLCache(
|
||||||
driveID,
|
drive.id,
|
||||||
"",
|
"",
|
||||||
1*time.Hour,
|
1*time.Hour,
|
||||||
driveEnumer,
|
driveEnumer,
|
||||||
@ -609,18 +583,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
driveID = "drive1"
|
|
||||||
refreshInterval = 1 * time.Second
|
refreshInterval = 1 * time.Second
|
||||||
|
drv = drive()
|
||||||
)
|
)
|
||||||
|
|
||||||
cache, err := newURLCache(
|
cache, err := newURLCache(
|
||||||
driveID,
|
drv.id,
|
||||||
"",
|
"",
|
||||||
refreshInterval,
|
refreshInterval,
|
||||||
&mock.EnumerateDriveItemsDelta{},
|
&enumerateDriveItemsDelta{},
|
||||||
count.New(),
|
count.New(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// cache is empty
|
// cache is empty
|
||||||
@ -641,6 +614,8 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||||
|
drv := drive()
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
driveID string
|
driveID string
|
||||||
@ -653,21 +628,21 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "invalid driveID",
|
name: "invalid driveID",
|
||||||
driveID: "",
|
driveID: "",
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: &mock.EnumerateDriveItemsDelta{},
|
itemPager: &enumerateDriveItemsDelta{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "invalid refresh interval",
|
name: "invalid refresh interval",
|
||||||
driveID: "drive1",
|
driveID: drv.id,
|
||||||
refreshInt: 100 * time.Millisecond,
|
refreshInt: 100 * time.Millisecond,
|
||||||
itemPager: &mock.EnumerateDriveItemsDelta{},
|
itemPager: &enumerateDriveItemsDelta{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "invalid item enumerator",
|
name: "invalid item enumerator",
|
||||||
driveID: "drive1",
|
driveID: drv.id,
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: nil,
|
itemPager: nil,
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
@ -675,9 +650,9 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "valid",
|
name: "valid",
|
||||||
driveID: "drive1",
|
driveID: drv.id,
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: &mock.EnumerateDriveItemsDelta{},
|
itemPager: &enumerateDriveItemsDelta{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,5 +1,16 @@
|
|||||||
package mock
|
package mock
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// >>> TODO <<<
|
||||||
|
// https://github.com/alcionai/corso/issues/4846
|
||||||
|
// This file's functions are duplicated into /drive/helper_test.go, which
|
||||||
|
// should act as the clear primary owner of that functionality. However,
|
||||||
|
// packages outside of /drive (such as sharepoint) depend on these helpers
|
||||||
|
// for test functionality. We'll want to unify the two at some point.
|
||||||
|
// In the meantime, make sure you're referencing and updating the correct
|
||||||
|
// set of helpers (prefer the /drive version over this one).
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -50,7 +61,6 @@ type BackupHandler[T any] struct {
|
|||||||
Service path.ServiceType
|
Service path.ServiceType
|
||||||
Category path.CategoryType
|
Category path.CategoryType
|
||||||
|
|
||||||
DrivePagerV pagers.NonDeltaHandler[models.Driveable]
|
|
||||||
// driveID -> itemPager
|
// driveID -> itemPager
|
||||||
ItemPagerV map[string]pagers.DeltaHandler[models.DriveItemable]
|
ItemPagerV map[string]pagers.DeltaHandler[models.DriveItemable]
|
||||||
|
|
||||||
@ -126,11 +136,9 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab
|
|||||||
|
|
||||||
func DefaultDriveBHWith(
|
func DefaultDriveBHWith(
|
||||||
resource string,
|
resource string,
|
||||||
drivePager *apiMock.Pager[models.Driveable],
|
|
||||||
enumerator EnumerateDriveItemsDelta,
|
enumerator EnumerateDriveItemsDelta,
|
||||||
) *BackupHandler[models.DriveItemable] {
|
) *BackupHandler[models.DriveItemable] {
|
||||||
mbh := DefaultOneDriveBH(resource)
|
mbh := DefaultOneDriveBH(resource)
|
||||||
mbh.DrivePagerV = drivePager
|
|
||||||
mbh.DriveItemEnumeration = enumerator
|
mbh.DriveItemEnumeration = enumerator
|
||||||
|
|
||||||
return mbh
|
return mbh
|
||||||
@ -168,7 +176,7 @@ func (h BackupHandler[T]) ServiceCat() (path.ServiceType, path.CategoryType) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (h BackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] {
|
func (h BackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] {
|
||||||
return h.DrivePagerV
|
return h.DriveItemEnumeration.DrivePager()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h BackupHandler[T]) FormatDisplayPath(_ string, pb *path.Builder) string {
|
func (h BackupHandler[T]) FormatDisplayPath(_ string, pb *path.Builder) string {
|
||||||
@ -333,18 +341,18 @@ type NextPage struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type EnumerateDriveItemsDelta struct {
|
type EnumerateDriveItemsDelta struct {
|
||||||
DrivePagers map[string]*DriveDeltaEnumerator
|
DrivePagers map[string]*DeltaDriveEnumerator
|
||||||
}
|
}
|
||||||
|
|
||||||
func DriveEnumerator(
|
func DriveEnumerator(
|
||||||
ds ...*DriveDeltaEnumerator,
|
ds ...*DeltaDriveEnumerator,
|
||||||
) EnumerateDriveItemsDelta {
|
) EnumerateDriveItemsDelta {
|
||||||
enumerator := EnumerateDriveItemsDelta{
|
enumerator := EnumerateDriveItemsDelta{
|
||||||
DrivePagers: map[string]*DriveDeltaEnumerator{},
|
DrivePagers: map[string]*DeltaDriveEnumerator{},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, drive := range ds {
|
for _, drive := range ds {
|
||||||
enumerator.DrivePagers[drive.DriveID] = drive
|
enumerator.DrivePagers[drive.Drive.ID] = drive
|
||||||
}
|
}
|
||||||
|
|
||||||
return enumerator
|
return enumerator
|
||||||
@ -359,29 +367,75 @@ func (en EnumerateDriveItemsDelta) EnumerateDriveItemsDelta(
|
|||||||
return iterator.nextDelta()
|
return iterator.nextDelta()
|
||||||
}
|
}
|
||||||
|
|
||||||
type DriveDeltaEnumerator struct {
|
func (en EnumerateDriveItemsDelta) DrivePager() *apiMock.Pager[models.Driveable] {
|
||||||
DriveID string
|
ds := []models.Driveable{}
|
||||||
|
|
||||||
|
for _, dp := range en.DrivePagers {
|
||||||
|
ds = append(ds, dp.Drive.Able)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &apiMock.Pager[models.Driveable]{
|
||||||
|
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||||
|
{Values: ds},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (en EnumerateDriveItemsDelta) Drives() []*DeltaDrive {
|
||||||
|
ds := []*DeltaDrive{}
|
||||||
|
|
||||||
|
for _, dp := range en.DrivePagers {
|
||||||
|
ds = append(ds, dp.Drive)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ds
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeltaDrive struct {
|
||||||
|
ID string
|
||||||
|
Able models.Driveable
|
||||||
|
}
|
||||||
|
|
||||||
|
func Drive(driveSuffix ...any) *DeltaDrive {
|
||||||
|
driveID := id("drive", driveSuffix...)
|
||||||
|
|
||||||
|
able := models.NewDrive()
|
||||||
|
able.SetId(ptr.To(driveID))
|
||||||
|
able.SetName(ptr.To(name("drive", driveSuffix...)))
|
||||||
|
|
||||||
|
return &DeltaDrive{
|
||||||
|
ID: driveID,
|
||||||
|
Able: able,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dd *DeltaDrive) NewEnumer() *DeltaDriveEnumerator {
|
||||||
|
cp := &DeltaDrive{}
|
||||||
|
|
||||||
|
*cp = *dd
|
||||||
|
|
||||||
|
return &DeltaDriveEnumerator{Drive: cp}
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeltaDriveEnumerator struct {
|
||||||
|
Drive *DeltaDrive
|
||||||
idx int
|
idx int
|
||||||
DeltaQueries []*DeltaQuery
|
DeltaQueries []*DeltaQuery
|
||||||
Err error
|
Err error
|
||||||
}
|
}
|
||||||
|
|
||||||
func Drive(driveID string) *DriveDeltaEnumerator {
|
func (dde *DeltaDriveEnumerator) With(ds ...*DeltaQuery) *DeltaDriveEnumerator {
|
||||||
return &DriveDeltaEnumerator{DriveID: driveID}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dde *DriveDeltaEnumerator) With(ds ...*DeltaQuery) *DriveDeltaEnumerator {
|
|
||||||
dde.DeltaQueries = ds
|
dde.DeltaQueries = ds
|
||||||
return dde
|
return dde
|
||||||
}
|
}
|
||||||
|
|
||||||
// WithErr adds an error that is always returned in the last delta index.
|
// WithErr adds an error that is always returned in the last delta index.
|
||||||
func (dde *DriveDeltaEnumerator) WithErr(err error) *DriveDeltaEnumerator {
|
func (dde *DeltaDriveEnumerator) WithErr(err error) *DeltaDriveEnumerator {
|
||||||
dde.Err = err
|
dde.Err = err
|
||||||
return dde
|
return dde
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dde *DriveDeltaEnumerator) nextDelta() *DeltaQuery {
|
func (dde *DeltaDriveEnumerator) nextDelta() *DeltaQuery {
|
||||||
if dde.idx == len(dde.DeltaQueries) {
|
if dde.idx == len(dde.DeltaQueries) {
|
||||||
// at the end of the enumeration, return an empty page with no items,
|
// at the end of the enumeration, return an empty page with no items,
|
||||||
// not even the root. This is what graph api would do to signify an absence
|
// not even the root. This is what graph api would do to signify an absence
|
||||||
@ -455,7 +509,7 @@ func (dq *DeltaQuery) NextPage() ([]models.DriveItemable, bool, bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
np := dq.Pages[dq.idx]
|
np := dq.Pages[dq.idx]
|
||||||
dq.idx = dq.idx + 1
|
dq.idx++
|
||||||
|
|
||||||
return np.Items, np.Reset, false
|
return np.Items, np.Reset, false
|
||||||
}
|
}
|
||||||
@ -598,3 +652,49 @@ func (h *RestoreHandler) GetRootFolder(
|
|||||||
) (models.DriveItemable, error) {
|
) (models.DriveItemable, error) {
|
||||||
return models.NewDriveItem(), nil
|
return models.NewDriveItem(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// assumption is only one suffix per id. Mostly using
|
||||||
|
// the variadic as an "optional" extension.
|
||||||
|
func id(v string, suffixes ...any) string {
|
||||||
|
id := fmt.Sprintf("id_%s", v)
|
||||||
|
|
||||||
|
// a bit weird, but acts as a quality of life
|
||||||
|
// that allows some funcs to take in the `file`
|
||||||
|
// or `folder` or etc monikers as the suffix
|
||||||
|
// without producing weird outputs.
|
||||||
|
if len(suffixes) == 1 {
|
||||||
|
sfx0, ok := suffixes[0].(string)
|
||||||
|
if ok && sfx0 == v {
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, sfx := range suffixes {
|
||||||
|
id = fmt.Sprintf("%s_%v", id, sfx)
|
||||||
|
}
|
||||||
|
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// assumption is only one suffix per name. Mostly using
|
||||||
|
// the variadic as an "optional" extension.
|
||||||
|
func name(v string, suffixes ...any) string {
|
||||||
|
name := fmt.Sprintf("n_%s", v)
|
||||||
|
|
||||||
|
// a bit weird, but acts as a quality of life
|
||||||
|
// that allows some funcs to take in the `file`
|
||||||
|
// or `folder` or etc monikers as the suffix
|
||||||
|
// without producing weird outputs.
|
||||||
|
if len(suffixes) == 1 {
|
||||||
|
sfx0, ok := suffixes[0].(string)
|
||||||
|
if ok && sfx0 == v {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, sfx := range suffixes {
|
||||||
|
name = fmt.Sprintf("%s_%v", name, sfx)
|
||||||
|
}
|
||||||
|
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|||||||
@ -28,7 +28,7 @@ import (
|
|||||||
|
|
||||||
var testBaseDrivePath = path.Builder{}.Append(
|
var testBaseDrivePath = path.Builder{}.Append(
|
||||||
odConsts.DrivesPathDir,
|
odConsts.DrivesPathDir,
|
||||||
"driveID1",
|
"id_drive",
|
||||||
odConsts.RootPathDir)
|
odConsts.RootPathDir)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -44,12 +44,14 @@ func TestLibrariesBackupUnitSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
||||||
anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
|
var (
|
||||||
|
anyFolder = (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
|
||||||
|
drv = mock.Drive()
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
tenantID = "tenant"
|
tenantID = "tenant"
|
||||||
siteID = "site"
|
siteID = "site"
|
||||||
driveID = "driveID1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
|
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
|
||||||
@ -96,13 +98,13 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
paths = map[string]string{}
|
paths = map[string]string{}
|
||||||
excluded = map[string]struct{}{}
|
excluded = map[string]struct{}{}
|
||||||
collMap = map[string]map[string]*drive.Collection{
|
collMap = map[string]map[string]*drive.Collection{
|
||||||
driveID: {},
|
drv.ID: {},
|
||||||
}
|
}
|
||||||
topLevelPackages = map[string]struct{}{}
|
topLevelPackages = map[string]struct{}{}
|
||||||
)
|
)
|
||||||
|
|
||||||
mbh.DriveItemEnumeration = mock.DriveEnumerator(
|
mbh.DriveItemEnumeration = mock.DriveEnumerator(
|
||||||
mock.Drive(driveID).With(
|
drv.NewEnumer().With(
|
||||||
mock.Delta("notempty", nil).With(mock.NextPage{Items: test.items})))
|
mock.Delta("notempty", nil).With(mock.NextPage{Items: test.items})))
|
||||||
|
|
||||||
c := drive.NewCollections(
|
c := drive.NewCollections(
|
||||||
@ -117,7 +119,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
|
|
||||||
_, _, err := c.PopulateDriveCollections(
|
_, _, err := c.PopulateDriveCollections(
|
||||||
ctx,
|
ctx,
|
||||||
driveID,
|
drv.ID,
|
||||||
"General",
|
"General",
|
||||||
paths,
|
paths,
|
||||||
excluded,
|
excluded,
|
||||||
@ -134,10 +136,10 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
assert.Empty(t, topLevelPackages, "should not find package type folders")
|
assert.Empty(t, topLevelPackages, "should not find package type folders")
|
||||||
|
|
||||||
for _, collPath := range test.expectedCollectionIDs {
|
for _, collPath := range test.expectedCollectionIDs {
|
||||||
assert.Contains(t, c.CollectionMap[driveID], collPath)
|
assert.Contains(t, c.CollectionMap[drv.ID], collPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, col := range c.CollectionMap[driveID] {
|
for _, col := range c.CollectionMap[drv.ID] {
|
||||||
assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String())
|
assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String())
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user