diff --git a/src/internal/m365/collection/drive/collection_test.go b/src/internal/m365/collection/drive/collection_test.go index ef6a282e8..8a831c724 100644 --- a/src/internal/m365/collection/drive/collection_test.go +++ b/src/internal/m365/collection/drive/collection_test.go @@ -23,7 +23,6 @@ import ( "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata" - "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" @@ -108,7 +107,7 @@ func (suite *CollectionUnitSuite) TestCollection() { name: "oneDrive, no duplicates", numInstances: 1, service: path.OneDriveService, - itemDeets: nst{stubItemName, defaultItemSize, now}, + itemDeets: nst{stubItemName, defaultFileSize, now}, itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}}, getBody: io.NopCloser(bytes.NewReader(stubItemContent)), getErr: nil, @@ -118,7 +117,7 @@ func (suite *CollectionUnitSuite) TestCollection() { name: "oneDrive, duplicates", numInstances: 3, service: path.OneDriveService, - itemDeets: nst{stubItemName, defaultItemSize, now}, + itemDeets: nst{stubItemName, defaultFileSize, now}, getBody: io.NopCloser(bytes.NewReader(stubItemContent)), getErr: nil, itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}}, @@ -128,7 +127,7 @@ func (suite *CollectionUnitSuite) TestCollection() { name: "oneDrive, malware", numInstances: 3, service: path.OneDriveService, - itemDeets: nst{stubItemName, defaultItemSize, now}, + itemDeets: nst{stubItemName, defaultFileSize, now}, itemInfo: details.ItemInfo{}, getBody: nil, getErr: clues.New("test malware").Label(graph.LabelsMalware), @@ -139,7 +138,7 @@ func (suite *CollectionUnitSuite) TestCollection() { name: "oneDrive, not found", numInstances: 3, service: path.OneDriveService, - itemDeets: nst{stubItemName, defaultItemSize, now}, + itemDeets: nst{stubItemName, defaultFileSize, now}, itemInfo: details.ItemInfo{}, getBody: nil, getErr: clues.New("test not found").Label(graph.LabelStatus(http.StatusNotFound)), @@ -150,7 +149,7 @@ func (suite *CollectionUnitSuite) TestCollection() { name: "sharePoint, no duplicates", numInstances: 1, service: path.SharePointService, - itemDeets: nst{stubItemName, defaultItemSize, now}, + itemDeets: nst{stubItemName, defaultFileSize, now}, itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}}, getBody: io.NopCloser(bytes.NewReader(stubItemContent)), getErr: nil, @@ -160,7 +159,7 @@ func (suite *CollectionUnitSuite) TestCollection() { name: "sharePoint, duplicates", numInstances: 3, service: path.SharePointService, - itemDeets: nst{stubItemName, defaultItemSize, now}, + itemDeets: nst{stubItemName, defaultFileSize, now}, itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}}, getBody: io.NopCloser(bytes.NewReader(stubItemContent)), getErr: nil, @@ -185,9 +184,9 @@ func (suite *CollectionUnitSuite) TestCollection() { folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false) require.NoError(t, err, clues.ToCore(err)) - mbh := mock.DefaultOneDriveBH("a-user") + mbh := defaultOneDriveBH("a-user") if test.service == path.SharePointService { - mbh = mock.DefaultSharePointBH("a-site") + mbh = defaultSharePointBH("a-site") mbh.ItemInfo.SharePoint.Modified = now mbh.ItemInfo.SharePoint.ItemName = stubItemName } else { @@ -202,10 +201,10 @@ func (suite *CollectionUnitSuite) TestCollection() { }, } mbh.GetErrs = []error{test.getErr} - mbh.GI = mock.GetsItem{Err: assert.AnError} + mbh.GI = getsItem{Err: assert.AnError} pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles) - mbh.GIP = mock.GetsItemPermission{Perm: pcr} + mbh.GIP = getsItemPermission{Perm: pcr} coll, err := NewCollection( mbh, @@ -305,7 +304,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() { collStatus = support.ControllerOperationStatus{} wg = sync.WaitGroup{} name = "name" - size = defaultItemSize + size = defaultFileSize now = time.Now() ) @@ -318,9 +317,9 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() { folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false) require.NoError(t, err, clues.ToCore(err)) - mbh := mock.DefaultOneDriveBH("a-user") - mbh.GI = mock.GetsItem{Err: assert.AnError} - mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()} + mbh := defaultOneDriveBH("a-user") + mbh.GI = getsItem{Err: assert.AnError} + mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()} mbh.GetResps = []*http.Response{ nil, {StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))}, @@ -375,7 +374,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() { collStatus = support.ControllerOperationStatus{} wg = sync.WaitGroup{} name = "name" - size = defaultItemSize + size = defaultFileSize now = time.Now() ) @@ -397,9 +396,9 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() { folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false) require.NoError(t, err) - mbh := mock.DefaultOneDriveBH("a-user") - mbh.GI = mock.GetsItem{Item: stubItem} - mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()} + mbh := defaultOneDriveBH("a-user") + mbh.GI = getsItem{Item: stubItem} + mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()} mbh.GetResps = []*http.Response{ nil, {StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))}, @@ -457,9 +456,9 @@ func (suite *CollectionUnitSuite) TestCollectionPermissionBackupLatestModTime() folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false) require.NoError(t, err, clues.ToCore(err)) - mbh := mock.DefaultOneDriveBH("a-user") + mbh := defaultOneDriveBH("a-user") mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}} - mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()} + mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()} mbh.GetResps = []*http.Response{{ StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("Fake Data!")), @@ -635,8 +634,8 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() { stubItem.GetFile().SetMimeType(&test.itemMimeType) - mbh := mock.DefaultOneDriveBH("a-user") - mbh.GI = mock.GetsItem{Item: stubItem} + mbh := defaultOneDriveBH("a-user") + mbh.GI = getsItem{Item: stubItem} mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}} mbh.GetErrs = []error{test.err} @@ -692,7 +691,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { table := []struct { name string - mgi mock.GetsItem + mgi getsItem itemInfo details.ItemInfo respBody []io.ReadCloser getErr []error @@ -711,7 +710,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { }, { name: "expired url redownloads", - mgi: mock.GetsItem{Item: itemWID, Err: nil}, + mgi: getsItem{Item: itemWID, Err: nil}, itemInfo: details.ItemInfo{}, respBody: []io.ReadCloser{nil, iorc}, getErr: []error{errUnauth, nil}, @@ -731,14 +730,14 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { name: "re-fetching the item fails", itemInfo: details.ItemInfo{}, getErr: []error{errUnauth}, - mgi: mock.GetsItem{Item: nil, Err: assert.AnError}, + mgi: getsItem{Item: nil, Err: assert.AnError}, expectErr: require.Error, expect: require.Nil, muc: m, }, { name: "expired url fails redownload", - mgi: mock.GetsItem{Item: itemWID, Err: nil}, + mgi: getsItem{Item: itemWID, Err: nil}, itemInfo: details.ItemInfo{}, respBody: []io.ReadCloser{nil, nil}, getErr: []error{errUnauth, assert.AnError}, @@ -748,7 +747,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { }, { name: "url refreshed from cache", - mgi: mock.GetsItem{Item: itemWID, Err: nil}, + mgi: getsItem{Item: itemWID, Err: nil}, itemInfo: details.ItemInfo{}, respBody: []io.ReadCloser{nil, iorc}, getErr: []error{errUnauth, nil}, @@ -766,7 +765,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { }, { name: "url refreshed from cache but item deleted", - mgi: mock.GetsItem{Item: itemWID, Err: graph.ErrDeletedInFlight}, + mgi: getsItem{Item: itemWID, Err: graph.ErrDeletedInFlight}, itemInfo: details.ItemInfo{}, respBody: []io.ReadCloser{nil, nil, nil}, getErr: []error{errUnauth, graph.ErrDeletedInFlight, graph.ErrDeletedInFlight}, @@ -784,7 +783,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { }, { name: "fallback to item fetch on any cache error", - mgi: mock.GetsItem{Item: itemWID, Err: nil}, + mgi: getsItem{Item: itemWID, Err: nil}, itemInfo: details.ItemInfo{}, respBody: []io.ReadCloser{nil, iorc}, getErr: []error{errUnauth, nil}, @@ -814,7 +813,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { } } - mbh := mock.DefaultOneDriveBH("a-user") + mbh := defaultOneDriveBH("a-user") mbh.GI = test.mgi mbh.ItemInfo = test.itemInfo mbh.GetResps = resps @@ -980,9 +979,9 @@ func (suite *CollectionUnitSuite) TestItemExtensions() { wg.Add(1) - mbh := mock.DefaultOneDriveBH("a-user") - mbh.GI = mock.GetsItem{Err: assert.AnError} - mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()} + mbh := defaultOneDriveBH("a-user") + mbh.GI = getsItem{Err: assert.AnError} + mbh.GIP = getsItemPermission{Perm: models.NewPermissionCollectionResponse()} mbh.GetResps = []*http.Response{ { StatusCode: http.StatusOK, diff --git a/src/internal/m365/collection/drive/collections_test.go b/src/internal/m365/collection/drive/collections_test.go index 36fdfe24a..379c012c8 100644 --- a/src/internal/m365/collection/drive/collections_test.go +++ b/src/internal/m365/collection/drive/collections_test.go @@ -13,10 +13,8 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" - "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" dataMock "github.com/alcionai/corso/src/internal/data/mock" - "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" bupMD "github.com/alcionai/corso/src/pkg/backup/metadata" @@ -26,7 +24,6 @@ import ( "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api/graph" - apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" ) // --------------------------------------------------------------------------- @@ -43,6 +40,7 @@ func TestCollectionsUnitSuite(t *testing.T) { func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { t := suite.T() + d := drive() tests := []struct { name string @@ -64,19 +62,19 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Invalid item", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(item), name(item), driveParentDir(drive), rootID, -1), + driveRootFolder(), + driveItem(id(item), name(item), d.dir(), rootID, -1), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.Error, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), + rootID: asNotMoved(t, d.strPath()), }, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{}, @@ -84,43 +82,43 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single File", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(file), name(file), driveParentDir(drive), rootID, isFile), + driveRootFolder(), + driveFile(d.dir(), rootID), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), + rootID: asNotMoved(t, d.strPath()), }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, // Root folder is skipped since it's always present. expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, - expectedExcludes: makeExcludeMap(id(file)), + expectedExcludes: makeExcludeMap(fileID()), expectedTopLevelPackages: map[string]struct{}{}, }, { name: "Single Folder", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asNew(t, driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asNew(t, d.strPath(folderName())), }, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, expectedItemCount: 1, expectedContainerCount: 2, @@ -130,21 +128,21 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Folder created twice", // deleted a created with same name in between a backup items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - idx(folder, 2): asNew(t, driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(2): asNew(t, d.strPath(folderName())), }, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - idx(folder, 2): driveFullPath(drive, name(folder)), + rootID: d.strPath(), + folderID(2): d.strPath(folderName()), }, expectedItemCount: 1, expectedContainerCount: 2, @@ -154,115 +152,115 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Package", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), + driveRootFolder(), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(pkg): asNew(t, driveFullPath(drive, name(pkg))), + rootID: asNotMoved(t, d.strPath()), + id(pkg): asNew(t, d.strPath(name(pkg))), }, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(pkg): driveFullPath(drive, name(pkg)), + rootID: d.strPath(), + id(pkg): d.strPath(name(pkg)), }, expectedItemCount: 1, expectedContainerCount: 2, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{ - driveFullPath(drive, name(pkg)): {}, + d.strPath(name(pkg)): {}, }, expectedCountPackages: 1, }, { name: "Single Package with subfolder", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), - driveItem(id(folder), name(folder), driveParentDir(drive, name(pkg)), id(pkg), isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(pkg)), id(pkg), isFolder), + driveRootFolder(), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), + driveItem(folderID(), folderName(), d.dir(name(pkg)), id(pkg), isFolder), + driveItem(id(subfolder), name(subfolder), d.dir(name(pkg)), id(pkg), isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(pkg): asNew(t, driveFullPath(drive, name(pkg))), - id(folder): asNew(t, driveFullPath(drive, name(pkg), name(folder))), - id(subfolder): asNew(t, driveFullPath(drive, name(pkg), name(subfolder))), + rootID: asNotMoved(t, d.strPath()), + id(pkg): asNew(t, d.strPath(name(pkg))), + folderID(): asNew(t, d.strPath(name(pkg), folderName())), + id(subfolder): asNew(t, d.strPath(name(pkg), name(subfolder))), }, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(pkg): driveFullPath(drive, name(pkg)), - id(folder): driveFullPath(drive, name(pkg), name(folder)), - id(subfolder): driveFullPath(drive, name(pkg), name(subfolder)), + rootID: d.strPath(), + id(pkg): d.strPath(name(pkg)), + folderID(): d.strPath(name(pkg), folderName()), + id(subfolder): d.strPath(name(pkg), name(subfolder)), }, expectedItemCount: 3, expectedContainerCount: 4, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{ - driveFullPath(drive, name(pkg)): {}, + d.strPath(name(pkg)): {}, }, expectedCountPackages: 3, }, { name: "1 root file, 1 folder, 1 package, 2 files, 3 collections", items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(file, "inRoot"), namex(file, "inRoot"), driveParentDir(drive), rootID, isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), - driveItem(idx(file, "inFolder"), namex(file, "inFolder"), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inPackage"), namex(file, "inPackage"), driveParentDir(drive, name(pkg)), id(pkg), isFile), + driveRootFolder(), + driveFile(d.dir(), rootID, "inRoot"), + driveFolder(d.dir(), rootID), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), + driveFile(d.dir(folderName()), folderID(), "inFolder"), + driveFile(d.dir(name(pkg)), id(pkg), "inPackage"), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asNew(t, driveFullPath(drive, name(folder))), - id(pkg): asNew(t, driveFullPath(drive, name(pkg))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asNew(t, d.strPath(folderName())), + id(pkg): asNew(t, d.strPath(name(pkg))), }, expectedItemCount: 5, expectedFileCount: 3, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(pkg): driveFullPath(drive, name(pkg)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(pkg): d.strPath(name(pkg)), }, expectedTopLevelPackages: map[string]struct{}{ - driveFullPath(drive, name(pkg)): {}, + d.strPath(name(pkg)): {}, }, expectedCountPackages: 1, - expectedExcludes: makeExcludeMap(idx(file, "inRoot"), idx(file, "inFolder"), idx(file, "inPackage")), + expectedExcludes: makeExcludeMap(fileID("inRoot"), fileID("inFolder"), fileID("inPackage")), }, { name: "contains folder selector", items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(file, "inRoot"), namex(file, "inRoot"), driveParentDir(drive), rootID, isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(folder)), id(folder), isFolder), - driveItem(idx(folder, 2), name(folder), driveParentDir(drive, name(folder), name(subfolder)), id(subfolder), isFolder), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), - driveItem(idx(file, "inFolder"), idx(file, "inFolder"), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), driveParentDir(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), - driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), driveParentDir(drive, name(pkg)), id(pkg), isFile), + driveRootFolder(), + driveFile(d.dir(), rootID, "inRoot"), + driveFolder(d.dir(), rootID), + driveItem(id(subfolder), name(subfolder), d.dir(folderName()), folderID(), isFolder), + driveItem(folderID(2), folderName(), d.dir(folderName(), name(subfolder)), id(subfolder), isFolder), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), + driveItem(fileID("inFolder"), fileID("inFolder"), d.dir(folderName()), folderID(), isFile), + driveItem(fileID("inFolder2"), fileName("inFolder2"), d.dir(folderName(), name(subfolder), folderName()), folderID(2), isFile), + driveItem(fileID("inFolderPackage"), fileName("inPackage"), d.dir(name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, - scope: (&selectors.OneDriveBackup{}).Folders([]string{name(folder)})[0], + scope: (&selectors.OneDriveBackup{}).Folders([]string{folderName()})[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - id(folder): asNew(t, driveFullPath(drive, name(folder))), - id(subfolder): asNew(t, driveFullPath(drive, name(folder), name(subfolder))), - idx(folder, 2): asNew(t, driveFullPath(drive, name(folder), name(subfolder), name(folder))), + folderID(): asNew(t, d.strPath(folderName())), + id(subfolder): asNew(t, d.strPath(folderName(), name(subfolder))), + folderID(2): asNew(t, d.strPath(folderName(), name(subfolder), folderName())), }, expectedItemCount: 5, expectedFileCount: 2, @@ -270,99 +268,99 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { // just "folder" isn't added here because the include check is done on the // parent path since we only check later if something is a folder or not. expectedPrevPaths: map[string]string{ - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), - idx(folder, 2): driveFullPath(drive, name(folder), name(subfolder), name(folder)), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), + folderID(2): d.strPath(folderName(), name(subfolder), folderName()), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(idx(file, "inFolder"), idx(file, "inFolder2")), + expectedExcludes: makeExcludeMap(fileID("inFolder"), fileID("inFolder2")), }, { name: "prefix subfolder selector", items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(file, "inRoot"), namex(file, "inRoot"), driveParentDir(drive), rootID, isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(folder)), id(folder), isFolder), - driveItem(idx(folder, 2), name(folder), driveParentDir(drive, name(folder), name(subfolder)), id(subfolder), isFolder), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), - driveItem(idx(file, "inFolder"), idx(file, "inFolder"), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), driveParentDir(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), - driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), driveParentDir(drive, name(pkg)), id(pkg), isFile), + driveRootFolder(), + driveFile(d.dir(), rootID, "inRoot"), + driveFolder(d.dir(), rootID), + driveItem(id(subfolder), name(subfolder), d.dir(folderName()), folderID(), isFolder), + driveItem(folderID(2), folderName(), d.dir(folderName(), name(subfolder)), id(subfolder), isFolder), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), + driveItem(fileID("inFolder"), fileID("inFolder"), d.dir(folderName()), folderID(), isFile), + driveItem(fileID("inFolder2"), fileName("inFolder2"), d.dir(folderName(), name(subfolder), folderName()), folderID(2), isFile), + driveItem(fileID("inFolderPackage"), fileName("inPackage"), d.dir(name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, scope: (&selectors.OneDriveBackup{}).Folders( - []string{toPath(name(folder), name(subfolder))}, + []string{toPath(folderName(), name(subfolder))}, selectors.PrefixMatch())[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - id(subfolder): asNew(t, driveFullPath(drive, name(folder), name(subfolder))), - idx(folder, 2): asNew(t, driveFullPath(drive, name(folder), name(subfolder), name(folder))), + id(subfolder): asNew(t, d.strPath(folderName(), name(subfolder))), + folderID(2): asNew(t, d.strPath(folderName(), name(subfolder), folderName())), }, expectedItemCount: 3, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), - idx(folder, 2): driveFullPath(drive, name(folder), name(subfolder), name(folder)), + id(subfolder): d.strPath(folderName(), name(subfolder)), + folderID(2): d.strPath(folderName(), name(subfolder), folderName()), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(idx(file, "inFolder2")), + expectedExcludes: makeExcludeMap(fileID("inFolder2")), }, { name: "match subfolder selector", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(file), name(file), driveParentDir(drive), rootID, isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive, name(folder)), id(folder), isFolder), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), - driveItem(idx(file, 1), namex(file, 1), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(idx(file, "inSubfolder"), namex(file, "inSubfolder"), driveParentDir(drive, name(folder), name(subfolder)), id(subfolder), isFile), - driveItem(idx(file, 9), namex(file, 9), driveParentDir(drive, name(pkg)), id(pkg), isFile), + driveRootFolder(), + driveFile(d.dir(), rootID), + driveFolder(d.dir(), rootID), + driveItem(id(subfolder), name(subfolder), d.dir(folderName()), folderID(), isFolder), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), + driveItem(fileID(1), fileName(1), d.dir(folderName()), folderID(), isFile), + driveItem(fileID("inSubfolder"), fileName("inSubfolder"), d.dir(folderName(), name(subfolder)), id(subfolder), isFile), + driveItem(fileID(9), fileName(9), d.dir(name(pkg)), id(pkg), isFile), }, previousPaths: map[string]string{}, - scope: (&selectors.OneDriveBackup{}).Folders([]string{toPath(name(folder), name(subfolder))})[0], + scope: (&selectors.OneDriveBackup{}).Folders([]string{toPath(folderName(), name(subfolder))})[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - id(subfolder): asNew(t, driveFullPath(drive, name(folder), name(subfolder))), + id(subfolder): asNew(t, d.strPath(folderName(), name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 1, // No child folders for subfolder so nothing here. expectedPrevPaths: map[string]string{ - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(idx(file, "inSubfolder")), + expectedExcludes: makeExcludeMap(fileID("inSubfolder")), }, { name: "not moved folder tree", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asNotMoved(t, driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asNotMoved(t, d.strPath(folderName())), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -370,27 +368,27 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asMoved(t, d.strPath(folderName("a")), d.strPath(folderName())), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -398,28 +396,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree twice within backup", items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 1), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveItem(folderID(1), folderName(), d.dir(), rootID, isFolder), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{ - idx(folder, 1): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(1): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - idx(folder, 2): asNew(t, driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(2): asNew(t, d.strPath(folderName())), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - idx(folder, 2): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + folderID(2): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -427,28 +425,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder tree twice within backup", items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), rootID, isFolder), - driveItem(id(folder), name(drive), driveParentDir(drive), rootID, isFolder), - delItem(id(folder), rootID, isFolder), + driveRootFolder(), + delItem(folderID(), rootID, isFolder), + driveItem(folderID(), name(drivePfx), d.dir(), rootID, isFolder), + delItem(folderID(), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asDeleted(t, driveFullPath(drive, "")), + rootID: asNotMoved(t, d.strPath()), + folderID(): asDeleted(t, d.strPath("")), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + rootID: d.strPath(), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -456,29 +454,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree twice within backup including delete", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - delItem(id(folder), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), + delItem(folderID(), rootID, isFolder), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - idx(folder, 2): asNew(t, driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(2): asNew(t, d.strPath(folderName())), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - idx(folder, 2): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + folderID(2): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -486,28 +484,28 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder tree twice within backup with addition", items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 1), name(folder), driveParentDir(drive), rootID, isFolder), - delItem(idx(folder, 1), rootID, isFolder), - driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), - delItem(idx(folder, 2), rootID, isFolder), + driveRootFolder(), + driveItem(folderID(1), folderName(), d.dir(), rootID, isFolder), + delItem(folderID(1), rootID, isFolder), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), + delItem(folderID(2), rootID, isFolder), }, previousPaths: map[string]string{ - idx(folder, 1): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(1): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), + rootID: asNotMoved(t, d.strPath()), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -515,80 +513,80 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree with file no previous", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(id(folder), namex(folder, 2), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), + driveItem(fileID(), fileName(), d.dir(folderName()), folderID(), isFile), + driveItem(folderID(), folderName(2), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asNew(t, driveFullPath(drive, namex(folder, 2))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asNew(t, d.strPath(folderName(2))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, namex(folder, 2)), + rootID: d.strPath(), + folderID(): d.strPath(folderName(2)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(id(file)), + expectedExcludes: makeExcludeMap(fileID()), }, { name: "moved folder tree with file no previous 1", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), + driveRootFolder(), + driveFolder(d.dir(), rootID), + driveItem(fileID(), fileName(), d.dir(folderName()), folderID(), isFile), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asNew(t, driveFullPath(drive, name(folder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asNew(t, d.strPath(folderName())), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(id(file)), + expectedExcludes: makeExcludeMap(fileID()), }, { name: "moved folder tree and subfolder 1", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), + driveItem(id(subfolder), name(subfolder), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), - id(subfolder): asMoved(t, driveFullPath(drive, namex(folder, "a"), name(subfolder)), driveFullPath(drive, name(subfolder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asMoved(t, d.strPath(folderName("a")), d.strPath(folderName())), + id(subfolder): asMoved(t, d.strPath(folderName("a"), name(subfolder)), d.strPath(name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -596,29 +594,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree and subfolder 2", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveItem(id(subfolder), name(subfolder), d.dir(), rootID, isFolder), + driveFolder(d.dir(), rootID), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), - id(subfolder): asMoved(t, driveFullPath(drive, namex(folder, "a"), name(subfolder)), driveFullPath(drive, name(subfolder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asMoved(t, d.strPath(folderName("a")), d.strPath(folderName())), + id(subfolder): asMoved(t, d.strPath(folderName("a"), name(subfolder)), d.strPath(name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -626,96 +624,96 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "move subfolder when moving parent", items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(drive), rootID, isFolder), - driveItem(id(item), name(item), driveParentDir(drive, namex(folder, 2)), idx(folder, 2), isFile), + driveRootFolder(), + driveItem(folderID(2), folderName(2), d.dir(), rootID, isFolder), + driveItem(id(item), name(item), d.dir(folderName(2)), folderID(2), isFile), // Need to see the parent folder first (expected since that's what Graph // consistently returns). - driveItem(id(folder), namex(folder, "a"), driveParentDir(drive), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive, namex(folder, "a")), id(folder), isFolder), - driveItem(idx(item, 2), namex(item, 2), driveParentDir(drive, namex(folder, "a"), name(subfolder)), id(subfolder), isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveItem(folderID(), folderName("a"), d.dir(), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), d.dir(folderName("a")), folderID(), isFolder), + driveItem(id(item, 2), name(item, 2), d.dir(folderName("a"), name(subfolder)), id(subfolder), isFile), + driveFolder(d.dir(), rootID), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - idx(folder, 2): asNew(t, driveFullPath(drive, namex(folder, 2))), - id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, name(folder))), - id(subfolder): asMoved(t, driveFullPath(drive, namex(folder, "a"), name(subfolder)), driveFullPath(drive, name(folder), name(subfolder))), + rootID: asNotMoved(t, d.strPath()), + folderID(2): asNew(t, d.strPath(folderName(2))), + folderID(): asMoved(t, d.strPath(folderName("a")), d.strPath(folderName())), + id(subfolder): asMoved(t, d.strPath(folderName("a"), name(subfolder)), d.strPath(folderName(), name(subfolder))), }, expectedItemCount: 5, expectedFileCount: 2, expectedContainerCount: 4, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - idx(folder, 2): driveFullPath(drive, namex(folder, 2)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + folderID(2): d.strPath(folderName(2)), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(id(item), idx(item, 2)), + expectedExcludes: makeExcludeMap(id(item), id(item, 2)), }, { name: "moved folder tree multiple times", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(id(folder), namex(folder, 2), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveFolder(d.dir(), rootID), + driveItem(fileID(), fileName(), d.dir(folderName()), folderID(), isFile), + driveItem(folderID(), folderName(2), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{ - id(folder): driveFullPath(drive, namex(folder, "a")), - id(subfolder): driveFullPath(drive, namex(folder, "a"), name(subfolder)), + folderID(): d.strPath(folderName("a")), + id(subfolder): d.strPath(folderName("a"), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asMoved(t, driveFullPath(drive, namex(folder, "a")), driveFullPath(drive, namex(folder, 2))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asMoved(t, d.strPath(folderName("a")), d.strPath(folderName(2))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, namex(folder, 2)), - id(subfolder): driveFullPath(drive, namex(folder, 2), name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName(2)), + id(subfolder): d.strPath(folderName(2), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap(id(file)), + expectedExcludes: makeExcludeMap(fileID()), }, { name: "deleted folder and package", items: []models.DriveItemable{ - driveRootItem(), // root is always present, but not necessary here - delItem(id(folder), rootID, isFolder), + driveRootFolder(), // root is always present, but not necessary here + delItem(folderID(), rootID, isFolder), delItem(id(pkg), rootID, isPackage), }, previousPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(pkg): driveFullPath(drive, name(pkg)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(pkg): d.strPath(name(pkg)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asDeleted(t, driveFullPath(drive, name(folder))), - id(pkg): asDeleted(t, driveFullPath(drive, name(pkg))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asDeleted(t, d.strPath(folderName())), + id(pkg): asDeleted(t, d.strPath(name(pkg))), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -723,23 +721,23 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete folder without previous", items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), rootID, isFolder), + driveRootFolder(), + delItem(folderID(), rootID, isFolder), }, previousPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), + rootID: asNotMoved(t, d.strPath()), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -747,29 +745,29 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete folder tree move subfolder", items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), rootID, isFolder), - driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + delItem(folderID(), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), d.dir(), rootID, isFolder), }, previousPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(subfolder): driveFullPath(drive, name(folder), name(subfolder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(subfolder): d.strPath(folderName(), name(subfolder)), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asDeleted(t, driveFullPath(drive, name(folder))), - id(subfolder): asMoved(t, driveFullPath(drive, name(folder), name(subfolder)), driveFullPath(drive, name(subfolder))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asDeleted(t, d.strPath(folderName())), + id(subfolder): asMoved(t, d.strPath(folderName(), name(subfolder)), d.strPath(name(subfolder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(subfolder): driveFullPath(drive, name(subfolder)), + rootID: d.strPath(), + id(subfolder): d.strPath(name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -777,23 +775,23 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete file", items: []models.DriveItemable{ - driveRootItem(), + driveRootFolder(), delItem(id(item), rootID, isFile), }, previousPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), + rootID: asNotMoved(t, d.strPath()), }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: makeExcludeMap(id(item)), @@ -801,22 +799,22 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "item before parent errors", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(file), name(file), driveParentDir(drive, name(folder)), id(folder), isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), + driveRootFolder(), + driveItem(fileID(), fileName(), d.dir(folderName()), folderID(), isFile), + driveFolder(d.dir(), rootID), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.Error, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), + rootID: asNotMoved(t, d.strPath()), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), + rootID: d.strPath(), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -824,36 +822,36 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { { name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware", items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(file), id(file), driveParentDir(drive), rootID, isFile), - driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - driveItem(id(pkg), name(pkg), driveParentDir(drive), rootID, isPackage), - driveItem(idx(file, "good"), namex(file, "good"), driveParentDir(drive, name(folder)), id(folder), isFile), - malwareItem(id(malware), name(malware), driveParentDir(drive, name(folder)), id(folder), isFile), + driveRootFolder(), + driveItem(fileID(), fileID(), d.dir(), rootID, isFile), + driveFolder(d.dir(), rootID), + driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage), + driveItem(fileID("good"), fileName("good"), d.dir(folderName()), folderID(), isFile), + malwareItem(id(malware), name(malware), d.dir(folderName()), folderID(), isFile), }, previousPaths: map[string]string{}, scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - rootID: asNotMoved(t, driveFullPath(drive)), - id(folder): asNew(t, driveFullPath(drive, name(folder))), - id(pkg): asNew(t, driveFullPath(drive, name(pkg))), + rootID: asNotMoved(t, d.strPath()), + folderID(): asNew(t, d.strPath(folderName())), + id(pkg): asNew(t, d.strPath(name(pkg))), }, expectedItemCount: 4, expectedFileCount: 2, expectedContainerCount: 3, expectedSkippedCount: 1, expectedPrevPaths: map[string]string{ - rootID: driveFullPath(drive), - id(folder): driveFullPath(drive, name(folder)), - id(pkg): driveFullPath(drive, name(pkg)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + id(pkg): d.strPath(name(pkg)), }, expectedTopLevelPackages: map[string]struct{}{ - driveFullPath(drive, name(pkg)): {}, + d.strPath(name(pkg)): {}, }, expectedCountPackages: 1, - expectedExcludes: makeExcludeMap(id(file), idx(file, "good")), + expectedExcludes: makeExcludeMap(fileID(), fileID("good")), }, } @@ -865,15 +863,15 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { defer flush() var ( - driveID = idx(drive, drive) - mbh = mock.DefaultOneDriveBH(user) + drive = drive() + mbh = defaultOneDriveBH(user) excludes = map[string]struct{}{} errs = fault.New(true) ) - mbh.DriveItemEnumeration = mock.DriveEnumerator( - mock.Drive(driveID).With( - mock.Delta("notempty", nil).With( + mbh.DriveItemEnumeration = driveEnumerator( + drive.newEnumer().with( + delta("notempty", nil).with( aPage(test.items...)))) sel := selectors.NewOneDriveBackup([]string{user}) @@ -889,11 +887,11 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { control.Options{ToggleFeatures: control.Toggles{}}, count.New()) - c.CollectionMap[driveID] = map[string]*Collection{} + c.CollectionMap[drive.id] = map[string]*Collection{} _, newPrevPaths, err := c.PopulateDriveCollections( ctx, - driveID, + drive.id, "General", test.previousPaths, excludes, @@ -905,7 +903,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { assert.ElementsMatch( t, maps.Keys(test.expectedCollectionIDs), - maps.Keys(c.CollectionMap[driveID]), + maps.Keys(c.CollectionMap[drive.id]), "expected collection IDs") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count") @@ -913,14 +911,14 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()), "skipped item count") for id, sp := range test.expectedCollectionIDs { - if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) { + if !assert.Containsf(t, c.CollectionMap[drive.id], id, "missing collection with id %s", id) { // Skip collections we don't find so we don't get an NPE. continue } - assert.Equalf(t, sp.state, c.CollectionMap[driveID][id].State(), "state for collection %s", id) - assert.Equalf(t, sp.currPath, c.CollectionMap[driveID][id].FullPath(), "current path for collection %s", id) - assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id) + assert.Equalf(t, sp.state, c.CollectionMap[drive.id][id].State(), "state for collection %s", id) + assert.Equalf(t, sp.currPath, c.CollectionMap[drive.id][id].FullPath(), "current path for collection %s", id) + assert.Equalf(t, sp.prevPath, c.CollectionMap[drive.id][id].PreviousPath(), "prev path for collection %s", id) } assert.Equal(t, test.expectedPrevPaths, newPrevPaths, "previous paths") @@ -943,6 +941,9 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { } func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { + d := drive() + d2 := drive(2) + table := []struct { name string // Each function returns the set of files for a single data.Collection. @@ -960,23 +961,23 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), } }, }, expectedDeltas: map[string]string{ - id(drive): id(delta), + d.id: id(deltaURL), }, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }, canUsePreviousBackup: true, @@ -989,7 +990,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), } }, }, @@ -1006,8 +1007,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), } @@ -1015,8 +1016,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { }, expectedDeltas: map[string]string{}, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }, canUsePreviousBackup: true, @@ -1032,17 +1033,17 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): {}, + d.id: {}, }), } }, }, expectedDeltas: map[string]string{}, - expectedPaths: map[string]map[string]string{id(drive): {}}, + expectedPaths: map[string]map[string]string{d.id: {}}, canUsePreviousBackup: true, errCheck: assert.NoError, }, @@ -1057,22 +1058,22 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.DeltaURLsFileName, map[string]string{ - id(drive): "", + d.id: "", }), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), } }, }, - expectedDeltas: map[string]string{id(drive): ""}, + expectedDeltas: map[string]string{d.id: ""}, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }, canUsePreviousBackup: true, @@ -1085,12 +1086,12 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), } @@ -1099,27 +1100,27 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{idx(drive, 2): idx(delta, 2)}), + map[string]string{d2.id: id(deltaURL, 2)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - idx(drive, 2): { - idx(folder, 2): driveFullPath(2), + d2.id: { + folderID(2): d2.strPath(), }, }), } }, }, expectedDeltas: map[string]string{ - id(drive): id(delta), - idx(drive, 2): idx(delta, 2), + d.id: id(deltaURL), + d2.id: id(deltaURL, 2), }, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, - idx(drive, 2): { - idx(folder, 2): driveFullPath(2), + d2.id: { + folderID(2): d2.strPath(), }, }, canUsePreviousBackup: true, @@ -1136,7 +1137,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.PreviousPathFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), } }, }, @@ -1152,26 +1153,26 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), graph.NewMetadataEntry( "foo", - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), } }, }, expectedDeltas: map[string]string{ - id(drive): id(delta), + d.id: id(deltaURL), }, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }, canUsePreviousBackup: true, @@ -1184,12 +1185,12 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), } @@ -1199,8 +1200,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 2): driveFullPath(2), + d.id: { + folderID(2): d2.strPath(), }, }), } @@ -1218,12 +1219,12 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), }, }), } @@ -1232,7 +1233,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): idx(delta, 2)}), + map[string]string{d.id: id(deltaURL, 2)}), } }, }, @@ -1248,25 +1249,25 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{id(drive): id(delta)}), + map[string]string{d.id: id(deltaURL)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), - idx(folder, 2): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), + folderID(2): d.strPath(), }, }), } }, }, expectedDeltas: map[string]string{ - id(drive): id(delta), + d.id: id(deltaURL), }, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), - idx(folder, 2): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), + folderID(2): d.strPath(), }, }, expectedAlerts: []string{fault.AlertPreviousPathCollision}, @@ -1281,14 +1282,14 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.DeltaURLsFileName, map[string]string{ - id(drive): id(delta), + d.id: id(deltaURL), }), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), - idx(folder, 2): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), + folderID(2): d.strPath(), }, }), } @@ -1297,28 +1298,28 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{idx(drive, 2): idx(delta, 2)}), + map[string]string{d2.id: id(deltaURL, 2)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - idx(drive, 2): { - idx(folder, 1): driveFullPath(1), + d2.id: { + folderID(1): d.strPath(), }, }), } }, }, expectedDeltas: map[string]string{ - id(drive): id(delta), - idx(drive, 2): idx(delta, 2), + d.id: id(deltaURL), + d2.id: id(deltaURL, 2), }, expectedPaths: map[string]map[string]string{ - id(drive): { - idx(folder, 1): driveFullPath(1), - idx(folder, 2): driveFullPath(1), + d.id: { + folderID(1): d.strPath(), + folderID(2): d.strPath(), }, - idx(drive, 2): { - idx(folder, 1): driveFullPath(1), + d2.id: { + folderID(1): d.strPath(), }, }, expectedAlerts: []string{fault.AlertPreviousPathCollision}, @@ -1402,19 +1403,15 @@ func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() { ctx, flush := tester.NewContext(t) defer flush() - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) - - mbh := mock.DefaultOneDriveBH(user) + mbh := defaultOneDriveBH(user) opts := control.DefaultOptions() opts.ToggleFeatures.UseDeltaTree = true - mbh.DrivePagerV = pagerForDrives(drv) - mbh.DriveItemEnumeration = mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( - aPage(delItem(id(file), rootID, isFile))))) + mbh.DriveItemEnumeration = driveEnumerator( + drive().newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + delItem(fileID(), rootID, isFile))))) c := collWithMBH(mbh) c.ctrl = opts @@ -1432,18 +1429,12 @@ func (suite *CollectionsUnitSuite) TestGet() { false) require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err)) - drive1 := models.NewDrive() - drive1.SetId(ptr.To(idx(drive, 1))) - drive1.SetName(ptr.To(namex(drive, 1))) - - drive2 := models.NewDrive() - drive2.SetId(ptr.To(idx(drive, 2))) - drive2.SetName(ptr.To(namex(drive, 2))) + d := drive(1) + d2 := drive(2) table := []struct { name string - drives []models.Driveable - enumerator mock.EnumerateDriveItemsDelta + enumerator enumerateDriveItemsDelta canUsePreviousBackup bool errCheck assert.ErrorAssertionFunc previousPaths map[string]map[string]string @@ -1460,373 +1451,364 @@ func (suite *CollectionsUnitSuite) TestGet() { doNotMergeItems map[string]bool }{ { - name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - delItem(id(file), rootID, isFile))))), + name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + delItem(fileID(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: driveFullPath(1)}, + id(drivePfx, 1): {rootID: d.strPath()}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NotMovedState: {}}, + d.strPath(): {data.NotMovedState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: driveFullPath(1)}, + id(drivePfx, 1): {rootID: d.strPath()}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(id(file)), + d.strPath(): makeExcludeMap(fileID()), }), }, { - name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(id(file), name(file), driveParentDir(1), rootID, isFile))))), + name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + driveFile(d.dir(), rootID))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: driveFullPath(1)}, + id(drivePfx, 1): {rootID: d.strPath()}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NotMovedState: {id(file)}}, + d.strPath(): {data.NotMovedState: {fileID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: driveFullPath(1)}, + id(drivePfx, 1): {rootID: d.strPath()}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(id(file)), + d.strPath(): makeExcludeMap(fileID()), }), }, { - name: "OneDrive_OneItemPage_NoErrors", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile))))), + name: "OneDrive_OneItemPage_NoErrors", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{}, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(id(file), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), + name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + driveItem(fileID(), fileName(2), d.dir(folderName()), folderID(), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{}, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(id(file), namex(file, 2), driveParentDir(1), rootID, isFile))))), + name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + driveItem(fileID(), fileName(2), d.dir(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NotMovedState: {id(file)}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder)}}, + d.strPath(): {data.NotMovedState: {fileID()}}, + d.strPath(folderName()): {data.NewState: {folderID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(id(file)), + d.strPath(): makeExcludeMap(fileID()), }), }, { - name: "OneDrive_TwoItemPages_NoErrors", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_TwoItemPages_NoErrors", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID(), 2))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_TwoItemPages_WithReset", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_TwoItemPages_WithReset", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(idx(file, 3), namex(file, 3), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + driveItem(fileID(3), fileName(3), d.dir(folderName()), folderID(), isFile)), aReset(), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID(), 2))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_TwoItemPages_WithResetCombinedWithItems", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_TwoItemPages_WithResetCombinedWithItems", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPageWReset( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID(), 2))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { name: "TwoDrives_OneItemPageEach_NoErrors", - drives: []models.Driveable{ - drive1, - drive2, - }, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)))), - mock.Drive(idx(drive, 2)).With( - mock.DeltaWReset(idx(delta, 2), nil).With(aPage( - driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile))))), + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())))), + d2.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with(aPage( + driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder), + driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, - idx(drive, 2): {}, + id(drivePfx, 1): {}, + d2.id: {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - driveFullPath(2): {data.NewState: {}}, - driveFullPath(2, name(folder)): {data.NewState: {idx(folder, 2), idx(file, 2)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID()}}, + d2.strPath(): {data.NewState: {}}, + d2.strPath(folderName()): {data.NewState: {folderID(2), fileID(2)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), - idx(drive, 2): idx(delta, 2), + id(drivePfx, 1): id(deltaURL), + d2.id: id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, - idx(drive, 2): { - rootID: driveFullPath(2), - idx(folder, 2): driveFullPath(2, name(folder)), + d2.id: { + rootID: d2.strPath(), + folderID(2): d2.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, - driveFullPath(2): true, - driveFullPath(2, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, + d2.strPath(): true, + d2.strPath(folderName()): true, }, }, { name: "TwoDrives_DuplicateIDs_OneItemPageEach_NoErrors", - drives: []models.Driveable{ - drive1, - drive2, - }, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)))), - mock.Drive(idx(drive, 2)).With( - mock.DeltaWReset(idx(delta, 2), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), id(folder), isFile))))), + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())))), + d2.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with( + aPage( + driveFolder(d2.dir(), rootID), + driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, - idx(drive, 2): {}, + id(drivePfx, 1): {}, + d2.id: {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - driveFullPath(2): {data.NewState: {}}, - driveFullPath(2, name(folder)): {data.NewState: {id(folder), idx(file, 2)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID()}}, + d2.strPath(): {data.NewState: {}}, + d2.strPath(folderName()): {data.NewState: {folderID(), fileID(2)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), - idx(drive, 2): idx(delta, 2), + id(drivePfx, 1): id(deltaURL), + d2.id: id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, - idx(drive, 2): { - rootID: driveFullPath(2), - id(folder): driveFullPath(2, name(folder)), + d2.id: { + rootID: d2.strPath(), + folderID(): d2.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, - driveFullPath(2): true, - driveFullPath(2, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, + d2.strPath(): true, + d2.strPath(folderName()): true, }, }, { - name: "OneDrive_OneItemPage_Errors", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta("", assert.AnError))), + name: "OneDrive_OneItemPage_Errors", + enumerator: driveEnumerator( + d.newEnumer().with( + delta("", assert.AnError))), canUsePreviousBackup: false, errCheck: assert.Error, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: nil, expectedDeltaURLs: nil, @@ -1834,883 +1816,826 @@ func (suite *CollectionsUnitSuite) TestGet() { expectedDelList: nil, }, { - name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), aPage( - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile))))), + driveFolder(d.dir(), rootID, 2), + driveFile(d.dir(folderName(2)), folderID(2)))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.DeletedState: {}}, - driveFullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.DeletedState: {}}, + d.strPath(folderName(2)): {data.NewState: {folderID(2), fileID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 2): driveFullPath(1, namex(folder, 2)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(2): d.strPath(folderName(2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, - driveFullPath(1, namex(folder, 2)): true, + d.strPath(): true, + d.strPath(folderName()): true, + d.strPath(folderName(2)): true, }, }, { - name: "OneDrive_OneItemPage_InvalidPrevDeltaCombinedWithItems_DeleteNonExistentFolder", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_OneItemPage_InvalidPrevDeltaCombinedWithItems_DeleteNonExistentFolder", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), aPage( - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile))))), + driveFolder(d.dir(), rootID, 2), + driveFile(d.dir(folderName(2)), folderID(2)))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.DeletedState: {}}, - driveFullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.DeletedState: {}}, + d.strPath(folderName(2)): {data.NewState: {folderID(2), fileID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 2): driveFullPath(1, namex(folder, 2)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(2): d.strPath(folderName(2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, - driveFullPath(1, namex(folder, 2)): true, + d.strPath(): true, + d.strPath(folderName()): true, + d.strPath(folderName(2)): true, }, }, { - name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile)), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), + driveFile(d.dir(folderName()), folderID(2))), aReset(), aPage( - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile))))), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), + driveFile(d.dir(folderName()), folderID(2)))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): { + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): { // Old folder path should be marked as deleted since it should compare // by ID. data.DeletedState: {}, - data.NewState: {idx(folder, 2), id(file)}, + data.NewState: {folderID(2), fileID()}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 2): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(2): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtExistingLocation", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtExistingLocation", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aReset(), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): { - data.NewState: {id(folder), id(file)}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): { + data.NewState: {folderID(), fileID()}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_OneItemPage_ImmediateInvalidPrevDelta_MoveFolderToPreviouslyExistingPath", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_OneItemPage_ImmediateInvalidPrevDelta_MoveFolderToPreviouslyExistingPath", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), aPage( - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile))))), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), + driveItem(fileID(2), fileName(), d.dir(folderName()), folderID(2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): { + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): { data.DeletedState: {}, - data.NewState: {idx(folder, 2), idx(file, 2)}, + data.NewState: {folderID(2), fileID(2)}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 2): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(2): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), aPage( - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile))))), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), + driveFile(d.dir(folderName()), folderID(2)))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): { + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): { // Old folder path should be marked as deleted since it should compare // by ID. data.DeletedState: {}, - data.NewState: {idx(folder, 2), id(file)}, + data.NewState: {folderID(2), fileID()}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 2): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(2): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "OneDrive Two Item Pages with Malware", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "OneDrive Two Item Pages with Malware", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - malwareItem(id(malware), name(malware), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + malwareItem(id(malware), name(malware), d.dir(folderName()), folderID(), isFile)), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), - malwareItem(idx(malware, 2), namex(malware, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID(), 2), + malwareItem(id(malware, 2), name(malware, 2), d.dir(folderName()), folderID(), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, expectedSkippedCount: 2, }, { - name: "One Drive Deleted Folder In New Results With Invalid Delta", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(idx(delta, 2), nil).With( + name: "One Drive Deleted Folder In New Results With Invalid Delta", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + driveFolder(d.dir(), rootID, 2), + driveFile(d.dir(folderName(2)), folderID(2), 2)), aReset(), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - delItem(idx(folder, 2), rootID, isFolder), - delItem(namex(file, 2), rootID, isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + delItem(folderID(2), rootID, isFolder), + delItem(fileName(2), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), - idx(folder, 2): driveFullPath(1, namex(folder, 2)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + folderID(2): d.strPath(folderName(2)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - driveFullPath(1, namex(folder, 2)): {data.DeletedState: {}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(), fileID()}}, + d.strPath(folderName(2)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): idx(delta, 2), + id(drivePfx, 1): id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, - driveFullPath(1, namex(folder, 2)): true, + d.strPath(): true, + d.strPath(folderName()): true, + d.strPath(folderName(2)): true, }, }, { - name: "One Drive Folder Delete After Invalid Delta", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPageWReset( - delItem(id(folder), rootID, isFolder))))), + name: "One Drive Folder Delete After Invalid Delta", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPageWReset( + delItem(folderID(), rootID, isFolder))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.DeletedState: {}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "One Drive Item Delete After Invalid Delta", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPageWReset( - delItem(id(file), rootID, isFile))))), + name: "One Drive Item Delete After Invalid Delta", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPageWReset( + delItem(fileID(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, + d.strPath(): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, + d.strPath(): true, }, }, { - name: "One Drive Folder Made And Deleted", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(idx(delta, 2), nil).With( + name: "One Drive Folder Made And Deleted", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - delItem(id(folder), rootID, isFolder), - delItem(id(file), rootID, isFile))))), + delItem(folderID(), rootID, isFolder), + delItem(fileID(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, + d.strPath(): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): idx(delta, 2), + id(drivePfx, 1): id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, + d.strPath(): true, }, }, { - name: "One Drive Folder Created -> Deleted -> Created", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(idx(delta, 2), nil).With( + name: "One Drive Folder Created -> Deleted -> Created", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - delItem(id(folder), rootID, isFolder), - delItem(id(file), rootID, isFile)), + delItem(folderID(), rootID, isFolder), + delItem(fileID(), rootID, isFile)), aPage( - driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile))))), + driveItem(folderID(1), folderName(), d.dir(), rootID, isFolder), + driveItem(fileID(1), fileName(), d.dir(folderName()), folderID(1), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {idx(folder, 1), idx(file, 1)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID(1), fileID(1)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): idx(delta, 2), + id(drivePfx, 1): id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 1): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(1): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "One Drive Folder Deleted -> Created -> Deleted", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(idx(delta, 2), nil).With( + name: "One Drive Folder Deleted -> Created -> Deleted", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with( aPage( - delItem(id(folder), rootID, isFolder), - delItem(id(file), rootID, isFile)), + delItem(folderID(), rootID, isFolder), + delItem(fileID(), rootID, isFile)), aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - delItem(id(folder), rootID, isFolder), - delItem(id(file), rootID, isFile))))), + delItem(folderID(), rootID, isFolder), + delItem(fileID(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NotMovedState: {}}, - driveFullPath(1, name(folder)): {data.DeletedState: {}}, + d.strPath(): {data.NotMovedState: {}}, + d.strPath(folderName()): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): idx(delta, 2), + id(drivePfx, 1): id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{}, }, { - name: "One Drive Folder Created -> Deleted -> Created with prev", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(idx(delta, 2), nil).With( + name: "One Drive Folder Created -> Deleted -> Created with prev", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL, 2), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), aPage( - delItem(id(folder), rootID, isFolder), - delItem(id(file), rootID, isFile)), + delItem(folderID(), rootID, isFolder), + delItem(fileID(), rootID, isFile)), aPage( - driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile))))), + driveItem(folderID(1), folderName(), d.dir(), rootID, isFolder), + driveItem(fileID(1), fileName(), d.dir(folderName()), folderID(1), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.DeletedState: {}, data.NewState: {idx(folder, 1), idx(file, 1)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.DeletedState: {}, data.NewState: {folderID(1), fileID(1)}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): idx(delta, 2), + id(drivePfx, 1): id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - idx(folder, 1): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(1): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): false, - driveFullPath(1, name(folder)): true, + d.strPath(): false, + d.strPath(folderName()): true, }, }, { - name: "One Drive Item Made And Deleted", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With( + name: "One Drive Item Made And Deleted", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), - aPage(delItem(id(file), rootID, isFile))))), + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())), + aPage(delItem(fileID(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder)}}, + d.strPath(): {data.NewState: {}}, + d.strPath(folderName()): {data.NewState: {folderID()}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, + d.strPath(): true, + d.strPath(folderName()): true, }, }, { - name: "One Drive Random Folder Delete", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.DeltaWReset(id(delta), nil).With(aPage( - delItem(id(folder), rootID, isFolder))))), + name: "One Drive Random Folder Delete", + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( + aPage( + delItem(folderID(), rootID, isFolder))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, + d.strPath(): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, + d.strPath(): true, }, }, { - name: "One Drive Random Item Delete", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - delItem(id(file), rootID, isFile))))), + name: "One Drive Random Item Delete", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + delItem(fileID(), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, + id(drivePfx, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, + d.strPath(): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), + id(drivePfx, 1): { + rootID: d.strPath(), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(1): true, + d.strPath(): true, }, }, { - name: "TwoPriorDrives_OneTombstoned", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage()))), // root only + name: "TwoPriorDrives_OneTombstoned", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with(aPage()))), // root only canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: driveFullPath(1)}, - idx(drive, 2): {rootID: driveFullPath(2)}, + id(drivePfx, 1): {rootID: d.strPath()}, + d2.id: {rootID: d2.strPath()}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NotMovedState: {}}, - driveFullPath(2): {data.DeletedState: {}}, + d.strPath(): {data.NotMovedState: {}}, + d2.strPath(): {data.DeletedState: {}}, }, - expectedDeltaURLs: map[string]string{idx(drive, 1): id(delta)}, + expectedDeltaURLs: map[string]string{id(drivePfx, 1): id(deltaURL)}, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): {rootID: driveFullPath(1)}, + id(drivePfx, 1): {rootID: d.strPath()}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - driveFullPath(2): true, + d2.strPath(): true, }, }, { - name: "duplicate previous paths in metadata", - drives: []models.Driveable{drive1, drive2}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile)))), - mock.Drive(idx(drive, 2)).With( - mock.Delta(idx(delta, 2), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(2, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(2, namex(folder, 2)), idx(folder, 2), isFile))))), + name: "duplicate previous paths in metadata", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID()), + driveFolder(d.dir(), rootID, 2), + driveFile(d.dir(folderName(2)), folderID(2), 2)))), + d2.newEnumer().with( + delta(id(deltaURL, 2), nil).with( + aPage( + driveFolder(d2.dir(), rootID), + driveFile(d2.dir(folderName()), folderID()), + driveFolder(d2.dir(), rootID, 2), + driveFile(d2.dir(folderName(2)), folderID(2), 2))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), - idx(folder, 2): driveFullPath(1, name(folder)), - idx(folder, 3): driveFullPath(1, name(folder)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName()), + folderID(2): d.strPath(folderName()), + folderID(3): d.strPath(folderName()), }, - idx(drive, 2): { - rootID: driveFullPath(2), - id(folder): driveFullPath(2, name(folder)), - idx(folder, 2): driveFullPath(2, namex(folder, 2)), + d2.id: { + rootID: d2.strPath(), + folderID(): d2.strPath(folderName()), + folderID(2): d2.strPath(folderName(2)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): { - data.NewState: {id(folder), idx(folder, 2)}, + d.strPath(): { + data.NewState: {folderID(), folderID(2)}, }, - driveFullPath(1, name(folder)): { - data.NotMovedState: {id(folder), id(file)}, + d.strPath(folderName()): { + data.NotMovedState: {folderID(), fileID()}, }, - driveFullPath(1, namex(folder, 2)): { - data.MovedState: {idx(folder, 2), idx(file, 2)}, + d.strPath(folderName(2)): { + data.MovedState: {folderID(2), fileID(2)}, }, - driveFullPath(2): { - data.NewState: {id(folder), idx(folder, 2)}, + d2.strPath(): { + data.NewState: {folderID(), folderID(2)}, }, - driveFullPath(2, name(folder)): { - data.NotMovedState: {id(folder), id(file)}, + d2.strPath(folderName()): { + data.NotMovedState: {folderID(), fileID()}, }, - driveFullPath(2, namex(folder, 2)): { - data.NotMovedState: {idx(folder, 2), idx(file, 2)}, + d2.strPath(folderName(2)): { + data.NotMovedState: {folderID(2), fileID(2)}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), - idx(drive, 2): idx(delta, 2), + id(drivePfx, 1): id(deltaURL), + d2.id: id(deltaURL, 2), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, namex(folder, 2)), // note: this is a bug, but is currently expected - idx(folder, 2): driveFullPath(1, namex(folder, 2)), - idx(folder, 3): driveFullPath(1, namex(folder, 2)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(): d.strPath(folderName(2)), // note: this is a bug, but is currently expected + folderID(2): d.strPath(folderName(2)), + folderID(3): d.strPath(folderName(2)), }, - idx(drive, 2): { - rootID: driveFullPath(2), - id(folder): driveFullPath(2, name(folder)), - idx(folder, 2): driveFullPath(2, namex(folder, 2)), + d2.id: { + rootID: d2.strPath(), + folderID(): d2.strPath(folderName()), + folderID(2): d2.strPath(folderName(2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(id(file), idx(file, 2)), - driveFullPath(2): makeExcludeMap(id(file), idx(file, 2)), + d.strPath(): makeExcludeMap(fileID(), fileID(2)), + d2.strPath(): makeExcludeMap(fileID(), fileID(2)), }), doNotMergeItems: map[string]bool{}, }, { - name: "out of order item enumeration causes prev path collisions", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), - driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile))))), + name: "out of order item enumeration causes prev path collisions", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + driveItem(folderID(fanny, 2), folderName(fanny), d.dir(), rootID, isFolder), + driveFile(d.dir(folderName(fanny)), folderID(fanny, 2), 2), + driveFolder(d.dir(), rootID, nav), + driveFile(d.dir(folderName(nav)), folderID(nav)))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(nav): driveFullPath(1, name(fanny)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(nav): d.strPath(folderName(fanny)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): { - data.NewState: {idx(fanny, 2)}, + d.strPath(): { + data.NewState: {folderID(fanny, 2)}, }, - driveFullPath(1, name(nav)): { - data.MovedState: {id(nav), id(file)}, + d.strPath(folderName(nav)): { + data.MovedState: {folderID(nav), fileID()}, }, - driveFullPath(1, name(fanny)): { - data.NewState: {idx(fanny, 2), idx(file, 2)}, + d.strPath(folderName(fanny)): { + data.NewState: {folderID(fanny, 2), fileID(2)}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(nav): driveFullPath(1, name(nav)), - idx(fanny, 2): driveFullPath(1, name(nav)), // note: this is a bug, but currently expected + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(nav): d.strPath(folderName(nav)), + folderID(fanny, 2): d.strPath(folderName(nav)), // note: this is a bug, but currently expected }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(id(file), idx(file, 2)), + d.strPath(): makeExcludeMap(fileID(), fileID(2)), }), doNotMergeItems: map[string]bool{}, }, { - name: "out of order item enumeration causes prev path collisions", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), - driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile))))), + name: "out of order item enumeration causes opposite prev path collisions", + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + driveFile(d.dir(), rootID, 1), + driveFolder(d.dir(), rootID, fanny), + driveFolder(d.dir(), rootID, nav), + driveFolder(d.dir(folderName(fanny)), folderID(fanny), foo), + driveItem(folderID(bar), folderName(foo), d.dir(folderName(nav)), folderID(nav), isFolder))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(nav): driveFullPath(1, name(fanny)), + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(nav): d.strPath(folderName(nav)), + folderID(fanny): d.strPath(folderName(fanny)), + folderID(foo): d.strPath(folderName(nav), folderName(foo)), + folderID(bar): d.strPath(folderName(fanny), folderName(foo)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): { - data.NewState: {idx(fanny, 2)}, + d.strPath(): { + data.NotMovedState: {fileID(1)}, }, - driveFullPath(1, name(nav)): { - data.MovedState: {id(nav), id(file)}, + d.strPath(folderName(nav)): { + data.NotMovedState: {folderID(nav)}, }, - driveFullPath(1, name(fanny)): { - data.NewState: {idx(fanny, 2), idx(file, 2)}, + d.strPath(folderName(nav), folderName(foo)): { + data.MovedState: {folderID(bar)}, + }, + d.strPath(folderName(fanny)): { + data.NotMovedState: {folderID(fanny)}, + }, + d.strPath(folderName(fanny), folderName(foo)): { + data.MovedState: {folderID(foo)}, }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), + id(drivePfx, 1): id(deltaURL), }, expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(nav): driveFullPath(1, name(nav)), - idx(fanny, 2): driveFullPath(1, name(nav)), // note: this is a bug, but currently expected + id(drivePfx, 1): { + rootID: d.strPath(), + folderID(nav): d.strPath(folderName(nav)), + folderID(fanny): d.strPath(folderName(fanny)), + folderID(foo): d.strPath(folderName(nav), folderName(foo)), // note: this is a bug, but currently expected + folderID(bar): d.strPath(folderName(nav), folderName(foo)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(id(file), idx(file, 2)), - }), - doNotMergeItems: map[string]bool{}, - }, - { - name: "out of order item enumeration causes opposite prev path collisions", - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(idx(file, 1), namex(file, 1), driveParentDir(1), rootID, isFile), - driveItem(id(fanny), name(fanny), driveParentDir(1), rootID, isFolder), - driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), - driveItem(id(foo), name(foo), driveParentDir(1, name(fanny)), id(fanny), isFolder), - driveItem(id(bar), name(foo), driveParentDir(1, name(nav)), id(nav), isFolder))))), - canUsePreviousBackup: true, - errCheck: assert.NoError, - previousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(nav): driveFullPath(1, name(nav)), - id(fanny): driveFullPath(1, name(fanny)), - id(foo): driveFullPath(1, name(nav), name(foo)), - id(bar): driveFullPath(1, name(fanny), name(foo)), - }, - }, - expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): { - data.NotMovedState: {idx(file, 1)}, - }, - driveFullPath(1, name(nav)): { - data.NotMovedState: {id(nav)}, - }, - driveFullPath(1, name(nav), name(foo)): { - data.MovedState: {id(bar)}, - }, - driveFullPath(1, name(fanny)): { - data.NotMovedState: {id(fanny)}, - }, - driveFullPath(1, name(fanny), name(foo)): { - data.MovedState: {id(foo)}, - }, - }, - expectedDeltaURLs: map[string]string{ - idx(drive, 1): id(delta), - }, - expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(nav): driveFullPath(1, name(nav)), - id(fanny): driveFullPath(1, name(fanny)), - id(foo): driveFullPath(1, name(nav), name(foo)), // note: this is a bug, but currently expected - id(bar): driveFullPath(1, name(nav), name(foo)), - }, - }, - expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - driveFullPath(1): makeExcludeMap(idx(file, 1)), + d.strPath(): makeExcludeMap(fileID(1)), }), doNotMergeItems: map[string]bool{}, }, @@ -2722,14 +2647,7 @@ func (suite *CollectionsUnitSuite) TestGet() { ctx, flush := tester.NewContext(t) defer flush() - mockDrivePager := &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: test.drives}, - }, - } - - mbh := mock.DefaultOneDriveBH(user) - mbh.DrivePagerV = mockDrivePager + mbh := defaultOneDriveBH(user) mbh.DriveItemEnumeration = test.enumerator c := NewCollections( @@ -2751,8 +2669,8 @@ func (suite *CollectionsUnitSuite) TestGet() { graph.NewMetadataEntry( bupMD.DeltaURLsFileName, map[string]string{ - idx(drive, 1): prevDelta, - idx(drive, 2): prevDelta, + id(drivePfx, 1): prevDelta, + d2.id: prevDelta, }), graph.NewMetadataEntry( bupMD.PreviousPathFileName, @@ -2860,35 +2778,27 @@ func (suite *CollectionsUnitSuite) TestGet() { } func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { - drive1 := models.NewDrive() - drive1.SetId(ptr.To(idx(drive, 1))) - drive1.SetName(ptr.To(namex(drive, 1))) - - drive2 := models.NewDrive() - drive2.SetId(ptr.To(idx(drive, 2))) - drive2.SetName(ptr.To(namex(drive, 2))) + d := drive(1) + d2 := drive(2) table := []struct { name string - drives []models.Driveable - enumerator mock.EnumerateDriveItemsDelta + enumerator enumerateDriveItemsDelta errCheck assert.ErrorAssertionFunc }{ { name: "Two drives with unique url cache instances", - drives: []models.Driveable{ - drive1, - drive2, - }, - enumerator: mock.DriveEnumerator( - mock.Drive(idx(drive, 1)).With( - mock.Delta(id(delta), nil).With(aPage( - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)))), - mock.Drive(idx(drive, 2)).With( - mock.Delta(idx(delta, 2), nil).With(aPage( - driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile))))), + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage( + driveFolder(d.dir(), rootID), + driveFile(d.dir(folderName()), folderID())))), + d2.newEnumer().with( + delta(id(deltaURL, 2), nil).with( + aPage( + driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder), + driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))), errCheck: assert.NoError, }, // TODO(pandeyabs): Add a test case to check that the cache is not attached @@ -2903,14 +2813,7 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { ctx, flush := tester.NewContext(t) defer flush() - mockDrivePager := &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: test.drives}, - }, - } - - mbh := mock.DefaultOneDriveBH(user) - mbh.DrivePagerV = mockDrivePager + mbh := defaultOneDriveBH(user) mbh.DriveItemEnumeration = test.enumerator c := NewCollections( @@ -2968,7 +2871,7 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { // Check that we have the expected number of caches. One per drive. require.Equal( t, - len(test.drives), + len(test.enumerator.getDrives()), len(caches), "expected one cache per drive") }) diff --git a/src/internal/m365/collection/drive/collections_tree.go b/src/internal/m365/collection/drive/collections_tree.go index ae6800389..1c083432b 100644 --- a/src/internal/m365/collection/drive/collections_tree.go +++ b/src/internal/m365/collection/drive/collections_tree.go @@ -629,7 +629,7 @@ func (c *Collections) addFileToTree( return nil, nil } - _, alreadySeen := tree.fileIDToParentID[fileID] + alreadySeen := tree.hasFile(fileID) parentNode, parentNotNil := tree.folderIDToNode[parentID] if parentNotNil && !alreadySeen { @@ -686,25 +686,10 @@ func (c *Collections) makeDriveTombstones( continue } - // TODO: call NewTombstoneCollection - coll, err := NewCollection( - c.handler, - c.protectedResource, - nil, // delete the drive + coll := data.NewTombstoneCollection( prevDrivePath, - driveID, - c.statusUpdater, c.ctrl, - false, - true, - nil, c.counter.Local()) - if err != nil { - err = clues.WrapWC(ctx, err, "making drive tombstone") - el.AddRecoverable(ctx, err) - - continue - } colls = append(colls, coll) } diff --git a/src/internal/m365/collection/drive/collections_tree_test.go b/src/internal/m365/collection/drive/collections_tree_test.go index d1bc1b355..c23e8104c 100644 --- a/src/internal/m365/collection/drive/collections_tree_test.go +++ b/src/internal/m365/collection/drive/collections_tree_test.go @@ -13,13 +13,11 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" - "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/count" countTD "github.com/alcionai/corso/src/pkg/count/testdata" "github.com/alcionai/corso/src/pkg/fault" - apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/custom" ) @@ -33,7 +31,7 @@ func TestCollectionsTreeUnitSuite(t *testing.T) { } func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveTombstones() { - badPfxMBH := mock.DefaultOneDriveBH(user) + badPfxMBH := defaultOneDriveBH(user) badPfxMBH.PathPrefixErr = assert.AnError twostones := map[string]struct{}{ @@ -51,21 +49,21 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveTombstones() { { name: "nil", tombstones: nil, - c: collWithMBH(mock.DefaultOneDriveBH(user)), + c: collWithMBH(defaultOneDriveBH(user)), expectErr: assert.NoError, expect: assert.Empty, }, { name: "none", tombstones: map[string]struct{}{}, - c: collWithMBH(mock.DefaultOneDriveBH(user)), + c: collWithMBH(defaultOneDriveBH(user)), expectErr: assert.NoError, expect: assert.Empty, }, { name: "some tombstones", tombstones: twostones, - c: collWithMBH(mock.DefaultOneDriveBH(user)), + c: collWithMBH(defaultOneDriveBH(user)), expectErr: assert.NoError, expect: assert.NotEmpty, }, @@ -96,7 +94,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveTombstones() { } func (suite *CollectionsTreeUnitSuite) TestCollections_MakeMetadataCollections() { - badMetaPfxMBH := mock.DefaultOneDriveBH(user) + badMetaPfxMBH := defaultOneDriveBH(user) badMetaPfxMBH.MetadataPathPrefixErr = assert.AnError table := []struct { @@ -106,7 +104,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeMetadataCollections() }{ { name: "no errors", - c: collWithMBH(mock.DefaultOneDriveBH(user)), + c: collWithMBH(defaultOneDriveBH(user)), expect: assert.NotEmpty, }, { @@ -146,10 +144,6 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { // path.FilesCategory, // false) // require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err)) - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) - type expected struct { canUsePrevBackup assert.BoolAssertionFunc counts countTD.Expected @@ -160,19 +154,17 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { table := []struct { name string - drivePager *apiMock.Pager[models.Driveable] - enumerator mock.EnumerateDriveItemsDelta + enumerator enumerateDriveItemsDelta previousPaths map[string]map[string]string metadata []data.RestoreCollection expect expected }{ { - name: "not yet implemented", - drivePager: pagerForDrives(drv), - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + name: "not yet implemented", + enumerator: driveEnumerator( + drive().newEnumer().with( + delta(id(deltaURL), nil).with( aPage()))), expect: expected{ canUsePrevBackup: assert.False, @@ -193,7 +185,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { defer flush() var ( - mbh = mock.DefaultDriveBHWith(user, test.drivePager, test.enumerator) + mbh = defaultDriveBHWith(user, test.enumerator) c = collWithMBH(mbh) prevMetadata = makePrevMetadataColls(t, mbh, test.previousPaths) globalExcludes = prefixmatcher.NewStringSetBuilder() @@ -244,23 +236,21 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { // at lower levels are better for verifing fine-grained concerns. This test only needs // to ensure we stitch the parts together correctly. func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) + d := drive() table := []struct { name string - drive models.Driveable - enumerator mock.EnumerateDriveItemsDelta + drive *deltaDrive + enumerator enumerateDriveItemsDelta prevPaths map[string]string expectCounts countTD.Expected }{ { name: "only root in delta, no prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage()))), prevPaths: map[string]string{}, expectCounts: countTD.Expected{ @@ -269,13 +259,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "only root in delta, with prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage()))), prevPaths: map[string]string{ - id(folder): fullPath(name(folder)), + folderID(): d.strPath(folderName()), }, expectCounts: countTD.Expected{ count.PrevPaths: 1, @@ -283,11 +273,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "some items, no prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( - aPage(folderAtRoot(), fileAt(folder))))), + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage(d.folderAtRoot(), d.fileAt(folder))))), prevPaths: map[string]string{}, expectCounts: countTD.Expected{ count.PrevPaths: 0, @@ -295,13 +285,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "some items, with prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( - aPage(folderAtRoot(), fileAt(folder))))), + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage(d.folderAtRoot(), d.fileAt(folder))))), prevPaths: map[string]string{ - id(folder): fullPath(name(folder)), + folderID(): d.strPath(folderName()), }, expectCounts: countTD.Expected{ count.PrevPaths: 1, @@ -309,10 +299,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "tree had delta reset, only root after, no prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.DeltaWReset(id(delta), nil).With( + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), aPage()))), prevPaths: map[string]string{}, @@ -322,14 +312,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "tree had delta reset, only root after, with prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.DeltaWReset(id(delta), nil).With( + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), aPage()))), prevPaths: map[string]string{ - id(folder): fullPath(name(folder)), + folderID(): d.strPath(folderName()), }, expectCounts: countTD.Expected{ count.PrevPaths: 1, @@ -337,12 +327,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "tree had delta reset, enumerate items after, no prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.DeltaWReset(id(delta), nil).With( + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), - aPage(folderAtRoot(), fileAt(folder))))), + aPage(d.folderAtRoot(), d.fileAt(folder))))), prevPaths: map[string]string{}, expectCounts: countTD.Expected{ count.PrevPaths: 0, @@ -350,14 +340,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { }, { name: "tree had delta reset, enumerate items after, with prev paths", - drive: drv, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.DeltaWReset(id(delta), nil).With( + drive: d, + enumerator: driveEnumerator( + d.newEnumer().with( + deltaWReset(id(deltaURL), nil).with( aReset(), - aPage(folderAtRoot(), fileAt(folder))))), + aPage(d.folderAtRoot(), d.fileAt(folder))))), prevPaths: map[string]string{ - id(folder): fullPath(name(folder)), + folderID(): d.strPath(folderName()), }, expectCounts: countTD.Expected{ count.PrevPaths: 1, @@ -371,17 +361,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { ctx, flush := tester.NewContext(t) defer flush() - mbh := mock.DefaultOneDriveBH(user) - mbh.DrivePagerV = pagerForDrives(drv) + mbh := defaultOneDriveBH(user) mbh.DriveItemEnumeration = test.enumerator c := collWithMBH(mbh) _, _, _, err := c.makeDriveCollections( ctx, - test.drive, + test.drive.able, test.prevPaths, - idx(delta, "prev"), + id(deltaURL, "prev"), newPagerLimiter(control.DefaultOptions()), prefixmatcher.NewStringSetBuilder(), c.counter, @@ -394,9 +383,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { } func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors() { + d := drive() + table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace prevPaths map[string]string expectErr require.ErrorAssertionFunc }{ @@ -404,8 +395,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors name: "no error - normal usage", tree: treeWithFolders, prevPaths: map[string]string{ - idx(folder, "parent"): fullPath(namex(folder, "parent")), - id(folder): fullPath(namex(folder, "parent"), name(folder)), + folderID("parent"): d.strPath(folderName("parent")), + folderID(): d.strPath(folderName("parent"), folderName()), }, expectErr: require.NoError, }, @@ -419,7 +410,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors name: "no error - folder not visited in this delta", tree: treeWithFolders, prevPaths: map[string]string{ - id("santa"): fullPath(name("santa")), + id("santa"): d.strPath(name("santa")), }, expectErr: require.NoError, }, @@ -427,7 +418,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors name: "empty key in previous paths", tree: treeWithFolders, prevPaths: map[string]string{ - "": fullPath(namex(folder, "parent")), + "": d.strPath(folderName("parent")), }, expectErr: require.Error, }, @@ -435,7 +426,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors name: "empty value in previous paths", tree: treeWithFolders, prevPaths: map[string]string{ - id(folder): "", + folderID(): "", }, expectErr: require.Error, }, @@ -443,7 +434,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors name: "malformed value in previous paths", tree: treeWithFolders, prevPaths: map[string]string{ - id(folder): "not a path", + folderID(): "not a path", }, expectErr: require.Error, }, @@ -455,7 +446,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors ctx, flush := tester.NewContext(t) defer flush() - tree := test.tree(t) + tree := test.tree(t, d) err := addPrevPathsToTree( ctx, @@ -468,15 +459,17 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddPrevPathsToTree_errors } func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() { + d := drive() + type expected struct { prevPaths map[string]string - collections func(t *testing.T) expectedCollections + collections func(t *testing.T, d *deltaDrive) expectedCollections globalExcludedFileIDs map[string]struct{} } table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace prevPaths map[string]string enableURLCache bool expect expected @@ -488,32 +481,32 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() enableURLCache: true, expect: expected{ prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent")), - id(folder): fullPath(namex(folder, "parent"), name(folder)), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent")), + folderID(): d.strPath(folderName("parent"), folderName()), }, - collections: func(t *testing.T) expectedCollections { + collections: func(t *testing.T, d *deltaDrive) expectedCollections { return expectCollections( false, true, aColl( - fullPathPath(t), + d.fullPath(t), nil, - idx(file, "r")), + fileID("r")), aColl( - fullPathPath(t, namex(folder, "parent")), + d.fullPath(t, folderName("parent")), nil, - idx(file, "p")), + fileID("p")), aColl( - fullPathPath(t, namex(folder, "parent"), name(folder)), + d.fullPath(t, folderName("parent"), folderName()), nil, - id(file))) + fileID())) }, globalExcludedFileIDs: makeExcludeMap( - idx(file, "r"), - idx(file, "p"), - idx(file, "d"), - id(file)), + fileID("r"), + fileID("p"), + fileID("d"), + fileID()), }, }, { @@ -521,40 +514,40 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() tree: fullTree, enableURLCache: true, prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent-prev")), - id(folder): fullPath(namex(folder, "parent-prev"), name(folder)), - idx(folder, "tombstone"): fullPath(namex(folder, "tombstone-prev")), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent-prev")), + folderID(): d.strPath(folderName("parent-prev"), folderName()), + folderID("tombstone"): d.strPath(folderName("tombstone-prev")), }, expect: expected{ prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent")), - id(folder): fullPath(namex(folder, "parent"), name(folder)), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent")), + folderID(): d.strPath(folderName("parent"), folderName()), }, - collections: func(t *testing.T) expectedCollections { + collections: func(t *testing.T, d *deltaDrive) expectedCollections { return expectCollections( false, true, aColl( - fullPathPath(t), - fullPathPath(t), - idx(file, "r")), + d.fullPath(t), + d.fullPath(t), + fileID("r")), aColl( - fullPathPath(t, namex(folder, "parent")), - fullPathPath(t, namex(folder, "parent-prev")), - idx(file, "p")), + d.fullPath(t, folderName("parent")), + d.fullPath(t, folderName("parent-prev")), + fileID("p")), aColl( - fullPathPath(t, namex(folder, "parent"), name(folder)), - fullPathPath(t, namex(folder, "parent-prev"), name(folder)), - id(file)), - aColl(nil, fullPathPath(t, namex(folder, "tombstone-prev")))) + d.fullPath(t, folderName("parent"), folderName()), + d.fullPath(t, folderName("parent-prev"), folderName()), + fileID()), + aColl(nil, d.fullPath(t, folderName("tombstone-prev")))) }, globalExcludedFileIDs: makeExcludeMap( - idx(file, "r"), - idx(file, "p"), - idx(file, "d"), - id(file)), + fileID("r"), + fileID("p"), + fileID("d"), + fileID()), }, }, { @@ -562,40 +555,40 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() tree: fullTreeWithNames("parent", "tombstone"), enableURLCache: true, prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent-prev")), - id(folder): fullPath(namex(folder, "parent-prev"), name(folder)), - idx(folder, "tombstone"): fullPath(namex(folder, "tombstone-prev")), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent-prev")), + folderID(): d.strPath(folderName("parent-prev"), folderName()), + folderID("tombstone"): d.strPath(folderName("tombstone-prev")), }, expect: expected{ prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent")), - id(folder): fullPath(namex(folder, "parent"), name(folder)), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent")), + folderID(): d.strPath(folderName("parent"), folderName()), }, - collections: func(t *testing.T) expectedCollections { + collections: func(t *testing.T, d *deltaDrive) expectedCollections { return expectCollections( false, true, aColl( - fullPathPath(t), - fullPathPath(t), - idx(file, "r")), + d.fullPath(t), + d.fullPath(t), + fileID("r")), aColl( - fullPathPath(t, namex(folder, "parent")), - fullPathPath(t, namex(folder, "parent-prev")), - idx(file, "p")), + d.fullPath(t, folderName("parent")), + d.fullPath(t, folderName("parent-prev")), + fileID("p")), aColl( - fullPathPath(t, namex(folder, "parent"), name(folder)), - fullPathPath(t, namex(folder, "parent-prev"), name(folder)), - id(file)), - aColl(nil, fullPathPath(t, namex(folder, "tombstone-prev")))) + d.fullPath(t, folderName("parent"), folderName()), + d.fullPath(t, folderName("parent-prev"), folderName()), + fileID()), + aColl(nil, d.fullPath(t, folderName("tombstone-prev")))) }, globalExcludedFileIDs: makeExcludeMap( - idx(file, "r"), - idx(file, "p"), - idx(file, "d"), - id(file)), + fileID("r"), + fileID("p"), + fileID("d"), + fileID()), }, }, { @@ -603,40 +596,40 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() tree: fullTree, enableURLCache: true, prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent")), - id(folder): fullPath(namex(folder, "parent"), name(folder)), - idx(folder, "tombstone"): fullPath(namex(folder, "tombstone")), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent")), + folderID(): d.strPath(folderName("parent"), folderName()), + folderID("tombstone"): d.strPath(folderName("tombstone")), }, expect: expected{ prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent")), - id(folder): fullPath(namex(folder, "parent"), name(folder)), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent")), + folderID(): d.strPath(folderName("parent"), folderName()), }, - collections: func(t *testing.T) expectedCollections { + collections: func(t *testing.T, d *deltaDrive) expectedCollections { return expectCollections( false, true, aColl( - fullPathPath(t), - fullPathPath(t), - idx(file, "r")), + d.fullPath(t), + d.fullPath(t), + fileID("r")), aColl( - fullPathPath(t, namex(folder, "parent")), - fullPathPath(t, namex(folder, "parent")), - idx(file, "p")), + d.fullPath(t, folderName("parent")), + d.fullPath(t, folderName("parent")), + fileID("p")), aColl( - fullPathPath(t, namex(folder, "parent"), name(folder)), - fullPathPath(t, namex(folder, "parent"), name(folder)), - id(file)), - aColl(nil, fullPathPath(t, namex(folder, "tombstone")))) + d.fullPath(t, folderName("parent"), folderName()), + d.fullPath(t, folderName("parent"), folderName()), + fileID()), + aColl(nil, d.fullPath(t, folderName("tombstone")))) }, globalExcludedFileIDs: makeExcludeMap( - idx(file, "r"), - idx(file, "p"), - idx(file, "d"), - id(file)), + fileID("r"), + fileID("p"), + fileID("d"), + fileID()), }, }, } @@ -647,12 +640,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() ctx, flush := tester.NewContext(t) defer flush() - tree := test.tree(t) + tree := test.tree(t, d) err := addPrevPathsToTree(ctx, tree, test.prevPaths, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) - c := collWithMBH(mock.DefaultOneDriveBH(user)) + c := collWithMBH(defaultOneDriveBH(user)) countPages := 9001 if test.enableURLCache { @@ -662,14 +655,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections() colls, newPrevPaths, excluded, err := c.turnTreeIntoCollections( ctx, tree, - id(drive), - delta, + d.id, + deltaURL, countPages, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) assert.Equal(t, test.expect.prevPaths, newPrevPaths, "new previous paths") - expectColls := test.expect.collections(t) + expectColls := test.expect.collections(t, d) expectColls.compare(t, colls) expectColls.requireNoUnseenCollections(t) @@ -692,8 +685,8 @@ type populateTreeExpected struct { type populateTreeTest struct { name string - enumerator mock.EnumerateDriveItemsDelta - tree func(t *testing.T) *folderyMcFolderFace + enumerator enumerateDriveItemsDelta + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace limiter *pagerLimiter expect populateTreeExpected } @@ -701,9 +694,7 @@ type populateTreeTest struct { // this test focuses on the population of a tree using a single delta's enumeration data. // It is not concerned with unifying previous paths or post-processing collections. func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) + d := drive() table := []populateTreeTest{ { @@ -711,13 +702,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( tree: newTree, // special case enumerator to generate a null page. // otherwise all enumerators should be DriveEnumerator()s. - enumerator: mock.EnumerateDriveItemsDelta{ - DrivePagers: map[string]*mock.DriveDeltaEnumerator{ - id(drive): { - DriveID: id(drive), - DeltaQueries: []*mock.DeltaQuery{{ + enumerator: enumerateDriveItemsDelta{ + DrivePagers: map[string]*DeltaDriveEnumerator{ + d.id: { + Drive: d, + DeltaQueries: []*deltaQuery{{ Pages: nil, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(deltaURL)}, }}, }, }, @@ -737,9 +728,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( { name: "root only", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage()))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ @@ -762,9 +753,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( { name: "root only on two pages", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage(), aPage()))), limiter: newPagerLimiter(control.DefaultOptions()), @@ -788,14 +779,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( { name: "many folders in a hierarchy across multiple pages", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( - aPage(folderAtRoot()), - aPage(folderxAtRoot("sib")), + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage(d.folderAtRoot()), + aPage(d.folderAtRoot("sib")), aPage( - folderAtRoot(), - folderxAt("chld", folder))))), + d.folderAtRoot(), + d.folderAt(folder, "chld"))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -809,9 +800,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, - id(folder), - idx(folder, "sib"), - idx(folder, "chld"), + folderID(), + folderID("sib"), + folderID("chld"), }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{}, @@ -820,19 +811,19 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( { name: "many folders with files", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - fileAt(folder)), + d.folderAtRoot(), + d.fileAt(folder)), aPage( - folderxAtRoot("sib"), - filexAt("fsib", "sib")), + d.folderAtRoot("sib"), + d.fileAt("sib", "fsib")), aPage( - folderAtRoot(), - folderxAt("chld", folder), - filexAt("fchld", "chld"))))), + d.folderAtRoot(), + d.folderAt(folder, "chld"), + d.fileAt("chld", "fchld"))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -843,73 +834,92 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( err: require.NoError, numLiveFiles: 3, numLiveFolders: 4, - sizeBytes: 3 * defaultItemSize, + sizeBytes: 3 * defaultFileSize, treeContainsFolderIDs: []string{ rootID, - id(folder), - idx(folder, "sib"), - idx(folder, "chld"), + folderID(), + folderID("sib"), + folderID("chld"), }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), - idx(file, "fsib"): idx(folder, "sib"), - idx(file, "fchld"): idx(folder, "chld"), + fileID(): folderID(), + fileID("fsib"): folderID("sib"), + fileID("fchld"): folderID("chld"), }, }, }, { - name: "many folders with files across multiple deltas", + name: "tombstone with unpopulated tree", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(aPage( - folderAtRoot(), - fileAt(folder))), - mock.Delta(id(delta), nil).With(aPage( - folderxAtRoot("sib"), - filexAt("fsib", "sib"))), - mock.Delta(id(delta), nil).With(aPage( - folderAtRoot(), - folderxAt("chld", folder), - filexAt("fchld", "chld"))))), + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage(delItem(folderID(), folderID("parent"), isFolder))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ - count.TotalFoldersProcessed: 7, - count.TotalFilesProcessed: 3, - count.TotalPagesEnumerated: 4, + count.TotalFoldersProcessed: 1, + count.TotalDeleteFoldersProcessed: 1, + count.TotalFilesProcessed: 0, + count.TotalPagesEnumerated: 2, }, err: require.NoError, - numLiveFiles: 3, - numLiveFolders: 4, - sizeBytes: 3 * 42, + numLiveFiles: 0, + numLiveFolders: 1, + sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, - id(folder), - idx(folder, "sib"), - idx(folder, "chld"), }, - treeContainsTombstoneIDs: []string{}, + treeContainsTombstoneIDs: []string{ + folderID(), + }, + treeContainsFileIDsWithParent: map[string]string{}, + }, + }, + { + name: "tombstone with populated tree", + tree: treeWithFileInFolder, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + delItem(folderID(), folderID("parent"), isFolder))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalFoldersProcessed: 1, + count.TotalDeleteFoldersProcessed: 1, + count.TotalFilesProcessed: 0, + count.TotalPagesEnumerated: 2, + }, + err: require.NoError, + numLiveFiles: 0, + numLiveFolders: 2, + sizeBytes: 0, + treeContainsFolderIDs: []string{ + rootID, + folderID("parent"), + }, + treeContainsTombstoneIDs: []string{ + folderID(), + }, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), - idx(file, "fsib"): idx(folder, "sib"), - idx(file, "fchld"): idx(folder, "chld"), + fileID(): folderID(), }, }, }, { // technically you won't see this behavior from graph deltas, since deletes always // precede creates/updates. But it's worth checking that we can handle it anyways. - name: "create, delete on next page", + name: "create->delete folder", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - fileAt(folder)), - aPage(delItem(id(folder), rootID, isFolder))))), + d.folderAtRoot(), + d.fileAt(folder)), + aPage(delItem(folderID(), rootID, isFolder))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -927,7 +937,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), + fileID(): folderID(), }, }, }, @@ -936,14 +946,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( // precede creates/updates. But it's worth checking that we can handle it anyways. name: "move->delete folder with populated tree", tree: treeWithFolders, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderxAtRoot("parent"), - driveItem(id(folder), namex(folder, "moved"), parentDir(), idx(folder, "parent"), isFolder), - fileAtDeep(parentDir(namex(folder, "parent"), name(folder)), id(folder))), - aPage(delItem(id(folder), idx(folder, "parent"), isFolder))))), + d.folderAtRoot("parent"), + driveItem(folderID(), folderName("moved"), d.dir(), folderID("parent"), isFolder), + driveFile(d.dir(folderName("parent"), folderName()), folderID())), + aPage(delItem(folderID(), folderID("parent"), isFolder))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -958,32 +968,96 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( sizeBytes: 0, treeContainsFolderIDs: []string{ rootID, - idx(folder, "parent"), + folderID("parent"), }, treeContainsTombstoneIDs: []string{ - id(folder), + folderID(), }, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), + fileID(): folderID(), + }, + }, + }, + { + name: "delete->create folder with previous path", + tree: newTree, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage(delItem(folderID(), rootID, isFolder)), + aPage( + d.folderAtRoot(), + d.fileAt(folder))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalDeleteFoldersProcessed: 1, + count.TotalFoldersProcessed: 3, + count.TotalFilesProcessed: 1, + count.TotalPagesEnumerated: 3, + }, + err: require.NoError, + numLiveFiles: 1, + numLiveFolders: 2, + sizeBytes: defaultFileSize, + treeContainsFolderIDs: []string{ + rootID, + folderID(), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{ + fileID(): folderID(), + }, + }, + }, + { + name: "delete->create folder without previous path", + tree: treeWithRoot, + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( + aPage(delItem(folderID(), rootID, isFolder)), + aPage( + d.folderAtRoot(), + d.fileAt(folder))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalDeleteFoldersProcessed: 1, + count.TotalFoldersProcessed: 3, + count.TotalFilesProcessed: 1, + count.TotalPagesEnumerated: 3, + }, + err: require.NoError, + numLiveFiles: 1, + numLiveFolders: 2, + sizeBytes: defaultFileSize, + treeContainsFolderIDs: []string{ + rootID, + folderID(), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{ + fileID(): folderID(), }, }, }, { name: "at folder limit before enumeration", tree: treeWithFileAtRoot, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - fileAt(folder)), + d.folderAtRoot(), + d.fileAt(folder)), aPage( - folderxAtRoot("sib"), - filexAt("fsib", "sib")), + d.folderAtRoot("sib"), + d.fileAt("sib", "fsib")), aPage( - folderAtRoot(), - folderxAt("chld", folder), - filexAt("fchld", "chld"))))), + d.folderAtRoot(), + d.folderAt(folder, "chld"), + d.fileAt("chld", "fchld"))))), limiter: newPagerLimiter(minimumLimitOpts()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -996,7 +1070,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( shouldHitLimit: true, numLiveFiles: 1, numLiveFolders: 1, - sizeBytes: defaultItemSize, + sizeBytes: defaultFileSize, treeContainsFolderIDs: []string{ rootID, }, @@ -1007,19 +1081,19 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( { name: "hit folder limit during enumeration", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - fileAt(folder)), + d.folderAtRoot(), + d.fileAt(folder)), aPage( - folderxAtRoot("sib"), - filexAt("fsib", "sib")), + d.folderAtRoot("sib"), + d.fileAt("sib", "fsib")), aPage( - folderAtRoot(), - folderxAt("chld", folder), - filexAt("fchld", "chld"))))), + d.folderAtRoot(), + d.folderAt(folder, "chld"), + d.fileAt("chld", "fchld"))))), limiter: newPagerLimiter(minimumLimitOpts()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -1043,7 +1117,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( } for _, test := range table { suite.Run(test.name, func() { - runPopulateTreeTest(suite.T(), drv, test) + runPopulateTreeTest(suite.T(), d.able, test) }) } } @@ -1052,29 +1126,27 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta( // multiple delta enumerations. // It is not concerned with unifying previous paths or post-processing collections. func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) + d := drive() table := []populateTreeTest{ { name: "sanity case: normal enumeration split across multiple deltas", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( - aPage( - folderAtRoot(), - fileAt(folder))), - mock.Delta(id(delta), nil).With( - aPage( - folderxAtRoot("sib"), - filexAt("fsib", "sib"))), - mock.Delta(id(delta), nil).With( - aPage( - folderAtRoot(), - folderxAt("chld", folder), - filexAt("fchld", "chld"))))), + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil). + with(aPage( + d.folderAtRoot(), + d.fileAt(folder))), + delta(id(deltaURL), nil). + with(aPage( + d.folderAtRoot("sib"), + d.fileAt("sib", "fsib"))), + delta(id(deltaURL), nil). + with(aPage( + d.folderAtRoot(), + d.folderAt(folder, "chld"), + d.fileAt("chld", "fchld"))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -1088,39 +1160,39 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() err: require.NoError, numLiveFiles: 3, numLiveFolders: 4, - sizeBytes: 3 * 42, + sizeBytes: 3 * defaultFileSize, treeContainsFolderIDs: []string{ rootID, - id(folder), - idx(folder, "sib"), - idx(folder, "chld"), + folderID(), + folderID("sib"), + folderID("chld"), }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), - idx(file, "fsib"): idx(folder, "sib"), - idx(file, "fchld"): idx(folder, "chld"), + fileID(): folderID(), + fileID("fsib"): folderID("sib"), + fileID("fchld"): folderID("chld"), }, }, }, { name: "create->delete,create", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - fileAt(folder))), + d.folderAtRoot(), + d.fileAt(folder))), // a (delete,create) pair in the same delta can occur when // a user deletes and restores an item in-between deltas. - mock.Delta(id(delta), nil).With( + delta(id(deltaURL), nil).with( aPage( - delItem(id(folder), rootID, isFolder), - delItem(id(file), id(folder), isFile)), + delItem(folderID(), rootID, isFolder), + delItem(fileID(), folderID(), isFile)), aPage( - folderAtRoot(), - fileAt(folder))))), + d.folderAtRoot(), + d.fileAt(folder))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -1134,10 +1206,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() err: require.NoError, numLiveFiles: 1, numLiveFolders: 2, - sizeBytes: 42, + sizeBytes: defaultFileSize, treeContainsFolderIDs: []string{ rootID, - id(folder), + folderID(), }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{}, @@ -1146,16 +1218,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() { name: "visit->rename", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - fileAt(folder))), - mock.Delta(id(delta), nil).With( + d.folderAtRoot(), + d.fileAt(folder))), + delta(id(deltaURL), nil).with( aPage( - driveItem(id(folder), namex(folder, "rename"), parentDir(), rootID, isFolder), - driveItem(id(file), namex(file, "rename"), parentDir(namex(folder, "rename")), id(folder), isFile))))), + driveItem(folderID(), folderName("rename"), d.dir(), rootID, isFolder), + driveItem(fileID(), fileName("rename"), d.dir(folderName("rename")), folderID(), isFile))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -1169,27 +1241,27 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() err: require.NoError, numLiveFiles: 1, numLiveFolders: 2, - sizeBytes: 42, + sizeBytes: defaultFileSize, treeContainsFolderIDs: []string{ rootID, - id(folder), + folderID(), }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), + fileID(): folderID(), }, }, }, { name: "duplicate folder name from deferred delete marker", tree: newTree, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with( // first page: create /root/folder and /root/folder/file aPage( - folderAtRoot(), - fileAt(folder)), + d.folderAtRoot(), + d.fileAt(folder)), // assume the user makes changes at this point: // * create a new /root/folder // * move /root/folder/file from old to new folder (same file ID) @@ -1197,12 +1269,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() // in drive deltas, this will show up as another folder creation sharing // the same dirname, but we won't see the delete until... aPage( - driveItem(idx(folder, 2), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), idx(folder, 2), isFile))), + driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), + driveItem(fileID(), fileName(), d.dir(folderName()), folderID(2), isFile))), // the next delta, containing the delete marker for the original /root/folder - mock.Delta(id(delta), nil).With( + delta(id(deltaURL), nil).with( aPage( - delItem(id(folder), rootID, isFolder))))), + delItem(folderID(), rootID, isFolder))))), limiter: newPagerLimiter(control.DefaultOptions()), expect: populateTreeExpected{ counts: countTD.Expected{ @@ -1216,45 +1288,45 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() err: require.NoError, numLiveFiles: 1, numLiveFolders: 2, - sizeBytes: 42, + sizeBytes: defaultFileSize, treeContainsFolderIDs: []string{ rootID, - idx(folder, 2), + folderID(2), }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{ - id(file): idx(folder, 2), + fileID(): folderID(2), }, }, }, } for _, test := range table { suite.Run(test.name, func() { - runPopulateTreeTest(suite.T(), drv, test) + runPopulateTreeTest(suite.T(), d.able, test) }) } } func runPopulateTreeTest( t *testing.T, - drv models.Driveable, + d models.Driveable, test populateTreeTest, ) { ctx, flush := tester.NewContext(t) defer flush() var ( - mbh = mock.DefaultDriveBHWith(user, pagerForDrives(drv), test.enumerator) + mbh = defaultDriveBHWith(user, test.enumerator) c = collWithMBH(mbh) counter = count.New() - tree = test.tree(t) + tree = test.tree(t, drive()) ) _, _, err := c.populateTree( ctx, tree, - drv, - id(delta), + d, + id(deltaURL), test.limiter, counter, fault.New(true)) @@ -1301,9 +1373,7 @@ func runPopulateTreeTest( // This test focuses on folder assertions when enumerating a page of items. // File-specific assertions are focused in the _folders test variant. func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_folders() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) + d := drive() type expected struct { counts countTD.Expected @@ -1316,15 +1386,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace - page mock.NextPage + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace + page nextPage limiter *pagerLimiter expect expected }{ { name: "nil page", tree: treeWithRoot, - page: mock.NextPage{}, + page: nextPage{}, limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{}, @@ -1339,7 +1409,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "empty page", tree: treeWithRoot, - page: mock.NextPage{Items: []models.DriveItemable{}}, + page: nextPage{Items: []models.DriveItemable{}}, limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{}, @@ -1372,9 +1442,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold name: "many folders in a hierarchy", tree: treeWithRoot, page: aPage( - folderAtRoot(), - folderxAtRoot("sib"), - folderxAt("chld", folder)), + d.folderAtRoot(), + d.folderAtRoot("sib"), + d.folderAt(folder, "chld")), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1384,9 +1454,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold treeSize: 4, treeContainsFolderIDs: []string{ rootID, - id(folder), - idx(folder, "sib"), - idx(folder, "chld"), + folderID(), + folderID("sib"), + folderID("chld"), }, treeContainsTombstoneIDs: []string{}, }, @@ -1395,8 +1465,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold name: "create->delete", tree: treeWithRoot, page: aPage( - folderAtRoot(), - delItem(id(folder), rootID, isFolder)), + d.folderAtRoot(), + delItem(folderID(), rootID, isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1415,9 +1485,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold name: "move->delete", tree: treeWithFolders, page: aPage( - folderxAtRoot("parent"), - driveItem(id(folder), namex(folder, "moved"), parentDir(namex(folder, "parent")), idx(folder, "parent"), isFolder), - delItem(id(folder), idx(folder, "parent"), isFolder)), + d.folderAtRoot("parent"), + driveItem(folderID(), folderName("moved"), d.dir(folderName("parent")), folderID("parent"), isFolder), + delItem(folderID(), folderID("parent"), isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1428,10 +1498,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold treeSize: 3, treeContainsFolderIDs: []string{ rootID, - idx(folder, "parent"), + folderID("parent"), }, treeContainsTombstoneIDs: []string{ - id(folder), + folderID(), }, }, }, @@ -1439,8 +1509,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold name: "delete->create with previous path", tree: treeWithRoot, page: aPage( - delItem(id(folder), rootID, isFolder), - folderAtRoot()), + delItem(folderID(), rootID, isFolder), + d.folderAtRoot()), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1451,7 +1521,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold treeSize: 2, treeContainsFolderIDs: []string{ rootID, - id(folder), + folderID(), }, treeContainsTombstoneIDs: []string{}, }, @@ -1460,8 +1530,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold name: "delete->create without previous path", tree: treeWithRoot, page: aPage( - delItem(id(folder), rootID, isFolder), - folderAtRoot()), + delItem(folderID(), rootID, isFolder), + d.folderAtRoot()), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1472,7 +1542,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold treeSize: 2, treeContainsFolderIDs: []string{ rootID, - id(folder), + folderID(), }, treeContainsTombstoneIDs: []string{}, }, @@ -1486,15 +1556,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold defer flush() var ( - c = collWithMBH(mock.DefaultOneDriveBH(user)) + c = collWithMBH(defaultOneDriveBH(user)) counter = count.New() - tree = test.tree(t) + tree = test.tree(t, d) ) err := c.enumeratePageOfItems( ctx, tree, - drv, + d.able, test.page.Items, test.limiter, counter, @@ -1524,17 +1594,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold } func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) - var ( - fld = custom.ToCustomDriveItem(folderAtRoot()) - subFld = custom.ToCustomDriveItem(folderAtDeep(driveParentDir(drv, namex(folder, "parent")), idx(folder, "parent"))) - pack = custom.ToCustomDriveItem(driveItem(id(pkg), name(pkg), parentDir(), rootID, isPackage)) - del = custom.ToCustomDriveItem(delItem(id(folder), rootID, isFolder)) - mal = custom.ToCustomDriveItem( - malwareItem(idx(folder, "mal"), namex(folder, "mal"), parentDir(), rootID, isFolder)) + d = drive() + fld = custom.ToCustomDriveItem(d.folderAtRoot()) + subFld = custom.ToCustomDriveItem(driveFolder(d.dir(folderName("parent")), folderID("parent"))) + pack = custom.ToCustomDriveItem(driveItem(id(pkg), name(pkg), d.dir(), rootID, isPackage)) + del = custom.ToCustomDriveItem(delItem(folderID(), rootID, isFolder)) + mal = custom.ToCustomDriveItem(malwareItem(folderID("mal"), folderName("mal"), d.dir(), rootID, isFolder)) ) type expected struct { @@ -1549,7 +1615,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace folder *custom.DriveItem limiter *pagerLimiter expect expected @@ -1755,15 +1821,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { defer flush() var ( - c = collWithMBH(mock.DefaultOneDriveBH(user)) + c = collWithMBH(defaultOneDriveBH(user)) counter = count.New() - tree = test.tree(t) + tree = test.tree(t, d) ) skipped, err := c.addFolderToTree( ctx, tree, - drv, + d.able, test.folder, test.limiter, counter) @@ -1788,10 +1854,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { } func (suite *CollectionsTreeUnitSuite) TestCollections_MakeFolderCollectionPath() { - basePath, err := odConsts.DriveFolderPrefixBuilder(id(drive)).ToDataLayerOneDrivePath(tenant, user, false) + d := drive() + + basePath, err := odConsts.DriveFolderPrefixBuilder(d.id). + ToDataLayerOneDrivePath(tenant, user, false) require.NoError(suite.T(), err, clues.ToCore(err)) - folderPath, err := basePath.Append(false, name(folder)) + folderPath, err := basePath.Append(false, folderName()) require.NoError(suite.T(), err, clues.ToCore(err)) table := []struct { @@ -1802,13 +1871,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeFolderCollectionPath( }{ { name: "root", - folder: driveRootItem(), + folder: driveRootFolder(), expect: basePath.String(), expectErr: require.NoError, }, { name: "folder", - folder: folderAtRoot(), + folder: d.folderAtRoot(), expect: folderPath.String(), expectErr: require.NoError, }, @@ -1822,15 +1891,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeFolderCollectionPath( for _, test := range table { suite.Run(test.name, func() { t := suite.T() + c := collWithMBH(defaultOneDriveBH(user)) ctx, flush := tester.NewContext(t) defer flush() - c := collWithMBH(mock.DefaultOneDriveBH(user)) - p, err := c.makeFolderCollectionPath( ctx, - id(drive), + d.id, custom.ToCustomDriveItem(test.folder)) test.expectErr(t, err, clues.ToCore(err)) @@ -1848,9 +1916,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeFolderCollectionPath( // this test focuses on folder assertions when enumerating a page of items // file-specific assertions are in the next test func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_files() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) + d := drive() type expected struct { counts countTD.Expected @@ -1862,14 +1928,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace - page mock.NextPage + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace + page nextPage expect expected }{ { name: "one file at root", tree: treeWithRoot, - page: aPage(fileAtRoot()), + page: aPage(d.fileAtRoot()), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1878,39 +1944,19 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, - }, - }, - { - name: "one file in a folder", - tree: newTree, - page: aPage( - folderAtRoot(), - fileAt(folder)), - expect: expected{ - counts: countTD.Expected{ - count.TotalDeleteFilesProcessed: 0, - count.TotalFoldersProcessed: 2, - count.TotalFilesProcessed: 1, - }, - err: require.NoError, - treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), - }, - countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, { name: "many files in a hierarchy", tree: treeWithRoot, page: aPage( - fileAtRoot(), - folderAtRoot(), - filexAt("chld", folder)), + d.fileAtRoot(), + d.folderAtRoot(), + d.fileAt(folder, "fchld")), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1919,20 +1965,20 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, - idx(file, "chld"): id(folder), + fileID(): rootID, + fileID("fchld"): folderID(), }, countLiveFiles: 2, - countTotalBytes: defaultItemSize * 2, + countTotalBytes: defaultFileSize * 2, }, }, { name: "many updates to the same file", tree: treeWithRoot, page: aPage( - fileAtRoot(), - driveItem(id(file), namex(file, 1), parentDir(), rootID, isFile), - driveItem(id(file), namex(file, 2), parentDir(), rootID, isFile)), + d.fileAtRoot(), + driveItem(fileID(), fileName(1), d.dir(), rootID, isFile), + driveItem(fileID(), fileName(2), d.dir(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1941,16 +1987,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, { name: "delete an existing file", tree: treeWithFileAtRoot, - page: aPage(delItem(id(file), rootID, isFile)), + page: aPage(delItem(fileID(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1967,8 +2013,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file name: "delete the same file twice", tree: treeWithFileAtRoot, page: aPage( - delItem(id(file), rootID, isFile), - delItem(id(file), rootID, isFile)), + delItem(fileID(), rootID, isFile), + delItem(fileID(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 2, @@ -1985,8 +2031,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file name: "create->delete", tree: treeWithRoot, page: aPage( - fileAtRoot(), - delItem(id(file), rootID, isFile)), + d.fileAtRoot(), + delItem(fileID(), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -2003,9 +2049,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file name: "move->delete", tree: treeWithFileAtRoot, page: aPage( - folderAtRoot(), - fileAt(folder), - delItem(id(file), id(folder), isFile)), + d.folderAtRoot(), + d.fileAt(folder), + delItem(fileID(), folderID(), isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -2019,11 +2065,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, }, { - name: "delete->create an existing file", + name: "file already in tree: delete->restore", tree: treeWithFileAtRoot, page: aPage( - delItem(id(file), rootID, isFile), - fileAtRoot()), + delItem(fileID(), rootID, isFile), + d.fileAtRoot()), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -2032,18 +2078,18 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, { - name: "delete->create a non-existing file", + name: "file not in tree: delete->restore", tree: treeWithRoot, page: aPage( - delItem(id(file), rootID, isFile), - fileAtRoot()), + delItem(fileID(), rootID, isFile), + d.fileAtRoot()), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -2052,10 +2098,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, err: require.NoError, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, } @@ -2067,15 +2113,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file defer flush() var ( - c = collWithMBH(mock.DefaultOneDriveBH(user)) + c = collWithMBH(defaultOneDriveBH(user)) counter = count.New() - tree = test.tree(t) + tree = test.tree(t, d) ) err := c.enumeratePageOfItems( ctx, tree, - drv, + d.able, test.page.Items, newPagerLimiter(control.DefaultOptions()), counter, @@ -2092,9 +2138,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file } func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) + d := drive() type expected struct { counts countTD.Expected @@ -2108,7 +2152,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace file models.DriveItemable limiter *pagerLimiter expect expected @@ -2116,7 +2160,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "add new file", tree: treeWithRoot, - file: fileAtRoot(), + file: d.fileAtRoot(), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -2125,16 +2169,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { err: require.NoError, skipped: assert.Nil, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, { name: "duplicate file", tree: treeWithFileAtRoot, - file: fileAtRoot(), + file: d.fileAtRoot(), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -2143,16 +2187,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { err: require.NoError, skipped: assert.Nil, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, { name: "error file seen before parent", tree: treeWithRoot, - file: fileAt(folder), + file: d.fileAt(folder), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -2168,7 +2212,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "malware file", tree: treeWithRoot, - file: malwareItem(id(file), name(file), parentDir(name(folder)), rootID, isFile), + file: malwareItem(fileID(), fileName(), d.dir(folderName()), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -2184,7 +2228,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "delete non-existing file", tree: treeWithRoot, - file: delItem(id(file), id(folder), isFile), + file: delItem(fileID(), folderID(), isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -2200,7 +2244,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "delete existing file", tree: treeWithFileAtRoot, - file: delItem(id(file), rootID, isFile), + file: delItem(fileID(), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -2216,7 +2260,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "already at container file limit", tree: treeWithFileAtRoot, - file: filexAtRoot(2), + file: d.fileAtRoot(2), limiter: newPagerLimiter(minimumLimitOpts()), expect: expected{ counts: countTD.Expected{ @@ -2226,16 +2270,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { shouldHitLimit: true, skipped: assert.Nil, treeContainsFileIDsWithParent: map[string]string{ - id(file): rootID, + fileID(): rootID, }, countLiveFiles: 1, - countTotalBytes: defaultItemSize, + countTotalBytes: defaultFileSize, }, }, { name: "goes over total byte limit", tree: treeWithRoot, - file: fileAtRoot(), + file: d.fileAtRoot(), limiter: newPagerLimiter(minimumLimitOpts()), expect: expected{ counts: countTD.Expected{ @@ -2258,15 +2302,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { defer flush() var ( - c = collWithMBH(mock.DefaultOneDriveBH(user)) + c = collWithMBH(defaultOneDriveBH(user)) counter = count.New() - tree = test.tree(t) + tree = test.tree(t, d) ) skipped, err := c.addFileToTree( ctx, tree, - drv, + d.able, custom.ToCustomDriveItem(test.file), test.limiter, counter) diff --git a/src/internal/m365/collection/drive/delta_tree.go b/src/internal/m365/collection/drive/delta_tree.go index 45012bb53..be54a80cb 100644 --- a/src/internal/m365/collection/drive/delta_tree.go +++ b/src/internal/m365/collection/drive/delta_tree.go @@ -309,6 +309,11 @@ func (face *folderyMcFolderFace) setPreviousPath( // file handling // --------------------------------------------------------------------------- +func (face *folderyMcFolderFace) hasFile(id string) bool { + _, exists := face.fileIDToParentID[id] + return exists +} + // addFile places the file in the correct parent node. If the // file was already added to the tree and is getting relocated, // this func will update and/or clean up all the old references. diff --git a/src/internal/m365/collection/drive/delta_tree_test.go b/src/internal/m365/collection/drive/delta_tree_test.go index 9495ddf14..b7cf4af1d 100644 --- a/src/internal/m365/collection/drive/delta_tree_test.go +++ b/src/internal/m365/collection/drive/delta_tree_test.go @@ -9,7 +9,6 @@ import ( "github.com/stretchr/testify/suite" "golang.org/x/exp/maps" - "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/custom" @@ -68,7 +67,7 @@ func (suite *DeltaTreeUnitSuite) TestNewNodeyMcNodeFace() { func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { table := []struct { tname string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace parentID string id string name string @@ -94,16 +93,16 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { tname: "add folder", tree: treeWithRoot, parentID: rootID, - id: id(folder), - name: name(folder), + id: folderID(), + name: folderName(), expectErr: assert.NoError, }, { tname: "add package", tree: treeWithRoot, parentID: rootID, - id: id(folder), - name: name(folder), + id: folderID(), + name: folderName(), isPackage: true, expectErr: assert.NoError, }, @@ -111,7 +110,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { tname: "missing ID", tree: treeWithRoot, parentID: rootID, - name: name(folder), + name: folderName(), isPackage: true, expectErr: assert.Error, }, @@ -119,15 +118,15 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { tname: "missing name", tree: treeWithRoot, parentID: rootID, - id: id(folder), + id: folderID(), isPackage: true, expectErr: assert.Error, }, { tname: "missing parentID", tree: treeWithRoot, - id: id(folder), - name: name(folder), + id: folderID(), + name: folderName(), isPackage: true, expectErr: assert.Error, }, @@ -135,29 +134,29 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { tname: "already tombstoned", tree: treeWithTombstone, parentID: rootID, - id: id(folder), - name: name(folder), + id: folderID(), + name: folderName(), expectErr: assert.NoError, }, { tname: "add folder before parent", - tree: func(t *testing.T) *folderyMcFolderFace { + tree: func(t *testing.T, d *deltaDrive) *folderyMcFolderFace { return &folderyMcFolderFace{ folderIDToNode: map[string]*nodeyMcNodeFace{}, } }, parentID: rootID, - id: id(folder), - name: name(folder), + id: folderID(), + name: folderName(), isPackage: true, expectErr: assert.Error, }, { tname: "folder already exists", tree: treeWithFolders, - parentID: idx(folder, "parent"), - id: id(folder), - name: name(folder), + parentID: folderID("parent"), + id: folderID(), + name: folderName(), expectErr: assert.NoError, }, } @@ -168,7 +167,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { ctx, flush := tester.NewContext(t) defer flush() - tree := test.tree(t) + tree := test.tree(t, drive()) err := tree.setFolder( ctx, @@ -203,18 +202,18 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() { table := []struct { name string id string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace expectErr assert.ErrorAssertionFunc }{ { name: "add tombstone", - id: id(folder), + id: folderID(), tree: newTree, expectErr: assert.NoError, }, { name: "duplicate tombstone", - id: id(folder), + id: folderID(), tree: treeWithTombstone, expectErr: assert.NoError, }, @@ -224,14 +223,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() { expectErr: assert.Error, }, { - name: "conflict: folder alive", - id: id(folder), - tree: treeWithTombstone, - expectErr: assert.NoError, - }, - { - name: "already tombstoned", - id: id(folder), + name: "folder exists and is alive", + id: folderID(), tree: treeWithTombstone, expectErr: assert.NoError, }, @@ -243,7 +236,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() { ctx, flush := tester.NewContext(t) defer flush() - tree := test.tree(t) + tree := test.tree(t, drive()) err := tree.setTombstone(ctx, test.id) test.expectErr(t, err, clues.ToCore(err)) @@ -270,14 +263,14 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() { name string id string prev path.Path - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace expectErr assert.ErrorAssertionFunc expectLive bool expectTombstone bool }{ { name: "no changes become a no-op", - id: id(folder), + id: folderID(), prev: pathWith(defaultLoc()), tree: newTree, expectErr: assert.NoError, @@ -295,7 +288,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() { }, { name: "create tombstone after reset", - id: id(folder), + id: folderID(), prev: pathWith(defaultLoc()), tree: treeAfterReset, expectErr: assert.NoError, @@ -312,7 +305,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() { }, { name: "missing prev", - id: id(folder), + id: folderID(), tree: newTree, expectErr: assert.Error, expectLive: false, @@ -320,7 +313,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() { }, { name: "update live folder", - id: id(folder), + id: folderID(), prev: pathWith(defaultLoc()), tree: treeWithFolders, expectErr: assert.NoError, @@ -329,7 +322,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() { }, { name: "update tombstone", - id: id(folder), + id: folderID(), prev: pathWith(defaultLoc()), tree: treeWithTombstone, expectErr: assert.NoError, @@ -340,7 +333,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetPreviousPath() { for _, test := range table { suite.Run(test.name, func() { t := suite.T() - tree := test.tree(t) + tree := test.tree(t, drive()) err := tree.setPreviousPath(test.id, test.prev) test.expectErr(t, err, clues.ToCore(err)) @@ -478,7 +471,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTree() ctx, flush := tester.NewContext(t) defer flush() - tree := treeWithRoot(t) + tree := treeWithRoot(t, drive()) set := func( parentID, fid, fname string, @@ -564,7 +557,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst ctx, flush := tester.NewContext(t) defer flush() - tree := treeWithRoot(t) + tree := treeWithRoot(t, drive()) set := func( parentID, fid, fname string, @@ -739,7 +732,8 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder_correctTombst func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { table := []struct { tname string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace + id string oldParentID string parentID string contentSize int64 @@ -749,69 +743,87 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { { tname: "add file to root", tree: treeWithRoot, + id: fileID(), oldParentID: "", parentID: rootID, - contentSize: 42, + contentSize: defaultFileSize, expectErr: assert.NoError, - expectFiles: map[string]string{id(file): rootID}, + expectFiles: map[string]string{fileID(): rootID}, }, { tname: "add file to folder", tree: treeWithFolders, + id: fileID(), oldParentID: "", - parentID: id(folder), + parentID: folderID(), contentSize: 24, expectErr: assert.NoError, - expectFiles: map[string]string{id(file): id(folder)}, + expectFiles: map[string]string{fileID(): folderID()}, }, { tname: "re-add file at the same location", tree: treeWithFileAtRoot, + id: fileID(), oldParentID: rootID, parentID: rootID, contentSize: 84, expectErr: assert.NoError, - expectFiles: map[string]string{id(file): rootID}, + expectFiles: map[string]string{fileID(): rootID}, }, { tname: "move file from folder to root", tree: treeWithFileInFolder, - oldParentID: id(folder), + id: fileID(), + oldParentID: folderID(), parentID: rootID, contentSize: 48, expectErr: assert.NoError, - expectFiles: map[string]string{id(file): rootID}, + expectFiles: map[string]string{fileID(): rootID}, }, { tname: "move file from tombstone to root", tree: treeWithFileInTombstone, - oldParentID: id(folder), + id: fileID(), + oldParentID: folderID(), parentID: rootID, contentSize: 2, expectErr: assert.NoError, - expectFiles: map[string]string{id(file): rootID}, + expectFiles: map[string]string{fileID(): rootID}, + }, + { + tname: "adding file with no ID", + tree: treeWithTombstone, + id: "", + oldParentID: "", + parentID: folderID(), + contentSize: 4, + expectErr: assert.Error, + expectFiles: map[string]string{}, }, { tname: "error adding file to tombstone", tree: treeWithTombstone, + id: fileID(), oldParentID: "", - parentID: id(folder), - contentSize: 4, + parentID: folderID(), + contentSize: 8, expectErr: assert.Error, expectFiles: map[string]string{}, }, { tname: "error adding file before parent", tree: treeWithTombstone, + id: fileID(), oldParentID: "", - parentID: idx(folder, 1), - contentSize: 8, + parentID: folderID("not-in-tree"), + contentSize: 16, expectErr: assert.Error, expectFiles: map[string]string{}, }, { tname: "error adding file without parent id", tree: treeWithTombstone, + id: fileID(), oldParentID: "", parentID: "", contentSize: 16, @@ -822,15 +834,13 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { for _, test := range table { suite.Run(test.tname, func() { t := suite.T() - tree := test.tree(t) - - df := driveFile(file, parentDir(), test.parentID) - df.SetSize(ptr.To(test.contentSize)) + d := drive() + tree := test.tree(t, d) err := tree.addFile( test.parentID, - id(file), - custom.ToCustomDriveItem(df)) + test.id, + custom.ToCustomDriveItem(d.fileWSizeAt(test.contentSize, test.parentID))) test.expectErr(t, err, clues.ToCore(err)) assert.Equal(t, test.expectFiles, tree.fileIDToParentID) @@ -841,7 +851,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { parent := tree.getNode(test.parentID) require.NotNil(t, parent) - assert.Contains(t, parent.files, id(file)) + assert.Contains(t, parent.files, fileID()) countSize := tree.countLiveFilesAndSizes() assert.Equal(t, 1, countSize.numFiles, "should have one file in the tree") @@ -851,7 +861,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { old := tree.getNode(test.oldParentID) require.NotNil(t, old) - assert.NotContains(t, old.files, id(file)) + assert.NotContains(t, old.files, fileID()) } }) } @@ -860,7 +870,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddFile() { func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() { table := []struct { tname string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace parentID string }{ { @@ -876,34 +886,35 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_DeleteFile() { { tname: "delete file from folder", tree: treeWithFileInFolder, - parentID: id(folder), + parentID: folderID(), }, { tname: "delete file from tombstone", tree: treeWithFileInTombstone, - parentID: id(folder), + parentID: folderID(), }, } for _, test := range table { suite.Run(test.tname, func() { t := suite.T() - tree := test.tree(t) + tree := test.tree(t, drive()) - tree.deleteFile(id(file)) + tree.deleteFile(fileID()) parent := tree.getNode(test.parentID) require.NotNil(t, parent) - assert.NotContains(t, parent.files, id(file)) - assert.NotContains(t, tree.fileIDToParentID, id(file)) - assert.Contains(t, tree.deletedFileIDs, id(file)) + assert.NotContains(t, parent.files, fileID()) + assert.NotContains(t, tree.fileIDToParentID, fileID()) + assert.Contains(t, tree.deletedFileIDs, fileID()) }) } } func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() { t := suite.T() - tree := treeWithRoot(t) + d := drive() + tree := treeWithRoot(t, d) fID := id(file) require.Len(t, tree.fileIDToParentID, 0) @@ -916,7 +927,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() { assert.Len(t, tree.deletedFileIDs, 1) assert.Contains(t, tree.deletedFileIDs, fID) - err := tree.addFile(rootID, fID, custom.ToCustomDriveItem(fileAtRoot())) + err := tree.addFile(rootID, fID, custom.ToCustomDriveItem(d.fileAtRoot())) require.NoError(t, err, clues.ToCore(err)) assert.Len(t, tree.fileIDToParentID, 1) @@ -935,7 +946,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_addAndDeleteFile() { func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs() { table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace expect map[string]struct{} }{ { @@ -946,7 +957,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs( { name: "one file in a folder", tree: treeWithFileInFolder, - expect: makeExcludeMap(id(file)), + expect: makeExcludeMap(fileID()), }, { name: "one file in a tombstone", @@ -956,22 +967,22 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs( { name: "one deleted file", tree: treeWithDeletedFile, - expect: makeExcludeMap(idx(file, "d")), + expect: makeExcludeMap(fileID("d")), }, { name: "files in folders and tombstones", tree: fullTree, expect: makeExcludeMap( - id(file), - idx(file, "r"), - idx(file, "p"), - idx(file, "d")), + fileID(), + fileID("r"), + fileID("p"), + fileID("d")), }, } for _, test := range table { suite.Run(test.name, func() { t := suite.T() - tree := test.tree(t) + tree := test.tree(t, drive()) result := tree.generateExcludeItemIDs() assert.Equal(t, test.expect, result) @@ -985,10 +996,11 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateExcludeItemIDs( func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() { t := suite.T() + d := drive() table := []struct { name string - tree func(t *testing.T) *folderyMcFolderFace + tree func(t *testing.T, d *deltaDrive) *folderyMcFolderFace prevPaths map[string]string expectErr require.ErrorAssertionFunc expect map[string]collectable @@ -1005,7 +1017,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() expectErr: require.NoError, expect: map[string]collectable{ rootID: { - currPath: fullPathPath(t), + currPath: d.fullPath(t), files: map[string]*custom.DriveItem{}, folderID: rootID, isPackageOrChildOfPackage: false, @@ -1019,9 +1031,9 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() expectErr: require.NoError, expect: map[string]collectable{ rootID: { - currPath: fullPathPath(t), + currPath: d.fullPath(t), files: map[string]*custom.DriveItem{ - id(file): custom.ToCustomDriveItem(fileAtRoot()), + fileID(): custom.ToCustomDriveItem(d.fileAtRoot()), }, folderID: rootID, isPackageOrChildOfPackage: false, @@ -1035,41 +1047,41 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() expectErr: require.NoError, expect: map[string]collectable{ rootID: { - currPath: fullPathPath(t), + currPath: d.fullPath(t), files: map[string]*custom.DriveItem{}, folderID: rootID, isPackageOrChildOfPackage: false, loc: path.Elements{}, }, - idx(folder, "parent"): { - currPath: fullPathPath(t, namex(folder, "parent")), + folderID("parent"): { + currPath: d.fullPath(t, folderName("parent")), files: map[string]*custom.DriveItem{}, - folderID: idx(folder, "parent"), + folderID: folderID("parent"), isPackageOrChildOfPackage: false, loc: path.Elements{rootName}, }, - id(folder): { - currPath: fullPathPath(t, namex(folder, "parent"), name(folder)), + folderID(): { + currPath: d.fullPath(t, folderName("parent"), folderName()), files: map[string]*custom.DriveItem{ - id(file): custom.ToCustomDriveItem(fileAt("parent")), + fileID(): custom.ToCustomDriveItem(d.fileAt("parent")), }, - folderID: id(folder), + folderID: folderID(), isPackageOrChildOfPackage: false, - loc: path.Elements{rootName, namex(folder, "parent")}, + loc: path.Elements{rootName, folderName("parent")}, }, }, }, { name: "package in hierarchy", - tree: func(t *testing.T) *folderyMcFolderFace { + tree: func(t *testing.T, d *deltaDrive) *folderyMcFolderFace { ctx, flush := tester.NewContext(t) defer flush() - tree := treeWithRoot(t) + tree := treeWithRoot(t, d) err := tree.setFolder(ctx, rootID, id(pkg), name(pkg), true) require.NoError(t, err, clues.ToCore(err)) - err = tree.setFolder(ctx, id(pkg), id(folder), name(folder), false) + err = tree.setFolder(ctx, id(pkg), folderID(), folderName(), false) require.NoError(t, err, clues.ToCore(err)) return tree @@ -1077,23 +1089,23 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() expectErr: require.NoError, expect: map[string]collectable{ rootID: { - currPath: fullPathPath(t), + currPath: d.fullPath(t), files: map[string]*custom.DriveItem{}, folderID: rootID, isPackageOrChildOfPackage: false, loc: path.Elements{}, }, id(pkg): { - currPath: fullPathPath(t, name(pkg)), + currPath: d.fullPath(t, name(pkg)), files: map[string]*custom.DriveItem{}, folderID: id(pkg), isPackageOrChildOfPackage: true, loc: path.Elements{rootName}, }, - id(folder): { - currPath: fullPathPath(t, name(pkg), name(folder)), + folderID(): { + currPath: d.fullPath(t, name(pkg), folderName()), files: map[string]*custom.DriveItem{}, - folderID: id(folder), + folderID: folderID(), isPackageOrChildOfPackage: true, loc: path.Elements{rootName, name(pkg)}, }, @@ -1104,36 +1116,36 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() tree: treeWithFileInFolder, expectErr: require.NoError, prevPaths: map[string]string{ - rootID: fullPath(), - idx(folder, "parent"): fullPath(namex(folder, "parent-prev")), - id(folder): fullPath(namex(folder, "parent-prev"), name(folder)), + rootID: d.strPath(), + folderID("parent"): d.strPath(folderName("parent-prev")), + folderID(): d.strPath(folderName("parent-prev"), folderName()), }, expect: map[string]collectable{ rootID: { - currPath: fullPathPath(t), + currPath: d.fullPath(t), files: map[string]*custom.DriveItem{}, folderID: rootID, isPackageOrChildOfPackage: false, loc: path.Elements{}, - prevPath: fullPathPath(t), + prevPath: d.fullPath(t), }, - idx(folder, "parent"): { - currPath: fullPathPath(t, namex(folder, "parent")), + folderID("parent"): { + currPath: d.fullPath(t, folderName("parent")), files: map[string]*custom.DriveItem{}, - folderID: idx(folder, "parent"), + folderID: folderID("parent"), isPackageOrChildOfPackage: false, loc: path.Elements{rootName}, - prevPath: fullPathPath(t, namex(folder, "parent-prev")), + prevPath: d.fullPath(t, folderName("parent-prev")), }, - id(folder): { - currPath: fullPathPath(t, namex(folder, "parent"), name(folder)), - folderID: id(folder), + folderID(): { + currPath: d.fullPath(t, folderName("parent"), folderName()), + folderID: folderID(), isPackageOrChildOfPackage: false, files: map[string]*custom.DriveItem{ - id(file): custom.ToCustomDriveItem(fileAt("parent")), + fileID(): custom.ToCustomDriveItem(d.fileAt("parent")), }, - loc: path.Elements{rootName, namex(folder, "parent")}, - prevPath: fullPathPath(t, namex(folder, "parent-prev"), name(folder)), + loc: path.Elements{rootName, folderName("parent")}, + prevPath: d.fullPath(t, folderName("parent-prev"), folderName()), }, }, }, @@ -1141,24 +1153,24 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() name: "root and tombstones", tree: treeWithFileInTombstone, prevPaths: map[string]string{ - rootID: fullPath(), - id(folder): fullPath(name(folder)), + rootID: d.strPath(), + folderID(): d.strPath(folderName()), }, expectErr: require.NoError, expect: map[string]collectable{ rootID: { - currPath: fullPathPath(t), + currPath: d.fullPath(t), files: map[string]*custom.DriveItem{}, folderID: rootID, isPackageOrChildOfPackage: false, loc: path.Elements{}, - prevPath: fullPathPath(t), + prevPath: d.fullPath(t), }, - id(folder): { + folderID(): { files: map[string]*custom.DriveItem{}, - folderID: id(folder), + folderID: folderID(), isPackageOrChildOfPackage: false, - prevPath: fullPathPath(t, name(folder)), + prevPath: d.fullPath(t, folderName()), }, }, }, @@ -1166,7 +1178,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_GenerateCollectables() for _, test := range table { suite.Run(test.name, func() { t := suite.T() - tree := test.tree(t) + tree := test.tree(t, d) if len(test.prevPaths) > 0 { for id, ps := range test.prevPaths { diff --git a/src/internal/m365/collection/drive/helper_test.go b/src/internal/m365/collection/drive/helper_test.go index c60208083..1a08cbb40 100644 --- a/src/internal/m365/collection/drive/helper_test.go +++ b/src/internal/m365/collection/drive/helper_test.go @@ -3,10 +3,12 @@ package drive import ( "context" "fmt" + "net/http" "testing" "time" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -17,11 +19,11 @@ import ( dataMock "github.com/alcionai/corso/src/internal/data/mock" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" - "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/backup/details" bupMD "github.com/alcionai/corso/src/pkg/backup/metadata" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/count" @@ -31,10 +33,11 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api/graph" apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" + "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/custom" ) -const defaultItemSize int64 = 42 +const defaultFileSize int64 = 42 // TODO(ashmrtn): Merge with similar structs in graph and exchange packages. type oneDriveService struct { @@ -43,7 +46,7 @@ type oneDriveService struct { ac api.Client } -func NewOneDriveService(credentials account.M365Config) (*oneDriveService, error) { +func newOneDriveService(credentials account.M365Config) (*oneDriveService, error) { ac, err := api.NewClient( credentials, control.DefaultOptions(), @@ -74,7 +77,7 @@ func loadTestService(t *testing.T) *oneDriveService { creds, err := a.M365Config() require.NoError(t, err, clues.ToCore(err)) - service, err := NewOneDriveService(creds) + service, err := newOneDriveService(creds) require.NoError(t, err, clues.ToCore(err)) return service @@ -136,383 +139,9 @@ func asNotMoved(t *testing.T, p string) statePath { } // --------------------------------------------------------------------------- -// stub drive items +// misc helpers // --------------------------------------------------------------------------- -type itemType int - -const ( - isFile itemType = 1 - isFolder itemType = 2 - isPackage itemType = 3 -) - -func coreItem( - id, name, parentPath, parentID string, - it itemType, -) *models.DriveItem { - item := models.NewDriveItem() - item.SetName(&name) - item.SetId(&id) - item.SetLastModifiedDateTime(ptr.To(time.Now())) - - parentReference := models.NewItemReference() - parentReference.SetPath(&parentPath) - parentReference.SetId(&parentID) - item.SetParentReference(parentReference) - - switch it { - case isFile: - item.SetSize(ptr.To[int64](42)) - item.SetFile(models.NewFile()) - case isFolder: - item.SetFolder(models.NewFolder()) - case isPackage: - item.SetPackageEscaped(models.NewPackageEscaped()) - } - - return item -} - -func driveItem( - id, name, parentPath, parentID string, - it itemType, -) models.DriveItemable { - return coreItem(id, name, parentPath, parentID, it) -} - -func driveFile( - idX any, - parentPath, parentID string, -) models.DriveItemable { - i := id(file) - n := name(file) - - if idX != file { - i = idx(file, idX) - n = namex(file, idX) - } - - return driveItem(i, n, parentPath, parentID, isFile) -} - -func fileAtRoot() models.DriveItemable { - return driveItem(id(file), name(file), parentDir(), rootID, isFile) -} - -func fileAt( - parentX any, -) models.DriveItemable { - pd := parentDir(namex(folder, parentX)) - pid := idx(folder, parentX) - - if parentX == folder { - pd = parentDir(name(folder)) - pid = id(folder) - } - - return driveItem( - id(file), - name(file), - pd, - pid, - isFile) -} - -func fileAtDeep( - parentDir, parentID string, -) models.DriveItemable { - return driveItem( - id(file), - name(file), - parentDir, - parentID, - isFile) -} - -func filexAtRoot( - x any, -) models.DriveItemable { - return driveItem( - idx(file, x), - namex(file, x), - parentDir(), - rootID, - isFile) -} - -func filexAt( - x, parentX any, -) models.DriveItemable { - pd := parentDir(namex(folder, parentX)) - pid := idx(folder, parentX) - - if parentX == folder { - pd = parentDir(name(folder)) - pid = id(folder) - } - - return driveItem( - idx(file, x), - namex(file, x), - pd, - pid, - isFile) -} - -func filexWSizeAtRoot( - x any, - size int64, -) models.DriveItemable { - return driveItemWithSize( - idx(file, x), - namex(file, x), - parentDir(), - rootID, - size, - isFile) -} - -func filexWSizeAt( - x, parentX any, - size int64, -) models.DriveItemable { - pd := parentDir(namex(folder, parentX)) - pid := idx(folder, parentX) - - if parentX == folder { - pd = parentDir(name(folder)) - pid = id(folder) - } - - return driveItemWithSize( - idx(file, x), - namex(file, x), - pd, - pid, - size, - isFile) -} - -func folderAtRoot() models.DriveItemable { - return driveItem(id(folder), name(folder), parentDir(), rootID, isFolder) -} - -func folderAtDeep( - parentDir, parentID string, -) models.DriveItemable { - return driveItem( - id(folder), - name(folder), - parentDir, - parentID, - isFolder) -} - -func folderxAt( - x, parentX any, -) models.DriveItemable { - pd := parentDir(namex(folder, parentX)) - pid := idx(folder, parentX) - - if parentX == folder { - pd = parentDir(name(folder)) - pid = id(folder) - } - - return driveItem( - idx(folder, x), - namex(folder, x), - pd, - pid, - isFolder) -} - -func folderxAtRoot( - x any, -) models.DriveItemable { - return driveItem( - idx(folder, x), - namex(folder, x), - parentDir(), - rootID, - isFolder) -} - -func driveItemWithSize( - id, name, parentPath, parentID string, - size int64, - it itemType, -) models.DriveItemable { - res := coreItem(id, name, parentPath, parentID, it) - res.SetSize(ptr.To(size)) - - return res -} - -func fileItem( - id, name, parentPath, parentID, url string, - deleted bool, -) models.DriveItemable { - di := driveItem(id, name, parentPath, parentID, isFile) - di.SetAdditionalData(map[string]any{ - "@microsoft.graph.downloadUrl": url, - }) - - if deleted { - di.SetDeleted(models.NewDeleted()) - } - - return di -} - -func malwareItem( - id, name, parentPath, parentID string, - it itemType, -) models.DriveItemable { - c := coreItem(id, name, parentPath, parentID, it) - - mal := models.NewMalware() - malStr := "test malware" - mal.SetDescription(&malStr) - - c.SetMalware(mal) - - return c -} - -func driveRootItem() models.DriveItemable { - item := models.NewDriveItem() - item.SetName(ptr.To(rootName)) - item.SetId(ptr.To(rootID)) - item.SetRoot(models.NewRoot()) - item.SetFolder(models.NewFolder()) - - return item -} - -// delItem creates a DriveItemable that is marked as deleted. path must be set -// to the base drive path. -func delItem( - id string, - parentID string, - it itemType, -) models.DriveItemable { - item := models.NewDriveItem() - item.SetId(&id) - item.SetDeleted(models.NewDeleted()) - - parentReference := models.NewItemReference() - parentReference.SetId(&parentID) - item.SetParentReference(parentReference) - - switch it { - case isFile: - item.SetFile(models.NewFile()) - case isFolder: - item.SetFolder(models.NewFolder()) - case isPackage: - item.SetPackageEscaped(models.NewPackageEscaped()) - } - - return item -} - -func id(v string) string { - return fmt.Sprintf("id_%s_0", v) -} - -func idx(v string, sfx any) string { - return fmt.Sprintf("id_%s_%v", v, sfx) -} - -func name(v string) string { - return fmt.Sprintf("n_%s_0", v) -} - -func namex(v string, sfx any) string { - return fmt.Sprintf("n_%s_%v", v, sfx) -} - -func toPath(elems ...string) string { - es := []string{} - for _, elem := range elems { - es = append(es, path.Split(elem)...) - } - - switch len(es) { - case 0: - return "" - case 1: - return es[0] - default: - return path.Builder{}.Append(es...).String() - } -} - -func fullPath(elems ...string) string { - return toPath(append( - []string{ - tenant, - path.OneDriveService.String(), - user, - path.FilesCategory.String(), - odConsts.DriveFolderPrefixBuilder(id(drive)).String(), - }, - elems...)...) -} - -func fullPathPath(t *testing.T, elems ...string) path.Path { - p, err := path.FromDataLayerPath(fullPath(elems...), false) - require.NoError(t, err, clues.ToCore(err)) - - return p -} - -func driveFullPath(driveID any, elems ...string) string { - return toPath(append( - []string{ - tenant, - path.OneDriveService.String(), - user, - path.FilesCategory.String(), - odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String(), - }, - elems...)...) -} - -func parentDir(elems ...string) string { - return toPath(append( - []string{odConsts.DriveFolderPrefixBuilder(id(drive)).String()}, - elems...)...) -} - -func driveParentDir(driveID any, elems ...string) string { - return toPath(append( - []string{odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String()}, - elems...)...) -} - -// common item names -const ( - bar = "bar" - delta = "delta_url" - drive = "drive" - fanny = "fanny" - file = "file" - folder = "folder" - foo = "foo" - item = "item" - malware = "malware" - nav = "nav" - pkg = "package" - rootID = odConsts.RootID - rootName = odConsts.RootPathDir - subfolder = "subfolder" - tenant = "t" - user = "u" -) - var anyFolderScope = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] type failingColl struct{} @@ -583,29 +212,21 @@ func collWithMBHAndOpts( count.New()) } -func pagerForDrives(drives ...models.Driveable) *apiMock.Pager[models.Driveable] { - return &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: drives}, - }, +func aPage(items ...models.DriveItemable) nextPage { + return nextPage{ + Items: append([]models.DriveItemable{driveRootFolder()}, items...), } } -func aPage(items ...models.DriveItemable) mock.NextPage { - return mock.NextPage{ - Items: append([]models.DriveItemable{driveRootItem()}, items...), - } -} - -func aPageWReset(items ...models.DriveItemable) mock.NextPage { - return mock.NextPage{ - Items: append([]models.DriveItemable{driveRootItem()}, items...), +func aPageWReset(items ...models.DriveItemable) nextPage { + return nextPage{ + Items: append([]models.DriveItemable{driveRootFolder()}, items...), Reset: true, } } -func aReset(items ...models.DriveItemable) mock.NextPage { - return mock.NextPage{ +func aReset(items ...models.DriveItemable) nextPage { + return nextPage{ Items: []models.DriveItemable{}, Reset: true, } @@ -626,7 +247,7 @@ func makePrevMetadataColls( prevDeltas := map[string]string{} for driveID := range previousPaths { - prevDeltas[driveID] = idx(delta, "prev") + prevDeltas[driveID] = id(deltaURL, "prev") } mdColl, err := graph.MakeMetadataCollection( @@ -818,8 +439,8 @@ func (ecs expectedCollections) requireNoUnseenCollections(t *testing.T) { // delta trees // --------------------------------------------------------------------------- -func defaultTreePfx(t *testing.T) path.Path { - fpb := fullPathPath(t).ToBuilder() +func defaultTreePfx(t *testing.T, d *deltaDrive) path.Path { + fpb := d.fullPath(t).ToBuilder() fpe := fpb.Elements() fpe = fpe[:len(fpe)-1] fpb = path.Builder{}.Append(fpe...) @@ -839,160 +460,1152 @@ func defaultLoc() path.Elements { return path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy") } -func newTree(t *testing.T) *folderyMcFolderFace { - return newFolderyMcFolderFace(defaultTreePfx(t), rootID) +func newTree(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + return newFolderyMcFolderFace(defaultTreePfx(t, d), rootID) } -func treeWithRoot(t *testing.T) *folderyMcFolderFace { - tree := newFolderyMcFolderFace(defaultTreePfx(t), rootID) - rootey := newNodeyMcNodeFace(nil, rootID, rootName, false) - tree.root = rootey - tree.folderIDToNode[rootID] = rootey +func treeWithRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := newFolderyMcFolderFace(defaultTreePfx(t, d), rootID) + + //nolint:forbidigo + err := tree.setFolder(context.Background(), "", rootID, rootName, false) + require.NoError(t, err, clues.ToCore(err)) return tree } -func treeAfterReset(t *testing.T) *folderyMcFolderFace { - tree := newFolderyMcFolderFace(defaultTreePfx(t), rootID) +func treeAfterReset(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := newFolderyMcFolderFace(defaultTreePfx(t, d), rootID) tree.reset() return tree } -func treeWithFoldersAfterReset(t *testing.T) *folderyMcFolderFace { - tree := treeWithFolders(t) +func treeWithFoldersAfterReset(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithFolders(t, d) tree.hadReset = true return tree } -func treeWithTombstone(t *testing.T) *folderyMcFolderFace { - tree := treeWithRoot(t) - tree.tombstones[id(folder)] = newNodeyMcNodeFace(nil, id(folder), "", false) +func treeWithTombstone(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithRoot(t, d) + + //nolint:forbidigo + err := tree.setTombstone(context.Background(), folderID()) + require.NoError(t, err, clues.ToCore(err)) return tree } -func treeWithFolders(t *testing.T) *folderyMcFolderFace { - tree := treeWithRoot(t) +func treeWithFolders(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithRoot(t, d) - parent := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true) - tree.folderIDToNode[parent.id] = parent - tree.root.children[parent.id] = parent + //nolint:forbidigo + err := tree.setFolder(context.Background(), rootID, folderID("parent"), folderName("parent"), true) + require.NoError(t, err, clues.ToCore(err)) - f := newNodeyMcNodeFace(parent, id(folder), name(folder), false) - tree.folderIDToNode[f.id] = f - parent.children[f.id] = f + //nolint:forbidigo + err = tree.setFolder(context.Background(), folderID("parent"), folderID(), folderName(), false) + require.NoError(t, err, clues.ToCore(err)) return tree } -func treeWithFileAtRoot(t *testing.T) *folderyMcFolderFace { - tree := treeWithRoot(t) - tree.root.files[id(file)] = custom.ToCustomDriveItem(fileAtRoot()) - tree.fileIDToParentID[id(file)] = rootID +func treeWithFileAtRoot(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithRoot(t, d) + + err := tree.addFile(rootID, fileID(), custom.ToCustomDriveItem(d.fileAtRoot())) + require.NoError(t, err, clues.ToCore(err)) return tree } -func treeWithDeletedFile(t *testing.T) *folderyMcFolderFace { - tree := treeWithRoot(t) - tree.deleteFile(idx(file, "d")) +func treeWithDeletedFile(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithRoot(t, d) + tree.deleteFile(fileID("d")) return tree } -func treeWithFileInFolder(t *testing.T) *folderyMcFolderFace { - tree := treeWithFolders(t) - tree.folderIDToNode[id(folder)].files[id(file)] = custom.ToCustomDriveItem(fileAt(folder)) - tree.fileIDToParentID[id(file)] = id(folder) +func treeWithFileInFolder(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithFolders(t, d) + + err := tree.addFile(folderID(), fileID(), custom.ToCustomDriveItem(d.fileAt(folder))) + require.NoError(t, err, clues.ToCore(err)) return tree } -func treeWithFileInTombstone(t *testing.T) *folderyMcFolderFace { - tree := treeWithTombstone(t) - tree.tombstones[id(folder)].files[id(file)] = custom.ToCustomDriveItem(fileAt("tombstone")) - tree.fileIDToParentID[id(file)] = id(folder) +func treeWithFileInTombstone(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + tree := treeWithTombstone(t, d) + + // setting these directly, instead of using addFile(), + // because we can't add files to tombstones. + tree.tombstones[folderID()].files[fileID()] = custom.ToCustomDriveItem(d.fileAt("tombstone")) + tree.fileIDToParentID[fileID()] = folderID() return tree } -// root -> idx(folder, parent) -> id(folder) +// root -> idx(folder, parent) -> folderID() // one item at each dir // one tombstone: idx(folder, tombstone) // one item in the tombstone // one deleted item -func fullTree(t *testing.T) *folderyMcFolderFace { - return fullTreeWithNames("parent", "tombstone")(t) +func fullTree(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + return fullTreeWithNames("parent", "tombstone")(t, d) } func fullTreeWithNames( parentFolderX, tombstoneX any, -) func(t *testing.T) *folderyMcFolderFace { - return func(t *testing.T) *folderyMcFolderFace { +) func(t *testing.T, d *deltaDrive) *folderyMcFolderFace { + return func(t *testing.T, d *deltaDrive) *folderyMcFolderFace { ctx, flush := tester.NewContext(t) defer flush() - tree := treeWithRoot(t) + tree := treeWithRoot(t, d) // file in root - df := driveFile("r", parentDir(), rootID) + df := driveFile(d.dir(), rootID, "r") err := tree.addFile( rootID, - idx(file, "r"), + fileID("r"), custom.ToCustomDriveItem(df)) require.NoError(t, err, clues.ToCore(err)) - // root -> idx(folder, parent) - err = tree.setFolder(ctx, rootID, idx(folder, parentFolderX), namex(folder, parentFolderX), false) + // root -> folderID(parentX) + err = tree.setFolder(ctx, rootID, folderID(parentFolderX), folderName(parentFolderX), false) require.NoError(t, err, clues.ToCore(err)) - // file in idx(folder, parent) - df = driveFile("p", parentDir(namex(folder, parentFolderX)), idx(folder, parentFolderX)) + // file in folderID(parentX) + df = driveFile(d.dir(folderName(parentFolderX)), folderID(parentFolderX), "p") err = tree.addFile( - idx(folder, parentFolderX), - idx(file, "p"), + folderID(parentFolderX), + fileID("p"), custom.ToCustomDriveItem(df)) require.NoError(t, err, clues.ToCore(err)) - // idx(folder, parent) -> id(folder) - err = tree.setFolder(ctx, idx(folder, parentFolderX), id(folder), name(folder), false) + // folderID(parentX) -> folderID() + err = tree.setFolder(ctx, folderID(parentFolderX), folderID(), folderName(), false) require.NoError(t, err, clues.ToCore(err)) - // file in id(folder) - df = driveFile(file, parentDir(name(folder)), id(folder)) + // file in folderID() + df = driveFile(d.dir(folderName()), folderID()) err = tree.addFile( - id(folder), - id(file), + folderID(), + fileID(), custom.ToCustomDriveItem(df)) require.NoError(t, err, clues.ToCore(err)) - // tombstone - have to set a non-tombstone folder first, then add the item, then tombstone the folder - err = tree.setFolder(ctx, rootID, idx(folder, tombstoneX), namex(folder, tombstoneX), false) + // tombstone - have to set a non-tombstone folder first, + // then add the item, + // then tombstone the folder + err = tree.setFolder(ctx, rootID, folderID(tombstoneX), folderName(tombstoneX), false) require.NoError(t, err, clues.ToCore(err)) // file in tombstone - df = driveFile("t", parentDir(namex(folder, tombstoneX)), idx(folder, tombstoneX)) + df = driveFile(d.dir(folderName(tombstoneX)), folderID(tombstoneX), "t") err = tree.addFile( - idx(folder, tombstoneX), - idx(file, "t"), + folderID(tombstoneX), + fileID("t"), custom.ToCustomDriveItem(df)) require.NoError(t, err, clues.ToCore(err)) - err = tree.setTombstone(ctx, idx(folder, tombstoneX)) + err = tree.setTombstone(ctx, folderID(tombstoneX)) require.NoError(t, err, clues.ToCore(err)) // deleted file - tree.deleteFile(idx(file, "d")) + tree.deleteFile(fileID("d")) return tree } } +// --------------------------------------------------------------------------- +// Backup Handler +// --------------------------------------------------------------------------- + +type mockBackupHandler[T any] struct { + ItemInfo details.ItemInfo + // FIXME: this is a hacky solution. Better to use an interface + // and plug in the selector scope there. + Sel selectors.Selector + + DriveItemEnumeration enumerateDriveItemsDelta + + GI getsItem + GIP getsItemPermission + + PathPrefixFn pathPrefixer + PathPrefixErr error + + MetadataPathPrefixFn metadataPathPrefixer + MetadataPathPrefixErr error + + CanonPathFn canonPather + CanonPathErr error + + ProtectedResource idname.Provider + Service path.ServiceType + Category path.CategoryType + + // driveID -> itemPager + ItemPagerV map[string]pagers.DeltaHandler[models.DriveItemable] + + LocationIDFn locationIDer + + getCall int + GetResps []*http.Response + GetErrs []error + + RootFolder models.DriveItemable +} + +func stubRootFolder() models.DriveItemable { + item := models.NewDriveItem() + item.SetName(ptr.To(odConsts.RootPathDir)) + item.SetId(ptr.To(odConsts.RootID)) + item.SetRoot(models.NewRoot()) + item.SetFolder(models.NewFolder()) + + return item +} + +func defaultOneDriveBH(resourceOwner string) *mockBackupHandler[models.DriveItemable] { + sel := selectors.NewOneDriveBackup([]string{resourceOwner}) + sel.Include(sel.AllData()) + + return &mockBackupHandler[models.DriveItemable]{ + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{}, + Extension: &details.ExtensionData{}, + }, + Sel: sel.Selector, + DriveItemEnumeration: enumerateDriveItemsDelta{}, + GI: getsItem{Err: clues.New("not defined")}, + GIP: getsItemPermission{Err: clues.New("not defined")}, + PathPrefixFn: defaultOneDrivePathPrefixer, + MetadataPathPrefixFn: defaultOneDriveMetadataPathPrefixer, + CanonPathFn: defaultOneDriveCanonPather, + ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner), + Service: path.OneDriveService, + Category: path.FilesCategory, + LocationIDFn: defaultOneDriveLocationIDer, + GetResps: []*http.Response{nil}, + GetErrs: []error{clues.New("not defined")}, + RootFolder: stubRootFolder(), + } +} + +func defaultSharePointBH(resourceOwner string) *mockBackupHandler[models.DriveItemable] { + sel := selectors.NewOneDriveBackup([]string{resourceOwner}) + sel.Include(sel.AllData()) + + return &mockBackupHandler[models.DriveItemable]{ + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{}, + Extension: &details.ExtensionData{}, + }, + Sel: sel.Selector, + GI: getsItem{Err: clues.New("not defined")}, + GIP: getsItemPermission{Err: clues.New("not defined")}, + PathPrefixFn: defaultSharePointPathPrefixer, + MetadataPathPrefixFn: defaultSharePointMetadataPathPrefixer, + CanonPathFn: defaultSharePointCanonPather, + ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner), + Service: path.SharePointService, + Category: path.LibrariesCategory, + LocationIDFn: defaultSharePointLocationIDer, + GetResps: []*http.Response{nil}, + GetErrs: []error{clues.New("not defined")}, + RootFolder: stubRootFolder(), + } +} + +func defaultDriveBHWith( + resource string, + enumerator enumerateDriveItemsDelta, +) *mockBackupHandler[models.DriveItemable] { + mbh := defaultOneDriveBH(resource) + mbh.DriveItemEnumeration = enumerator + + return mbh +} + +func (h mockBackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error) { + pp, err := h.PathPrefixFn(tID, h.ProtectedResource.ID(), driveID) + if err != nil { + return nil, err + } + + return pp, h.PathPrefixErr +} + +func (h mockBackupHandler[T]) MetadataPathPrefix(tID string) (path.Path, error) { + pp, err := h.MetadataPathPrefixFn(tID, h.ProtectedResource.ID()) + if err != nil { + return nil, err + } + + return pp, h.MetadataPathPrefixErr +} + +func (h mockBackupHandler[T]) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) { + cp, err := h.CanonPathFn(pb, tID, h.ProtectedResource.ID()) + if err != nil { + return nil, err + } + + return cp, h.CanonPathErr +} + +func (h mockBackupHandler[T]) ServiceCat() (path.ServiceType, path.CategoryType) { + return h.Service, h.Category +} + +func (h mockBackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] { + return h.DriveItemEnumeration.drivePager() +} + +func (h mockBackupHandler[T]) FormatDisplayPath(_ string, pb *path.Builder) string { + return "/" + pb.String() +} + +func (h mockBackupHandler[T]) NewLocationIDer(driveID string, elems ...string) details.LocationIDer { + return h.LocationIDFn(driveID, elems...) +} + +func (h mockBackupHandler[T]) AugmentItemInfo( + details.ItemInfo, + idname.Provider, + *custom.DriveItem, + int64, + *path.Builder, +) details.ItemInfo { + return h.ItemInfo +} + +func (h *mockBackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) { + c := h.getCall + h.getCall++ + + // allows mockers to only populate the errors slice + if h.GetErrs[c] != nil { + return nil, h.GetErrs[c] + } + + return h.GetResps[c], h.GetErrs[c] +} + +func (h mockBackupHandler[T]) EnumerateDriveItemsDelta( + ctx context.Context, + driveID, prevDeltaLink string, + cc api.CallConfig, +) pagers.NextPageResulter[models.DriveItemable] { + return h.DriveItemEnumeration.EnumerateDriveItemsDelta( + ctx, + driveID, + prevDeltaLink, + cc) +} + +func (h mockBackupHandler[T]) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) { + return h.GI.GetItem(ctx, "", "") +} + +func (h mockBackupHandler[T]) GetItemPermission( + ctx context.Context, + _, _ string, +) (models.PermissionCollectionResponseable, error) { + return h.GIP.GetItemPermission(ctx, "", "") +} + +type canonPather func(*path.Builder, string, string) (path.Path, error) + +var defaultOneDriveCanonPather = func(pb *path.Builder, tID, ro string) (path.Path, error) { + return pb.ToDataLayerOneDrivePath(tID, ro, false) +} + +var defaultSharePointCanonPather = func(pb *path.Builder, tID, ro string) (path.Path, error) { + return pb.ToDataLayerSharePointPath(tID, ro, path.LibrariesCategory, false) +} + +type ( + pathPrefixer func(tID, ro, driveID string) (path.Path, error) + metadataPathPrefixer func(tID, ro string) (path.Path, error) +) + +var defaultOneDrivePathPrefixer = func(tID, ro, driveID string) (path.Path, error) { + return path.Build( + tID, + ro, + path.OneDriveService, + path.FilesCategory, + false, + odConsts.DrivesPathDir, + driveID, + odConsts.RootPathDir) +} + +var defaultOneDriveMetadataPathPrefixer = func(tID, ro string) (path.Path, error) { + return path.BuildMetadata( + tID, + ro, + path.OneDriveService, + path.FilesCategory, + false) +} + +var defaultSharePointPathPrefixer = func(tID, ro, driveID string) (path.Path, error) { + return path.Build( + tID, + ro, + path.SharePointService, + path.LibrariesCategory, + false, + odConsts.DrivesPathDir, + driveID, + odConsts.RootPathDir) +} + +var defaultSharePointMetadataPathPrefixer = func(tID, ro string) (path.Path, error) { + return path.BuildMetadata( + tID, + ro, + path.SharePointService, + path.LibrariesCategory, + false) +} + +type locationIDer func(string, ...string) details.LocationIDer + +var defaultOneDriveLocationIDer = func(driveID string, elems ...string) details.LocationIDer { + return details.NewOneDriveLocationIDer(driveID, elems...) +} + +var defaultSharePointLocationIDer = func(driveID string, elems ...string) details.LocationIDer { + return details.NewSharePointLocationIDer(driveID, elems...) +} + +func (h mockBackupHandler[T]) IsAllPass() bool { + scope := h.Sel.Includes[0] + return selectors.IsAnyTarget(selectors.SharePointScope(scope), selectors.SharePointLibraryFolder) || + selectors.IsAnyTarget(selectors.OneDriveScope(scope), selectors.OneDriveFolder) +} + +func (h mockBackupHandler[T]) IncludesDir(dir string) bool { + scope := h.Sel.Includes[0] + return selectors.SharePointScope(scope).Matches(selectors.SharePointLibraryFolder, dir) || + selectors.OneDriveScope(scope).Matches(selectors.OneDriveFolder, dir) +} + +func (h mockBackupHandler[T]) GetRootFolder(context.Context, string) (models.DriveItemable, error) { + return h.RootFolder, nil +} + +// --------------------------------------------------------------------------- +// Get Itemer +// --------------------------------------------------------------------------- + +type getsItem struct { + Item models.DriveItemable + Err error +} + +func (m getsItem) GetItem( + _ context.Context, + _, _ string, +) (models.DriveItemable, error) { + return m.Item, m.Err +} + +// --------------------------------------------------------------------------- +// Drive Item Enummerator +// --------------------------------------------------------------------------- + +type nextPage struct { + Items []models.DriveItemable + Reset bool +} + +type enumerateDriveItemsDelta struct { + DrivePagers map[string]*DeltaDriveEnumerator +} + +func driveEnumerator( + ds ...*DeltaDriveEnumerator, +) enumerateDriveItemsDelta { + enumerator := enumerateDriveItemsDelta{ + DrivePagers: map[string]*DeltaDriveEnumerator{}, + } + + for _, drive := range ds { + enumerator.DrivePagers[drive.Drive.id] = drive + } + + return enumerator +} + +func (en enumerateDriveItemsDelta) EnumerateDriveItemsDelta( + _ context.Context, + driveID, _ string, + _ api.CallConfig, +) pagers.NextPageResulter[models.DriveItemable] { + iterator := en.DrivePagers[driveID] + return iterator.nextDelta() +} + +func (en enumerateDriveItemsDelta) drivePager() *apiMock.Pager[models.Driveable] { + dvs := []models.Driveable{} + + for _, dp := range en.DrivePagers { + dvs = append(dvs, dp.Drive.able) + } + + return &apiMock.Pager[models.Driveable]{ + ToReturn: []apiMock.PagerResult[models.Driveable]{ + {Values: dvs}, + }, + } +} + +func (en enumerateDriveItemsDelta) getDrives() []*deltaDrive { + dvs := []*deltaDrive{} + + for _, dp := range en.DrivePagers { + dvs = append(dvs, dp.Drive) + } + + return dvs +} + +type deltaDrive struct { + id string + able models.Driveable +} + +func drive(driveSuffix ...any) *deltaDrive { + driveID := id(drivePfx, driveSuffix...) + + able := models.NewDrive() + able.SetId(ptr.To(driveID)) + able.SetName(ptr.To(name(drivePfx, driveSuffix...))) + + return &deltaDrive{ + id: driveID, + able: able, + } +} + +func (dd *deltaDrive) newEnumer() *DeltaDriveEnumerator { + clone := &deltaDrive{} + *clone = *dd + + return &DeltaDriveEnumerator{Drive: clone} +} + +type DeltaDriveEnumerator struct { + Drive *deltaDrive + idx int + DeltaQueries []*deltaQuery + Err error +} + +func (dde *DeltaDriveEnumerator) with(ds ...*deltaQuery) *DeltaDriveEnumerator { + dde.DeltaQueries = ds + return dde +} + +// withErr adds an error that is always returned in the last delta index. +func (dde *DeltaDriveEnumerator) withErr(err error) *DeltaDriveEnumerator { + dde.Err = err + return dde +} + +func (dde *DeltaDriveEnumerator) nextDelta() *deltaQuery { + if dde.idx == len(dde.DeltaQueries) { + // at the end of the enumeration, return an empty page with no items, + // not even the root. This is what graph api would do to signify an absence + // of changes in the delta. + lastDU := dde.DeltaQueries[dde.idx-1].DeltaUpdate + + return &deltaQuery{ + DeltaUpdate: lastDU, + Pages: []nextPage{{ + Items: []models.DriveItemable{}, + }}, + Err: dde.Err, + } + } + + if dde.idx > len(dde.DeltaQueries) { + // a panic isn't optimal here, but since this mechanism is internal to testing, + // it's an acceptable way to have the tests ensure we don't over-enumerate deltas. + panic(fmt.Sprintf("delta index %d larger than count of delta iterations in mock", dde.idx)) + } + + pages := dde.DeltaQueries[dde.idx] + + dde.idx++ + + return pages +} + +var _ pagers.NextPageResulter[models.DriveItemable] = &deltaQuery{} + +type deltaQuery struct { + idx int + Pages []nextPage + DeltaUpdate pagers.DeltaUpdate + Err error +} + +func delta( + resultDeltaID string, + err error, +) *deltaQuery { + return &deltaQuery{ + DeltaUpdate: pagers.DeltaUpdate{URL: resultDeltaID}, + Err: err, + } +} + +func deltaWReset( + resultDeltaID string, + err error, +) *deltaQuery { + return &deltaQuery{ + DeltaUpdate: pagers.DeltaUpdate{ + URL: resultDeltaID, + Reset: true, + }, + Err: err, + } +} + +func (dq *deltaQuery) with( + pages ...nextPage, +) *deltaQuery { + dq.Pages = pages + return dq +} + +func (dq *deltaQuery) NextPage() ([]models.DriveItemable, bool, bool) { + if dq.idx >= len(dq.Pages) { + return nil, false, true + } + + np := dq.Pages[dq.idx] + dq.idx++ + + return np.Items, np.Reset, false +} + +func (dq *deltaQuery) Cancel() {} + +func (dq *deltaQuery) Results() (pagers.DeltaUpdate, error) { + return dq.DeltaUpdate, dq.Err +} + +// --------------------------------------------------------------------------- +// Get Item Permissioner +// --------------------------------------------------------------------------- + +type getsItemPermission struct { + Perm models.PermissionCollectionResponseable + Err error +} + +func (m getsItemPermission) GetItemPermission( + _ context.Context, + _, _ string, +) (models.PermissionCollectionResponseable, error) { + return m.Perm, m.Err +} + +// --------------------------------------------------------------------------- +// Restore Handler +// -------------------------------------------------------------------------- + +type mockRestoreHandler struct { + ItemInfo details.ItemInfo + + CollisionKeyMap map[string]api.DriveItemIDType + + CalledDeleteItem bool + CalledDeleteItemOn string + DeleteItemErr error + + CalledPostItem bool + PostItemResp models.DriveItemable + PostItemErr error + + DrivePagerV pagers.NonDeltaHandler[models.Driveable] + + PostDriveResp models.Driveable + PostDriveErr error + + UploadSessionErr error +} + +func (h mockRestoreHandler) PostDrive( + ctx context.Context, + protectedResourceID, driveName string, +) (models.Driveable, error) { + return h.PostDriveResp, h.PostDriveErr +} + +func (h mockRestoreHandler) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] { + return h.DrivePagerV +} + +func (h *mockRestoreHandler) AugmentItemInfo( + details.ItemInfo, + idname.Provider, + *custom.DriveItem, + int64, + *path.Builder, +) details.ItemInfo { + return h.ItemInfo +} + +func (h *mockRestoreHandler) GetItemsInContainerByCollisionKey( + context.Context, + string, string, +) (map[string]api.DriveItemIDType, error) { + return h.CollisionKeyMap, nil +} + +func (h *mockRestoreHandler) DeleteItem( + _ context.Context, + _, itemID string, +) error { + h.CalledDeleteItem = true + h.CalledDeleteItemOn = itemID + + return h.DeleteItemErr +} + +func (h *mockRestoreHandler) DeleteItemPermission( + context.Context, + string, string, string, +) error { + return nil +} + +func (h *mockRestoreHandler) NewItemContentUpload( + context.Context, + string, string, +) (models.UploadSessionable, error) { + return models.NewUploadSession(), h.UploadSessionErr +} + +func (h *mockRestoreHandler) PostItemPermissionUpdate( + context.Context, + string, string, + *drives.ItemItemsItemInvitePostRequestBody, +) (drives.ItemItemsItemInviteResponseable, error) { + return drives.NewItemItemsItemInviteResponse(), nil +} + +func (h *mockRestoreHandler) PostItemLinkShareUpdate( + ctx context.Context, + driveID, itemID string, + body *drives.ItemItemsItemCreateLinkPostRequestBody, +) (models.Permissionable, error) { + return nil, clues.New("not implemented") +} + +func (h *mockRestoreHandler) PostItemInContainer( + context.Context, + string, string, + models.DriveItemable, + control.CollisionPolicy, +) (models.DriveItemable, error) { + h.CalledPostItem = true + return h.PostItemResp, h.PostItemErr +} + +func (h *mockRestoreHandler) GetFolderByName( + context.Context, + string, string, string, +) (models.DriveItemable, error) { + return models.NewDriveItem(), nil +} + +func (h *mockRestoreHandler) GetRootFolder( + context.Context, + string, +) (models.DriveItemable, error) { + return models.NewDriveItem(), nil +} + +// --------------------------------------------------------------------------- +// stub drive item factories +// --------------------------------------------------------------------------- + +type itemType int + +const ( + isFile itemType = 1 + isFolder itemType = 2 + isPackage itemType = 3 +) + +func coreItem( + id, name, parentPath, parentID string, + it itemType, +) *models.DriveItem { + item := models.NewDriveItem() + item.SetName(&name) + item.SetId(&id) + item.SetLastModifiedDateTime(ptr.To(time.Now())) + + parentReference := models.NewItemReference() + parentReference.SetPath(&parentPath) + parentReference.SetId(&parentID) + item.SetParentReference(parentReference) + + switch it { + case isFile: + item.SetSize(ptr.To[int64](42)) + item.SetFile(models.NewFile()) + case isFolder: + item.SetFolder(models.NewFolder()) + case isPackage: + item.SetPackageEscaped(models.NewPackageEscaped()) + } + + return item +} + +func driveItem( + id, name, parentPath, parentID string, + it itemType, +) models.DriveItemable { + return coreItem(id, name, parentPath, parentID, it) +} + +func driveItemWSize( + id, name, parentPath, parentID string, + size int64, + it itemType, +) models.DriveItemable { + res := coreItem(id, name, parentPath, parentID, it) + res.SetSize(ptr.To(size)) + + return res +} + +func malwareItem( + id, name, parentPath, parentID string, + it itemType, +) models.DriveItemable { + c := coreItem(id, name, parentPath, parentID, it) + + mal := models.NewMalware() + malStr := "test malware" + mal.SetDescription(&malStr) + + c.SetMalware(mal) + + return c +} + +// delItem creates a DriveItemable that is marked as deleted. path must be set +// to the base drive path. +func delItem( + id string, + parentID string, + it itemType, +) models.DriveItemable { + item := models.NewDriveItem() + item.SetId(&id) + item.SetDeleted(models.NewDeleted()) + + parentReference := models.NewItemReference() + parentReference.SetId(&parentID) + item.SetParentReference(parentReference) + + switch it { + case isFile: + item.SetFile(models.NewFile()) + case isFolder: + item.SetFolder(models.NewFolder()) + case isPackage: + item.SetPackageEscaped(models.NewPackageEscaped()) + } + + return item +} + +// --------------------------------------------------------------------------- +// file factories +// --------------------------------------------------------------------------- + +func fileID(fileSuffixes ...any) string { + return id(file, fileSuffixes...) +} + +func fileName(fileSuffixes ...any) string { + return name(file, fileSuffixes...) +} + +func driveFile( + parentPath, parentID string, + fileSuffixes ...any, +) models.DriveItemable { + return driveItem( + fileID(fileSuffixes...), + fileName(fileSuffixes...), + parentPath, + parentID, + isFile) +} + +func (dd *deltaDrive) fileAt( + parentSuffix any, + fileSuffixes ...any, +) models.DriveItemable { + return driveItem( + fileID(fileSuffixes...), + fileName(fileSuffixes...), + dd.dir(folderName(parentSuffix)), + folderID(parentSuffix), + isFile) +} + +func (dd *deltaDrive) fileAtRoot( + fileSuffixes ...any, +) models.DriveItemable { + return driveItem( + fileID(fileSuffixes...), + fileName(fileSuffixes...), + dd.dir(), + rootID, + isFile) +} + +func (dd *deltaDrive) fileWURLAtRoot( + url string, + isDeleted bool, + fileSuffixes ...any, +) models.DriveItemable { + di := driveFile(dd.dir(), rootID, fileSuffixes...) + di.SetAdditionalData(map[string]any{ + "@microsoft.graph.downloadUrl": url, + }) + + if isDeleted { + di.SetDeleted(models.NewDeleted()) + } + + return di +} + +func (dd *deltaDrive) fileWSizeAtRoot( + size int64, + fileSuffixes ...any, +) models.DriveItemable { + return driveItemWSize( + fileID(fileSuffixes...), + fileName(fileSuffixes...), + dd.dir(), + rootID, + size, + isFile) +} + +func (dd *deltaDrive) fileWSizeAt( + size int64, + parentSuffix any, + fileSuffixes ...any, +) models.DriveItemable { + return driveItemWSize( + fileID(fileSuffixes...), + fileName(fileSuffixes...), + dd.dir(folderName(parentSuffix)), + folderID(parentSuffix), + size, + isFile) +} + +// --------------------------------------------------------------------------- +// folder factories +// --------------------------------------------------------------------------- + +func folderID(folderSuffixes ...any) string { + return id(folder, folderSuffixes...) +} + +func folderName(folderSuffixes ...any) string { + return name(folder, folderSuffixes...) +} + +func driveFolder( + parentPath, parentID string, + folderSuffixes ...any, +) models.DriveItemable { + return driveItem( + folderID(folderSuffixes...), + folderName(folderSuffixes...), + parentPath, + parentID, + isFolder) +} + +func driveRootFolder() models.DriveItemable { + rootFolder := models.NewDriveItem() + rootFolder.SetName(ptr.To(rootName)) + rootFolder.SetId(ptr.To(rootID)) + rootFolder.SetRoot(models.NewRoot()) + rootFolder.SetFolder(models.NewFolder()) + + return rootFolder +} + +func (dd *deltaDrive) folderAtRoot( + folderSuffixes ...any, +) models.DriveItemable { + return driveItem( + folderID(folderSuffixes...), + folderName(folderSuffixes...), + dd.dir(), + rootID, + isFolder) +} + +func (dd *deltaDrive) folderAt( + parentSuffix any, + folderSuffixes ...any, +) models.DriveItemable { + return driveItem( + folderID(folderSuffixes...), + folderName(folderSuffixes...), + dd.dir(folderName(parentSuffix)), + folderID(parentSuffix), + isFolder) +} + +// --------------------------------------------------------------------------- +// id, name, path factories +// --------------------------------------------------------------------------- + +// assumption is only one suffix per id. Mostly using +// the variadic as an "optional" extension. +func id(v string, suffixes ...any) string { + id := fmt.Sprintf("id_%s", v) + + // a bit weird, but acts as a quality of life + // that allows some funcs to take in the `file` + // or `folder` or etc monikers as the suffix + // without producing weird outputs. + if len(suffixes) == 1 { + sfx0, ok := suffixes[0].(string) + if ok && sfx0 == v { + return id + } + } + + for _, sfx := range suffixes { + id = fmt.Sprintf("%s_%v", id, sfx) + } + + return id +} + +// assumption is only one suffix per name. Mostly using +// the variadic as an "optional" extension. +func name(v string, suffixes ...any) string { + name := fmt.Sprintf("n_%s", v) + + // a bit weird, but acts as a quality of life + // that allows some funcs to take in the `file` + // or `folder` or etc monikers as the suffix + // without producing weird outputs. + if len(suffixes) == 1 { + sfx0, ok := suffixes[0].(string) + if ok && sfx0 == v { + return name + } + } + + for _, sfx := range suffixes { + name = fmt.Sprintf("%s_%v", name, sfx) + } + + return name +} + +func toPath(elems ...string) string { + es := []string{} + for _, elem := range elems { + es = append(es, path.Split(elem)...) + } + + switch len(es) { + case 0: + return "" + case 1: + return es[0] + default: + return path.Builder{}.Append(es...).String() + } +} + +// produces the full path for the provided drive +func (dd *deltaDrive) strPath(elems ...string) string { + return toPath(append( + []string{ + tenant, + path.OneDriveService.String(), + user, + path.FilesCategory.String(), + odConsts.DriveFolderPrefixBuilder(dd.id).String(), + }, + elems...)...) +} + +func (dd *deltaDrive) fullPath(t *testing.T, elems ...string) path.Path { + p, err := path.FromDataLayerPath(dd.strPath(elems...), false) + require.NoError(t, err, clues.ToCore(err)) + + return p +} + +// produces a complete path prefix up to the drive root folder with any +// elements passed in appended to the generated prefix. +func (dd *deltaDrive) dir(elems ...string) string { + return toPath(append( + []string{odConsts.DriveFolderPrefixBuilder(dd.id).String()}, + elems...)...) +} + +// common item names +const ( + bar = "bar" + deltaURL = "delta_url" + drivePfx = "drive" + fanny = "fanny" + file = "file" + folder = "folder" + foo = "foo" + item = "item" + malware = "malware" + nav = "nav" + pkg = "package" + rootID = odConsts.RootID + rootName = odConsts.RootPathDir + subfolder = "subfolder" + tenant = "t" + user = "u" +) + // --------------------------------------------------------------------------- // misc // --------------------------------------------------------------------------- + func expectFullOrPrev(ca *collectionAssertion) path.Path { var p path.Path diff --git a/src/internal/m365/collection/drive/limiter_test.go b/src/internal/m365/collection/drive/limiter_test.go index f71463b1b..87c4a62e1 100644 --- a/src/internal/m365/collection/drive/limiter_test.go +++ b/src/internal/m365/collection/drive/limiter_test.go @@ -12,14 +12,11 @@ import ( "golang.org/x/exp/maps" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" - "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" - apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" ) type LimiterUnitSuite struct { @@ -33,23 +30,14 @@ func TestLimiterUnitSuite(t *testing.T) { type backupLimitTest struct { name string limits control.PreviewItemLimits - drives []models.Driveable - enumerator mock.EnumerateDriveItemsDelta + enumerator enumerateDriveItemsDelta // Collection name -> set of item IDs. We can't check item data because // that's not mocked out. Metadata is checked separately. expectedItemIDsInCollection map[string][]string } -func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) { - drive1 := models.NewDrive() - drive1.SetId(ptr.To(id(drive))) - drive1.SetName(ptr.To(name(drive))) - - drive2 := models.NewDrive() - drive2.SetId(ptr.To(idx(drive, 2))) - drive2.SetName(ptr.To(namex(drive, 2))) - - tbl := []backupLimitTest{ +func backupLimitTable(d1, d2 *deltaDrive) []backupLimitTest { + return []backupLimitTest{ { name: "OneDrive SinglePage ExcludeItemsOverMaxSize", limits: control.PreviewItemLimits{ @@ -60,15 +48,14 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 5, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(aPage( - filexWSizeAtRoot(1, 7), - filexWSizeAtRoot(2, 1), - filexWSizeAtRoot(3, 1))))), + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + d1.fileWSizeAtRoot(7, "f1"), + d1.fileWSizeAtRoot(1, "f2"), + d1.fileWSizeAtRoot(1, "f3"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 2), idx(file, 3)}, + d1.strPath(): {fileID("f2"), fileID("f3")}, }, }, { @@ -81,15 +68,14 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 3, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(aPage( - filexWSizeAtRoot(1, 1), - filexWSizeAtRoot(2, 2), - filexWSizeAtRoot(3, 1))))), + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + d1.fileWSizeAtRoot(1, "f1"), + d1.fileWSizeAtRoot(2, "f2"), + d1.fileWSizeAtRoot(1, "f3"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2)}, + d1.strPath(): {fileID("f1"), fileID("f2")}, }, }, { @@ -102,17 +88,16 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 3, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(aPage( - filexWSizeAtRoot(1, 1), - folderxAtRoot(1), - filexWSizeAt(2, 1, 2), - filexWSizeAt(3, 1, 1))))), + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + d1.fileWSizeAtRoot(1, "f1"), + d1.folderAtRoot(), + d1.fileWSizeAt(2, folder, "f2"), + d1.fileWSizeAt(1, folder, "f3"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)}, + d1.strPath(): {fileID("f1")}, + d1.strPath(folderName()): {folderID(), fileID("f2")}, }, }, { @@ -125,18 +110,17 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3), - filexAtRoot(4), - filexAtRoot(5), - filexAtRoot(6))))), + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3"), + d1.fileAtRoot("f4"), + d1.fileAtRoot("f5"), + d1.fileAtRoot("f6"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, + d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, }, }, { @@ -149,24 +133,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2")), aPage( // Repeated items shouldn't count against the limit. - filexAtRoot(1), - folderxAtRoot(1), - filexAt(3, 1), - filexAt(4, 1), - filexAt(5, 1), - filexAt(6, 1))))), + d1.fileAtRoot("f1"), + d1.folderAtRoot(), + d1.fileAt(folder, "f3"), + d1.fileAt(folder, "f4"), + d1.fileAt(folder, "f5"), + d1.fileAt(folder, "f6"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)}, + d1.strPath(): {fileID("f1"), fileID("f2")}, + d1.strPath(folderName()): {folderID(), fileID("f3")}, }, }, { @@ -179,21 +162,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 1, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2")), aPage( - folderxAtRoot(1), - filexAt(3, 1), - filexAt(4, 1), - filexAt(5, 1), - filexAt(6, 1))))), + d1.folderAtRoot(), + d1.fileAt(folder, "f3"), + d1.fileAt(folder, "f4"), + d1.fileAt(folder, "f5"), + d1.fileAt(folder, "f6"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2)}, + d1.strPath(): {fileID("f1"), fileID("f2")}, }, }, { @@ -206,23 +188,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3")), aPage( - folderxAtRoot(1), - filexAt(4, 1), - filexAt(5, 1))))), + d1.folderAtRoot(), + d1.fileAt(folder, "f4"), + d1.fileAt(folder, "f5"))))), expectedItemIDsInCollection: map[string][]string{ // Root has an additional item. It's hard to fix that in the code // though. - fullPath(): {idx(file, 1), idx(file, 2)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4)}, + d1.strPath(): {fileID("f1"), fileID("f2")}, + d1.strPath(folderName()): {folderID(), fileID("f4")}, }, }, { @@ -235,23 +216,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - folderAtRoot(), - filexAt(1, folder), - filexAt(2, folder)), + d1.folderAtRoot(), + d1.fileAt(folder, "f1"), + d1.fileAt(folder, "f2")), aPage( - folderAtRoot(), + d1.folderAtRoot(), // Updated item that shouldn't count against the limit a second time. - filexAt(2, folder), - filexAt(3, folder), - filexAt(4, folder))))), + d1.fileAt(folder, "f2"), + d1.fileAt(folder, "f3"), + d1.fileAt(folder, "f4"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {}, - fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)}, + d1.strPath(): {}, + d1.strPath(folderName()): {folderID(), fileID("f1"), fileID("f2"), fileID("f3")}, }, }, { @@ -264,24 +244,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), // Put folder 0 at limit. - folderAtRoot(), - filexAt(3, folder), - filexAt(4, folder)), + d1.folderAtRoot(), + d1.fileAt(folder, "f3"), + d1.fileAt(folder, "f4")), aPage( - folderAtRoot(), + d1.folderAtRoot(), // Try to move item from root to folder 0 which is already at the limit. - filexAt(1, folder))))), + d1.fileAt(folder, "f1"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2)}, - fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)}, + d1.strPath(): {fileID("f1"), fileID("f2")}, + d1.strPath(folderName()): {folderID(), fileID("f3"), fileID("f4")}, }, }, { @@ -294,23 +273,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3")), aPage( - folderxAtRoot(1), - filexAt(4, 1)), + d1.folderAtRoot(), + d1.fileAt(folder, "f4")), aPage( - folderxAtRoot(1), - filexAt(5, 1))))), + d1.folderAtRoot(), + d1.fileAt(folder, "f5"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, + d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")}, }, }, { @@ -323,26 +301,25 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3")), aPage( - folderxAtRoot(1), - filexAt(4, 1), - filexAt(5, 1), + d1.folderAtRoot(), + d1.fileAt(folder, "f4"), + d1.fileAt(folder, "f5"), // This container shouldn't be returned. - folderxAtRoot(2), - filexAt(7, 2), - filexAt(8, 2), - filexAt(9, 2))))), + d1.folderAtRoot(2), + d1.fileAt(2, "f7"), + d1.fileAt(2, "f8"), + d1.fileAt(2, "f9"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, + d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")}, }, }, { @@ -355,27 +332,26 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3")), aPage( - folderxAtRoot(1), - filexAt(4, 1), - filexAt(5, 1)), + d1.folderAtRoot(), + d1.fileAt(folder, "f4"), + d1.fileAt(folder, "f5")), aPage( // This container shouldn't be returned. - folderxAtRoot(2), - filexAt(7, 2), - filexAt(8, 2), - filexAt(9, 2))))), + d1.folderAtRoot(2), + d1.fileAt(2, "f7"), + d1.fileAt(2, "f8"), + d1.fileAt(2, "f9"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, + d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")}, }, }, { @@ -388,25 +364,24 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 999999, MaxPages: 999, }, - drives: []models.Driveable{drive1, drive2}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3), - filexAtRoot(4), - filexAtRoot(5)))), - mock.Drive(idx(drive, 2)).With( - mock.Delta(id(delta), nil).With(aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3), - filexAtRoot(4), - filexAtRoot(5))))), + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3"), + d1.fileAtRoot("f4"), + d1.fileAtRoot("f5")))), + d2.newEnumer().with( + delta(id(deltaURL), nil).with(aPage( + d2.fileAtRoot("f1"), + d2.fileAtRoot("f2"), + d2.fileAtRoot("f3"), + d2.fileAtRoot("f4"), + d2.fileAtRoot("f5"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, - driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)}, + d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, + d2.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, }, }, { @@ -418,28 +393,25 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxBytes: 1, MaxPages: 1, }, - drives: []models.Driveable{drive1}, - enumerator: mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With( + enumerator: driveEnumerator( + d1.newEnumer().with( + delta(id(deltaURL), nil).with( aPage( - filexAtRoot(1), - filexAtRoot(2), - filexAtRoot(3)), + d1.fileAtRoot("f1"), + d1.fileAtRoot("f2"), + d1.fileAtRoot("f3")), aPage( - folderxAtRoot(1), - filexAt(4, 1)), + d1.folderAtRoot(), + d1.fileAt(folder, "f4")), aPage( - folderxAtRoot(1), - filexAt(5, 1))))), + d1.folderAtRoot(), + d1.fileAt(folder, "f5"))))), expectedItemIDsInCollection: map[string][]string{ - fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, - fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, + d1.strPath(): {fileID("f1"), fileID("f2"), fileID("f3")}, + d1.strPath(folderName()): {folderID(), fileID("f4"), fileID("f5")}, }, }, } - - return drive1, drive2, tbl } // TestGet_PreviewLimits checks that the limits set for preview backups in @@ -447,16 +419,7 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) // checks that don't examine metadata, collection states, etc. They really just // check the expected items appear. func (suite *LimiterUnitSuite) TestGet_PreviewLimits_noTree() { - _, _, tbl := backupLimitTable() - - for _, test := range tbl { - suite.Run(test.name, func() { - runGetPreviewLimits( - suite.T(), - test, - control.DefaultOptions()) - }) - } + iterGetPreviewLimitsTests(suite, control.DefaultOptions()) } // TestGet_PreviewLimits checks that the limits set for preview backups in @@ -469,13 +432,21 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() { opts := control.DefaultOptions() opts.ToggleFeatures.UseDeltaTree = true - _, _, tbl := backupLimitTable() + iterGetPreviewLimitsTests(suite, opts) +} - for _, test := range tbl { +func iterGetPreviewLimitsTests( + suite *LimiterUnitSuite, + opts control.Options, +) { + d1, d2 := drive(), drive(2) + + for _, test := range backupLimitTable(d1, d2) { suite.Run(test.name, func() { runGetPreviewLimits( suite.T(), test, + d1, d2, opts) }) } @@ -484,6 +455,7 @@ func (suite *LimiterUnitSuite) TestGet_PreviewLimits_tree() { func runGetPreviewLimits( t *testing.T, test backupLimitTest, + drive1, drive2 *deltaDrive, opts control.Options, ) { ctx, flush := tester.NewContext(t) @@ -500,12 +472,7 @@ func runGetPreviewLimits( opts.PreviewLimits = test.limits var ( - mockDrivePager = &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: test.drives}, - }, - } - mbh = mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator) + mbh = defaultDriveBHWith(user, test.enumerator) c = collWithMBHAndOpts(mbh, opts) errs = fault.New(true) delList = prefixmatcher.NewStringSetBuilder() @@ -558,7 +525,7 @@ func runGetPreviewLimits( t, test.expectedItemIDsInCollection[folderPath], itemIDs, - "item IDs in collection with path %q", + "item IDs in collection with path:\n\t%q", folderPath) } @@ -741,20 +708,17 @@ func runGetPreviewLimitsDefaults( false) require.NoError(t, err, "making metadata path", clues.ToCore(err)) - drv := models.NewDrive() - drv.SetId(ptr.To(id(drive))) - drv.SetName(ptr.To(name(drive))) - - pages := make([]mock.NextPage, 0, test.numContainers) + d := drive() + pages := make([]nextPage, 0, test.numContainers) for containerIdx := 0; containerIdx < test.numContainers; containerIdx++ { - page := mock.NextPage{ + page := nextPage{ Items: []models.DriveItemable{ - driveRootItem(), + driveRootFolder(), driveItem( - idx(folder, containerIdx), - namex(folder, containerIdx), - parentDir(), + folderID(containerIdx), + folderName(containerIdx), + d.dir(), rootID, isFolder), }, @@ -763,11 +727,11 @@ func runGetPreviewLimitsDefaults( for itemIdx := 0; itemIdx < test.numItemsPerContainer; itemIdx++ { itemSuffix := fmt.Sprintf("%d-%d", containerIdx, itemIdx) - page.Items = append(page.Items, driveItemWithSize( - idx(file, itemSuffix), - namex(file, itemSuffix), - parentDir(namex(folder, containerIdx)), - idx(folder, containerIdx), + page.Items = append(page.Items, driveItemWSize( + fileID(itemSuffix), + fileName(itemSuffix), + d.dir(folderName(containerIdx)), + folderID(containerIdx), test.itemSize, isFile)) } @@ -778,15 +742,10 @@ func runGetPreviewLimitsDefaults( opts.PreviewLimits = test.limits var ( - mockDrivePager = &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: []models.Driveable{drv}}, - }, - } - mockEnumerator = mock.DriveEnumerator( - mock.Drive(id(drive)).With( - mock.Delta(id(delta), nil).With(pages...))) - mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator) + mockEnumerator = driveEnumerator( + d.newEnumer().with( + delta(id(deltaURL), nil).with(pages...))) + mbh = defaultDriveBHWith(user, mockEnumerator) c = collWithMBHAndOpts(mbh, opts) errs = fault.New(true) delList = prefixmatcher.NewStringSetBuilder() diff --git a/src/internal/m365/collection/drive/restore_test.go b/src/internal/m365/collection/drive/restore_test.go index 5e0fded00..c186ef483 100644 --- a/src/internal/m365/collection/drive/restore_test.go +++ b/src/internal/m365/collection/drive/restore_test.go @@ -15,7 +15,7 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" dataMock "github.com/alcionai/corso/src/internal/data/mock" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" - odMock "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" + "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/tester" @@ -53,7 +53,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { onCollision control.CollisionPolicy deleteErr error expectSkipped assert.BoolAssertionFunc - expectMock func(*testing.T, *odMock.RestoreHandler) + expectMock func(*testing.T, *mockRestoreHandler) expectCounts counts }{ { @@ -61,7 +61,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { collisionKeys: map[string]api.DriveItemIDType{}, onCollision: control.Copy, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -72,7 +72,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { collisionKeys: map[string]api.DriveItemIDType{}, onCollision: control.Replace, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -83,7 +83,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { collisionKeys: map[string]api.DriveItemIDType{}, onCollision: control.Skip, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -92,11 +92,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "collision, copy", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: {ItemID: mndiID}, + mock.DriveItemFileName: {ItemID: mndiID}, }, onCollision: control.Copy, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -105,11 +105,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "collision, replace", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: {ItemID: mndiID}, + mock.DriveItemFileName: {ItemID: mndiID}, }, onCollision: control.Replace, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.True(t, rh.CalledDeleteItem, "new item deleted") assert.Equal(t, mndiID, rh.CalledDeleteItemOn, "deleted the correct item") @@ -119,12 +119,12 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "collision, replace - err already deleted", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: {ItemID: "smarf"}, + mock.DriveItemFileName: {ItemID: "smarf"}, }, onCollision: control.Replace, deleteErr: graph.ErrDeletedInFlight, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.True(t, rh.CalledDeleteItem, "new item deleted") }, @@ -133,11 +133,11 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "collision, skip", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: {ItemID: mndiID}, + mock.DriveItemFileName: {ItemID: mndiID}, }, onCollision: control.Skip, expectSkipped: assert.True, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.False(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -146,14 +146,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "file-folder collision, copy", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: { + mock.DriveItemFileName: { ItemID: mndiID, IsFolder: true, }, }, onCollision: control.Copy, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -162,14 +162,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "file-folder collision, replace", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: { + mock.DriveItemFileName: { ItemID: mndiID, IsFolder: true, }, }, onCollision: control.Replace, expectSkipped: assert.False, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.True(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -178,14 +178,14 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { { name: "file-folder collision, skip", collisionKeys: map[string]api.DriveItemIDType{ - odMock.DriveItemFileName: { + mock.DriveItemFileName: { ItemID: mndiID, IsFolder: true, }, }, onCollision: control.Skip, expectSkipped: assert.True, - expectMock: func(t *testing.T, rh *odMock.RestoreHandler) { + expectMock: func(t *testing.T, rh *mockRestoreHandler) { assert.False(t, rh.CalledPostItem, "new item posted") assert.False(t, rh.CalledDeleteItem, "new item deleted") }, @@ -204,7 +204,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { var ( caches = NewRestoreCaches(nil) - rh = &odMock.RestoreHandler{ + rh = &mockRestoreHandler{ PostItemResp: models.NewDriveItem(), DeleteItemErr: test.deleteErr, } @@ -232,9 +232,9 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { ctx, rh, rcc, - odMock.FetchItemByName{ + mock.FetchItemByName{ Item: &dataMock.Item{ - Reader: odMock.FileRespReadCloser(odMock.DriveFileMetaData), + Reader: mock.FileRespReadCloser(mock.DriveFileMetaData), ItemInfo: odStub.DriveItemInfo(), }, }, @@ -244,7 +244,7 @@ func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() { caches, &dataMock.Item{ ItemID: uuid.NewString(), - Reader: odMock.FileRespReadCloser(odMock.DriveFilePayloadData), + Reader: mock.FileRespReadCloser(mock.DriveFilePayloadData), ItemInfo: odStub.DriveItemInfo(), }, nil, diff --git a/src/internal/m365/collection/drive/url_cache_test.go b/src/internal/m365/collection/drive/url_cache_test.go index 8901a9db6..14494610c 100644 --- a/src/internal/m365/collection/drive/url_cache_test.go +++ b/src/internal/m365/collection/drive/url_cache_test.go @@ -18,7 +18,6 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/pkg/control" @@ -214,12 +213,15 @@ func TestURLCacheUnitSuite(t *testing.T) { } func (suite *URLCacheUnitSuite) TestGetItemProperties() { - deltaString := "delta" - driveID := "drive1" + d := drive() + + aURL := func(n int) string { + return fmt.Sprintf("https://dummy%d.com", n) + } table := []struct { name string - pages []mock.NextPage + pages []nextPage pagerErr error expectedItemProps map[string]itemProps expectErr assert.ErrorAssertionFunc @@ -227,14 +229,12 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }{ { name: "single item in cache", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - }}, + pages: []nextPage{ + aPage(d.fileWURLAtRoot(aURL(1), false, 1)), }, expectedItemProps: map[string]itemProps{ - "1": { - downloadURL: "https://dummy1.com", + fileID(1): { + downloadURL: aURL(1), isDeleted: false, }, }, @@ -247,34 +247,33 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "multiple items in cache", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - fileItem("4", "file4", "root", "root", "https://dummy4.com", false), - fileItem("5", "file5", "root", "root", "https://dummy5.com", false), - }}, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3), + d.fileWURLAtRoot(aURL(4), false, 4), + d.fileWURLAtRoot(aURL(5), false, 5)), }, expectedItemProps: map[string]itemProps{ - "1": { - downloadURL: "https://dummy1.com", + fileID(1): { + downloadURL: aURL(1), isDeleted: false, }, - "2": { - downloadURL: "https://dummy2.com", + fileID(2): { + downloadURL: aURL(2), isDeleted: false, }, - "3": { - downloadURL: "https://dummy3.com", + fileID(3): { + downloadURL: aURL(3), isDeleted: false, }, - "4": { - downloadURL: "https://dummy4.com", + fileID(4): { + downloadURL: aURL(4), isDeleted: false, }, - "5": { - downloadURL: "https://dummy5.com", + fileID(5): { + downloadURL: aURL(5), isDeleted: false, }, }, @@ -287,36 +286,34 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "multiple pages", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - }}, - {Items: []models.DriveItemable{ - fileItem("4", "file4", "root", "root", "https://dummy4.com", false), - fileItem("5", "file5", "root", "root", "https://dummy5.com", false), - }}, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3)), + aPage( + d.fileWURLAtRoot(aURL(4), false, 4), + d.fileWURLAtRoot(aURL(5), false, 5)), }, expectedItemProps: map[string]itemProps{ - "1": { - downloadURL: "https://dummy1.com", + fileID(1): { + downloadURL: aURL(1), isDeleted: false, }, - "2": { - downloadURL: "https://dummy2.com", + fileID(2): { + downloadURL: aURL(2), isDeleted: false, }, - "3": { - downloadURL: "https://dummy3.com", + fileID(3): { + downloadURL: aURL(3), isDeleted: false, }, - "4": { - downloadURL: "https://dummy4.com", + fileID(4): { + downloadURL: aURL(4), isDeleted: false, }, - "5": { - downloadURL: "https://dummy5.com", + fileID(5): { + downloadURL: aURL(5), isDeleted: false, }, }, @@ -329,53 +326,41 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "multiple pages with resets", - pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - fileItem("-1", "file-1", "root", "root", "https://dummy-1.com", false), - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - }, - }, - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - fileItem("0", "file1", "root", "root", "https://dummy0.com", false), - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - }, - }, - { - Items: []models.DriveItemable{ - fileItem("4", "file4", "root", "root", "https://dummy4.com", false), - fileItem("5", "file5", "root", "root", "https://dummy5.com", false), - }, - }, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(-1), false, -1), + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3)), + aReset(), + aPage( + d.fileWURLAtRoot(aURL(0), false, 0), + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3)), + aPage( + d.fileWURLAtRoot(aURL(4), false, 4), + d.fileWURLAtRoot(aURL(5), false, 5)), }, expectedItemProps: map[string]itemProps{ - "1": { - downloadURL: "https://dummy1.com", + fileID(1): { + downloadURL: aURL(1), isDeleted: false, }, - "2": { - downloadURL: "https://dummy2.com", + fileID(2): { + downloadURL: aURL(2), isDeleted: false, }, - "3": { - downloadURL: "https://dummy3.com", + fileID(3): { + downloadURL: aURL(3), isDeleted: false, }, - "4": { - downloadURL: "https://dummy4.com", + fileID(4): { + downloadURL: aURL(4), isDeleted: false, }, - "5": { - downloadURL: "https://dummy5.com", + fileID(5): { + downloadURL: aURL(5), isDeleted: false, }, }, @@ -388,48 +373,39 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "multiple pages with resets and combo reset+items in page", - pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - }, - }, - { - Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - }, - Reset: true, - }, - { - Items: []models.DriveItemable{ - fileItem("4", "file4", "root", "root", "https://dummy4.com", false), - fileItem("5", "file5", "root", "root", "https://dummy5.com", false), - }, - }, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(0), false, 0), + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3)), + aPageWReset( + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3)), + aPage( + d.fileWURLAtRoot(aURL(4), false, 4), + d.fileWURLAtRoot(aURL(5), false, 5)), }, expectedItemProps: map[string]itemProps{ - "1": { - downloadURL: "https://dummy1.com", + fileID(1): { + downloadURL: aURL(1), isDeleted: false, }, - "2": { - downloadURL: "https://dummy2.com", + fileID(2): { + downloadURL: aURL(2), isDeleted: false, }, - "3": { - downloadURL: "https://dummy3.com", + fileID(3): { + downloadURL: aURL(3), isDeleted: false, }, - "4": { - downloadURL: "https://dummy4.com", + fileID(4): { + downloadURL: aURL(4), isDeleted: false, }, - "5": { - downloadURL: "https://dummy5.com", + fileID(5): { + downloadURL: aURL(5), isDeleted: false, }, }, @@ -442,26 +418,25 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "duplicate items with potentially new urls", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("3", "file3", "root", "root", "https://dummy3.com", false), - fileItem("1", "file1", "root", "root", "https://test1.com", false), - fileItem("2", "file2", "root", "root", "https://test2.com", false), - }}, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(3), false, 3), + d.fileWURLAtRoot(aURL(100), false, 1), + d.fileWURLAtRoot(aURL(200), false, 2)), }, expectedItemProps: map[string]itemProps{ - "1": { - downloadURL: "https://test1.com", + fileID(1): { + downloadURL: aURL(100), isDeleted: false, }, - "2": { - downloadURL: "https://test2.com", + fileID(2): { + downloadURL: aURL(200), isDeleted: false, }, - "3": { - downloadURL: "https://dummy3.com", + fileID(3): { + downloadURL: aURL(3), isDeleted: false, }, }, @@ -474,20 +449,19 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "deleted items", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - fileItem("2", "file2", "root", "root", "https://dummy2.com", false), - fileItem("1", "file1", "root", "root", "https://dummy1.com", true), - }}, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(1), false, 1), + d.fileWURLAtRoot(aURL(2), false, 2), + d.fileWURLAtRoot(aURL(1), true, 1)), }, expectedItemProps: map[string]itemProps{ - "1": { + fileID(1): { downloadURL: "", isDeleted: true, }, - "2": { - downloadURL: "https://dummy2.com", + fileID(2): { + downloadURL: aURL(2), isDeleted: false, }, }, @@ -500,13 +474,11 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "item not found in cache", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - }}, + pages: []nextPage{ + aPage(d.fileWURLAtRoot(aURL(1), false, 1)), }, expectedItemProps: map[string]itemProps{ - "2": {}, + fileID(2): {}, }, expectErr: assert.Error, expect: func(t *testing.T, uc *urlCache, startTime time.Time) { @@ -517,13 +489,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "delta query error", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{}}, + pages: []nextPage{ + aPage(), }, pagerErr: errors.New("delta query error"), expectedItemProps: map[string]itemProps{ - "1": {}, - "2": {}, + fileID(1): {}, + fileID(2): {}, }, expectErr: assert.Error, expect: func(t *testing.T, uc *urlCache, startTime time.Time) { @@ -534,14 +506,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { }, { name: "folder item", - pages: []mock.NextPage{ - {Items: []models.DriveItemable{ - fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - driveItem("2", "folder2", "root", "root", isFolder), - }}, + pages: []nextPage{ + aPage( + d.fileWURLAtRoot(aURL(1), false, 1), + d.folderAtRoot(2)), }, expectedItemProps: map[string]itemProps{ - "2": {}, + fileID(2): {}, }, expectErr: assert.Error, expect: func(t *testing.T, uc *urlCache, startTime time.Time) { @@ -562,14 +533,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { ctx, flush := tester.NewContext(t) defer flush() - driveEnumer := mock.DriveEnumerator( - mock.Drive(driveID). - WithErr(test.pagerErr). - With(mock.Delta(deltaString, test.pagerErr). - With(test.pages...))) + drive := drive() + + driveEnumer := driveEnumerator( + drive.newEnumer(). + withErr(test.pagerErr). + with( + delta(deltaURL, test.pagerErr). + with(test.pages...))) cache, err := newURLCache( - driveID, + drive.id, "", 1*time.Hour, driveEnumer, @@ -609,18 +583,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { func (suite *URLCacheUnitSuite) TestNeedsRefresh() { var ( t = suite.T() - driveID = "drive1" refreshInterval = 1 * time.Second + drv = drive() ) cache, err := newURLCache( - driveID, + drv.id, "", refreshInterval, - &mock.EnumerateDriveItemsDelta{}, + &enumerateDriveItemsDelta{}, count.New(), fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) // cache is empty @@ -641,6 +614,8 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() { } func (suite *URLCacheUnitSuite) TestNewURLCache() { + drv := drive() + table := []struct { name string driveID string @@ -653,21 +628,21 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() { name: "invalid driveID", driveID: "", refreshInt: 1 * time.Hour, - itemPager: &mock.EnumerateDriveItemsDelta{}, + itemPager: &enumerateDriveItemsDelta{}, errors: fault.New(true), expectErr: require.Error, }, { name: "invalid refresh interval", - driveID: "drive1", + driveID: drv.id, refreshInt: 100 * time.Millisecond, - itemPager: &mock.EnumerateDriveItemsDelta{}, + itemPager: &enumerateDriveItemsDelta{}, errors: fault.New(true), expectErr: require.Error, }, { name: "invalid item enumerator", - driveID: "drive1", + driveID: drv.id, refreshInt: 1 * time.Hour, itemPager: nil, errors: fault.New(true), @@ -675,9 +650,9 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() { }, { name: "valid", - driveID: "drive1", + driveID: drv.id, refreshInt: 1 * time.Hour, - itemPager: &mock.EnumerateDriveItemsDelta{}, + itemPager: &enumerateDriveItemsDelta{}, errors: fault.New(true), expectErr: require.NoError, }, diff --git a/src/internal/m365/service/onedrive/mock/handlers.go b/src/internal/m365/service/onedrive/mock/handlers.go index 28a07ebc9..13be790d0 100644 --- a/src/internal/m365/service/onedrive/mock/handlers.go +++ b/src/internal/m365/service/onedrive/mock/handlers.go @@ -1,5 +1,16 @@ package mock +// --------------------------------------------------------------------------- +// >>> TODO <<< +// https://github.com/alcionai/corso/issues/4846 +// This file's functions are duplicated into /drive/helper_test.go, which +// should act as the clear primary owner of that functionality. However, +// packages outside of /drive (such as sharepoint) depend on these helpers +// for test functionality. We'll want to unify the two at some point. +// In the meantime, make sure you're referencing and updating the correct +// set of helpers (prefer the /drive version over this one). +// --------------------------------------------------------------------------- + import ( "context" "fmt" @@ -50,7 +61,6 @@ type BackupHandler[T any] struct { Service path.ServiceType Category path.CategoryType - DrivePagerV pagers.NonDeltaHandler[models.Driveable] // driveID -> itemPager ItemPagerV map[string]pagers.DeltaHandler[models.DriveItemable] @@ -126,11 +136,9 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab func DefaultDriveBHWith( resource string, - drivePager *apiMock.Pager[models.Driveable], enumerator EnumerateDriveItemsDelta, ) *BackupHandler[models.DriveItemable] { mbh := DefaultOneDriveBH(resource) - mbh.DrivePagerV = drivePager mbh.DriveItemEnumeration = enumerator return mbh @@ -168,7 +176,7 @@ func (h BackupHandler[T]) ServiceCat() (path.ServiceType, path.CategoryType) { } func (h BackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] { - return h.DrivePagerV + return h.DriveItemEnumeration.DrivePager() } func (h BackupHandler[T]) FormatDisplayPath(_ string, pb *path.Builder) string { @@ -333,18 +341,18 @@ type NextPage struct { } type EnumerateDriveItemsDelta struct { - DrivePagers map[string]*DriveDeltaEnumerator + DrivePagers map[string]*DeltaDriveEnumerator } func DriveEnumerator( - ds ...*DriveDeltaEnumerator, + ds ...*DeltaDriveEnumerator, ) EnumerateDriveItemsDelta { enumerator := EnumerateDriveItemsDelta{ - DrivePagers: map[string]*DriveDeltaEnumerator{}, + DrivePagers: map[string]*DeltaDriveEnumerator{}, } for _, drive := range ds { - enumerator.DrivePagers[drive.DriveID] = drive + enumerator.DrivePagers[drive.Drive.ID] = drive } return enumerator @@ -359,29 +367,75 @@ func (en EnumerateDriveItemsDelta) EnumerateDriveItemsDelta( return iterator.nextDelta() } -type DriveDeltaEnumerator struct { - DriveID string +func (en EnumerateDriveItemsDelta) DrivePager() *apiMock.Pager[models.Driveable] { + ds := []models.Driveable{} + + for _, dp := range en.DrivePagers { + ds = append(ds, dp.Drive.Able) + } + + return &apiMock.Pager[models.Driveable]{ + ToReturn: []apiMock.PagerResult[models.Driveable]{ + {Values: ds}, + }, + } +} + +func (en EnumerateDriveItemsDelta) Drives() []*DeltaDrive { + ds := []*DeltaDrive{} + + for _, dp := range en.DrivePagers { + ds = append(ds, dp.Drive) + } + + return ds +} + +type DeltaDrive struct { + ID string + Able models.Driveable +} + +func Drive(driveSuffix ...any) *DeltaDrive { + driveID := id("drive", driveSuffix...) + + able := models.NewDrive() + able.SetId(ptr.To(driveID)) + able.SetName(ptr.To(name("drive", driveSuffix...))) + + return &DeltaDrive{ + ID: driveID, + Able: able, + } +} + +func (dd *DeltaDrive) NewEnumer() *DeltaDriveEnumerator { + cp := &DeltaDrive{} + + *cp = *dd + + return &DeltaDriveEnumerator{Drive: cp} +} + +type DeltaDriveEnumerator struct { + Drive *DeltaDrive idx int DeltaQueries []*DeltaQuery Err error } -func Drive(driveID string) *DriveDeltaEnumerator { - return &DriveDeltaEnumerator{DriveID: driveID} -} - -func (dde *DriveDeltaEnumerator) With(ds ...*DeltaQuery) *DriveDeltaEnumerator { +func (dde *DeltaDriveEnumerator) With(ds ...*DeltaQuery) *DeltaDriveEnumerator { dde.DeltaQueries = ds return dde } // WithErr adds an error that is always returned in the last delta index. -func (dde *DriveDeltaEnumerator) WithErr(err error) *DriveDeltaEnumerator { +func (dde *DeltaDriveEnumerator) WithErr(err error) *DeltaDriveEnumerator { dde.Err = err return dde } -func (dde *DriveDeltaEnumerator) nextDelta() *DeltaQuery { +func (dde *DeltaDriveEnumerator) nextDelta() *DeltaQuery { if dde.idx == len(dde.DeltaQueries) { // at the end of the enumeration, return an empty page with no items, // not even the root. This is what graph api would do to signify an absence @@ -455,7 +509,7 @@ func (dq *DeltaQuery) NextPage() ([]models.DriveItemable, bool, bool) { } np := dq.Pages[dq.idx] - dq.idx = dq.idx + 1 + dq.idx++ return np.Items, np.Reset, false } @@ -598,3 +652,49 @@ func (h *RestoreHandler) GetRootFolder( ) (models.DriveItemable, error) { return models.NewDriveItem(), nil } + +// assumption is only one suffix per id. Mostly using +// the variadic as an "optional" extension. +func id(v string, suffixes ...any) string { + id := fmt.Sprintf("id_%s", v) + + // a bit weird, but acts as a quality of life + // that allows some funcs to take in the `file` + // or `folder` or etc monikers as the suffix + // without producing weird outputs. + if len(suffixes) == 1 { + sfx0, ok := suffixes[0].(string) + if ok && sfx0 == v { + return id + } + } + + for _, sfx := range suffixes { + id = fmt.Sprintf("%s_%v", id, sfx) + } + + return id +} + +// assumption is only one suffix per name. Mostly using +// the variadic as an "optional" extension. +func name(v string, suffixes ...any) string { + name := fmt.Sprintf("n_%s", v) + + // a bit weird, but acts as a quality of life + // that allows some funcs to take in the `file` + // or `folder` or etc monikers as the suffix + // without producing weird outputs. + if len(suffixes) == 1 { + sfx0, ok := suffixes[0].(string) + if ok && sfx0 == v { + return name + } + } + + for _, sfx := range suffixes { + name = fmt.Sprintf("%s_%v", name, sfx) + } + + return name +} diff --git a/src/internal/m365/service/sharepoint/backup_test.go b/src/internal/m365/service/sharepoint/backup_test.go index acf57ff46..d87cb934f 100644 --- a/src/internal/m365/service/sharepoint/backup_test.go +++ b/src/internal/m365/service/sharepoint/backup_test.go @@ -28,7 +28,7 @@ import ( var testBaseDrivePath = path.Builder{}.Append( odConsts.DrivesPathDir, - "driveID1", + "id_drive", odConsts.RootPathDir) // --------------------------------------------------------------------------- @@ -44,12 +44,14 @@ func TestLibrariesBackupUnitSuite(t *testing.T) { } func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { - anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0] + var ( + anyFolder = (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0] + drv = mock.Drive() + ) const ( tenantID = "tenant" siteID = "site" - driveID = "driveID1" ) pb := path.Builder{}.Append(testBaseDrivePath.Elements()...) @@ -96,13 +98,13 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { paths = map[string]string{} excluded = map[string]struct{}{} collMap = map[string]map[string]*drive.Collection{ - driveID: {}, + drv.ID: {}, } topLevelPackages = map[string]struct{}{} ) mbh.DriveItemEnumeration = mock.DriveEnumerator( - mock.Drive(driveID).With( + drv.NewEnumer().With( mock.Delta("notempty", nil).With(mock.NextPage{Items: test.items}))) c := drive.NewCollections( @@ -117,7 +119,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { _, _, err := c.PopulateDriveCollections( ctx, - driveID, + drv.ID, "General", paths, excluded, @@ -134,10 +136,10 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { assert.Empty(t, topLevelPackages, "should not find package type folders") for _, collPath := range test.expectedCollectionIDs { - assert.Contains(t, c.CollectionMap[driveID], collPath) + assert.Contains(t, c.CollectionMap[drv.ID], collPath) } - for _, col := range c.CollectionMap[driveID] { + for _, col := range c.CollectionMap[drv.ID] { assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String()) } })