From 1f756ce34f2b154e3952d7e2e74635fdea03c7d3 Mon Sep 17 00:00:00 2001 From: Keepers Date: Fri, 10 Nov 2023 09:19:16 -0700 Subject: [PATCH] support debug reading out delta files (#4629) #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :sunflower: Feature #### Test Plan - [x] :muscle: Manual --- CHANGELOG.md | 1 + src/.golangci.yml | 3 + src/cli/cli.go | 2 + src/cli/debug/debug.go | 27 +- src/cli/debug/exchange.go | 10 +- src/cli/debug/groups.go | 10 +- src/cli/debug/onedrive.go | 10 +- src/cli/debug/sharepoint.go | 10 +- src/cli/print/print.go | 22 + src/internal/kopia/base_finder.go | 8 + src/internal/kopia/wrapper.go | 6 +- src/internal/m365/backup.go | 7 +- .../m365/collection/drive/collections.go | 131 +- .../m365/collection/drive/collections_test.go | 2758 +++++++++-------- src/internal/m365/collection/drive/debug.go | 32 + .../m365/collection/drive/url_cache_test.go | 2 +- .../m365/collection/exchange/debug.go | 17 + src/internal/m365/collection/groups/debug.go | 17 + src/internal/m365/debug.go | 39 + src/internal/m365/mock/connector.go | 3 +- src/internal/operations/inject/inject.go | 11 +- src/internal/operations/inject/mock/inject.go | 3 +- src/internal/operations/manifests.go | 2 +- src/pkg/fault/alert.go | 4 + src/pkg/fault/fault.go | 1 - src/pkg/repository/data_providers.go | 12 + src/pkg/repository/debug.go | 86 + src/pkg/repository/repository.go | 1 + src/pkg/store/metadata.go | 12 + 29 files changed, 1963 insertions(+), 1284 deletions(-) create mode 100644 src/internal/m365/collection/drive/debug.go create mode 100644 src/internal/m365/collection/exchange/debug.go create mode 100644 src/internal/m365/collection/groups/debug.go create mode 100644 src/internal/m365/debug.go create mode 100644 src/pkg/repository/debug.go create mode 100644 src/pkg/store/metadata.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ff1ca07a..ddf9ecde0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - Handle OneDrive folders being deleted and recreated midway through a backup +- Automatically re-run a full delta query on incrmental if the prior backup is found to have malformed prior-state information. ## [v0.15.0] (beta) - 2023-10-31 diff --git a/src/.golangci.yml b/src/.golangci.yml index da54f6217..7cb697ba9 100644 --- a/src/.golangci.yml +++ b/src/.golangci.yml @@ -125,6 +125,9 @@ issues: linters: - forbidigo text: "context.(Background|TODO)" + - path: internal/m365/collection/drive/collections_test.go + linters: + - lll - path: internal/m365/graph/betasdk linters: - wsl diff --git a/src/cli/cli.go b/src/cli/cli.go index 9b6eae05c..3ca7f0cbb 100644 --- a/src/cli/cli.go +++ b/src/cli/cli.go @@ -11,6 +11,7 @@ import ( "github.com/alcionai/corso/src/cli/backup" "github.com/alcionai/corso/src/cli/config" + "github.com/alcionai/corso/src/cli/debug" "github.com/alcionai/corso/src/cli/export" "github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/help" @@ -125,6 +126,7 @@ func BuildCommandTree(cmd *cobra.Command) { backup.AddCommands(cmd) restore.AddCommands(cmd) export.AddCommands(cmd) + debug.AddCommands(cmd) help.AddCommands(cmd) } diff --git a/src/cli/debug/debug.go b/src/cli/debug/debug.go index 4546b55fc..437683e9d 100644 --- a/src/cli/debug/debug.go +++ b/src/cli/debug/debug.go @@ -3,11 +3,13 @@ package debug import ( "context" + "github.com/alcionai/clues" "github.com/spf13/cobra" "github.com/alcionai/corso/src/cli/flags" . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/selectors" ) @@ -31,9 +33,9 @@ func AddCommands(cmd *cobra.Command) { utils.AddCommand(debugC, subCommand, utils.MarkDebugCommand()) for _, addTo := range debugCommands { - addTo(subCommand) - flags.AddAllProviderFlags(subCommand) - flags.AddAllStorageFlags(subCommand) + servCmd := addTo(subCommand) + flags.AddAllProviderFlags(servCmd) + flags.AddAllStorageFlags(servCmd) } } } @@ -85,13 +87,15 @@ func handleMetadataFilesCmd(cmd *cobra.Command, args []string) error { // runners // --------------------------------------------------------------------------- -func runMetadataFiles( +func genericMetadataFiles( ctx context.Context, cmd *cobra.Command, args []string, sel selectors.Selector, - debugID, serviceName string, + backupID string, ) error { + ctx = clues.Add(ctx, "backup_id", backupID) + r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService()) if err != nil { return Only(ctx, err) @@ -99,7 +103,18 @@ func runMetadataFiles( defer utils.CloseRepo(ctx, r) - // TODO: read and print out all metadata files in the debug + // read metadata + files, err := r.GetBackupMetadata(ctx, sel, backupID, fault.New(true)) + if err != nil { + return Only(ctx, clues.Wrap(err, "retrieving metadata files")) + } + + for _, file := range files { + Infof(ctx, "\n------------------------------") + Info(ctx, file.Name) + Info(ctx, file.Path) + Pretty(ctx, file.Data) + } return nil } diff --git a/src/cli/debug/exchange.go b/src/cli/debug/exchange.go index 379d62b8e..adc6c19df 100644 --- a/src/cli/debug/exchange.go +++ b/src/cli/debug/exchange.go @@ -59,13 +59,17 @@ func metadataFilesExchangeCmd(cmd *cobra.Command, args []string) error { return nil } - // opts := utils.MakeExchangeOpts(cmd) - if flags.RunModeFV == flags.RunModeFlagTest { return nil } sel := selectors.NewExchangeBackup([]string{"unused-placeholder"}) + sel.Include(sel.AllData()) - return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "Exchange") + return genericMetadataFiles( + ctx, + cmd, + args, + sel.Selector, + flags.BackupIDFV) } diff --git a/src/cli/debug/groups.go b/src/cli/debug/groups.go index a76c12930..3335e9e0c 100644 --- a/src/cli/debug/groups.go +++ b/src/cli/debug/groups.go @@ -60,13 +60,17 @@ func metadataFilesGroupsCmd(cmd *cobra.Command, args []string) error { return nil } - // opts := utils.MakeGroupsOpts(cmd) - if flags.RunModeFV == flags.RunModeFlagTest { return nil } sel := selectors.NewGroupsBackup([]string{"unused-placeholder"}) + sel.Include(sel.AllData()) - return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "Groups") + return genericMetadataFiles( + ctx, + cmd, + args, + sel.Selector, + flags.BackupIDFV) } diff --git a/src/cli/debug/onedrive.go b/src/cli/debug/onedrive.go index 48b7b2a51..902a1f748 100644 --- a/src/cli/debug/onedrive.go +++ b/src/cli/debug/onedrive.go @@ -59,13 +59,17 @@ func metadataFilesOneDriveCmd(cmd *cobra.Command, args []string) error { return nil } - // opts := utils.MakeOneDriveOpts(cmd) - if flags.RunModeFV == flags.RunModeFlagTest { return nil } sel := selectors.NewOneDriveBackup([]string{"unused-placeholder"}) + sel.Include(sel.AllData()) - return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "OneDrive") + return genericMetadataFiles( + ctx, + cmd, + args, + sel.Selector, + flags.BackupIDFV) } diff --git a/src/cli/debug/sharepoint.go b/src/cli/debug/sharepoint.go index 8aa67122f..dd6a18383 100644 --- a/src/cli/debug/sharepoint.go +++ b/src/cli/debug/sharepoint.go @@ -59,13 +59,17 @@ func metadataFilesSharePointCmd(cmd *cobra.Command, args []string) error { return nil } - // opts := utils.MakeSharePointOpts(cmd) - if flags.RunModeFV == flags.RunModeFlagTest { return nil } sel := selectors.NewSharePointBackup([]string{"unused-placeholder"}) + sel.Include(sel.LibraryFolders(selectors.Any())) - return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "SharePoint") + return genericMetadataFiles( + ctx, + cmd, + args, + sel.Selector, + flags.BackupIDFV) } diff --git a/src/cli/print/print.go b/src/cli/print/print.go index 9c456b65d..dfe8a4806 100644 --- a/src/cli/print/print.go +++ b/src/cli/print/print.go @@ -119,10 +119,21 @@ func Infof(ctx context.Context, t string, s ...any) { outf(ctx, getRootCmd(ctx).ErrOrStderr(), t, s...) } +// Pretty prettifies and prints the value. +func Pretty(ctx context.Context, a any) { + if a == nil { + Err(ctx, "") + return + } + + printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a) +} + // PrettyJSON prettifies and prints the value. func PrettyJSON(ctx context.Context, p minimumPrintabler) { if p == nil { Err(ctx, "") + return } outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug) @@ -281,3 +292,14 @@ func printJSON(w io.Writer, a any) { fmt.Fprintln(w, string(pretty.Pretty(bs))) } + +// output to stdout the list of printable structs as prettified json. +func printPrettyJSON(w io.Writer, a any) { + bs, err := json.MarshalIndent(a, "", " ") + if err != nil { + fmt.Fprintf(w, "error formatting results to json: %v\n", err) + return + } + + fmt.Fprintln(w, string(pretty.Pretty(bs))) +} diff --git a/src/internal/kopia/base_finder.go b/src/internal/kopia/base_finder.go index c058524cf..d2f2b1388 100644 --- a/src/internal/kopia/base_finder.go +++ b/src/internal/kopia/base_finder.go @@ -55,6 +55,14 @@ type BackupBase struct { Reasons []identity.Reasoner } +func (bb BackupBase) GetReasons() []identity.Reasoner { + return bb.Reasons +} + +func (bb BackupBase) GetSnapshotID() manifest.ID { + return bb.ItemDataSnapshot.ID +} + func (bb BackupBase) GetSnapshotTag(key string) (string, bool) { k, _ := makeTagKV(key) v, ok := bb.ItemDataSnapshot.Tags[k] diff --git a/src/internal/kopia/wrapper.go b/src/internal/kopia/wrapper.go index d7d728762..38bd5682f 100644 --- a/src/internal/kopia/wrapper.go +++ b/src/internal/kopia/wrapper.go @@ -376,11 +376,15 @@ func getDir( return nil, clues.Wrap(ErrNoRestorePath, "getting directory").WithClues(ctx) } + toGet := dirPath.PopFront() + + ctx = clues.Add(ctx, "entry_path", toGet) + // GetNestedEntry handles nil properly. e, err := snapshotfs.GetNestedEntry( ctx, snapshotRoot, - encodeElements(dirPath.PopFront().Elements()...)) + encodeElements(toGet.Elements()...)) if err != nil { if isErrEntryNotFound(err) { err = clues.Stack(data.ErrNotFound, err).WithClues(ctx) diff --git a/src/internal/m365/backup.go b/src/internal/m365/backup.go index 204232fb0..02ecd2579 100644 --- a/src/internal/m365/backup.go +++ b/src/internal/m365/backup.go @@ -8,7 +8,6 @@ import ( "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" - "github.com/alcionai/corso/src/internal/kopia" kinject "github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/service/exchange" @@ -179,7 +178,7 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error { func (ctrl *Controller) GetMetadataPaths( ctx context.Context, r kinject.RestoreProducer, - base kopia.BackupBase, + base inject.ReasonAndSnapshotIDer, errs *fault.Bus, ) ([]path.RestorePaths, error) { var ( @@ -187,12 +186,12 @@ func (ctrl *Controller) GetMetadataPaths( err error ) - for _, reason := range base.Reasons { + for _, reason := range base.GetReasons() { filePaths := [][]string{} switch true { case reason.Service() == path.GroupsService && reason.Category() == path.LibrariesCategory: - filePaths, err = groups.MetadataFiles(ctx, reason, r, base.ItemDataSnapshot.ID, errs) + filePaths, err = groups.MetadataFiles(ctx, reason, r, base.GetSnapshotID(), errs) if err != nil { return nil, err } diff --git a/src/internal/m365/collection/drive/collections.go b/src/internal/m365/collection/drive/collections.go index 6e61e897f..4469ded21 100644 --- a/src/internal/m365/collection/drive/collections.go +++ b/src/internal/m365/collection/drive/collections.go @@ -72,7 +72,88 @@ func NewCollections( } } -func deserializeMetadata( +func deserializeAndValidateMetadata( + ctx context.Context, + cols []data.RestoreCollection, + fb *fault.Bus, +) (map[string]string, map[string]map[string]string, bool, error) { + deltas, prevs, canUse, err := DeserializeMetadata(ctx, cols) + if err != nil || !canUse { + return deltas, prevs, false, clues.Stack(err).OrNil() + } + + // Go through and remove delta tokens if we didn't have any paths for them + // or one or more paths are empty (incorrect somehow). This will ensure we + // don't accidentally try to pull in delta results when we should have + // enumerated everything instead. + // + // Loop over the set of previous deltas because it's alright to have paths + // without a delta but not to have a delta without paths. This way ensures + // we check at least all the path sets for the deltas we have. + for drive := range deltas { + ictx := clues.Add(ctx, "drive_id", drive) + + paths := prevs[drive] + if len(paths) == 0 { + logger.Ctx(ictx).Info("dropping drive delta due to 0 prev paths") + delete(deltas, drive) + } + + // Drives have only a single delta token. If we find any folder that + // seems like the path is bad we need to drop the entire token and start + // fresh. Since we know the token will be gone we can also stop checking + // for other possibly incorrect folder paths. + for _, prevPath := range paths { + if len(prevPath) == 0 { + logger.Ctx(ictx).Info("dropping drive delta due to 0 len path") + delete(deltas, drive) + + break + } + } + } + + alertIfPrevPathsHaveCollisions(ctx, prevs, fb) + + return deltas, prevs, canUse, nil +} + +func alertIfPrevPathsHaveCollisions( + ctx context.Context, + prevs map[string]map[string]string, + fb *fault.Bus, +) { + for driveID, folders := range prevs { + prevPathCollisions := map[string]string{} + + for fid, prev := range folders { + if otherID, collision := prevPathCollisions[prev]; collision { + ctx = clues.Add( + ctx, + "collision_folder_id_1", fid, + "collision_folder_id_2", otherID, + "collision_drive_id", driveID, + "collision_prev_path", path.LoggableDir(prev)) + + fb.AddAlert(ctx, fault.NewAlert( + fault.AlertPreviousPathCollision, + "", // no namespace + "", // no item id + "previousPaths", + map[string]any{ + "collision_folder_id_1": fid, + "collision_folder_id_2": otherID, + "collision_drive_id": driveID, + "collision_prev_path": prev, + })) + } + + prevPathCollisions[prev] = fid + } + } +} + +func DeserializeMetadata( ctx context.Context, cols []data.RestoreCollection, ) (map[string]string, map[string]map[string]string, bool, error) { @@ -96,7 +177,7 @@ func deserializeMetadata( for breakLoop := false; !breakLoop; { select { case <-ctx.Done(): - return nil, nil, false, clues.Wrap(ctx.Err(), "deserialzing previous backup metadata").WithClues(ctx) + return nil, nil, false, clues.Wrap(ctx.Err(), "deserializing previous backup metadata").WithClues(ctx) case item, ok := <-items: if !ok { @@ -137,32 +218,6 @@ func deserializeMetadata( } } } - - // Go through and remove delta tokens if we didn't have any paths for them - // or one or more paths are empty (incorrect somehow). This will ensure we - // don't accidentally try to pull in delta results when we should have - // enumerated everything instead. - // - // Loop over the set of previous deltas because it's alright to have paths - // without a delta but not to have a delta without paths. This way ensures - // we check at least all the path sets for the deltas we have. - for drive := range prevDeltas { - paths := prevFolders[drive] - if len(paths) == 0 { - delete(prevDeltas, drive) - } - - // Drives have only a single delta token. If we find any folder that - // seems like the path is bad we need to drop the entire token and start - // fresh. Since we know the token will be gone we can also stop checking - // for other possibly incorrect folder paths. - for _, prevPath := range paths { - if len(prevPath) == 0 { - delete(prevDeltas, drive) - break - } - } - } } // if reads from items failed, return empty but no error @@ -215,7 +270,7 @@ func (c *Collections) Get( ssmb *prefixmatcher.StringSetMatchBuilder, errs *fault.Bus, ) ([]data.BackupCollection, bool, error) { - prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata) + deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata(ctx, prevMetadata, errs) if err != nil { return nil, false, err } @@ -224,7 +279,7 @@ func (c *Collections) Get( driveTombstones := map[string]struct{}{} - for driveID := range oldPrevPathsByDriveID { + for driveID := range prevPathsByDriveID { driveTombstones[driveID] = struct{}{} } @@ -257,8 +312,8 @@ func (c *Collections) Get( "drive_name", clues.Hide(driveName)) excludedItemIDs = map[string]struct{}{} - oldPrevPaths = oldPrevPathsByDriveID[driveID] - prevDeltaLink = prevDriveIDToDelta[driveID] + oldPrevPaths = prevPathsByDriveID[driveID] + prevDeltaLink = deltasByDriveID[driveID] // packagePaths is keyed by folder paths to a parent directory // which is marked as a package by its driveItem GetPackage @@ -437,6 +492,8 @@ func (c *Collections) Get( collections = append(collections, coll) } + alertIfPrevPathsHaveCollisions(ctx, driveIDToPrevPaths, errs) + // add metadata collections pathPrefix, err := c.handler.MetadataPathPrefix(c.tenantID) if err != nil { @@ -996,13 +1053,13 @@ func includePath(ctx context.Context, dsc dirScopeChecker, folderPath path.Path) } func updatePath(paths map[string]string, id, newPath string) { - oldPath := paths[id] - if len(oldPath) == 0 { + currPath := paths[id] + if len(currPath) == 0 { paths[id] = newPath return } - if oldPath == newPath { + if currPath == newPath { return } @@ -1011,10 +1068,10 @@ func updatePath(paths map[string]string, id, newPath string) { // other components should take care of that. We do need to ensure that the // resulting map contains all folders though so we know the next time around. for folderID, p := range paths { - if !strings.HasPrefix(p, oldPath) { + if !strings.HasPrefix(p, currPath) { continue } - paths[folderID] = strings.Replace(p, oldPath, newPath, 1) + paths[folderID] = strings.Replace(p, currPath, newPath, 1) } } diff --git a/src/internal/m365/collection/drive/collections_test.go b/src/internal/m365/collection/drive/collections_test.go index 2467e100d..2ca7f4e47 100644 --- a/src/internal/m365/collection/drive/collections_test.go +++ b/src/internal/m365/collection/drive/collections_test.go @@ -2,10 +2,10 @@ package drive import ( "context" + "fmt" "testing" "github.com/alcionai/clues" - "github.com/google/uuid" "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -15,6 +15,7 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" + "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" dataMock "github.com/alcionai/corso/src/internal/data/mock" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" @@ -32,82 +33,256 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) +// --------------------------------------------------------------------------- +// helpers +// --------------------------------------------------------------------------- + type statePath struct { state data.CollectionState - curPath path.Path + currPath path.Path prevPath path.Path } -func getExpectedStatePathGenerator( - t *testing.T, - bh BackupHandler, - tenant, base string, -) func(data.CollectionState, ...string) statePath { - return func(state data.CollectionState, pths ...string) statePath { - var ( - p1 path.Path - p2 path.Path - pp path.Path - cp path.Path - err error - ) +func toODPath(t *testing.T, s string) path.Path { + spl := path.Split(s) + p, err := path.Builder{}. + Append(spl[4:]...). + ToDataLayerPath( + spl[0], + spl[2], + path.OneDriveService, + path.FilesCategory, + false) + require.NoError(t, err, clues.ToCore(err)) - if state != data.MovedState { - require.Len(t, pths, 1, "invalid number of paths to getExpectedStatePathGenerator") - } else { - require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator") - pb := path.Builder{}.Append(path.Split(base + pths[1])...) - p2, err = bh.CanonicalPath(pb, tenant) - require.NoError(t, err, clues.ToCore(err)) - } + return p +} - pb := path.Builder{}.Append(path.Split(base + pths[0])...) - p1, err = bh.CanonicalPath(pb, tenant) - require.NoError(t, err, clues.ToCore(err)) - - switch state { - case data.NewState: - cp = p1 - case data.NotMovedState: - cp = p1 - pp = p1 - case data.DeletedState: - pp = p1 - case data.MovedState: - pp = p2 - cp = p1 - } - - return statePath{ - state: state, - curPath: cp, - prevPath: pp, - } +func asDeleted(t *testing.T, prev string) statePath { + return statePath{ + state: data.DeletedState, + prevPath: toODPath(t, prev), } } -func getExpectedPathGenerator( - t *testing.T, - bh BackupHandler, - tenant, base string, -) func(string) string { - return func(p string) string { - pb := path.Builder{}.Append(path.Split(base + p)...) - cp, err := bh.CanonicalPath(pb, tenant) - require.NoError(t, err, clues.ToCore(err)) - - return cp.String() +func asMoved(t *testing.T, prev, curr string) statePath { + return statePath{ + state: data.MovedState, + prevPath: toODPath(t, prev), + currPath: toODPath(t, curr), } } -type OneDriveCollectionsUnitSuite struct { - tester.Suite +func asNew(t *testing.T, curr string) statePath { + return statePath{ + state: data.NewState, + currPath: toODPath(t, curr), + } } -func TestOneDriveCollectionsUnitSuite(t *testing.T) { - suite.Run(t, &OneDriveCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)}) +func asNotMoved(t *testing.T, p string) statePath { + return statePath{ + state: data.NotMovedState, + prevPath: toODPath(t, p), + currPath: toODPath(t, p), + } } +type itemType int + +const ( + isFile itemType = 1 + isFolder itemType = 2 + isPackage itemType = 3 +) + +func coreItem( + id, name, parentPath, parentID string, + it itemType, +) *models.DriveItem { + item := models.NewDriveItem() + item.SetName(&name) + item.SetId(&id) + + parentReference := models.NewItemReference() + parentReference.SetPath(&parentPath) + parentReference.SetId(&parentID) + item.SetParentReference(parentReference) + + switch it { + case isFile: + item.SetFile(models.NewFile()) + case isFolder: + item.SetFolder(models.NewFolder()) + case isPackage: + item.SetPackageEscaped(models.NewPackageEscaped()) + } + + return item +} + +func driveItem( + id, name, parentPath, parentID string, + it itemType, +) models.DriveItemable { + return coreItem(id, name, parentPath, parentID, it) +} + +func fileItem( + id, name, parentPath, parentID, url string, + deleted bool, +) models.DriveItemable { + di := driveItem(id, name, parentPath, parentID, isFile) + di.SetAdditionalData(map[string]any{ + "@microsoft.graph.downloadUrl": url, + }) + + if deleted { + di.SetDeleted(models.NewDeleted()) + } + + return di +} + +func malwareItem( + id, name, parentPath, parentID string, + it itemType, +) models.DriveItemable { + c := coreItem(id, name, parentPath, parentID, it) + + mal := models.NewMalware() + malStr := "test malware" + mal.SetDescription(&malStr) + + c.SetMalware(mal) + + return c +} + +func driveRootItem(id string) models.DriveItemable { + name := "root" + item := models.NewDriveItem() + item.SetName(&name) + item.SetId(&id) + item.SetRoot(models.NewRoot()) + item.SetFolder(models.NewFolder()) + + return item +} + +// delItem creates a DriveItemable that is marked as deleted. path must be set +// to the base drive path. +func delItem( + id string, + parentPath string, + parentID string, + it itemType, +) models.DriveItemable { + item := models.NewDriveItem() + item.SetId(&id) + item.SetDeleted(models.NewDeleted()) + + parentReference := models.NewItemReference() + parentReference.SetId(&parentID) + item.SetParentReference(parentReference) + + switch it { + case isFile: + item.SetFile(models.NewFile()) + case isFolder: + item.SetFolder(models.NewFolder()) + case isPackage: + item.SetPackageEscaped(models.NewPackageEscaped()) + } + + return item +} + +func id(v string) string { + return fmt.Sprintf("id_%s_0", v) +} + +func idx(v string, sfx any) string { + return fmt.Sprintf("id_%s_%v", v, sfx) +} + +func name(v string) string { + return fmt.Sprintf("n_%s_0", v) +} + +func namex(v string, sfx any) string { + return fmt.Sprintf("n_%s_%v", v, sfx) +} + +func toPath(elems ...string) string { + es := []string{} + for _, elem := range elems { + es = append(es, path.Split(elem)...) + } + + switch len(es) { + case 0: + return "" + case 1: + return es[0] + default: + return path.Builder{}.Append(es...).String() + } +} + +func fullPath(driveID any, elems ...string) string { + return toPath(append( + []string{ + tenant, + path.OneDriveService.String(), + user, + path.FilesCategory.String(), + odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String(), + }, + elems...)...) +} + +func parent(driveID any, elems ...string) string { + return toPath(append( + []string{odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String()}, + elems...)...) +} + +// common item names +const ( + bar = "bar" + delta = "delta_url" + drive = "drive" + fanny = "fanny" + file = "file" + folder = "folder" + foo = "foo" + item = "item" + malware = "malware" + nav = "nav" + pkg = "package" + rootName = "root" + rootID = "root_id" + subfolder = "subfolder" + tenant = "t" + user = "u" +) + +var anyFolderScope = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] + +type failingColl struct{} + +func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { + ic := make(chan data.Item) + defer close(ic) + + errs.AddRecoverable(ctx, assert.AnError) + + return ic +} +func (f failingColl) FullPath() path.Path { return nil } +func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error) { return nil, nil } + func makeExcludeMap(files ...string) map[string]struct{} { delList := map[string]struct{}{} for _, file := range files { @@ -118,27 +293,25 @@ func makeExcludeMap(files ...string) map[string]struct{} { return delList } -func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { - anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] +// --------------------------------------------------------------------------- +// tests +// --------------------------------------------------------------------------- - const ( - driveID = "driveID1" - tenant = "tenant" - user = "user" - folder = "/folder" - subFolder = "/subfolder" - pkg = "/package" - ) +type CollectionsUnitSuite struct { + tester.Suite +} - bh := userDriveBackupHandler{userID: user} - testBaseDrivePath := odConsts.DriveFolderPrefixBuilder("driveID1").String() - expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, testBaseDrivePath) - expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath) +func TestCollectionsUnitSuite(t *testing.T) { + suite.Run(t, &CollectionsUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { + t := suite.T() tests := []struct { name string items []models.DriveItemable - inputFolderMap map[string]string + previousPaths map[string]string topLevelPackages map[string]struct{} scope selectors.OneDriveScope expect assert.ErrorAssertionFunc @@ -155,19 +328,19 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Invalid item", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("item", "item", testBaseDrivePath, "root", false, false, false), + driveRootItem(rootID), + driveItem(id(item), name(item), parent(drive), rootID, -1), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.Error, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), + rootID: asNotMoved(t, fullPath(drive)), }, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), + rootID: fullPath(drive), }, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{}, @@ -175,43 +348,43 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single File", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("file", "file", testBaseDrivePath, "root", true, false, false), + driveRootItem(rootID), + driveItem(id(file), name(file), parent(drive), rootID, isFile), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), + rootID: asNotMoved(t, fullPath(drive)), }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, // Root folder is skipped since it's always present. expectedPrevPaths: map[string]string{ - "root": expectedPath(""), + rootID: fullPath(drive), }, - expectedExcludes: makeExcludeMap("file"), + expectedExcludes: makeExcludeMap(id(file)), expectedTopLevelPackages: map[string]struct{}{}, }, { name: "Single Folder", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.NewState, folder), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asNew(t, fullPath(drive, name(folder))), }, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder"), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), }, expectedItemCount: 1, expectedContainerCount: 2, @@ -221,21 +394,21 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Folder created twice", // deleted a created with same name in between a backup items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("id1", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("id2", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "id2": expectedStatePath(data.NewState, folder), + rootID: asNotMoved(t, fullPath(drive)), + idx(folder, 2): asNew(t, fullPath(drive, name(folder))), }, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "id2": expectedPath("/folder"), + rootID: fullPath(drive), + idx(folder, 2): fullPath(drive, name(folder)), }, expectedItemCount: 1, expectedContainerCount: 2, @@ -245,122 +418,115 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "Single Package", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), + driveRootItem(rootID), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "package": expectedStatePath(data.NewState, pkg), + rootID: asNotMoved(t, fullPath(drive)), + id(pkg): asNew(t, fullPath(drive, name(pkg))), }, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "package": expectedPath("/package"), + rootID: fullPath(drive), + id(pkg): fullPath(drive, name(pkg)), }, expectedItemCount: 1, expectedContainerCount: 2, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{ - expectedPath("/package"): {}, + fullPath(drive, name(pkg)): {}, }, expectedCountPackages: 1, }, { name: "Single Package with subfolder", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), - driveItem("folder", "folder", testBaseDrivePath+pkg, "package", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+pkg, "package", false, true, false), + driveRootItem(rootID), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveItem(id(folder), name(folder), parent(drive, name(pkg)), id(pkg), isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive, name(pkg)), id(pkg), isFolder), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "package": expectedStatePath(data.NewState, pkg), - "folder": expectedStatePath(data.NewState, pkg+folder), - "subfolder": expectedStatePath(data.NewState, pkg+subFolder), + rootID: asNotMoved(t, fullPath(drive)), + id(pkg): asNew(t, fullPath(drive, name(pkg))), + id(folder): asNew(t, fullPath(drive, name(pkg), name(folder))), + id(subfolder): asNew(t, fullPath(drive, name(pkg), name(subfolder))), }, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "package": expectedPath(pkg), - "folder": expectedPath(pkg + folder), - "subfolder": expectedPath(pkg + subFolder), + rootID: fullPath(drive), + id(pkg): fullPath(drive, name(pkg)), + id(folder): fullPath(drive, name(pkg), name(folder)), + id(subfolder): fullPath(drive, name(pkg), name(subfolder)), }, expectedItemCount: 3, expectedContainerCount: 4, expectedExcludes: map[string]struct{}{}, expectedTopLevelPackages: map[string]struct{}{ - expectedPath(pkg): {}, + fullPath(drive, name(pkg)): {}, }, expectedCountPackages: 3, }, { name: "1 root file, 1 folder, 1 package, 2 files, 3 collections", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), - driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), + driveRootItem(rootID), + driveItem(idx(file, "inRoot"), namex(file, "inRoot"), parent(drive), rootID, isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveItem(idx(file, "inFolder"), namex(file, "inFolder"), parent(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inPackage"), namex(file, "inPackage"), parent(drive, name(pkg)), id(pkg), isFile), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.NewState, folder), - "package": expectedStatePath(data.NewState, pkg), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asNew(t, fullPath(drive, name(folder))), + id(pkg): asNew(t, fullPath(drive, name(pkg))), }, expectedItemCount: 5, expectedFileCount: 3, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder"), - "package": expectedPath("/package"), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(pkg): fullPath(drive, name(pkg)), }, expectedTopLevelPackages: map[string]struct{}{ - expectedPath("/package"): {}, + fullPath(drive, name(pkg)): {}, }, expectedCountPackages: 1, - expectedExcludes: makeExcludeMap("fileInRoot", "fileInFolder", "fileInPackage"), + expectedExcludes: makeExcludeMap(idx(file, "inRoot"), idx(file, "inFolder"), idx(file, "inPackage")), }, { name: "contains folder selector", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+folder, "folder", false, true, false), - driveItem("folder2", "folder", testBaseDrivePath+folder+subFolder, "subfolder", false, true, false), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), - driveItem( - "fileInFolder2", - "fileInFolder2", - testBaseDrivePath+folder+subFolder+folder, - "folder2", - true, - false, - false), - driveItem("fileInFolderPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), + driveRootItem(rootID), + driveItem(idx(file, "inRoot"), namex(file, "inRoot"), parent(drive), rootID, isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive, name(folder)), id(folder), isFolder), + driveItem(idx(folder, 2), name(folder), parent(drive, name(folder), name(subfolder)), id(subfolder), isFolder), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveItem(idx(file, "inFolder"), idx(file, "inFolder"), parent(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), parent(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), + driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), parent(drive, name(pkg)), id(pkg), isFile), }, - inputFolderMap: map[string]string{}, - scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder"})[0], + previousPaths: map[string]string{}, + scope: (&selectors.OneDriveBackup{}).Folders([]string{name(folder)})[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "folder": expectedStatePath(data.NewState, folder), - "subfolder": expectedStatePath(data.NewState, folder+subFolder), - "folder2": expectedStatePath(data.NewState, folder+subFolder+folder), + id(folder): asNew(t, fullPath(drive, name(folder))), + id(subfolder): asNew(t, fullPath(drive, name(folder), name(subfolder))), + idx(folder, 2): asNew(t, fullPath(drive, name(folder), name(subfolder), name(folder))), }, expectedItemCount: 5, expectedFileCount: 2, @@ -368,111 +534,99 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { // just "folder" isn't added here because the include check is done on the // parent path since we only check later if something is a folder or not. expectedPrevPaths: map[string]string{ - "folder": expectedPath(folder), - "subfolder": expectedPath(folder + subFolder), - "folder2": expectedPath(folder + subFolder + folder), + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), + idx(folder, 2): fullPath(drive, name(folder), name(subfolder), name(folder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("fileInFolder", "fileInFolder2"), + expectedExcludes: makeExcludeMap(idx(file, "inFolder"), idx(file, "inFolder2")), }, { name: "prefix subfolder selector", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+folder, "folder", false, true, false), - driveItem("folder2", "folder", testBaseDrivePath+folder+subFolder, "subfolder", false, true, false), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), - driveItem( - "fileInFolder2", - "fileInFolder2", - testBaseDrivePath+folder+subFolder+folder, - "folder2", - true, - false, - false), - driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), + driveRootItem(rootID), + driveItem(idx(file, "inRoot"), namex(file, "inRoot"), parent(drive), rootID, isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive, name(folder)), id(folder), isFolder), + driveItem(idx(folder, 2), name(folder), parent(drive, name(folder), name(subfolder)), id(subfolder), isFolder), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveItem(idx(file, "inFolder"), idx(file, "inFolder"), parent(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inFolder2"), namex(file, "inFolder2"), parent(drive, name(folder), name(subfolder), name(folder)), idx(folder, 2), isFile), + driveItem(idx(file, "inFolderPackage"), namex(file, "inPackage"), parent(drive, name(pkg)), id(pkg), isFile), }, - inputFolderMap: map[string]string{}, - scope: (&selectors.OneDriveBackup{}).Folders([]string{"/folder/subfolder"}, selectors.PrefixMatch())[0], + previousPaths: map[string]string{}, + scope: (&selectors.OneDriveBackup{}).Folders( + []string{toPath(name(folder), name(subfolder))}, + selectors.PrefixMatch())[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "subfolder": expectedStatePath(data.NewState, folder+subFolder), - "folder2": expectedStatePath(data.NewState, folder+subFolder+folder), + id(subfolder): asNew(t, fullPath(drive, name(folder), name(subfolder))), + idx(folder, 2): asNew(t, fullPath(drive, name(folder), name(subfolder), name(folder))), }, expectedItemCount: 3, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "subfolder": expectedPath(folder + subFolder), - "folder2": expectedPath(folder + subFolder + folder), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), + idx(folder, 2): fullPath(drive, name(folder), name(subfolder), name(folder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("fileInFolder2"), + expectedExcludes: makeExcludeMap(idx(file, "inFolder2")), }, { name: "match subfolder selector", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+folder, "folder", false, true, false), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), - driveItem( - "fileInSubfolder", - "fileInSubfolder", - testBaseDrivePath+folder+subFolder, - "subfolder", - true, - false, - false), - driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), + driveRootItem(rootID), + driveItem(id(file), name(file), parent(drive), rootID, isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive, name(folder)), id(folder), isFolder), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveItem(idx(file, 1), namex(file, 1), parent(drive, name(folder)), id(folder), isFile), + driveItem(idx(file, "inSubfolder"), namex(file, "inSubfolder"), parent(drive, name(folder), name(subfolder)), id(subfolder), isFile), + driveItem(idx(file, 9), namex(file, 9), parent(drive, name(pkg)), id(pkg), isFile), }, - inputFolderMap: map[string]string{}, - scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder/subfolder"})[0], + previousPaths: map[string]string{}, + scope: (&selectors.OneDriveBackup{}).Folders([]string{toPath(name(folder), name(subfolder))})[0], topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "subfolder": expectedStatePath(data.NewState, folder+subFolder), + id(subfolder): asNew(t, fullPath(drive, name(folder), name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 1, // No child folders for subfolder so nothing here. expectedPrevPaths: map[string]string{ - "subfolder": expectedPath(folder + subFolder), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("fileInSubfolder"), + expectedExcludes: makeExcludeMap(idx(file, "inSubfolder")), }, { name: "not moved folder tree", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "folder": expectedPath(folder), - "subfolder": expectedPath(folder + subFolder), + previousPaths: map[string]string{ + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.NotMovedState, folder), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asNotMoved(t, fullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath(folder), - "subfolder": expectedPath(folder + subFolder), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -480,27 +634,27 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "folder": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.MovedState, folder, "/a-folder"), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath(folder), - "subfolder": expectedPath(folder + subFolder), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -508,28 +662,28 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree twice within backup", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("id1", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("id2", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(idx(folder, 1), name(folder), parent(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "id1": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + idx(folder, 1): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "id2": expectedStatePath(data.NewState, folder), + rootID: asNotMoved(t, fullPath(drive)), + idx(folder, 2): asNew(t, fullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "id2": expectedPath(folder), - "subfolder": expectedPath(folder + subFolder), + rootID: fullPath(drive), + idx(folder, 2): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -537,28 +691,28 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder tree twice within backup", items: []models.DriveItemable{ - driveRootItem("root"), - delItem("id1", testBaseDrivePath, "root", false, true, false), - driveItem("id1", "folder", testBaseDrivePath, "root", false, true, false), - delItem("id1", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + delItem(id(folder), parent(drive), rootID, isFolder), + driveItem(id(folder), name(drive), parent(drive), rootID, isFolder), + delItem(id(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "id1": expectedPath(""), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "id1": expectedStatePath(data.DeletedState, ""), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asDeleted(t, fullPath(drive, "")), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "subfolder": expectedPath("/a-folder" + subFolder), + rootID: fullPath(drive), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -566,29 +720,29 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree twice within backup including delete", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("id1", "folder", testBaseDrivePath, "root", false, true, false), - delItem("id1", testBaseDrivePath, "root", false, true, false), - driveItem("id2", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + delItem(id(folder), parent(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "id1": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "id2": expectedStatePath(data.NewState, folder), + rootID: asNotMoved(t, fullPath(drive)), + idx(folder, 2): asNew(t, fullPath(drive, name(folder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "id2": expectedPath(folder), - "subfolder": expectedPath(folder + subFolder), + rootID: fullPath(drive), + idx(folder, 2): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -596,28 +750,28 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "deleted folder tree twice within backup with addition", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("id1", "folder", testBaseDrivePath, "root", false, true, false), - delItem("id1", testBaseDrivePath, "root", false, true, false), - driveItem("id2", "folder", testBaseDrivePath, "root", false, true, false), - delItem("id2", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(idx(folder, 1), name(folder), parent(drive), rootID, isFolder), + delItem(idx(folder, 1), parent(drive), rootID, isFolder), + driveItem(idx(folder, 2), name(folder), parent(drive), rootID, isFolder), + delItem(idx(folder, 2), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "id1": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + idx(folder, 1): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), + rootID: asNotMoved(t, fullPath(drive)), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "subfolder": expectedPath(folder + subFolder), + rootID: fullPath(drive), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -625,80 +779,80 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree with file no previous", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false), - driveItem("folder", "folder2", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), + driveItem(id(folder), namex(folder, 2), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.NewState, "/folder2"), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asNew(t, fullPath(drive, namex(folder, 2))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder2"), + rootID: fullPath(drive), + id(folder): fullPath(drive, namex(folder, 2)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("file"), + expectedExcludes: makeExcludeMap(id(file)), }, { name: "moved folder tree with file no previous 1", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.NewState, folder), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asNew(t, fullPath(drive, name(folder))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath(folder), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("file"), + expectedExcludes: makeExcludeMap(id(file)), }, { name: "moved folder tree and subfolder 1", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "folder": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.MovedState, folder, "/a-folder"), - "subfolder": expectedStatePath(data.MovedState, "/subfolder", "/a-folder/subfolder"), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), + id(subfolder): asMoved(t, fullPath(drive, namex(folder, "a"), name(subfolder)), fullPath(drive, name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath(folder), - "subfolder": expectedPath("/subfolder"), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -706,29 +860,29 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "moved folder tree and subfolder 2", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(subfolder), name(subfolder), parent(drive), rootID, isFolder), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "folder": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.MovedState, folder, "/a-folder"), - "subfolder": expectedStatePath(data.MovedState, "/subfolder", "/a-folder/subfolder"), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), + id(subfolder): asMoved(t, fullPath(drive, namex(folder, "a"), name(subfolder)), fullPath(drive, name(subfolder))), }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 3, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath(folder), - "subfolder": expectedPath("/subfolder"), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -736,103 +890,96 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "move subfolder when moving parent", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false), - driveItem("itemInFolder2", "itemInFolder2", testBaseDrivePath+"/folder2", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), namex(folder, 2), parent(drive), rootID, isFolder), + driveItem(id(item), name(item), parent(drive, namex(folder, 2)), idx(folder, 2), isFile), // Need to see the parent folder first (expected since that's what Graph // consistently returns). - driveItem("folder", "a-folder", testBaseDrivePath, "root", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+"/a-folder", "folder", false, true, false), - driveItem( - "itemInSubfolder", - "itemInSubfolder", - testBaseDrivePath+"/a-folder/subfolder", - "subfolder", - true, - false, - false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveItem(id(folder), namex(folder, "a"), parent(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive, namex(folder, "a")), id(folder), isFolder), + driveItem(idx(item, 2), namex(item, 2), parent(drive, namex(folder, "a"), name(subfolder)), id(subfolder), isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "folder": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.MovedState, folder, "/a-folder"), - "folder2": expectedStatePath(data.NewState, "/folder2"), - "subfolder": expectedStatePath(data.MovedState, folder+subFolder, "/a-folder/subfolder"), + rootID: asNotMoved(t, fullPath(drive)), + idx(folder, 2): asNew(t, fullPath(drive, namex(folder, 2))), + id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, name(folder))), + id(subfolder): asMoved(t, fullPath(drive, namex(folder, "a"), name(subfolder)), fullPath(drive, name(folder), name(subfolder))), }, expectedItemCount: 5, expectedFileCount: 2, expectedContainerCount: 4, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder"), - "folder2": expectedPath("/folder2"), - "subfolder": expectedPath("/folder/subfolder"), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + idx(folder, 2): fullPath(drive, namex(folder, 2)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("itemInSubfolder", "itemInFolder2"), + expectedExcludes: makeExcludeMap(id(item), idx(item, 2)), }, { name: "moved folder tree multiple times", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false), - driveItem("folder", "folder2", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), + driveItem(id(folder), namex(folder, 2), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "folder": expectedPath("/a-folder"), - "subfolder": expectedPath("/a-folder/subfolder"), + previousPaths: map[string]string{ + id(folder): fullPath(drive, namex(folder, "a")), + id(subfolder): fullPath(drive, namex(folder, "a"), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.MovedState, "/folder2", "/a-folder"), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asMoved(t, fullPath(drive, namex(folder, "a")), fullPath(drive, namex(folder, 2))), }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder2"), - "subfolder": expectedPath("/folder2/subfolder"), + rootID: fullPath(drive), + id(folder): fullPath(drive, namex(folder, 2)), + id(subfolder): fullPath(drive, namex(folder, 2), name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("file"), + expectedExcludes: makeExcludeMap(id(file)), }, { name: "deleted folder and package", items: []models.DriveItemable{ - driveRootItem("root"), // root is always present, but not necessary here - delItem("folder", testBaseDrivePath, "root", false, true, false), - delItem("package", testBaseDrivePath, "root", false, false, true), + driveRootItem(rootID), // root is always present, but not necessary here + delItem(id(folder), parent(drive), rootID, isFolder), + delItem(id(pkg), parent(drive), rootID, isPackage), }, - inputFolderMap: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder"), - "package": expectedPath("/package"), + previousPaths: map[string]string{ + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(pkg): fullPath(drive, name(pkg)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.DeletedState, folder), - "package": expectedStatePath(data.DeletedState, pkg), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asDeleted(t, fullPath(drive, name(folder))), + id(pkg): asDeleted(t, fullPath(drive, name(pkg))), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), + rootID: fullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -840,23 +987,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete folder without previous", items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + delItem(id(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "root": expectedPath(""), + previousPaths: map[string]string{ + rootID: fullPath(drive), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), + rootID: asNotMoved(t, fullPath(drive)), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), + rootID: fullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -864,29 +1011,29 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete folder tree move subfolder", items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", testBaseDrivePath, "root", false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + delItem(id(folder), parent(drive), rootID, isFolder), + driveItem(id(subfolder), name(subfolder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder"), - "subfolder": expectedPath("/folder/subfolder"), + previousPaths: map[string]string{ + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(subfolder): fullPath(drive, name(folder), name(subfolder)), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.DeletedState, folder), - "subfolder": expectedStatePath(data.MovedState, "/subfolder", folder+subFolder), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asDeleted(t, fullPath(drive, name(folder))), + id(subfolder): asMoved(t, fullPath(drive, name(folder), name(subfolder)), fullPath(drive, name(subfolder))), }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 2, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "subfolder": expectedPath("/subfolder"), + rootID: fullPath(drive), + id(subfolder): fullPath(drive, name(subfolder)), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -894,46 +1041,46 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "delete file", items: []models.DriveItemable{ - driveRootItem("root"), - delItem("item", testBaseDrivePath, "root", true, false, false), + driveRootItem(rootID), + delItem(id(item), parent(drive), rootID, isFile), }, - inputFolderMap: map[string]string{ - "root": expectedPath(""), + previousPaths: map[string]string{ + rootID: fullPath(drive), }, - scope: anyFolder, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), + rootID: asNotMoved(t, fullPath(drive)), }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), + rootID: fullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, - expectedExcludes: makeExcludeMap("item"), + expectedExcludes: makeExcludeMap(id(item)), }, { name: "item before parent errors", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveRootItem(rootID), + driveItem(id(file), name(file), parent(drive, name(folder)), id(folder), isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.Error, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), + rootID: asNotMoved(t, fullPath(drive)), }, expectedItemCount: 0, expectedFileCount: 0, expectedContainerCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), + rootID: fullPath(drive), }, expectedTopLevelPackages: map[string]struct{}{}, expectedExcludes: map[string]struct{}{}, @@ -941,36 +1088,36 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { { name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware", items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), - driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), - driveItem("package", "package", testBaseDrivePath, "root", false, false, true), - driveItem("goodFile", "goodFile", testBaseDrivePath+folder, "folder", true, false, false), - malwareItem("malwareFile", "malwareFile", testBaseDrivePath+folder, "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(file), id(file), parent(drive), rootID, isFile), + driveItem(id(folder), name(folder), parent(drive), rootID, isFolder), + driveItem(id(pkg), name(pkg), parent(drive), rootID, isPackage), + driveItem(idx(file, "good"), namex(file, "good"), parent(drive, name(folder)), id(folder), isFile), + malwareItem(id(malware), name(malware), parent(drive, name(folder)), id(folder), isFile), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, + previousPaths: map[string]string{}, + scope: anyFolderScope, topLevelPackages: map[string]struct{}{}, expect: assert.NoError, expectedCollectionIDs: map[string]statePath{ - "root": expectedStatePath(data.NotMovedState, ""), - "folder": expectedStatePath(data.NewState, folder), - "package": expectedStatePath(data.NewState, pkg), + rootID: asNotMoved(t, fullPath(drive)), + id(folder): asNew(t, fullPath(drive, name(folder))), + id(pkg): asNew(t, fullPath(drive, name(pkg))), }, expectedItemCount: 4, expectedFileCount: 2, expectedContainerCount: 3, expectedSkippedCount: 1, expectedPrevPaths: map[string]string{ - "root": expectedPath(""), - "folder": expectedPath("/folder"), - "package": expectedPath("/package"), + rootID: fullPath(drive), + id(folder): fullPath(drive, name(folder)), + id(pkg): fullPath(drive, name(pkg)), }, expectedTopLevelPackages: map[string]struct{}{ - expectedPath("/package"): {}, + fullPath(drive, name(pkg)): {}, }, expectedCountPackages: 1, - expectedExcludes: makeExcludeMap("fileInRoot", "goodFile"), + expectedExcludes: makeExcludeMap(id(file), idx(file, "good")), }, } @@ -982,8 +1129,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { defer flush() var ( - mbh = mock.DefaultOneDriveBH(user) - du = pagers.DeltaUpdate{ + driveID = idx(drive, drive) + mbh = mock.DefaultOneDriveBH(user) + du = pagers.DeltaUpdate{ URL: "notempty", Reset: false, } @@ -1018,7 +1166,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { ctx, driveID, "General", - test.inputFolderMap, + test.previousPaths, excludes, test.topLevelPackages, "prevdelta", @@ -1041,7 +1189,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { } assert.Equalf(t, sp.state, c.CollectionMap[driveID][id].State(), "state for collection %s", id) - assert.Equalf(t, sp.curPath, c.CollectionMap[driveID][id].FullPath(), "current path for collection %s", id) + assert.Equalf(t, sp.currPath, c.CollectionMap[driveID][id].FullPath(), "current path for collection %s", id) assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id) } @@ -1064,24 +1212,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestPopulateDriveCollections() { } } -func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { - tenant := "a-tenant" - user := "a-user" - driveID1 := "1" - driveID2 := "2" - deltaURL1 := "url/1" - deltaURL2 := "url/2" - folderID1 := "folder1" - folderID2 := "folder2" - path1 := "folder1/path" - path2 := "folder2/path" - +func (suite *CollectionsUnitSuite) TestDeserializeMetadata() { table := []struct { name string // Each function returns the set of files for a single data.Collection. cols []func() []graph.MetadataCollectionEntry expectedDeltas map[string]string expectedPaths map[string]map[string]string + expectedAlerts []string canUsePreviousBackup bool errCheck assert.ErrorAssertionFunc }{ @@ -1092,23 +1230,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), } }, }, expectedDeltas: map[string]string{ - driveID1: deltaURL1, + id(drive): id(delta), }, expectedPaths: map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }, canUsePreviousBackup: true, @@ -1121,7 +1259,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), } }, }, @@ -1138,8 +1276,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), } @@ -1147,8 +1285,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { }, expectedDeltas: map[string]string{}, expectedPaths: map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }, canUsePreviousBackup: true, @@ -1164,17 +1302,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: {}, + id(drive): {}, }), } }, }, expectedDeltas: map[string]string{}, - expectedPaths: map[string]map[string]string{driveID1: {}}, + expectedPaths: map[string]map[string]string{id(drive): {}}, canUsePreviousBackup: true, errCheck: assert.NoError, }, @@ -1189,22 +1327,22 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.DeltaURLsFileName, map[string]string{ - driveID1: "", + id(drive): "", }), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), } }, }, - expectedDeltas: map[string]string{driveID1: ""}, + expectedDeltas: map[string]string{id(drive): ""}, expectedPaths: map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }, canUsePreviousBackup: true, @@ -1217,12 +1355,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), } @@ -1231,27 +1369,27 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID2: deltaURL2}), + map[string]string{idx(drive, 2): idx(delta, 2)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID2: { - folderID2: path2, + idx(drive, 2): { + idx(folder, 2): fullPath(2), }, }), } }, }, expectedDeltas: map[string]string{ - driveID1: deltaURL1, - driveID2: deltaURL2, + id(drive): id(delta), + idx(drive, 2): idx(delta, 2), }, expectedPaths: map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, - driveID2: { - folderID2: path2, + idx(drive, 2): { + idx(folder, 2): fullPath(2), }, }, canUsePreviousBackup: true, @@ -1268,7 +1406,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.PreviousPathFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), } }, }, @@ -1284,26 +1422,26 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), graph.NewMetadataEntry( "foo", - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), } }, }, expectedDeltas: map[string]string{ - driveID1: deltaURL1, + id(drive): id(delta), }, expectedPaths: map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }, canUsePreviousBackup: true, @@ -1316,12 +1454,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), } @@ -1331,8 +1469,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID2: path2, + id(drive): { + idx(folder, 2): fullPath(2), }, }), } @@ -1350,12 +1488,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL1}), + map[string]string{id(drive): id(delta)}), graph.NewMetadataEntry( bupMD.PreviousPathFileName, map[string]map[string]string{ - driveID1: { - folderID1: path1, + id(drive): { + idx(folder, 1): fullPath(1), }, }), } @@ -1364,7 +1502,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { return []graph.MetadataCollectionEntry{ graph.NewMetadataEntry( bupMD.DeltaURLsFileName, - map[string]string{driveID1: deltaURL2}), + map[string]string{id(drive): idx(delta, 2)}), } }, }, @@ -1373,6 +1511,90 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { canUsePreviousBackup: false, errCheck: assert.NoError, }, + { + name: "DuplicatePreviousPaths", + cols: []func() []graph.MetadataCollectionEntry{ + func() []graph.MetadataCollectionEntry { + return []graph.MetadataCollectionEntry{ + graph.NewMetadataEntry( + bupMD.DeltaURLsFileName, + map[string]string{id(drive): id(delta)}), + graph.NewMetadataEntry( + bupMD.PreviousPathFileName, + map[string]map[string]string{ + id(drive): { + idx(folder, 1): fullPath(1), + idx(folder, 2): fullPath(1), + }, + }), + } + }, + }, + expectedDeltas: map[string]string{ + id(drive): id(delta), + }, + expectedPaths: map[string]map[string]string{ + id(drive): { + idx(folder, 1): fullPath(1), + idx(folder, 2): fullPath(1), + }, + }, + expectedAlerts: []string{fault.AlertPreviousPathCollision}, + canUsePreviousBackup: true, + errCheck: assert.NoError, + }, + { + name: "DuplicatePreviousPaths_separateDrives", + cols: []func() []graph.MetadataCollectionEntry{ + func() []graph.MetadataCollectionEntry { + return []graph.MetadataCollectionEntry{ + graph.NewMetadataEntry( + bupMD.DeltaURLsFileName, + map[string]string{ + id(drive): id(delta), + }), + graph.NewMetadataEntry( + bupMD.PreviousPathFileName, + map[string]map[string]string{ + id(drive): { + idx(folder, 1): fullPath(1), + idx(folder, 2): fullPath(1), + }, + }), + } + }, + func() []graph.MetadataCollectionEntry { + return []graph.MetadataCollectionEntry{ + graph.NewMetadataEntry( + bupMD.DeltaURLsFileName, + map[string]string{idx(drive, 2): idx(delta, 2)}), + graph.NewMetadataEntry( + bupMD.PreviousPathFileName, + map[string]map[string]string{ + idx(drive, 2): { + idx(folder, 1): fullPath(1), + }, + }), + } + }, + }, + expectedDeltas: map[string]string{ + id(drive): id(delta), + idx(drive, 2): idx(delta, 2), + }, + expectedPaths: map[string]map[string]string{ + id(drive): { + idx(folder, 1): fullPath(1), + idx(folder, 2): fullPath(1), + }, + idx(drive, 2): { + idx(folder, 1): fullPath(1), + }, + }, + expectedAlerts: []string{fault.AlertPreviousPathCollision}, + canUsePreviousBackup: true, + errCheck: assert.NoError, + }, } for _, test := range table { @@ -1404,32 +1626,29 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { data.NoFetchRestoreCollection{Collection: mc})) } - deltas, paths, canUsePreviousBackup, err := deserializeMetadata(ctx, cols) + fb := fault.New(true) + + deltas, paths, canUsePreviousBackup, err := deserializeAndValidateMetadata(ctx, cols, fb) test.errCheck(t, err) assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup") assert.Equal(t, test.expectedDeltas, deltas, "deltas") assert.Equal(t, test.expectedPaths, paths, "paths") + + alertMsgs := []string{} + + for _, alert := range fb.Alerts() { + alertMsgs = append(alertMsgs, alert.Message) + } + + assert.ElementsMatch(t, test.expectedAlerts, alertMsgs, "alert messages") }) } } -type failingColl struct{} - -func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { - ic := make(chan data.Item) - defer close(ic) - - errs.AddRecoverable(ctx, assert.AnError) - - return ic -} -func (f failingColl) FullPath() path.Path { return nil } -func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error) { return nil, nil } - // This check is to ensure that we don't error out, but still return // canUsePreviousBackup as false on read errors -func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata_ReadFailure() { +func (suite *CollectionsUnitSuite) TestDeserializeMetadata_ReadFailure() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -1437,20 +1656,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata_ReadFailure() fc := failingColl{} - _, _, canUsePreviousBackup, err := deserializeMetadata(ctx, []data.RestoreCollection{fc}) + _, _, canUsePreviousBackup, err := deserializeAndValidateMetadata(ctx, []data.RestoreCollection{fc}, fault.New(true)) require.NoError(t, err) require.False(t, canUsePreviousBackup) } -func (suite *OneDriveCollectionsUnitSuite) TestGet() { - var ( - tenant = "a-tenant" - user = "a-user" - empty = "" - delta = "delta1" - delta2 = "delta2" - ) - +func (suite *CollectionsUnitSuite) TestGet() { metadataPath, err := path.BuildMetadata( tenant, user, @@ -1459,31 +1670,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { false) require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err)) - driveID1 := "drive-1-" + uuid.NewString() drive1 := models.NewDrive() - drive1.SetId(&driveID1) - drive1.SetName(&driveID1) + drive1.SetId(ptr.To(idx(drive, 1))) + drive1.SetName(ptr.To(namex(drive, 1))) - driveID2 := "drive-2-" + uuid.NewString() drive2 := models.NewDrive() - drive2.SetId(&driveID2) - drive2.SetName(&driveID2) - - var ( - bh = userDriveBackupHandler{userID: user} - - driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String() - driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String() - - expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, driveBasePath1) - expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, driveBasePath2) - - rootFolderPath1 = expectedPath1("") - folderPath1 = expectedPath1("/folder") - - rootFolderPath2 = expectedPath2("") - folderPath2 = expectedPath2("/folder") - ) + drive2.SetId(ptr.To(idx(drive, 2))) + drive2.SetName(ptr.To(namex(drive, 2))) table := []struct { name string @@ -1491,12 +1684,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { enumerator mock.EnumerateItemsDeltaByDrive canUsePreviousBackup bool errCheck assert.ErrorAssertionFunc - prevFolderPaths map[string]map[string]string + previousPaths map[string]map[string]string // Collection name -> set of item IDs. We can't check item data because // that's not mocked out. Metadata is checked separately. - expectedCollections map[string]map[data.CollectionState][]string - expectedDeltaURLs map[string]string - expectedFolderPaths map[string]map[string]string + expectedCollections map[string]map[data.CollectionState][]string + expectedDeltaURLs map[string]string + expectedPreviousPaths map[string]map[string]string // Items that should be excluded from the base. Only populated if the delta // was valid and there was at least 1 previous folder path. expectedDelList *pmMock.PrefixMap @@ -1509,33 +1702,33 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem("root"), // will be present, not needed - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), // will be present, not needed + delItem(id(file), parent(1), rootID, isFile), }, }}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {"root": rootFolderPath1}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {rootID: fullPath(1)}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NotMovedState: {}}, + fullPath(1): {data.NotMovedState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: {"root": rootFolderPath1}, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): {rootID: fullPath(1)}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - rootFolderPath1: makeExcludeMap("file"), + fullPath(1): makeExcludeMap(id(file)), }), }, { @@ -1543,33 +1736,33 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("file", "file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + driveItem(id(file), name(file), parent(1), rootID, isFile), }, }}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {"root": rootFolderPath1}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {rootID: fullPath(1)}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NotMovedState: {"file"}}, + fullPath(1): {data.NotMovedState: {id(file)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: {"root": rootFolderPath1}, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): {rootID: fullPath(1)}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - rootFolderPath1: makeExcludeMap("file"), + fullPath(1): makeExcludeMap(id(file)), }), }, { @@ -1577,38 +1770,38 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{}, + previousPaths: map[string]map[string]string{}, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -1616,39 +1809,39 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), - driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveItem(id(file), namex(file, 2), parent(1, name(folder)), id(folder), isFile), }, }}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{}, + previousPaths: map[string]map[string]string{}, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -1656,39 +1849,39 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), - driveItem("file", "file2", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveItem(id(file), namex(file, 2), parent(1), rootID, isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NotMovedState: {"file"}}, - folderPath1: {data.NewState: {"folder"}}, + fullPath(1): {data.NotMovedState: {id(file)}}, + fullPath(1, name(folder)): {data.NewState: {id(folder)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ - rootFolderPath1: makeExcludeMap("file"), + fullPath(1): makeExcludeMap(id(file)), }), }, { @@ -1696,36 +1889,36 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: empty, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: "", Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, }, expectedDeltaURLs: map[string]string{}, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -1733,49 +1926,49 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file", "file2"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -1783,14 +1976,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), - driveItem("file3", "file3", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveItem(idx(file, 3), namex(file, 3), parent(1, name(folder)), id(folder), isFile), }, }, { @@ -1799,45 +1992,45 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file", "file2"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -1845,57 +2038,57 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, Reset: true, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file", "file2"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -1906,56 +2099,56 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, - driveID2: { + idx(drive, 2): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root2"), - driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false), - driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false), + driveRootItem(idx("root", 2)), + driveItem(idx(folder, 2), name(folder), parent(2), idx("root", 2), isFolder), + driveItem(idx(file, 2), name(file), parent(2, name(folder)), idx(folder, 2), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, - driveID2: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, + idx(drive, 2): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file"}}, - rootFolderPath2: {data.NewState: {}}, - folderPath2: {data.NewState: {"folder2", "file2"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + fullPath(2): {data.NewState: {}}, + fullPath(2, name(folder)): {data.NewState: {idx(folder, 2), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, - driveID2: delta2, + idx(drive, 1): id(delta), + idx(drive, 2): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, - driveID2: { - "root2": rootFolderPath2, - "folder2": folderPath2, + idx(drive, 2): { + idx("root", 2): fullPath(2), + idx(folder, 2): fullPath(2, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, - rootFolderPath2: true, - folderPath2: true, + fullPath(1): true, + fullPath(1, name(folder)): true, + fullPath(2): true, + fullPath(2, name(folder)): true, }, }, { @@ -1966,56 +2159,56 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, - driveID2: { + idx(drive, 2): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath2, "root", false, true, false), - driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), parent(2, name(folder)), id(folder), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, - driveID2: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, + idx(drive, 2): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file"}}, - rootFolderPath2: {data.NewState: {}}, - folderPath2: {data.NewState: {"folder", "file2"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, + fullPath(2): {data.NewState: {}}, + fullPath(2, name(folder)): {data.NewState: {id(folder), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, - driveID2: delta2, + idx(drive, 1): id(delta), + idx(drive, 2): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, - driveID2: { - "root": rootFolderPath2, - "folder": folderPath2, + idx(drive, 2): { + rootID: fullPath(2), + id(folder): fullPath(2, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, - rootFolderPath2: true, - folderPath2: true, + fullPath(1): true, + fullPath(1, name(folder)): true, + fullPath(2): true, + fullPath(2, name(folder)): true, }, }, { @@ -2023,7 +2216,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{}}}, DeltaUpdate: pagers.DeltaUpdate{}, Err: assert.AnError, @@ -2032,20 +2225,20 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, canUsePreviousBackup: false, errCheck: assert.Error, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, - expectedCollections: nil, - expectedDeltaURLs: nil, - expectedFolderPaths: nil, - expectedDelList: nil, + expectedCollections: nil, + expectedDeltaURLs: nil, + expectedPreviousPaths: nil, + expectedDelList: nil, }, { name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder", drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{}, @@ -2053,43 +2246,43 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, namex(folder, 2)), idx(folder, 2), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - expectedPath1("/folder"): {data.DeletedState: {}}, - expectedPath1("/folder2"): {data.NewState: {"folder2", "file"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.DeletedState: {}}, + fullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder2": expectedPath1("/folder2"), + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + idx(folder, 2): fullPath(1, namex(folder, 2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, - expectedPath1("/folder2"): true, + fullPath(1): true, + fullPath(1, name(folder)): true, + fullPath(1, namex(folder, 2)): true, }, }, { @@ -2097,7 +2290,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{}, @@ -2105,43 +2298,43 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, namex(folder, 2)), idx(folder, 2), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - expectedPath1("/folder"): {data.DeletedState: {}}, - expectedPath1("/folder2"): {data.NewState: {"folder2", "file"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.DeletedState: {}}, + fullPath(1, namex(folder, 2)): {data.NewState: {idx(folder, 2), id(file)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder2": expectedPath1("/folder2"), + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + idx(folder, 2): fullPath(1, namex(folder, 2)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, - expectedPath1("/folder2"): true, + fullPath(1): true, + fullPath(1, name(folder)): true, + fullPath(1, namex(folder, 2)): true, }, }, { @@ -2149,15 +2342,15 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { // on the first page, if this is the total data, we'd expect both folder and folder2 // since new previousPaths merge with the old previousPaths. Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder2", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), idx(folder, 2), isFile), }, }, { @@ -2168,46 +2361,46 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { // but after a delta reset, we treat this as the total end set of folders, which means // we don't expect folder to exist any longer. Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder2", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), idx(folder, 2), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - expectedPath1("/folder"): { + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): { // Old folder path should be marked as deleted since it should compare // by ID. data.DeletedState: {}, - data.NewState: {"folder2", "file"}, + data.NewState: {idx(folder, 2), id(file)}, }, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder2": expectedPath1("/folder"), + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + idx(folder, 2): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -2215,7 +2408,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{}, @@ -2223,46 +2416,46 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder2", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), idx(folder, 2), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - expectedPath1("/folder"): { + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): { // Old folder path should be marked as deleted since it should compare // by ID. data.DeletedState: {}, - data.NewState: {"folder2", "file"}, + data.NewState: {idx(folder, 2), id(file)}, }, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder2": expectedPath1("/folder"), + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + idx(folder, 2): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -2270,51 +2463,51 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), - malwareItem("malware", "malware", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + malwareItem(id(malware), name(malware), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), - malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, name(folder)), id(folder), isFile), + malwareItem(idx(malware, 2), namex(malware, 2), parent(1, name(folder)), id(folder), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder", "file", "file2"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder), id(file), idx(file, 2)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, expectedSkippedCount: 2, }, @@ -2323,15 +2516,15 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), - driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false), - driveItem("file2", "file2", driveBasePath1+"/folder2", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, namex(folder, 2)), idx(folder, 2), isFile), }, }, { @@ -2339,46 +2532,46 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), - delItem("folder2", driveBasePath1, "root", false, true, false), - delItem("file2", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + delItem(idx(folder, 2), parent(1), rootID, isFolder), + delItem(namex(file, 2), parent(1), rootID, isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, - "folder2": expectedPath1("/folder2"), + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), + idx(folder, 2): fullPath(1, namex(folder, 2)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NotMovedState: {"folder", "file"}}, - expectedPath1("/folder2"): {data.DeletedState: {}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NotMovedState: {id(folder), id(file)}}, + fullPath(1, namex(folder, 2)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta2, + idx(drive, 1): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, - expectedPath1("/folder2"): true, + fullPath(1): true, + fullPath(1, name(folder)): true, + fullPath(1, namex(folder, 2)): true, }, }, { @@ -2386,42 +2579,42 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{ Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), }, Reset: true, }}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.DeletedState: {}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -2429,41 +2622,41 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + delItem(id(file), parent(1), rootID, isFile), }, Reset: true, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, + fullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, + fullPath(1): true, }, }, { @@ -2471,46 +2664,46 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), + delItem(id(file), parent(1), rootID, isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, + fullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta2, + idx(drive, 1): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, + fullPath(1): true, }, }, { @@ -2518,56 +2711,56 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), + delItem(id(file), parent(1), rootID, isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder1", "folder", driveBasePath1, "root", false, true, false), - driveItem("file1", "file", driveBasePath1+"/folder", "folder1", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 1), name(folder), parent(1), rootID, isFolder), + driveItem(idx(file, 1), name(file), parent(1, name(folder)), idx(folder, 1), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder1", "file1"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {idx(folder, 1), idx(file, 1)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta2, + idx(drive, 1): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder1": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + idx(folder, 1): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -2575,52 +2768,52 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), - delItem("file", driveBasePath1+"/folder", "root", true, false, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), + delItem(id(file), parent(1, name(folder)), rootID, isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), - delItem("file", driveBasePath1+"/folder", "root", true, false, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), + delItem(id(file), parent(1, name(folder)), rootID, isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NotMovedState: {}}, - folderPath1: {data.DeletedState: {}}, + fullPath(1): {data.NotMovedState: {}}, + fullPath(1, name(folder)): {data.DeletedState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta2, + idx(drive, 1): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), @@ -2631,59 +2824,59 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), + delItem(id(file), parent(1), rootID, isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder1", "folder", driveBasePath1, "root", false, true, false), - driveItem("file1", "file", driveBasePath1+"/folder", "folder1", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 1), name(folder), parent(1), rootID, isFolder), + driveItem(idx(file, 1), name(file), parent(1, name(folder)), idx(folder, 1), isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.DeletedState: {}, data.NewState: {"folder1", "file1"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.DeletedState: {}, data.NewState: {idx(folder, 1), idx(file, 1)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta2, + idx(drive, 1): idx(delta, 2), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder1": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + idx(folder, 1): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: false, - folderPath1: true, + fullPath(1): false, + fullPath(1, name(folder)): true, }, }, { @@ -2691,48 +2884,48 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{ { Items: []models.DriveItemable{ - driveRootItem("root"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }, }, { Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + delItem(id(file), parent(1), rootID, isFile), }, }, }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, - folderPath1: {data.NewState: {"folder"}}, + fullPath(1): {data.NewState: {}}, + fullPath(1, name(folder)): {data.NewState: {id(folder)}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, - "folder": folderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, - folderPath1: true, + fullPath(1): true, + fullPath(1, name(folder)): true, }, }, { @@ -2740,34 +2933,34 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("folder", driveBasePath1, "root", false, true, false), + driveRootItem(rootID), + delItem(id(folder), parent(1), rootID, isFolder), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, + fullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, + fullPath(1): true, }, }, { @@ -2775,34 +2968,34 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), - delItem("file", driveBasePath1, "root", true, false, false), + driveRootItem(rootID), + delItem(id(file), parent(1), rootID, isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NewState: {}}, + fullPath(1): {data.NewState: {}}, }, expectedDeltaURLs: map[string]string{ - driveID1: delta, + idx(drive, 1): id(delta), }, - expectedFolderPaths: map[string]map[string]string{ - driveID1: { - "root": rootFolderPath1, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), }, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath1: true, + fullPath(1): true, }, }, { @@ -2810,33 +3003,295 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { drives: []models.Driveable{drive1}, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root"), // will be present + driveRootItem(rootID), // will be present }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, }, }, }, canUsePreviousBackup: true, errCheck: assert.NoError, - prevFolderPaths: map[string]map[string]string{ - driveID1: {"root": rootFolderPath1}, - driveID2: {"root": rootFolderPath2}, + previousPaths: map[string]map[string]string{ + idx(drive, 1): {rootID: fullPath(1)}, + idx(drive, 2): {rootID: fullPath(2)}, }, expectedCollections: map[string]map[data.CollectionState][]string{ - rootFolderPath1: {data.NotMovedState: {}}, - rootFolderPath2: {data.DeletedState: {}}, + fullPath(1): {data.NotMovedState: {}}, + fullPath(2): {data.DeletedState: {}}, }, - expectedDeltaURLs: map[string]string{driveID1: delta}, - expectedFolderPaths: map[string]map[string]string{ - driveID1: {"root": rootFolderPath1}, + expectedDeltaURLs: map[string]string{idx(drive, 1): id(delta)}, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): {rootID: fullPath(1)}, }, expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), doNotMergeItems: map[string]bool{ - rootFolderPath2: true, + fullPath(2): true, }, }, + { + name: "duplicate previous paths in metadata", + drives: []models.Driveable{drive1, drive2}, + enumerator: mock.EnumerateItemsDeltaByDrive{ + DrivePagers: map[string]*mock.DriveItemsDeltaPager{ + // contains duplicates in previousPath + idx(drive, 1): { + Pages: []mock.NextPage{{ + Items: []models.DriveItemable{ + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, namex(folder, 2)), idx(folder, 2), isFile), + }, + }}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + }, + // does not contain duplicates + idx(drive, 2): { + Pages: []mock.NextPage{{ + Items: []models.DriveItemable{ + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(2), rootID, isFolder), + driveItem(id(file), name(file), parent(2, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), parent(2), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(2, namex(folder, 2)), idx(folder, 2), isFile), + }, + }}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2)}, + }, + }, + }, + canUsePreviousBackup: true, + errCheck: assert.NoError, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, name(folder)), + idx(folder, 2): fullPath(1, name(folder)), + idx(folder, 3): fullPath(1, name(folder)), + }, + idx(drive, 2): { + rootID: fullPath(2), + id(folder): fullPath(2, name(folder)), + idx(folder, 2): fullPath(2, namex(folder, 2)), + }, + }, + expectedCollections: map[string]map[data.CollectionState][]string{ + fullPath(1): { + data.NewState: {id(folder), idx(folder, 2)}, + }, + fullPath(1, name(folder)): { + data.NotMovedState: {id(folder), id(file)}, + }, + fullPath(1, namex(folder, 2)): { + data.MovedState: {idx(folder, 2), idx(file, 2)}, + }, + fullPath(2): { + data.NewState: {id(folder), idx(folder, 2)}, + }, + fullPath(2, name(folder)): { + data.NotMovedState: {id(folder), id(file)}, + }, + fullPath(2, namex(folder, 2)): { + data.NotMovedState: {idx(folder, 2), idx(file, 2)}, + }, + }, + expectedDeltaURLs: map[string]string{ + idx(drive, 1): id(delta), + idx(drive, 2): idx(delta, 2), + }, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(folder): fullPath(1, namex(folder, 2)), // note: this is a bug, but is currently expected + idx(folder, 2): fullPath(1, namex(folder, 2)), + idx(folder, 3): fullPath(1, namex(folder, 2)), + }, + idx(drive, 2): { + rootID: fullPath(2), + id(folder): fullPath(2, name(folder)), + idx(folder, 2): fullPath(2, namex(folder, 2)), + }, + }, + expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ + fullPath(1): makeExcludeMap(id(file), idx(file, 2)), + fullPath(2): makeExcludeMap(id(file), idx(file, 2)), + }), + doNotMergeItems: map[string]bool{}, + }, + { + name: "out of order item enumeration causes prev path collisions", + drives: []models.Driveable{drive1}, + enumerator: mock.EnumerateItemsDeltaByDrive{ + DrivePagers: map[string]*mock.DriveItemsDeltaPager{ + idx(drive, 1): { + Pages: []mock.NextPage{{ + Items: []models.DriveItemable{ + driveRootItem(rootID), + driveItem(idx(fanny, 2), name(fanny), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, name(fanny)), idx(fanny, 2), isFile), + driveItem(id(nav), name(nav), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(nav)), id(nav), isFile), + }, + }}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + }, + }, + }, + canUsePreviousBackup: true, + errCheck: assert.NoError, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(nav): fullPath(1, name(fanny)), + }, + }, + expectedCollections: map[string]map[data.CollectionState][]string{ + fullPath(1): { + data.NewState: {idx(fanny, 2)}, + }, + fullPath(1, name(nav)): { + data.MovedState: {id(nav), id(file)}, + }, + fullPath(1, name(fanny)): { + data.NewState: {idx(fanny, 2), idx(file, 2)}, + }, + }, + expectedDeltaURLs: map[string]string{ + idx(drive, 1): id(delta), + }, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(nav): fullPath(1, name(nav)), + idx(fanny, 2): fullPath(1, name(nav)), // note: this is a bug, but currently expected + }, + }, + expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ + fullPath(1): makeExcludeMap(id(file), idx(file, 2)), + }), + doNotMergeItems: map[string]bool{}, + }, + { + name: "out of order item enumeration causes prev path collisions", + drives: []models.Driveable{drive1}, + enumerator: mock.EnumerateItemsDeltaByDrive{ + DrivePagers: map[string]*mock.DriveItemsDeltaPager{ + idx(drive, 1): { + Pages: []mock.NextPage{{ + Items: []models.DriveItemable{ + driveRootItem(rootID), + driveItem(idx(fanny, 2), name(fanny), parent(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), parent(1, name(fanny)), idx(fanny, 2), isFile), + driveItem(id(nav), name(nav), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(nav)), id(nav), isFile), + }, + }}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + }, + }, + }, + canUsePreviousBackup: true, + errCheck: assert.NoError, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(nav): fullPath(1, name(fanny)), + }, + }, + expectedCollections: map[string]map[data.CollectionState][]string{ + fullPath(1): { + data.NewState: {idx(fanny, 2)}, + }, + fullPath(1, name(nav)): { + data.MovedState: {id(nav), id(file)}, + }, + fullPath(1, name(fanny)): { + data.NewState: {idx(fanny, 2), idx(file, 2)}, + }, + }, + expectedDeltaURLs: map[string]string{ + idx(drive, 1): id(delta), + }, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(nav): fullPath(1, name(nav)), + idx(fanny, 2): fullPath(1, name(nav)), // note: this is a bug, but currently expected + }, + }, + expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ + fullPath(1): makeExcludeMap(id(file), idx(file, 2)), + }), + doNotMergeItems: map[string]bool{}, + }, + { + name: "out of order item enumeration causes opposite prev path collisions", + drives: []models.Driveable{drive1}, + enumerator: mock.EnumerateItemsDeltaByDrive{ + DrivePagers: map[string]*mock.DriveItemsDeltaPager{ + idx(drive, 1): { + Pages: []mock.NextPage{{ + Items: []models.DriveItemable{ + driveRootItem(rootID), + driveItem(idx(file, 1), namex(file, 1), parent(1), rootID, isFile), + driveItem(id(fanny), name(fanny), parent(1), rootID, isFolder), + driveItem(id(nav), name(nav), parent(1), rootID, isFolder), + driveItem(id(foo), name(foo), parent(1, name(fanny)), id(fanny), isFolder), + driveItem(id(bar), name(foo), parent(1, name(nav)), id(nav), isFolder), + }, + }}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + }, + }, + }, + canUsePreviousBackup: true, + errCheck: assert.NoError, + previousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(nav): fullPath(1, name(nav)), + id(fanny): fullPath(1, name(fanny)), + id(foo): fullPath(1, name(nav), name(foo)), + id(bar): fullPath(1, name(fanny), name(foo)), + }, + }, + expectedCollections: map[string]map[data.CollectionState][]string{ + fullPath(1): { + data.NotMovedState: {idx(file, 1)}, + }, + fullPath(1, name(nav)): { + data.NotMovedState: {id(nav)}, + }, + fullPath(1, name(nav), name(foo)): { + data.MovedState: {id(bar)}, + }, + fullPath(1, name(fanny)): { + data.NotMovedState: {id(fanny)}, + }, + fullPath(1, name(fanny), name(foo)): { + data.MovedState: {id(foo)}, + }, + }, + expectedDeltaURLs: map[string]string{ + idx(drive, 1): id(delta), + }, + expectedPreviousPaths: map[string]map[string]string{ + idx(drive, 1): { + rootID: fullPath(1), + id(nav): fullPath(1, name(nav)), + id(fanny): fullPath(1, name(fanny)), + id(foo): fullPath(1, name(nav), name(foo)), // note: this is a bug, but currently expected + id(bar): fullPath(1, name(nav), name(foo)), + }, + }, + expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{ + fullPath(1): makeExcludeMap(idx(file, 1)), + }), + doNotMergeItems: map[string]bool{}, + }, } for _, test := range table { suite.Run(test.name, func() { @@ -2851,7 +3306,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { }, } - mbh := mock.DefaultOneDriveBH("a-user") + mbh := mock.DefaultOneDriveBH(user) mbh.DrivePagerV = mockDrivePager mbh.DriveItemEnumeration = test.enumerator @@ -2873,12 +3328,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { graph.NewMetadataEntry( bupMD.DeltaURLsFileName, map[string]string{ - driveID1: prevDelta, - driveID2: prevDelta, + idx(drive, 1): prevDelta, + idx(drive, 2): prevDelta, }), graph.NewMetadataEntry( bupMD.PreviousPathFileName, - test.prevFolderPaths), + test.previousPaths), }, func(*support.ControllerOperationStatus) {}) assert.NoError(t, err, "creating metadata collection", clues.ToCore(err)) @@ -2891,7 +3346,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { delList := prefixmatcher.NewStringSetBuilder() cols, canUsePreviousBackup, err := c.Get(ctx, prevMetadata, delList, errs) - test.errCheck(t, err) + test.errCheck(t, err, clues.ToCore(err)) assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup") assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped())) @@ -2910,19 +3365,20 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { } if folderPath == metadataPath.String() { - deltas, paths, _, err := deserializeMetadata( + deltas, prevs, _, err := deserializeAndValidateMetadata( ctx, []data.RestoreCollection{ dataMock.NewUnversionedRestoreCollection( t, data.NoFetchRestoreCollection{Collection: baseCol}), - }) + }, + errs) if !assert.NoError(t, err, "deserializing metadata", clues.ToCore(err)) { continue } assert.Equal(t, test.expectedDeltaURLs, deltas, "delta urls") - assert.Equal(t, test.expectedFolderPaths, paths, "folder paths") + assert.Equal(t, test.expectedPreviousPaths, prevs, "previous paths") continue } @@ -2978,142 +3434,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { } } -func coreItem( - id string, - name string, - parentPath string, - parentID string, - isFile, isFolder, isPackage bool, -) *models.DriveItem { - item := models.NewDriveItem() - item.SetName(&name) - item.SetId(&id) - - parentReference := models.NewItemReference() - parentReference.SetPath(&parentPath) - parentReference.SetId(&parentID) - item.SetParentReference(parentReference) - - switch { - case isFile: - item.SetFile(models.NewFile()) - case isFolder: - item.SetFolder(models.NewFolder()) - case isPackage: - item.SetPackageEscaped(models.NewPackageEscaped()) - } - - return item -} - -func driveItem( - id string, - name string, - parentPath string, - parentID string, - isFile, isFolder, isPackage bool, -) models.DriveItemable { - return coreItem(id, name, parentPath, parentID, isFile, isFolder, isPackage) -} - -func fileItem( - id, name, parentPath, parentID, url string, - deleted bool, -) models.DriveItemable { - di := driveItem(id, name, parentPath, parentID, true, false, false) - di.SetAdditionalData(map[string]any{ - "@microsoft.graph.downloadUrl": url, - }) - - if deleted { - di.SetDeleted(models.NewDeleted()) - } - - return di -} - -func malwareItem( - id string, - name string, - parentPath string, - parentID string, - isFile, isFolder, isPackage bool, -) models.DriveItemable { - c := coreItem(id, name, parentPath, parentID, isFile, isFolder, isPackage) - - mal := models.NewMalware() - malStr := "test malware" - mal.SetDescription(&malStr) - - c.SetMalware(mal) - - return c -} - -func driveRootItem(id string) models.DriveItemable { - name := "root" - item := models.NewDriveItem() - item.SetName(&name) - item.SetId(&id) - item.SetRoot(models.NewRoot()) - item.SetFolder(models.NewFolder()) - - return item -} - -// delItem creates a DriveItemable that is marked as deleted. path must be set -// to the base drive path. -func delItem( - id string, - parentPath string, - parentID string, - isFile, isFolder, isPackage bool, -) models.DriveItemable { - item := models.NewDriveItem() - item.SetId(&id) - item.SetDeleted(models.NewDeleted()) - - parentReference := models.NewItemReference() - parentReference.SetId(&parentID) - item.SetParentReference(parentReference) - - switch { - case isFile: - item.SetFile(models.NewFile()) - case isFolder: - item.SetFolder(models.NewFolder()) - case isPackage: - item.SetPackageEscaped(models.NewPackageEscaped()) - } - - return item -} - -// TestURLCacheAttach tests for 2 things: -// 1. All collections belong to the same drive share the url cache instance -// 2. Each drive has its own unique url cache instance -func (suite *OneDriveCollectionsUnitSuite) TestURLCacheAttach() { - var ( - tenant = "a-tenant" - user = "a-user" - delta = "delta1" - delta2 = "delta2" - ) - - driveID1 := "drive-1-" + uuid.NewString() +func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { drive1 := models.NewDrive() - drive1.SetId(&driveID1) - drive1.SetName(&driveID1) + drive1.SetId(ptr.To(idx(drive, 1))) + drive1.SetName(ptr.To(namex(drive, 1))) - driveID2 := "drive-2-" + uuid.NewString() drive2 := models.NewDrive() - drive2.SetId(&driveID2) - drive2.SetName(&driveID2) - - var ( - driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String() - driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String() - ) + drive2.SetId(ptr.To(idx(drive, 2))) + drive2.SetName(ptr.To(namex(drive, 2))) table := []struct { name string @@ -3129,21 +3457,21 @@ func (suite *OneDriveCollectionsUnitSuite) TestURLCacheAttach() { }, enumerator: mock.EnumerateItemsDeltaByDrive{ DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID1: { + idx(drive, 1): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root1"), - driveItem("folder", "folder", driveBasePath1, "root", false, true, false), - driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), + driveRootItem(rootID), + driveItem(id(folder), name(folder), parent(1), rootID, isFolder), + driveItem(id(file), name(file), parent(1, name(folder)), id(folder), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, }, - driveID2: { + idx(drive, 2): { Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem("root2"), - driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false), - driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false), + driveRootItem(rootID), + driveItem(idx(folder, 2), name(folder), parent(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), parent(2, name(folder)), idx(folder, 2), isFile), }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: delta2, Reset: true}, + DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, }, }, }, @@ -3167,7 +3495,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestURLCacheAttach() { }, } - mbh := mock.DefaultOneDriveBH("a-user") + mbh := mock.DefaultOneDriveBH(user) mbh.DrivePagerV = mockDrivePager mbh.DriveItemEnumeration = test.enumerator diff --git a/src/internal/m365/collection/drive/debug.go b/src/internal/m365/collection/drive/debug.go new file mode 100644 index 000000000..7a4f5c46c --- /dev/null +++ b/src/internal/m365/collection/drive/debug.go @@ -0,0 +1,32 @@ +package drive + +import ( + "context" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/data" + bupMD "github.com/alcionai/corso/src/pkg/backup/metadata" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/store" +) + +func DeserializeMetadataFiles( + ctx context.Context, + colls []data.RestoreCollection, +) ([]store.MetadataFile, error) { + deltas, prevs, _, err := deserializeAndValidateMetadata(ctx, colls, fault.New(true)) + + files := []store.MetadataFile{ + { + Name: bupMD.PreviousPathFileName, + Data: prevs, + }, + { + Name: bupMD.DeltaURLsFileName, + Data: deltas, + }, + } + + return files, clues.Stack(err).OrNil() +} diff --git a/src/internal/m365/collection/drive/url_cache_test.go b/src/internal/m365/collection/drive/url_cache_test.go index 6697eac00..f5338890d 100644 --- a/src/internal/m365/collection/drive/url_cache_test.go +++ b/src/internal/m365/collection/drive/url_cache_test.go @@ -531,7 +531,7 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { pages: []mock.NextPage{ {Items: []models.DriveItemable{ fileItem("1", "file1", "root", "root", "https://dummy1.com", false), - driveItem("2", "folder2", "root", "root", false, true, false), + driveItem("2", "folder2", "root", "root", isFolder), }}, }, expectedItemProps: map[string]itemProps{ diff --git a/src/internal/m365/collection/exchange/debug.go b/src/internal/m365/collection/exchange/debug.go new file mode 100644 index 000000000..c5787d647 --- /dev/null +++ b/src/internal/m365/collection/exchange/debug.go @@ -0,0 +1,17 @@ +package exchange + +import ( + "context" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/pkg/store" +) + +func DeserializeMetadataFiles( + ctx context.Context, + colls []data.RestoreCollection, +) ([]store.MetadataFile, error) { + return nil, clues.New("TODO: needs implementation") +} diff --git a/src/internal/m365/collection/groups/debug.go b/src/internal/m365/collection/groups/debug.go new file mode 100644 index 000000000..edb3c300b --- /dev/null +++ b/src/internal/m365/collection/groups/debug.go @@ -0,0 +1,17 @@ +package groups + +import ( + "context" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/pkg/store" +) + +func DeserializeMetadataFiles( + ctx context.Context, + colls []data.RestoreCollection, +) ([]store.MetadataFile, error) { + return nil, clues.New("TODO: needs implementation") +} diff --git a/src/internal/m365/debug.go b/src/internal/m365/debug.go new file mode 100644 index 000000000..6164e1076 --- /dev/null +++ b/src/internal/m365/debug.go @@ -0,0 +1,39 @@ +package m365 + +import ( + "context" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/collection/drive" + "github.com/alcionai/corso/src/internal/m365/collection/exchange" + "github.com/alcionai/corso/src/internal/m365/collection/groups" + "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/store" +) + +func (ctrl *Controller) DeserializeMetadataFiles( + ctx context.Context, + colls []data.RestoreCollection, +) ([]store.MetadataFile, error) { + if len(colls) == 0 { + return []store.MetadataFile{}, nil + } + + // assume all collections refer to the same service + service := colls[0].FullPath().Service() + + switch service { + case path.ExchangeService, path.ExchangeMetadataService: + return exchange.DeserializeMetadataFiles(ctx, colls) + case path.OneDriveService, path.OneDriveMetadataService: + return drive.DeserializeMetadataFiles(ctx, colls) + case path.SharePointService, path.SharePointMetadataService: + return drive.DeserializeMetadataFiles(ctx, colls) + case path.GroupsService, path.GroupsMetadataService: + return groups.DeserializeMetadataFiles(ctx, colls) + default: + return nil, clues.New("unrecognized service").With("service", service).WithClues(ctx) + } +} diff --git a/src/internal/m365/mock/connector.go b/src/internal/m365/mock/connector.go index a45530bae..a131fed01 100644 --- a/src/internal/m365/mock/connector.go +++ b/src/internal/m365/mock/connector.go @@ -8,7 +8,6 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/data" - "github.com/alcionai/corso/src/internal/kopia" kinject "github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/pkg/backup/details" @@ -53,7 +52,7 @@ func (ctrl Controller) ProduceBackupCollections( func (ctrl *Controller) GetMetadataPaths( ctx context.Context, r kinject.RestoreProducer, - base kopia.BackupBase, + base inject.ReasonAndSnapshotIDer, errs *fault.Bus, ) ([]path.RestorePaths, error) { return nil, clues.New("not implemented") diff --git a/src/internal/operations/inject/inject.go b/src/internal/operations/inject/inject.go index 0f449bf81..28713e15a 100644 --- a/src/internal/operations/inject/inject.go +++ b/src/internal/operations/inject/inject.go @@ -3,12 +3,14 @@ package inject import ( "context" + "github.com/kopia/kopia/repo/manifest" + "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/data" - "github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/count" @@ -37,7 +39,7 @@ type ( GetMetadataPaths( ctx context.Context, r inject.RestoreProducer, - base kopia.BackupBase, + base ReasonAndSnapshotIDer, errs *fault.Bus, ) ([]path.RestorePaths, error) @@ -125,4 +127,9 @@ type ( service path.ServiceType, ) (ServiceHandler, error) } + + ReasonAndSnapshotIDer interface { + GetReasons() []identity.Reasoner + GetSnapshotID() manifest.ID + } ) diff --git a/src/internal/operations/inject/mock/inject.go b/src/internal/operations/inject/mock/inject.go index a6f931a88..b558c322d 100644 --- a/src/internal/operations/inject/mock/inject.go +++ b/src/internal/operations/inject/mock/inject.go @@ -7,7 +7,6 @@ import ( "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/data" - "github.com/alcionai/corso/src/internal/kopia" kinject "github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/internal/m365" "github.com/alcionai/corso/src/internal/operations/inject" @@ -64,7 +63,7 @@ func (mbp *mockBackupProducer) Wait() *data.CollectionStats { func (mbp mockBackupProducer) GetMetadataPaths( ctx context.Context, r kinject.RestoreProducer, - base kopia.BackupBase, + base inject.ReasonAndSnapshotIDer, errs *fault.Bus, ) ([]path.RestorePaths, error) { ctrl := m365.Controller{} diff --git a/src/internal/operations/manifests.go b/src/internal/operations/manifests.go index 37bc70d5c..ecabe6d97 100644 --- a/src/internal/operations/manifests.go +++ b/src/internal/operations/manifests.go @@ -53,7 +53,7 @@ func produceManifestsAndMetadata( } // getManifestsAndMetadata calls kopia to retrieve prior backup manifests, -// metadata collections to supply backup heuristics. +// metadata collections to supply backup information. func getManifestsAndMetadata( ctx context.Context, bf inject.BaseFinder, diff --git a/src/pkg/fault/alert.go b/src/pkg/fault/alert.go index 5d4c97cea..d7b207f8f 100644 --- a/src/pkg/fault/alert.go +++ b/src/pkg/fault/alert.go @@ -4,6 +4,10 @@ import ( "github.com/alcionai/corso/src/cli/print" ) +const ( + AlertPreviousPathCollision = "previous_path_collision" +) + var _ print.Printable = &Alert{} // Alerts are informational-only notifications. The purpose of alerts is to diff --git a/src/pkg/fault/fault.go b/src/pkg/fault/fault.go index e6ea1bcd9..97816405b 100644 --- a/src/pkg/fault/fault.go +++ b/src/pkg/fault/fault.go @@ -208,7 +208,6 @@ func (e *Bus) AddAlert(ctx context.Context, a *Alert) { e.logAndAddAlert(ctx, a, 1) } -// logs the error and adds an alert. func (e *Bus) logAndAddAlert(ctx context.Context, a *Alert, trace int) { logger.CtxStack(ctx, trace+1). With("alert", a). diff --git a/src/pkg/repository/data_providers.go b/src/pkg/repository/data_providers.go index 93a141fed..0d9d663d8 100644 --- a/src/pkg/repository/data_providers.go +++ b/src/pkg/repository/data_providers.go @@ -6,11 +6,13 @@ import ( "github.com/alcionai/clues" + "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/m365" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/store" ) type DataProvider interface { @@ -20,6 +22,10 @@ type DataProvider interface { inject.ToServiceHandler VerifyAccess(ctx context.Context) error + DeserializeMetadataFiles( + ctx context.Context, + colls []data.RestoreCollection, + ) ([]store.MetadataFile, error) } type DataProviderConnector interface { @@ -30,6 +36,12 @@ type DataProviderConnector interface { ctx context.Context, pst path.ServiceType, ) error + // DataProvider retrieves the data provider. + DataProvider() DataProvider +} + +func (r *repository) DataProvider() DataProvider { + return r.Provider } func (r *repository) ConnectDataProvider( diff --git a/src/pkg/repository/debug.go b/src/pkg/repository/debug.go new file mode 100644 index 000000000..d255a1d67 --- /dev/null +++ b/src/pkg/repository/debug.go @@ -0,0 +1,86 @@ +package repository + +import ( + "context" + + "github.com/alcionai/clues" + "github.com/kopia/kopia/repo/manifest" + + "github.com/alcionai/corso/src/pkg/backup/identity" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/selectors" + "github.com/alcionai/corso/src/pkg/store" +) + +type base struct { + snapshotID manifest.ID + reasons []identity.Reasoner +} + +func (b base) GetReasons() []identity.Reasoner { + return b.reasons +} + +func (b base) GetSnapshotID() manifest.ID { + return b.snapshotID +} + +// should probably turn into a NewDebug interface like we're +// doing with the other interfaces +type Debugger interface { + GetBackupMetadata( + ctx context.Context, + sel selectors.Selector, + backupID string, + errs *fault.Bus, + ) ([]store.MetadataFile, error) +} + +// Backups lists backups by ID. Returns as many backups as possible with +// errors for the backups it was unable to retrieve. +func (r repository) GetBackupMetadata( + ctx context.Context, + sel selectors.Selector, + backupID string, + errs *fault.Bus, +) ([]store.MetadataFile, error) { + bup, err := r.Backup(ctx, backupID) + if err != nil { + return nil, clues.Wrap(err, "looking up backup") + } + + sel = sel.SetDiscreteOwnerIDName(bup.ResourceOwnerID, bup.ResourceOwnerName) + + reasons, err := sel.Reasons(r.Account.ID(), false) + if err != nil { + return nil, clues.Wrap(err, "constructing lookup parameters") + } + + var ( + rp = r.dataLayer + dp = r.DataProvider() + ) + + paths, err := dp.GetMetadataPaths( + ctx, + rp, + &base{manifest.ID(bup.SnapshotID), reasons}, + fault.New(true)) + if err != nil { + return nil, clues.Wrap(err, "retrieving metadata files") + } + + colls, err := rp.ProduceRestoreCollections( + ctx, + bup.SnapshotID, + paths, + nil, + fault.New(true)) + if err != nil { + return nil, clues.Wrap(err, "looking up metadata file content") + } + + files, err := dp.DeserializeMetadataFiles(ctx, colls) + + return files, clues.Wrap(err, "deserializing metadata file content").OrNil() +} diff --git a/src/pkg/repository/repository.go b/src/pkg/repository/repository.go index 522cd2ca6..7a1cad4e9 100644 --- a/src/pkg/repository/repository.go +++ b/src/pkg/repository/repository.go @@ -37,6 +37,7 @@ type Repositoryer interface { BackupGetter Restorer Exporter + Debugger DataProviderConnector Initialize( diff --git a/src/pkg/store/metadata.go b/src/pkg/store/metadata.go new file mode 100644 index 000000000..455e087a2 --- /dev/null +++ b/src/pkg/store/metadata.go @@ -0,0 +1,12 @@ +package store + +// MetadataFile holds a standard representation of a +// metadata file. Primarily used for debugging purposes. +type MetadataFile struct { + Name string `json:"name"` + Path string `json:"path"` + Data any `json:"data"` +} + +// TODO: printable support +// var _ print.Printable = &MetadataFile{}