Split services and collections for OneDrive & SharePoint (#4002)

Only code movement, no code changes.

Moved services to `/internal/m365/services/{onedrive,sharepoint,exchange}`
Moved collections to `/internal/m365/collection/{drive,site}`

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [ ]  Unit test
- [ ] 💚 E2E
This commit is contained in:
Abin Simon 2023-08-10 11:38:34 +05:30 committed by GitHub
parent bffaebd351
commit 8c939c0f0d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
134 changed files with 1132 additions and 918 deletions

View File

@ -17,9 +17,9 @@ import (
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub"
"github.com/alcionai/corso/src/internal/m365/resource"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"

View File

@ -5,8 +5,8 @@ import (
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/resource"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -21,7 +21,7 @@ import (
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
"github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity"

View File

@ -25,8 +25,8 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/data/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity"

View File

@ -8,10 +8,10 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"

View File

@ -11,9 +11,9 @@ import (
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/resource"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"

View File

@ -1,5 +1,5 @@
// Package onedrive provides support for retrieving M365 OneDrive objects
package onedrive
// Package drive provides support for retrieving M365 Drive objects
package drive
import (
"context"
@ -15,8 +15,8 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"bytes"
@ -20,11 +20,11 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
metaTD "github.com/alcionai/corso/src/internal/m365/onedrive/metadata/testdata"
"github.com/alcionai/corso/src/internal/m365/onedrive/mock"
odTD "github.com/alcionai/corso/src/internal/m365/onedrive/testdata"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -14,9 +14,9 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -17,10 +17,10 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/onedrive/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"github.com/microsoftgraph/msgraph-sdk-go/models"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"testing"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"bytes"
@ -13,8 +13,8 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -10,7 +10,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
@ -29,6 +29,10 @@ type itemBackupHandler struct {
scope selectors.OneDriveScope
}
func NewItemBackupHandler(ac api.Drives, scope selectors.OneDriveScope) *itemBackupHandler {
return &itemBackupHandler{ac, scope}
}
func (h itemBackupHandler) Get(
ctx context.Context,
url string,

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"testing"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"bytes"

View File

@ -1,4 +1,4 @@
package sharepoint
package drive
import (
"context"
@ -9,8 +9,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/onedrive"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
@ -18,13 +17,17 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
var _ onedrive.BackupHandler = &libraryBackupHandler{}
var _ BackupHandler = &libraryBackupHandler{}
type libraryBackupHandler struct {
ac api.Drives
scope selectors.SharePointScope
}
func NewLibraryBackupHandler(ac api.Drives, scope selectors.SharePointScope) libraryBackupHandler {
return libraryBackupHandler{ac, scope}
}
func (h libraryBackupHandler) Get(
ctx context.Context,
url string,
@ -78,7 +81,7 @@ func (h libraryBackupHandler) AugmentItemInfo(
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
return augmentLibraryItemInfo(dii, item, size, parentPath)
}
// constructWebURL is a helper function for recreating the webURL
@ -154,12 +157,16 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
// Restore
// ---------------------------------------------------------------------------
var _ onedrive.RestoreHandler = &libraryRestoreHandler{}
var _ RestoreHandler = &libraryRestoreHandler{}
type libraryRestoreHandler struct {
ac api.Client
}
func NewLibraryRestoreHandler(ac api.Client) libraryRestoreHandler {
return libraryRestoreHandler{ac}
}
func (h libraryRestoreHandler) PostDrive(
ctx context.Context,
siteID, driveName string,
@ -167,10 +174,6 @@ func (h libraryRestoreHandler) PostDrive(
return h.ac.Lists().PostDrive(ctx, siteID, driveName)
}
func NewRestoreHandler(ac api.Client) *libraryRestoreHandler {
return &libraryRestoreHandler{ac}
}
func (h libraryRestoreHandler) NewDrivePager(
resourceOwner string,
fields []string,
@ -184,7 +187,7 @@ func (h libraryRestoreHandler) AugmentItemInfo(
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
return augmentLibraryItemInfo(dii, item, size, parentPath)
}
func (h libraryRestoreHandler) DeleteItem(
@ -263,7 +266,7 @@ func (h libraryRestoreHandler) GetRootFolder(
// Common
// ---------------------------------------------------------------------------
func augmentItemInfo(
func augmentLibraryItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,

View File

@ -1,4 +1,4 @@
package sharepoint
package drive
import (
"testing"

View File

@ -6,7 +6,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
)
func AssertMetadataEqual(t *testing.T, expect, got metadata.Metadata) {

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -11,7 +11,7 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
@ -76,7 +76,7 @@ func getCollectionMetadata(
metaName = metadata.DirMetaFileSuffix
}
meta, err := fetchAndReadMetadata(ctx, dc, metaName)
meta, err := FetchAndReadMetadata(ctx, dc, metaName)
if err != nil {
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
}

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"strings"
@ -9,8 +9,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -6,7 +6,6 @@ import (
"fmt"
"io"
"runtime/trace"
"sort"
"strings"
"sync"
"sync/atomic"
@ -15,12 +14,11 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
@ -39,81 +37,6 @@ const (
maxUploadRetries = 3
)
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
func ConsumeRestoreCollections(
ctx context.Context,
rh RestoreHandler,
rcc inject.RestoreConsumerConfig,
backupDriveIDNames idname.Cacher,
dcs []data.RestoreCollection,
deets *details.Builder,
errs *fault.Bus,
ctr *count.Bus,
) (*support.ControllerOperationStatus, error) {
var (
restoreMetrics support.CollectionMetrics
el = errs.Local()
caches = NewRestoreCaches(backupDriveIDNames)
fallbackDriveName = rcc.RestoreConfig.Location
)
ctx = clues.Add(ctx, "backup_version", rcc.BackupVersion)
err := caches.Populate(ctx, rh, rcc.ProtectedResource.ID())
if err != nil {
return nil, clues.Wrap(err, "initializing restore caches")
}
// Reorder collections so that the parents directories are created
// before the child directories; a requirement for permissions.
data.SortRestoreCollections(dcs)
// Iterate through the data collections and restore the contents of each
for _, dc := range dcs {
if el.Failure() != nil {
break
}
var (
err error
metrics support.CollectionMetrics
ictx = clues.Add(
ctx,
"category", dc.FullPath().Category(),
"full_path", dc.FullPath())
)
metrics, err = RestoreCollection(
ictx,
rh,
rcc,
dc,
caches,
deets,
fallbackDriveName,
errs,
ctr.Local())
if err != nil {
el.AddRecoverable(ctx, err)
}
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
if errors.Is(err, context.Canceled) {
break
}
}
status := support.CreateStatus(
ctx,
support.Restore,
len(dcs),
restoreMetrics,
rcc.RestoreConfig.Location)
return status, el.Failure()
}
// RestoreCollection handles restoration of an individual collection.
// returns:
// - the collection's item and byte count metrics
@ -518,7 +441,7 @@ func restoreV1File(
// Fetch item permissions from the collection and restore them.
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
meta, err := FetchAndReadMetadata(ctx, fibn, metaName)
if err != nil {
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
}
@ -556,7 +479,7 @@ func restoreV6File(
// Get metadata file so we can determine the file name.
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
meta, err := FetchAndReadMetadata(ctx, fibn, metaName)
if err != nil {
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
}
@ -932,7 +855,7 @@ func restoreFile(
return ptr.Val(newItem.GetId()), dii, nil
}
func fetchAndReadMetadata(
func FetchAndReadMetadata(
ctx context.Context,
fibn data.FetchItemByNamer,
metaName string,
@ -974,132 +897,6 @@ func getMetadata(metar io.ReadCloser) (metadata.Metadata, error) {
return meta, nil
}
// Augment restore path to add extra files(meta) needed for restore as
// well as do any other ordering operations on the paths
//
// Only accepts StoragePath/RestorePath pairs where the RestorePath is
// at least as long as the StoragePath. If the RestorePath is longer than the
// StoragePath then the first few (closest to the root) directories will use
// default permissions during restore.
func AugmentRestorePaths(
backupVersion int,
paths []path.RestorePaths,
) ([]path.RestorePaths, error) {
// Keyed by each value's StoragePath.String() which corresponds to the RepoRef
// of the directory.
colPaths := map[string]path.RestorePaths{}
for _, p := range paths {
first := true
for {
sp, err := p.StoragePath.Dir()
if err != nil {
return nil, err
}
drivePath, err := path.ToDrivePath(sp)
if err != nil {
return nil, err
}
if len(drivePath.Folders) == 0 {
break
}
if len(p.RestorePath.Elements()) < len(sp.Elements()) {
return nil, clues.New("restorePath shorter than storagePath").
With("restore_path", p.RestorePath, "storage_path", sp)
}
rp := p.RestorePath
// Make sure the RestorePath always points to the level of the current
// collection. We need to track if it's the first iteration because the
// RestorePath starts out at the collection level to begin with.
if !first {
rp, err = p.RestorePath.Dir()
if err != nil {
return nil, err
}
}
paths := path.RestorePaths{
StoragePath: sp,
RestorePath: rp,
}
colPaths[sp.String()] = paths
p = paths
first = false
}
}
// Adds dirmeta files as we need to make sure collections for all
// directories involved are created and not just the final one. No
// need to add `.meta` files (metadata for files) as they will
// anyways be looked up automatically.
// TODO: Stop populating .dirmeta for newer versions once we can
// get files from parent directory via `Fetch` in a collection.
// As of now look up metadata for parent directories from a
// collection.
for _, p := range colPaths {
el := p.StoragePath.Elements()
if backupVersion >= version.OneDrive6NameInMeta {
mPath, err := p.StoragePath.AppendItem(".dirmeta")
if err != nil {
return nil, err
}
paths = append(
paths,
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
} else if backupVersion >= version.OneDrive4DirIncludesPermissions {
mPath, err := p.StoragePath.AppendItem(el.Last() + ".dirmeta")
if err != nil {
return nil, err
}
paths = append(
paths,
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
} else if backupVersion >= version.OneDrive1DataAndMetaFiles {
pp, err := p.StoragePath.Dir()
if err != nil {
return nil, err
}
mPath, err := pp.AppendItem(el.Last() + ".dirmeta")
if err != nil {
return nil, err
}
prp, err := p.RestorePath.Dir()
if err != nil {
return nil, err
}
paths = append(
paths,
path.RestorePaths{StoragePath: mPath, RestorePath: prp})
}
}
// This sort is done primarily to order `.meta` files after `.data`
// files. This is only a necessity for OneDrive as we are storing
// metadata for files/folders in separate meta files and we the
// data to be restored before we can restore the metadata.
//
// This sorting assumes stuff in the same StoragePath directory end up in the
// same RestorePath collection.
sort.Slice(paths, func(i, j int) bool {
return paths[i].StoragePath.String() < paths[j].StoragePath.String()
})
return paths, nil
}
type PostDriveAndGetRootFolderer interface {
PostDriver
GetRootFolderer

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -10,8 +10,8 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"
@ -14,8 +14,8 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/onedrive/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
@ -34,301 +34,6 @@ func TestRestoreUnitSuite(t *testing.T) {
suite.Run(t, &RestoreUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
// Adding a simple test here so that we can be sure that this
// function gets updated whenever we add a new version.
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
table := []struct {
name string
version int
input []string
output []string
}{
{
name: "no change v0",
version: 0,
input: []string{
"file.txt.data",
"file.txt", // v0 does not have `.data`
},
output: []string{
"file.txt", // ordering artifact of sorting
"file.txt.data",
},
},
{
name: "one folder v0",
version: 0,
input: []string{
"folder/file.txt.data",
"folder/file.txt",
},
output: []string{
"folder/file.txt",
"folder/file.txt.data",
},
},
{
name: "no change v1",
version: version.OneDrive1DataAndMetaFiles,
input: []string{
"file.txt.data",
},
output: []string{
"file.txt.data",
},
},
{
name: "one folder v1",
version: version.OneDrive1DataAndMetaFiles,
input: []string{
"folder/file.txt.data",
},
output: []string{
"folder.dirmeta",
"folder/file.txt.data",
},
},
{
name: "nested folders v1",
version: version.OneDrive1DataAndMetaFiles,
input: []string{
"folder/file.txt.data",
"folder/folder2/file.txt.data",
},
output: []string{
"folder.dirmeta",
"folder/file.txt.data",
"folder/folder2.dirmeta",
"folder/folder2/file.txt.data",
},
},
{
name: "no change v4",
version: version.OneDrive4DirIncludesPermissions,
input: []string{
"file.txt.data",
},
output: []string{
"file.txt.data",
},
},
{
name: "one folder v4",
version: version.OneDrive4DirIncludesPermissions,
input: []string{
"folder/file.txt.data",
},
output: []string{
"folder/file.txt.data",
"folder/folder.dirmeta",
},
},
{
name: "nested folders v4",
version: version.OneDrive4DirIncludesPermissions,
input: []string{
"folder/file.txt.data",
"folder/folder2/file.txt.data",
},
output: []string{
"folder/file.txt.data",
"folder/folder.dirmeta",
"folder/folder2/file.txt.data",
"folder/folder2/folder2.dirmeta",
},
},
{
name: "no change v6",
version: version.OneDrive6NameInMeta,
input: []string{
"file.txt.data",
},
output: []string{
"file.txt.data",
},
},
{
name: "one folder v6",
version: version.OneDrive6NameInMeta,
input: []string{
"folder/file.txt.data",
},
output: []string{
"folder/.dirmeta",
"folder/file.txt.data",
},
},
{
name: "nested folders v6",
version: version.OneDrive6NameInMeta,
input: []string{
"folder/file.txt.data",
"folder/folder2/file.txt.data",
},
output: []string{
"folder/.dirmeta",
"folder/file.txt.data",
"folder/folder2/.dirmeta",
"folder/folder2/file.txt.data",
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
_, flush := tester.NewContext(t)
defer flush()
base := "id/onedrive/user/files/drives/driveID/root:/"
inPaths := []path.RestorePaths{}
for _, ps := range test.input {
p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
pd, err := p.Dir()
require.NoError(t, err, "creating collection path", clues.ToCore(err))
inPaths = append(
inPaths,
path.RestorePaths{StoragePath: p, RestorePath: pd})
}
outPaths := []path.RestorePaths{}
for _, ps := range test.output {
p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
pd, err := p.Dir()
require.NoError(t, err, "creating collection path", clues.ToCore(err))
outPaths = append(
outPaths,
path.RestorePaths{StoragePath: p, RestorePath: pd})
}
actual, err := AugmentRestorePaths(test.version, inPaths)
require.NoError(t, err, "augmenting paths", clues.ToCore(err))
// Ordering of paths matter here as we need dirmeta files
// to show up before file in dir
assert.Equal(t, outPaths, actual, "augmented paths")
})
}
}
// TestAugmentRestorePaths_DifferentRestorePath tests that RestorePath
// substitution works properly. Since it's only possible for future backup
// versions to need restore path substitution (i.e. due to storing folders by
// ID instead of name) this is only tested against the most recent backup
// version at the moment.
func (suite *RestoreUnitSuite) TestAugmentRestorePaths_DifferentRestorePath() {
// Adding a simple test here so that we can be sure that this
// function gets updated whenever we add a new version.
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
type pathPair struct {
storage string
restore string
}
table := []struct {
name string
version int
input []pathPair
output []pathPair
errCheck assert.ErrorAssertionFunc
}{
{
name: "nested folders",
version: version.Backup,
input: []pathPair{
{storage: "folder-id/file.txt.data", restore: "folder"},
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
},
output: []pathPair{
{storage: "folder-id/.dirmeta", restore: "folder"},
{storage: "folder-id/file.txt.data", restore: "folder"},
{storage: "folder-id/folder2-id/.dirmeta", restore: "folder/folder2"},
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
},
errCheck: assert.NoError,
},
{
name: "restore path longer one folder",
version: version.Backup,
input: []pathPair{
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
},
output: []pathPair{
{storage: "folder-id/.dirmeta", restore: "corso_restore/folder"},
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
},
errCheck: assert.NoError,
},
{
name: "restore path shorter one folder",
version: version.Backup,
input: []pathPair{
{storage: "folder-id/file.txt.data", restore: ""},
},
errCheck: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
_, flush := tester.NewContext(t)
defer flush()
base := "id/onedrive/user/files/drives/driveID/root:/"
inPaths := []path.RestorePaths{}
for _, ps := range test.input {
p, err := path.FromDataLayerPath(base+ps.storage, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
r, err := path.FromDataLayerPath(base+ps.restore, false)
require.NoError(t, err, "creating path", clues.ToCore(err))
inPaths = append(
inPaths,
path.RestorePaths{StoragePath: p, RestorePath: r})
}
outPaths := []path.RestorePaths{}
for _, ps := range test.output {
p, err := path.FromDataLayerPath(base+ps.storage, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
r, err := path.FromDataLayerPath(base+ps.restore, false)
require.NoError(t, err, "creating path", clues.ToCore(err))
outPaths = append(
outPaths,
path.RestorePaths{StoragePath: p, RestorePath: r})
}
actual, err := AugmentRestorePaths(test.version, inPaths)
test.errCheck(t, err, "augmenting paths", clues.ToCore(err))
if err != nil {
return
}
// Ordering of paths matter here as we need dirmeta files
// to show up before file in dir
assert.Equal(t, outPaths, actual, "augmented paths")
})
}
}
func (suite *RestoreUnitSuite) TestRestoreItem_collisionHandling() {
const mndiID = "mndi-id"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"

View File

@ -1,4 +1,4 @@
package onedrive
package drive
import (
"context"

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"context"
@ -7,11 +7,10 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
@ -21,173 +20,9 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
func ProduceBackupCollections(
ctx context.Context,
bpc inject.BackupProducerConfig,
ac api.Client,
creds account.M365Config,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
b, err := bpc.Selector.ToSharePointBackup()
if err != nil {
return nil, nil, false, clues.Wrap(err, "sharePointDataCollection: parsing selector")
}
var (
el = errs.Local()
collections = []data.BackupCollection{}
categories = map[path.CategoryType]struct{}{}
ssmb = prefixmatcher.NewStringSetBuilder()
canUsePreviousBackup bool
)
ctx = clues.Add(
ctx,
"site_id", clues.Hide(bpc.ProtectedResource.ID()),
"site_url", clues.Hide(bpc.ProtectedResource.Name()))
for _, scope := range b.Scopes() {
if el.Failure() != nil {
break
}
progressBar := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", scope.Category().PathType()))
defer close(progressBar)
var spcs []data.BackupCollection
switch scope.Category().PathType() {
case path.ListsCategory:
spcs, err = collectLists(
ctx,
bpc,
ac,
creds.AzureTenantID,
su,
errs)
if err != nil {
el.AddRecoverable(ctx, err)
continue
}
// Lists don't make use of previous metadata
// TODO: Revisit when we add support of lists
canUsePreviousBackup = true
case path.LibrariesCategory:
spcs, canUsePreviousBackup, err = collectLibraries(
ctx,
bpc,
ac.Drives(),
creds.AzureTenantID,
ssmb,
scope,
su,
errs)
if err != nil {
el.AddRecoverable(ctx, err)
continue
}
case path.PagesCategory:
spcs, err = collectPages(
ctx,
bpc,
creds,
ac,
su,
errs)
if err != nil {
el.AddRecoverable(ctx, err)
continue
}
// Lists don't make use of previous metadata
// TODO: Revisit when we add support of pages
canUsePreviousBackup = true
}
collections = append(collections, spcs...)
categories[scope.Category().PathType()] = struct{}{}
}
if len(collections) > 0 {
baseCols, err := graph.BaseCollections(
ctx,
collections,
creds.AzureTenantID,
bpc.ProtectedResource.ID(),
path.SharePointService,
categories,
su,
errs)
if err != nil {
return nil, nil, false, err
}
collections = append(collections, baseCols...)
}
return collections, ssmb.ToReader(), canUsePreviousBackup, el.Failure()
}
func collectLists(
ctx context.Context,
bpc inject.BackupProducerConfig,
ac api.Client,
tenantID string,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).Debug("Creating SharePoint List Collections")
var (
el = errs.Local()
spcs = make([]data.BackupCollection, 0)
)
lists, err := preFetchLists(ctx, ac.Stable, bpc.ProtectedResource.ID())
if err != nil {
return nil, err
}
for _, tuple := range lists {
if el.Failure() != nil {
break
}
dir, err := path.Build(
tenantID,
bpc.ProtectedResource.ID(),
path.SharePointService,
path.ListsCategory,
false,
tuple.name)
if err != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "creating list collection path").WithClues(ctx))
}
collection := NewCollection(
dir,
ac,
List,
su,
bpc.Options)
collection.AddJob(tuple.id)
spcs = append(spcs, collection)
}
return spcs, el.Failure()
}
// collectLibraries constructs a onedrive Collections struct and Get()s
// CollectLibraries constructs a onedrive Collections struct and Get()s
// all the drives associated with the site.
func collectLibraries(
func CollectLibraries(
ctx context.Context,
bpc inject.BackupProducerConfig,
ad api.Drives,
@ -201,8 +36,8 @@ func collectLibraries(
var (
collections = []data.BackupCollection{}
colls = onedrive.NewCollections(
&libraryBackupHandler{ad, scope},
colls = drive.NewCollections(
drive.NewLibraryBackupHandler(ad, scope),
tenantID,
bpc.ProtectedResource.ID(),
su,
@ -217,9 +52,9 @@ func collectLibraries(
return append(collections, odcs...), canUsePreviousBackup, nil
}
// collectPages constructs a sharepoint Collections struct and Get()s the associated
// CollectPages constructs a sharepoint Collections struct and Get()s the associated
// M365 IDs for the associated Pages.
func collectPages(
func CollectPages(
ctx context.Context,
bpc inject.BackupProducerConfig,
creds account.M365Config,
@ -273,7 +108,57 @@ func collectPages(
Pages,
su,
bpc.Options)
collection.betaService = betaService
collection.SetBetaService(betaService)
collection.AddJob(tuple.ID)
spcs = append(spcs, collection)
}
return spcs, el.Failure()
}
func CollectLists(
ctx context.Context,
bpc inject.BackupProducerConfig,
ac api.Client,
tenantID string,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).Debug("Creating SharePoint List Collections")
var (
el = errs.Local()
spcs = make([]data.BackupCollection, 0)
)
lists, err := PreFetchLists(ctx, ac.Stable, bpc.ProtectedResource.ID())
if err != nil {
return nil, err
}
for _, tuple := range lists {
if el.Failure() != nil {
break
}
dir, err := path.Build(
tenantID,
bpc.ProtectedResource.ID(),
path.SharePointService,
path.ListsCategory,
false,
tuple.Name)
if err != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "creating list collection path").WithClues(ctx))
}
collection := NewCollection(
dir,
ac,
List,
su,
bpc.Options)
collection.AddJob(tuple.ID)
spcs = append(spcs, collection)

View File

@ -0,0 +1,73 @@
package site
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type SharePointPagesSuite struct {
tester.Suite
}
func TestSharePointPagesSuite(t *testing.T) {
suite.Run(t, &SharePointPagesSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *SharePointPagesSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
graph.InitializeConcurrencyLimiter(ctx, false, 4)
}
func (suite *SharePointPagesSuite) TestCollectPages() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
siteID = tconfig.M365SiteID(t)
a = tconfig.NewM365Account(t)
)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
ac, err := api.NewClient(creds, control.DefaultOptions())
require.NoError(t, err, clues.ToCore(err))
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: mock.NewProvider(siteID, siteID),
}
col, err := CollectPages(
ctx,
bpc,
creds,
ac,
(&MockGraphService{}).UpdateStatus,
fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, col)
}

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"bytes"
@ -13,7 +13,7 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -81,6 +81,10 @@ func NewCollection(
return c
}
func (sc *Collection) SetBetaService(betaService *betaAPI.BetaService) {
sc.betaService = betaService
}
// AddJob appends additional objectID to job field
func (sc *Collection) AddJob(objID string) {
sc.jobs = append(sc.jobs, objID)
@ -254,7 +258,7 @@ func (sc *Collection) retrieveLists(
sc.data <- &Item{
id: ptr.Val(lst.GetId()),
data: io.NopCloser(bytes.NewReader(byteArray)),
info: listToSPInfo(lst, size),
info: ListToSPInfo(lst, size),
modTime: t,
}

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"bytes"
@ -14,8 +14,8 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
spMock "github.com/alcionai/corso/src/internal/m365/sharepoint/mock"
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
spMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
@ -118,7 +118,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
data := &Item{
id: name,
data: io.NopCloser(bytes.NewReader(byteArray)),
info: listToSPInfo(listing, int64(len(byteArray))),
info: ListToSPInfo(listing, int64(len(byteArray))),
}
return data
@ -207,7 +207,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
listData := &Item{
id: testName,
data: io.NopCloser(bytes.NewReader(byteArray)),
info: listToSPInfo(listing, int64(len(byteArray))),
info: ListToSPInfo(listing, int64(len(byteArray))),
}
destName := testdata.DefaultRestoreConfig("").Location

View File

@ -1,6 +1,6 @@
// Code generated by "stringer -type=DataCategory"; DO NOT EDIT.
package sharepoint
package site
import "strconv"

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"testing"
@ -43,7 +43,7 @@ func (ms *MockGraphService) UpdateStatus(*support.ControllerOperationStatus) {
}
// ---------------------------------------------------------------------------
// Helper Functions
// Helper functions
// ---------------------------------------------------------------------------
func createTestService(t *testing.T, credentials account.M365Config) *graph.Service {

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"context"
@ -14,9 +14,9 @@ import (
"github.com/alcionai/corso/src/pkg/fault"
)
// listToSPInfo translates models.Listable metadata into searchable content
// ListToSPInfo translates models.Listable metadata into searchable content
// List Details: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0
func listToSPInfo(lst models.Listable, size int64) *details.SharePointInfo {
func ListToSPInfo(lst models.Listable, size int64) *details.SharePointInfo {
var (
name = ptr.Val(lst.GetDisplayName())
webURL = ptr.Val(lst.GetWebUrl())
@ -34,9 +34,9 @@ func listToSPInfo(lst models.Listable, size int64) *details.SharePointInfo {
}
}
type listTuple struct {
name string
id string
type ListTuple struct {
ID string
Name string
}
func preFetchListOptions() *sites.ItemListsRequestBuilderGetRequestConfiguration {
@ -51,15 +51,15 @@ func preFetchListOptions() *sites.ItemListsRequestBuilderGetRequestConfiguration
return options
}
func preFetchLists(
func PreFetchLists(
ctx context.Context,
gs graph.Servicer,
siteID string,
) ([]listTuple, error) {
) ([]ListTuple, error) {
var (
builder = gs.Client().Sites().BySiteId(siteID).Lists()
options = preFetchListOptions()
listTuples = make([]listTuple, 0)
listTuples = make([]ListTuple, 0)
)
for {
@ -72,11 +72,11 @@ func preFetchLists(
var (
id = ptr.Val(entry.GetId())
name = ptr.Val(entry.GetDisplayName())
temp = listTuple{id: id, name: name}
temp = ListTuple{ID: id, Name: name}
)
if len(name) == 0 {
temp.name = id
temp.Name = id
}
listTuples = append(listTuples, temp)

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"testing"
@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
@ -28,6 +29,11 @@ func (suite *ListsUnitSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
ctx, flush := tester.NewContext(suite.T())
defer flush()
graph.InitializeConcurrencyLimiter(ctx, false, 4)
}
func TestListsUnitSuite(t *testing.T) {
@ -57,10 +63,10 @@ func (suite *ListsUnitSuite) TestLoadList() {
defer flush()
service := createTestService(t, suite.creds)
tuples, err := preFetchLists(ctx, service, "root")
tuples, err := PreFetchLists(ctx, service, "root")
require.NoError(t, err, clues.ToCore(err))
job := []string{tuples[0].id}
job := []string{tuples[0].ID}
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.Greater(t, len(lists), 0)
@ -98,7 +104,7 @@ func (suite *ListsUnitSuite) TestSharePointInfo() {
t := suite.T()
list, expected := test.listAndDeets()
info := listToSPInfo(list, 10)
info := ListToSPInfo(list, 10)
assert.Equal(t, expected.ItemType, info.ItemType)
assert.Equal(t, expected.ItemName, info.ItemName)
assert.Equal(t, expected.WebURL, info.WebURL)

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"time"

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"testing"

View File

@ -1,4 +1,4 @@
package sharepoint
package site
import (
"context"
@ -15,9 +15,9 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api"
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -41,9 +41,9 @@ func ConsumeRestoreCollections(
ctr *count.Bus,
) (*support.ControllerOperationStatus, error) {
var (
lrh = libraryRestoreHandler{ac}
lrh = drive.NewLibraryRestoreHandler(ac)
restoreMetrics support.CollectionMetrics
caches = onedrive.NewRestoreCaches(backupDriveIDNames)
caches = drive.NewRestoreCaches(backupDriveIDNames)
el = errs.Local()
)
@ -75,7 +75,7 @@ func ConsumeRestoreCollections(
switch dc.FullPath().Category() {
case path.LibrariesCategory:
metrics, err = onedrive.RestoreCollection(
metrics, err = drive.RestoreCollection(
ictx,
lrh,
rcc,
@ -200,7 +200,7 @@ func restoreListItem(
}
}
dii.SharePoint = listToSPInfo(restoredList, int64(len(byteArray)))
dii.SharePoint = ListToSPInfo(restoredList, int64(len(byteArray)))
return dii, nil
}

View File

@ -17,10 +17,10 @@ import (
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/mock"
"github.com/alcionai/corso/src/internal/m365/resource"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"

View File

@ -8,7 +8,7 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -1,7 +1,7 @@
package metadata
import (
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/pkg/path"
)

View File

@ -9,8 +9,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph/metadata"
odmetadata "github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)

View File

@ -17,10 +17,10 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/onedrive"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/resource"
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
@ -737,7 +737,7 @@ func compareDriveItem(
)
if !isMeta {
oitem := item.(*onedrive.Item)
oitem := item.(*drive.Item)
info := oitem.Info()
if info.OneDrive != nil {

View File

@ -14,11 +14,11 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/onedrive/stub"
"github.com/alcionai/corso/src/internal/m365/resource"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"

View File

@ -7,10 +7,11 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/onedrive"
"github.com/alcionai/corso/src/internal/m365/sharepoint"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -71,7 +72,7 @@ func (ctrl *Controller) ConsumeRestoreCollections(
case path.OneDriveService:
status, err = onedrive.ConsumeRestoreCollections(
ctx,
onedrive.NewRestoreHandler(ctrl.AC),
drive.NewRestoreHandler(ctrl.AC),
rcc,
ctrl.backupDriveIDNames,
dcs,

View File

@ -10,8 +10,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"

View File

@ -7,8 +7,8 @@ import (
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
@ -47,8 +48,8 @@ func ProduceBackupCollections(
logger.Ctx(ctx).Debug("creating OneDrive collections")
nc := NewCollections(
&itemBackupHandler{ac.Drives(), scope},
nc := drive.NewCollections(
drive.NewItemBackupHandler(ac.Drives(), scope),
tenant,
bpc.ProtectedResource.ID(),
su,

View File

@ -7,7 +7,8 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -120,7 +121,7 @@ func getItemName(
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := fetchAndReadMetadata(ctx, fin, metaName)
meta, err := drive.FetchAndReadMetadata(ctx, fin, metaName)
if err != nil {
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
}

View File

@ -10,8 +10,8 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"

View File

@ -8,7 +8,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models"
odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"

View File

@ -0,0 +1,221 @@
package onedrive
import (
"context"
"sort"
"github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
func ConsumeRestoreCollections(
ctx context.Context,
rh drive.RestoreHandler,
rcc inject.RestoreConsumerConfig,
backupDriveIDNames idname.Cacher,
dcs []data.RestoreCollection,
deets *details.Builder,
errs *fault.Bus,
ctr *count.Bus,
) (*support.ControllerOperationStatus, error) {
var (
restoreMetrics support.CollectionMetrics
el = errs.Local()
caches = drive.NewRestoreCaches(backupDriveIDNames)
fallbackDriveName = rcc.RestoreConfig.Location
)
ctx = clues.Add(ctx, "backup_version", rcc.BackupVersion)
err := caches.Populate(ctx, rh, rcc.ProtectedResource.ID())
if err != nil {
return nil, clues.Wrap(err, "initializing restore caches")
}
// Reorder collections so that the parents directories are created
// before the child directories; a requirement for permissions.
data.SortRestoreCollections(dcs)
// Iterate through the data collections and restore the contents of each
for _, dc := range dcs {
if el.Failure() != nil {
break
}
var (
err error
metrics support.CollectionMetrics
ictx = clues.Add(
ctx,
"category", dc.FullPath().Category(),
"full_path", dc.FullPath())
)
metrics, err = drive.RestoreCollection(
ictx,
rh,
rcc,
dc,
caches,
deets,
fallbackDriveName,
errs,
ctr.Local())
if err != nil {
el.AddRecoverable(ctx, err)
}
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
if errors.Is(err, context.Canceled) {
break
}
}
status := support.CreateStatus(
ctx,
support.Restore,
len(dcs),
restoreMetrics,
rcc.RestoreConfig.Location)
return status, el.Failure()
}
// Augment restore path to add extra files(meta) needed for restore as
// well as do any other ordering operations on the paths
//
// Only accepts StoragePath/RestorePath pairs where the RestorePath is
// at least as long as the StoragePath. If the RestorePath is longer than the
// StoragePath then the first few (closest to the root) directories will use
// default permissions during restore.
func AugmentRestorePaths(
backupVersion int,
paths []path.RestorePaths,
) ([]path.RestorePaths, error) {
// Keyed by each value's StoragePath.String() which corresponds to the RepoRef
// of the directory.
colPaths := map[string]path.RestorePaths{}
for _, p := range paths {
first := true
for {
sp, err := p.StoragePath.Dir()
if err != nil {
return nil, err
}
drivePath, err := path.ToDrivePath(sp)
if err != nil {
return nil, err
}
if len(drivePath.Folders) == 0 {
break
}
if len(p.RestorePath.Elements()) < len(sp.Elements()) {
return nil, clues.New("restorePath shorter than storagePath").
With("restore_path", p.RestorePath, "storage_path", sp)
}
rp := p.RestorePath
// Make sure the RestorePath always points to the level of the current
// collection. We need to track if it's the first iteration because the
// RestorePath starts out at the collection level to begin with.
if !first {
rp, err = p.RestorePath.Dir()
if err != nil {
return nil, err
}
}
paths := path.RestorePaths{
StoragePath: sp,
RestorePath: rp,
}
colPaths[sp.String()] = paths
p = paths
first = false
}
}
// Adds dirmeta files as we need to make sure collections for all
// directories involved are created and not just the final one. No
// need to add `.meta` files (metadata for files) as they will
// anyways be looked up automatically.
// TODO: Stop populating .dirmeta for newer versions once we can
// get files from parent directory via `Fetch` in a collection.
// As of now look up metadata for parent directories from a
// collection.
for _, p := range colPaths {
el := p.StoragePath.Elements()
if backupVersion >= version.OneDrive6NameInMeta {
mPath, err := p.StoragePath.AppendItem(".dirmeta")
if err != nil {
return nil, err
}
paths = append(
paths,
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
} else if backupVersion >= version.OneDrive4DirIncludesPermissions {
mPath, err := p.StoragePath.AppendItem(el.Last() + ".dirmeta")
if err != nil {
return nil, err
}
paths = append(
paths,
path.RestorePaths{StoragePath: mPath, RestorePath: p.RestorePath})
} else if backupVersion >= version.OneDrive1DataAndMetaFiles {
pp, err := p.StoragePath.Dir()
if err != nil {
return nil, err
}
mPath, err := pp.AppendItem(el.Last() + ".dirmeta")
if err != nil {
return nil, err
}
prp, err := p.RestorePath.Dir()
if err != nil {
return nil, err
}
paths = append(
paths,
path.RestorePaths{StoragePath: mPath, RestorePath: prp})
}
}
// This sort is done primarily to order `.meta` files after `.data`
// files. This is only a necessity for OneDrive as we are storing
// metadata for files/folders in separate meta files and we the
// data to be restored before we can restore the metadata.
//
// This sorting assumes stuff in the same StoragePath directory end up in the
// same RestorePath collection.
sort.Slice(paths, func(i, j int) bool {
return paths[i].StoragePath.String() < paths[j].StoragePath.String()
})
return paths, nil
}

View File

@ -0,0 +1,317 @@
package onedrive
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/path"
)
type RestoreUnitSuite struct {
tester.Suite
}
func TestRestoreUnitSuite(t *testing.T) {
suite.Run(t, &RestoreUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *RestoreUnitSuite) TestAugmentRestorePaths() {
// Adding a simple test here so that we can be sure that this
// function gets updated whenever we add a new version.
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
table := []struct {
name string
version int
input []string
output []string
}{
{
name: "no change v0",
version: 0,
input: []string{
"file.txt.data",
"file.txt", // v0 does not have `.data`
},
output: []string{
"file.txt", // ordering artifact of sorting
"file.txt.data",
},
},
{
name: "one folder v0",
version: 0,
input: []string{
"folder/file.txt.data",
"folder/file.txt",
},
output: []string{
"folder/file.txt",
"folder/file.txt.data",
},
},
{
name: "no change v1",
version: version.OneDrive1DataAndMetaFiles,
input: []string{
"file.txt.data",
},
output: []string{
"file.txt.data",
},
},
{
name: "one folder v1",
version: version.OneDrive1DataAndMetaFiles,
input: []string{
"folder/file.txt.data",
},
output: []string{
"folder.dirmeta",
"folder/file.txt.data",
},
},
{
name: "nested folders v1",
version: version.OneDrive1DataAndMetaFiles,
input: []string{
"folder/file.txt.data",
"folder/folder2/file.txt.data",
},
output: []string{
"folder.dirmeta",
"folder/file.txt.data",
"folder/folder2.dirmeta",
"folder/folder2/file.txt.data",
},
},
{
name: "no change v4",
version: version.OneDrive4DirIncludesPermissions,
input: []string{
"file.txt.data",
},
output: []string{
"file.txt.data",
},
},
{
name: "one folder v4",
version: version.OneDrive4DirIncludesPermissions,
input: []string{
"folder/file.txt.data",
},
output: []string{
"folder/file.txt.data",
"folder/folder.dirmeta",
},
},
{
name: "nested folders v4",
version: version.OneDrive4DirIncludesPermissions,
input: []string{
"folder/file.txt.data",
"folder/folder2/file.txt.data",
},
output: []string{
"folder/file.txt.data",
"folder/folder.dirmeta",
"folder/folder2/file.txt.data",
"folder/folder2/folder2.dirmeta",
},
},
{
name: "no change v6",
version: version.OneDrive6NameInMeta,
input: []string{
"file.txt.data",
},
output: []string{
"file.txt.data",
},
},
{
name: "one folder v6",
version: version.OneDrive6NameInMeta,
input: []string{
"folder/file.txt.data",
},
output: []string{
"folder/.dirmeta",
"folder/file.txt.data",
},
},
{
name: "nested folders v6",
version: version.OneDrive6NameInMeta,
input: []string{
"folder/file.txt.data",
"folder/folder2/file.txt.data",
},
output: []string{
"folder/.dirmeta",
"folder/file.txt.data",
"folder/folder2/.dirmeta",
"folder/folder2/file.txt.data",
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
_, flush := tester.NewContext(t)
defer flush()
base := "id/onedrive/user/files/drives/driveID/root:/"
inPaths := []path.RestorePaths{}
for _, ps := range test.input {
p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
pd, err := p.Dir()
require.NoError(t, err, "creating collection path", clues.ToCore(err))
inPaths = append(
inPaths,
path.RestorePaths{StoragePath: p, RestorePath: pd})
}
outPaths := []path.RestorePaths{}
for _, ps := range test.output {
p, err := path.FromDataLayerPath(base+ps, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
pd, err := p.Dir()
require.NoError(t, err, "creating collection path", clues.ToCore(err))
outPaths = append(
outPaths,
path.RestorePaths{StoragePath: p, RestorePath: pd})
}
actual, err := AugmentRestorePaths(test.version, inPaths)
require.NoError(t, err, "augmenting paths", clues.ToCore(err))
// Ordering of paths matter here as we need dirmeta files
// to show up before file in dir
assert.Equal(t, outPaths, actual, "augmented paths")
})
}
}
// TestAugmentRestorePaths_DifferentRestorePath tests that RestorePath
// substitution works properly. Since it's only possible for future backup
// versions to need restore path substitution (i.e. due to storing folders by
// ID instead of name) this is only tested against the most recent backup
// version at the moment.
func (suite *RestoreUnitSuite) TestAugmentRestorePaths_DifferentRestorePath() {
// Adding a simple test here so that we can be sure that this
// function gets updated whenever we add a new version.
require.LessOrEqual(suite.T(), version.Backup, version.All8MigrateUserPNToID, "unsupported backup version")
type pathPair struct {
storage string
restore string
}
table := []struct {
name string
version int
input []pathPair
output []pathPair
errCheck assert.ErrorAssertionFunc
}{
{
name: "nested folders",
version: version.Backup,
input: []pathPair{
{storage: "folder-id/file.txt.data", restore: "folder"},
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
},
output: []pathPair{
{storage: "folder-id/.dirmeta", restore: "folder"},
{storage: "folder-id/file.txt.data", restore: "folder"},
{storage: "folder-id/folder2-id/.dirmeta", restore: "folder/folder2"},
{storage: "folder-id/folder2-id/file.txt.data", restore: "folder/folder2"},
},
errCheck: assert.NoError,
},
{
name: "restore path longer one folder",
version: version.Backup,
input: []pathPair{
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
},
output: []pathPair{
{storage: "folder-id/.dirmeta", restore: "corso_restore/folder"},
{storage: "folder-id/file.txt.data", restore: "corso_restore/folder"},
},
errCheck: assert.NoError,
},
{
name: "restore path shorter one folder",
version: version.Backup,
input: []pathPair{
{storage: "folder-id/file.txt.data", restore: ""},
},
errCheck: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
_, flush := tester.NewContext(t)
defer flush()
base := "id/onedrive/user/files/drives/driveID/root:/"
inPaths := []path.RestorePaths{}
for _, ps := range test.input {
p, err := path.FromDataLayerPath(base+ps.storage, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
r, err := path.FromDataLayerPath(base+ps.restore, false)
require.NoError(t, err, "creating path", clues.ToCore(err))
inPaths = append(
inPaths,
path.RestorePaths{StoragePath: p, RestorePath: r})
}
outPaths := []path.RestorePaths{}
for _, ps := range test.output {
p, err := path.FromDataLayerPath(base+ps.storage, true)
require.NoError(t, err, "creating path", clues.ToCore(err))
r, err := path.FromDataLayerPath(base+ps.restore, false)
require.NoError(t, err, "creating path", clues.ToCore(err))
outPaths = append(
outPaths,
path.RestorePaths{StoragePath: p, RestorePath: r})
}
actual, err := AugmentRestorePaths(test.version, inPaths)
test.errCheck(t, err, "augmenting paths", clues.ToCore(err))
if err != nil {
return
}
// Ordering of paths matter here as we need dirmeta files
// to show up before file in dir
assert.Equal(t, outPaths, actual, "augmented paths")
})
}
}

Some files were not shown because too many files have changed in this diff Show More