Merge branch 'main' of https://github.com/alcionai/corso into nestedAttachment
This commit is contained in:
commit
cb621bb359
@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased] (beta)
|
## [Unreleased] (beta)
|
||||||
|
|
||||||
|
## [v0.9.0] (beta) - 2023-06-05
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
- Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type.
|
- Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type.
|
||||||
- Rework base selection logic for incremental backups so it's more likely to find a valid base.
|
- Rework base selection logic for incremental backups so it's more likely to find a valid base.
|
||||||
@ -20,8 +22,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
### Changed
|
### Changed
|
||||||
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.
|
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.
|
||||||
|
|
||||||
### Known Issues
|
|
||||||
|
|
||||||
## [v0.8.0] (beta) - 2023-05-15
|
## [v0.8.0] (beta) - 2023-05-15
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@ -290,7 +290,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Miscellaneous
|
- Miscellaneous
|
||||||
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
||||||
|
|
||||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.7.0...HEAD
|
[Unreleased]: https://github.com/alcionai/corso/compare/v0.9.0...HEAD
|
||||||
|
[v0.9.0]: https://github.com/alcionai/corso/compare/v0.8.1...v0.9.0
|
||||||
|
[v0.8.0]: https://github.com/alcionai/corso/compare/v0.7.1...v0.8.0
|
||||||
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0
|
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0
|
||||||
[v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1
|
[v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1
|
||||||
[v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0
|
[v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0
|
||||||
|
|||||||
@ -51,7 +51,6 @@ type dataBuilderFunc func(id, now, subject, body string) []byte
|
|||||||
func generateAndRestoreItems(
|
func generateAndRestoreItems(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gc *connector.GraphConnector,
|
gc *connector.GraphConnector,
|
||||||
acct account.Account,
|
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
cat path.CategoryType,
|
cat path.CategoryType,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
@ -99,7 +98,7 @@ func generateAndRestoreItems(
|
|||||||
|
|
||||||
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
|
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
|
||||||
|
|
||||||
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, dataColls, errs)
|
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------
|
||||||
@ -188,7 +187,7 @@ func buildCollections(
|
|||||||
mc.Data[i] = c.items[i].data
|
mc.Data[i] = c.items[i].data
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, data.NotFoundRestoreCollection{Collection: mc})
|
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
|
||||||
}
|
}
|
||||||
|
|
||||||
return collections, nil
|
return collections, nil
|
||||||
@ -233,14 +232,14 @@ func generateAndRestoreDriveItems(
|
|||||||
|
|
||||||
switch service {
|
switch service {
|
||||||
case path.SharePointService:
|
case path.SharePointService:
|
||||||
d, err := gc.Service.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
|
d, err := gc.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "getting site's default drive")
|
return nil, clues.Wrap(err, "getting site's default drive")
|
||||||
}
|
}
|
||||||
|
|
||||||
driveID = ptr.Val(d.GetId())
|
driveID = ptr.Val(d.GetId())
|
||||||
default:
|
default:
|
||||||
d, err := gc.Service.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
|
d, err := gc.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "getting user's default drive")
|
return nil, clues.Wrap(err, "getting user's default drive")
|
||||||
}
|
}
|
||||||
@ -390,7 +389,6 @@ func generateAndRestoreDriveItems(
|
|||||||
}
|
}
|
||||||
|
|
||||||
config := connector.ConfigInfo{
|
config := connector.ConfigInfo{
|
||||||
Acct: acct,
|
|
||||||
Opts: opts,
|
Opts: opts,
|
||||||
Resource: connector.Users,
|
Resource: connector.Users,
|
||||||
Service: service,
|
Service: service,
|
||||||
@ -407,5 +405,5 @@ func generateAndRestoreDriveItems(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
|
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -52,7 +52,7 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
|
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, err)
|
return Only(ctx, err)
|
||||||
}
|
}
|
||||||
@ -60,7 +60,6 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
|
|||||||
deets, err := generateAndRestoreItems(
|
deets, err := generateAndRestoreItems(
|
||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
acct,
|
|
||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
selectors.NewExchangeRestore([]string{User}).Selector,
|
selectors.NewExchangeRestore([]string{User}).Selector,
|
||||||
@ -99,7 +98,7 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
|
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, err)
|
return Only(ctx, err)
|
||||||
}
|
}
|
||||||
@ -107,7 +106,6 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
|
|||||||
deets, err := generateAndRestoreItems(
|
deets, err := generateAndRestoreItems(
|
||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
acct,
|
|
||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
selectors.NewExchangeRestore([]string{User}).Selector,
|
selectors.NewExchangeRestore([]string{User}).Selector,
|
||||||
@ -145,7 +143,7 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
|
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, err)
|
return Only(ctx, err)
|
||||||
}
|
}
|
||||||
@ -153,7 +151,6 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
|
|||||||
deets, err := generateAndRestoreItems(
|
deets, err := generateAndRestoreItems(
|
||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
acct,
|
|
||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
selectors.NewExchangeRestore([]string{User}).Selector,
|
selectors.NewExchangeRestore([]string{User}).Selector,
|
||||||
|
|||||||
@ -71,16 +71,14 @@ func handleOneDriveCmd(cmd *cobra.Command, args []string) error {
|
|||||||
AzureTenantID: tid,
|
AzureTenantID: tid,
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo: swap to drive api client, when finished.
|
|
||||||
adpt, err := graph.CreateAdapter(tid, creds.AzureClientID, creds.AzureClientSecret)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "creating graph adapter"))
|
|
||||||
}
|
|
||||||
|
|
||||||
svc := graph.NewService(adpt)
|
|
||||||
gr := graph.NewNoTimeoutHTTPWrapper()
|
gr := graph.NewNoTimeoutHTTPWrapper()
|
||||||
|
|
||||||
err = runDisplayM365JSON(ctx, svc, gr, creds, user, m365ID)
|
ac, err := api.NewClient(creds)
|
||||||
|
if err != nil {
|
||||||
|
return Only(ctx, clues.Wrap(err, "getting api client"))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = runDisplayM365JSON(ctx, ac, gr, creds, user, m365ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
cmd.SilenceUsage = true
|
cmd.SilenceUsage = true
|
||||||
cmd.SilenceErrors = true
|
cmd.SilenceErrors = true
|
||||||
@ -107,12 +105,12 @@ func (i itemPrintable) MinimumPrintable() any {
|
|||||||
|
|
||||||
func runDisplayM365JSON(
|
func runDisplayM365JSON(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
srv graph.Servicer,
|
ac api.Client,
|
||||||
gr graph.Requester,
|
gr graph.Requester,
|
||||||
creds account.M365Config,
|
creds account.M365Config,
|
||||||
user, itemID string,
|
userID, itemID string,
|
||||||
) error {
|
) error {
|
||||||
drive, err := api.GetUsersDrive(ctx, srv, user)
|
drive, err := ac.Users().GetDefaultDrive(ctx, userID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -121,7 +119,7 @@ func runDisplayM365JSON(
|
|||||||
|
|
||||||
it := itemPrintable{}
|
it := itemPrintable{}
|
||||||
|
|
||||||
item, err := api.GetDriveItem(ctx, srv, driveID, itemID)
|
item, err := ac.Drives().GetItem(ctx, driveID, itemID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -148,7 +146,7 @@ func runDisplayM365JSON(
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
perms, err := api.GetItemPermission(ctx, srv, driveID, itemID)
|
perms, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,7 +8,7 @@ require (
|
|||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
|
||||||
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c
|
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c
|
||||||
github.com/armon/go-metrics v0.4.1
|
github.com/armon/go-metrics v0.4.1
|
||||||
github.com/aws/aws-sdk-go v1.44.273
|
github.com/aws/aws-sdk-go v1.44.275
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.1
|
github.com/aws/aws-xray-sdk-go v1.8.1
|
||||||
github.com/cenkalti/backoff/v4 v4.2.1
|
github.com/cenkalti/backoff/v4 v4.2.1
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
@ -19,7 +19,7 @@ require (
|
|||||||
github.com/microsoft/kiota-http-go v1.0.0
|
github.com/microsoft/kiota-http-go v1.0.0
|
||||||
github.com/microsoft/kiota-serialization-form-go v1.0.0
|
github.com/microsoft/kiota-serialization-form-go v1.0.0
|
||||||
github.com/microsoft/kiota-serialization-json-go v1.0.1
|
github.com/microsoft/kiota-serialization-json-go v1.0.1
|
||||||
github.com/microsoftgraph/msgraph-sdk-go v1.3.0
|
github.com/microsoftgraph/msgraph-sdk-go v1.4.0
|
||||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
|
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/rudderlabs/analytics-go v3.3.3+incompatible
|
github.com/rudderlabs/analytics-go v3.3.3+incompatible
|
||||||
@ -34,7 +34,7 @@ require (
|
|||||||
go.uber.org/zap v1.24.0
|
go.uber.org/zap v1.24.0
|
||||||
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
|
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
|
||||||
golang.org/x/time v0.3.0
|
golang.org/x/time v0.3.0
|
||||||
golang.org/x/tools v0.9.2
|
golang.org/x/tools v0.9.3
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
|||||||
12
src/go.sum
12
src/go.sum
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY
|
|||||||
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||||
github.com/aws/aws-sdk-go v1.44.273 h1:CX8O0gK+cGrgUyv7bgJ6QQP9mQg7u5mweHdNzULH47c=
|
github.com/aws/aws-sdk-go v1.44.275 h1:VqRULgqrigvQLll4e4hXuc568EQAtZQ6jmBzLlQHzSI=
|
||||||
github.com/aws/aws-sdk-go v1.44.273/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
github.com/aws/aws-sdk-go v1.44.275/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
|
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
|
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
|
||||||
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||||
@ -285,8 +285,8 @@ github.com/microsoft/kiota-serialization-json-go v1.0.1 h1:nI3pLpqep7L6BLJPT7teC
|
|||||||
github.com/microsoft/kiota-serialization-json-go v1.0.1/go.mod h1:KS+eFtwtJGsosXRQr/Qilep7ZD1MRF+VtO7LnL7Oyuw=
|
github.com/microsoft/kiota-serialization-json-go v1.0.1/go.mod h1:KS+eFtwtJGsosXRQr/Qilep7ZD1MRF+VtO7LnL7Oyuw=
|
||||||
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
|
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
|
||||||
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
|
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
|
||||||
github.com/microsoftgraph/msgraph-sdk-go v1.3.0 h1:SjMxt7Kg4l0Zxp2mLDmMrIiC6Um5rvKYwck6x6AVqP4=
|
github.com/microsoftgraph/msgraph-sdk-go v1.4.0 h1:ibNwMDEZ6HikA9BVXu+TljCzCiE+yFsD6wLpJbTc1tc=
|
||||||
github.com/microsoftgraph/msgraph-sdk-go v1.3.0/go.mod h1:U/B+zwNy0auluG4ZPhMWKyxHtqnxRqbg9RyerKXQqXw=
|
github.com/microsoftgraph/msgraph-sdk-go v1.4.0/go.mod h1:JIDL1xENx92B60NjO2ACyqGeKvtYkdl9rirgajIgryw=
|
||||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
|
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
|
||||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
|
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
|
||||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||||
@ -672,8 +672,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f
|
|||||||
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.9.2 h1:UXbndbirwCAx6TULftIfie/ygDNCwxEie+IiNP1IcNc=
|
golang.org/x/tools v0.9.3 h1:Gn1I8+64MsuTb/HpH+LmQtNas23LhUVr3rYZ0eKuaMM=
|
||||||
golang.org/x/tools v0.9.2/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
|
golang.org/x/tools v0.9.3/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package connector
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
@ -17,7 +16,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -27,13 +25,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
// copyBufferSize is used for chunked upload
|
|
||||||
// Microsoft recommends 5-10MB buffers
|
|
||||||
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession?view=graph-rest-1.0#best-practices
|
|
||||||
copyBufferSize = 5 * 1024 * 1024
|
|
||||||
)
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Data Collections
|
// Data Collections
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -51,7 +42,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
|
|||||||
lastBackupVersion int,
|
lastBackupVersion int,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, prefixmatcher.StringSetReader, error) {
|
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
|
||||||
ctx, end := diagnostics.Span(
|
ctx, end := diagnostics.Span(
|
||||||
ctx,
|
ctx,
|
||||||
"gc:produceBackupCollections",
|
"gc:produceBackupCollections",
|
||||||
@ -66,25 +57,26 @@ func (gc *GraphConnector) ProduceBackupCollections(
|
|||||||
|
|
||||||
err := verifyBackupInputs(sels, gc.IDNameLookup.IDs())
|
err := verifyBackupInputs(sels, gc.IDNameLookup.IDs())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Stack(err).WithClues(ctx)
|
return nil, nil, false, clues.Stack(err).WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled(
|
serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Discovery.Users(),
|
gc.AC.Users(),
|
||||||
path.ServiceType(sels.Service),
|
path.ServiceType(sels.Service),
|
||||||
sels.DiscreteOwner)
|
sels.DiscreteOwner)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !serviceEnabled {
|
if !serviceEnabled {
|
||||||
return []data.BackupCollection{}, nil, nil
|
return []data.BackupCollection{}, nil, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
colls []data.BackupCollection
|
colls []data.BackupCollection
|
||||||
ssmb *prefixmatcher.StringSetMatcher
|
ssmb *prefixmatcher.StringSetMatcher
|
||||||
|
canUsePreviousBackup bool
|
||||||
)
|
)
|
||||||
|
|
||||||
if !canMakeDeltaQueries {
|
if !canMakeDeltaQueries {
|
||||||
@ -95,9 +87,9 @@ func (gc *GraphConnector) ProduceBackupCollections(
|
|||||||
|
|
||||||
switch sels.Service {
|
switch sels.Service {
|
||||||
case selectors.ServiceExchange:
|
case selectors.ServiceExchange:
|
||||||
colls, ssmb, err = exchange.DataCollections(
|
colls, ssmb, canUsePreviousBackup, err = exchange.DataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Discovery,
|
gc.AC,
|
||||||
sels,
|
sels,
|
||||||
gc.credentials.AzureTenantID,
|
gc.credentials.AzureTenantID,
|
||||||
owner,
|
owner,
|
||||||
@ -106,44 +98,42 @@ func (gc *GraphConnector) ProduceBackupCollections(
|
|||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
case selectors.ServiceOneDrive:
|
case selectors.ServiceOneDrive:
|
||||||
colls, ssmb, err = onedrive.DataCollections(
|
colls, ssmb, canUsePreviousBackup, err = onedrive.DataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
|
gc.AC,
|
||||||
sels,
|
sels,
|
||||||
owner,
|
owner,
|
||||||
metadata,
|
metadata,
|
||||||
lastBackupVersion,
|
lastBackupVersion,
|
||||||
gc.credentials.AzureTenantID,
|
gc.credentials.AzureTenantID,
|
||||||
gc.itemClient,
|
|
||||||
gc.Service,
|
|
||||||
gc.UpdateStatus,
|
gc.UpdateStatus,
|
||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
case selectors.ServiceSharePoint:
|
case selectors.ServiceSharePoint:
|
||||||
colls, ssmb, err = sharepoint.DataCollections(
|
colls, ssmb, canUsePreviousBackup, err = sharepoint.DataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
gc.itemClient,
|
gc.AC,
|
||||||
sels,
|
sels,
|
||||||
owner,
|
owner,
|
||||||
metadata,
|
metadata,
|
||||||
gc.credentials,
|
gc.credentials,
|
||||||
gc.Service,
|
|
||||||
gc,
|
gc,
|
||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, nil, clues.Wrap(clues.New(sels.Service.String()), "service not supported").WithClues(ctx)
|
return nil, nil, false, clues.Wrap(clues.New(sels.Service.String()), "service not supported").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, c := range colls {
|
for _, c := range colls {
|
||||||
@ -158,7 +148,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return colls, ssmb, nil
|
return colls, ssmb, canUsePreviousBackup, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsBackupRunnable verifies that the users provided has the services enabled and
|
// IsBackupRunnable verifies that the users provided has the services enabled and
|
||||||
@ -174,7 +164,7 @@ func (gc *GraphConnector) IsBackupRunnable(
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
info, err := gc.Discovery.Users().GetInfo(ctx, resourceOwner)
|
info, err := gc.AC.Users().GetInfo(ctx, resourceOwner)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
@ -242,7 +232,6 @@ func checkServiceEnabled(
|
|||||||
func (gc *GraphConnector) ConsumeRestoreCollections(
|
func (gc *GraphConnector) ConsumeRestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
acct account.Account,
|
|
||||||
sels selectors.Selector,
|
sels selectors.Selector,
|
||||||
dest control.RestoreDestination,
|
dest control.RestoreDestination,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
@ -257,52 +246,31 @@ func (gc *GraphConnector) ConsumeRestoreCollections(
|
|||||||
var (
|
var (
|
||||||
status *support.ConnectorOperationStatus
|
status *support.ConnectorOperationStatus
|
||||||
deets = &details.Builder{}
|
deets = &details.Builder{}
|
||||||
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
creds, err := acct.M365Config()
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "malformed azure credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Buffer pool for uploads
|
|
||||||
pool := sync.Pool{
|
|
||||||
New: func() interface{} {
|
|
||||||
b := make([]byte, copyBufferSize)
|
|
||||||
return &b
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
switch sels.Service {
|
switch sels.Service {
|
||||||
case selectors.ServiceExchange:
|
case selectors.ServiceExchange:
|
||||||
status, err = exchange.RestoreCollections(ctx,
|
status, err = exchange.RestoreCollections(ctx, gc.AC, dest, dcs, deets, errs)
|
||||||
creds,
|
|
||||||
gc.Discovery,
|
|
||||||
gc.Service,
|
|
||||||
dest,
|
|
||||||
dcs,
|
|
||||||
deets,
|
|
||||||
errs)
|
|
||||||
case selectors.ServiceOneDrive:
|
case selectors.ServiceOneDrive:
|
||||||
status, err = onedrive.RestoreCollections(ctx,
|
status, err = onedrive.RestoreCollections(
|
||||||
creds,
|
ctx,
|
||||||
|
onedrive.NewRestoreHandler(gc.AC),
|
||||||
backupVersion,
|
backupVersion,
|
||||||
gc.Service,
|
|
||||||
dest,
|
dest,
|
||||||
opts,
|
opts,
|
||||||
dcs,
|
dcs,
|
||||||
deets,
|
deets,
|
||||||
&pool,
|
|
||||||
errs)
|
errs)
|
||||||
case selectors.ServiceSharePoint:
|
case selectors.ServiceSharePoint:
|
||||||
status, err = sharepoint.RestoreCollections(ctx,
|
status, err = sharepoint.RestoreCollections(
|
||||||
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
creds,
|
gc.AC,
|
||||||
gc.Service,
|
|
||||||
dest,
|
dest,
|
||||||
opts,
|
opts,
|
||||||
dcs,
|
dcs,
|
||||||
deets,
|
deets,
|
||||||
&pool,
|
|
||||||
errs)
|
errs)
|
||||||
default:
|
default:
|
||||||
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
||||||
|
|||||||
@ -12,7 +12,6 @@ import (
|
|||||||
|
|
||||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
@ -128,7 +127,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
|||||||
ctrlOpts := control.Defaults()
|
ctrlOpts := control.Defaults()
|
||||||
ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries
|
ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries
|
||||||
|
|
||||||
collections, excludes, err := exchange.DataCollections(
|
collections, excludes, canUsePreviousBackup, err := exchange.DataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
suite.ac,
|
suite.ac,
|
||||||
sel,
|
sel,
|
||||||
@ -139,6 +138,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
|||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|
||||||
for range collections {
|
for range collections {
|
||||||
@ -238,7 +238,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
collections, excludes, err := connector.ProduceBackupCollections(
|
collections, excludes, canUsePreviousBackup, err := connector.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
test.getSelector(t),
|
test.getSelector(t),
|
||||||
test.getSelector(t),
|
test.getSelector(t),
|
||||||
@ -247,6 +247,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
|
|||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
assert.Error(t, err, clues.ToCore(err))
|
assert.Error(t, err, clues.ToCore(err))
|
||||||
|
assert.False(t, canUsePreviousBackup, "can use previous backup")
|
||||||
assert.Empty(t, collections)
|
assert.Empty(t, collections)
|
||||||
assert.Nil(t, excludes)
|
assert.Nil(t, excludes)
|
||||||
})
|
})
|
||||||
@ -296,18 +297,18 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
|||||||
|
|
||||||
sel := test.getSelector()
|
sel := test.getSelector()
|
||||||
|
|
||||||
collections, excludes, err := sharepoint.DataCollections(
|
collections, excludes, canUsePreviousBackup, err := sharepoint.DataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
graph.NewNoTimeoutHTTPWrapper(),
|
suite.ac,
|
||||||
sel,
|
sel,
|
||||||
sel,
|
sel,
|
||||||
nil,
|
nil,
|
||||||
connector.credentials,
|
connector.credentials,
|
||||||
connector.Service,
|
|
||||||
connector,
|
connector,
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
// Not expecting excludes as this isn't an incremental backup.
|
// Not expecting excludes as this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|
||||||
@ -383,7 +384,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
|||||||
|
|
||||||
sel.SetDiscreteOwnerIDName(id, name)
|
sel.SetDiscreteOwnerIDName(id, name)
|
||||||
|
|
||||||
cols, excludes, err := gc.ProduceBackupCollections(
|
cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
inMock.NewProvider(id, name),
|
inMock.NewProvider(id, name),
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
@ -392,6 +393,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
|||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
require.Len(t, cols, 2) // 1 collection, 1 path prefix directory to ensure the root path exists.
|
require.Len(t, cols, 2) // 1 collection, 1 path prefix directory to ensure the root path exists.
|
||||||
// No excludes yet as this isn't an incremental backup.
|
// No excludes yet as this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
@ -429,7 +431,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
|||||||
|
|
||||||
sel.SetDiscreteOwnerIDName(id, name)
|
sel.SetDiscreteOwnerIDName(id, name)
|
||||||
|
|
||||||
cols, excludes, err := gc.ProduceBackupCollections(
|
cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
inMock.NewProvider(id, name),
|
inMock.NewProvider(id, name),
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
@ -438,6 +440,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
|||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
assert.Less(t, 0, len(cols))
|
assert.Less(t, 0, len(cols))
|
||||||
// No excludes yet as this isn't an incremental backup.
|
// No excludes yet as this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|||||||
@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
@ -64,8 +65,7 @@ type DeltaPath struct {
|
|||||||
func parseMetadataCollections(
|
func parseMetadataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
colls []data.RestoreCollection,
|
colls []data.RestoreCollection,
|
||||||
errs *fault.Bus,
|
) (CatDeltaPaths, bool, error) {
|
||||||
) (CatDeltaPaths, error) {
|
|
||||||
// cdp stores metadata
|
// cdp stores metadata
|
||||||
cdp := CatDeltaPaths{
|
cdp := CatDeltaPaths{
|
||||||
path.ContactsCategory: {},
|
path.ContactsCategory: {},
|
||||||
@ -81,6 +81,10 @@ func parseMetadataCollections(
|
|||||||
path.EventsCategory: {},
|
path.EventsCategory: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// errors from metadata items should not stop the backup,
|
||||||
|
// but it should prevent us from using previous backups
|
||||||
|
errs := fault.New(true)
|
||||||
|
|
||||||
for _, coll := range colls {
|
for _, coll := range colls {
|
||||||
var (
|
var (
|
||||||
breakLoop bool
|
breakLoop bool
|
||||||
@ -91,10 +95,10 @@ func parseMetadataCollections(
|
|||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return nil, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
|
return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
|
||||||
|
|
||||||
case item, ok := <-items:
|
case item, ok := <-items:
|
||||||
if !ok {
|
if !ok || errs.Failure() != nil {
|
||||||
breakLoop = true
|
breakLoop = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -106,13 +110,13 @@ func parseMetadataCollections(
|
|||||||
|
|
||||||
err := json.NewDecoder(item.ToReader()).Decode(&m)
|
err := json.NewDecoder(item.ToReader()).Decode(&m)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.New("decoding metadata json").WithClues(ctx)
|
return nil, false, clues.New("decoding metadata json").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch item.UUID() {
|
switch item.UUID() {
|
||||||
case graph.PreviousPathFileName:
|
case graph.PreviousPathFileName:
|
||||||
if _, ok := found[category]["path"]; ok {
|
if _, ok := found[category]["path"]; ok {
|
||||||
return nil, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
|
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, p := range m {
|
for k, p := range m {
|
||||||
@ -123,7 +127,7 @@ func parseMetadataCollections(
|
|||||||
|
|
||||||
case graph.DeltaURLsFileName:
|
case graph.DeltaURLsFileName:
|
||||||
if _, ok := found[category]["delta"]; ok {
|
if _, ok := found[category]["delta"]; ok {
|
||||||
return nil, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
|
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, d := range m {
|
for k, d := range m {
|
||||||
@ -142,6 +146,16 @@ func parseMetadataCollections(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if errs.Failure() != nil {
|
||||||
|
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
|
||||||
|
|
||||||
|
return CatDeltaPaths{
|
||||||
|
path.ContactsCategory: {},
|
||||||
|
path.EmailCategory: {},
|
||||||
|
path.EventsCategory: {},
|
||||||
|
}, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Remove any entries that contain a path or a delta, but not both.
|
// Remove any entries that contain a path or a delta, but not both.
|
||||||
// That metadata is considered incomplete, and needs to incur a
|
// That metadata is considered incomplete, and needs to incur a
|
||||||
// complete backup on the next run.
|
// complete backup on the next run.
|
||||||
@ -153,7 +167,7 @@ func parseMetadataCollections(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return cdp, nil
|
return cdp, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// DataCollections returns a DataCollection which the caller can
|
// DataCollections returns a DataCollection which the caller can
|
||||||
@ -168,10 +182,10 @@ func DataCollections(
|
|||||||
su support.StatusUpdater,
|
su support.StatusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, error) {
|
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
|
||||||
eb, err := selector.ToExchangeBackup()
|
eb, err := selector.ToExchangeBackup()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
|
return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -187,9 +201,9 @@ func DataCollections(
|
|||||||
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
|
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
|
||||||
}
|
}
|
||||||
|
|
||||||
cdps, err := parseMetadataCollections(ctx, metadata, errs)
|
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, scope := range eb.Scopes() {
|
for _, scope := range eb.Scopes() {
|
||||||
@ -228,13 +242,13 @@ func DataCollections(
|
|||||||
su,
|
su,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, baseCols...)
|
collections = append(collections, baseCols...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return collections, nil, el.Failure()
|
return collections, nil, canUsePreviousBackup, el.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
// createCollections - utility function that retrieves M365
|
// createCollections - utility function that retrieves M365
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package exchange
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"sync"
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@ -42,18 +43,20 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
data []fileValues
|
data []fileValues
|
||||||
expect map[string]DeltaPath
|
expect map[string]DeltaPath
|
||||||
expectError assert.ErrorAssertionFunc
|
canUsePreviousBackup bool
|
||||||
|
expectError assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "delta urls only",
|
name: "delta urls only",
|
||||||
data: []fileValues{
|
data: []fileValues{
|
||||||
{graph.DeltaURLsFileName, "delta-link"},
|
{graph.DeltaURLsFileName, "delta-link"},
|
||||||
},
|
},
|
||||||
expect: map[string]DeltaPath{},
|
expect: map[string]DeltaPath{},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple delta urls",
|
name: "multiple delta urls",
|
||||||
@ -61,7 +64,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
{graph.DeltaURLsFileName, "delta-link"},
|
{graph.DeltaURLsFileName, "delta-link"},
|
||||||
{graph.DeltaURLsFileName, "delta-link-2"},
|
{graph.DeltaURLsFileName, "delta-link-2"},
|
||||||
},
|
},
|
||||||
expectError: assert.Error,
|
canUsePreviousBackup: false,
|
||||||
|
expectError: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "previous path only",
|
name: "previous path only",
|
||||||
@ -74,7 +78,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
Path: "prev-path",
|
Path: "prev-path",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple previous paths",
|
name: "multiple previous paths",
|
||||||
@ -82,7 +87,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
{graph.PreviousPathFileName, "prev-path"},
|
{graph.PreviousPathFileName, "prev-path"},
|
||||||
{graph.PreviousPathFileName, "prev-path-2"},
|
{graph.PreviousPathFileName, "prev-path-2"},
|
||||||
},
|
},
|
||||||
expectError: assert.Error,
|
canUsePreviousBackup: false,
|
||||||
|
expectError: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "delta urls and previous paths",
|
name: "delta urls and previous paths",
|
||||||
@ -96,7 +102,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
Path: "prev-path",
|
Path: "prev-path",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "delta urls and empty previous paths",
|
name: "delta urls and empty previous paths",
|
||||||
@ -104,8 +111,9 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
{graph.DeltaURLsFileName, "delta-link"},
|
{graph.DeltaURLsFileName, "delta-link"},
|
||||||
{graph.PreviousPathFileName, ""},
|
{graph.PreviousPathFileName, ""},
|
||||||
},
|
},
|
||||||
expect: map[string]DeltaPath{},
|
expect: map[string]DeltaPath{},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "empty delta urls and previous paths",
|
name: "empty delta urls and previous paths",
|
||||||
@ -119,7 +127,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
Path: "prev-path",
|
Path: "prev-path",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "delta urls with special chars",
|
name: "delta urls with special chars",
|
||||||
@ -133,7 +142,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
Path: "prev-path",
|
Path: "prev-path",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "delta urls with escaped chars",
|
name: "delta urls with escaped chars",
|
||||||
@ -147,7 +157,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
Path: "prev-path",
|
Path: "prev-path",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "delta urls with newline char runes",
|
name: "delta urls with newline char runes",
|
||||||
@ -164,7 +175,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
Path: "prev-path",
|
Path: "prev-path",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectError: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
expectError: assert.NoError,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
@ -191,11 +203,13 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
)
|
)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
|
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
|
||||||
data.NotFoundRestoreCollection{Collection: coll},
|
data.NoFetchRestoreCollection{Collection: coll},
|
||||||
}, fault.New(true))
|
})
|
||||||
test.expectError(t, err, clues.ToCore(err))
|
test.expectError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
|
||||||
|
|
||||||
emails := cdps[path.EmailCategory]
|
emails := cdps[path.EmailCategory]
|
||||||
|
|
||||||
assert.Len(t, emails, len(test.expect))
|
assert.Len(t, emails, len(test.expect))
|
||||||
@ -208,6 +222,52 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type failingColl struct {
|
||||||
|
t *testing.T
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
|
||||||
|
ic := make(chan data.Stream)
|
||||||
|
defer close(ic)
|
||||||
|
|
||||||
|
errs.AddRecoverable(assert.AnError)
|
||||||
|
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f failingColl) FullPath() path.Path {
|
||||||
|
tmp, err := path.Build(
|
||||||
|
"tenant",
|
||||||
|
"user",
|
||||||
|
path.ExchangeService,
|
||||||
|
path.EmailCategory,
|
||||||
|
false,
|
||||||
|
"inbox")
|
||||||
|
require.NoError(f.t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
return tmp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) {
|
||||||
|
// no fetch calls will be made
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// This check is to ensure that we don't error out, but still return
|
||||||
|
// canUsePreviousBackup as false on read errors
|
||||||
|
func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
fc := failingColl{t}
|
||||||
|
|
||||||
|
_, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.False(t, canUsePreviousBackup)
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Integration tests
|
// Integration tests
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -401,10 +461,11 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
|
|||||||
|
|
||||||
require.NotNil(t, metadata, "collections contains a metadata collection")
|
require.NotNil(t, metadata, "collections contains a metadata collection")
|
||||||
|
|
||||||
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
|
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{
|
||||||
data.NotFoundRestoreCollection{Collection: metadata},
|
data.NoFetchRestoreCollection{Collection: metadata},
|
||||||
}, fault.New(true))
|
})
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
|
|
||||||
dps := cdps[test.scope.Category().PathType()]
|
dps := cdps[test.scope.Category().PathType()]
|
||||||
|
|
||||||
|
|||||||
@ -11,7 +11,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -21,7 +20,6 @@ import (
|
|||||||
|
|
||||||
type RestoreIntgSuite struct {
|
type RestoreIntgSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
gs graph.Servicer
|
|
||||||
credentials account.M365Config
|
credentials account.M365Config
|
||||||
ac api.Client
|
ac api.Client
|
||||||
}
|
}
|
||||||
@ -44,14 +42,6 @@ func (suite *RestoreIntgSuite) SetupSuite() {
|
|||||||
suite.credentials = m365
|
suite.credentials = m365
|
||||||
suite.ac, err = api.NewClient(m365)
|
suite.ac, err = api.NewClient(m365)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
adpt, err := graph.CreateAdapter(
|
|
||||||
m365.AzureTenantID,
|
|
||||||
m365.AzureClientID,
|
|
||||||
m365.AzureClientSecret)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
suite.gs = graph.NewService(adpt)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestRestoreContact ensures contact object can be created, placed into
|
// TestRestoreContact ensures contact object can be created, placed into
|
||||||
|
|||||||
@ -425,10 +425,9 @@ func checkMetadata(
|
|||||||
expect DeltaPaths,
|
expect DeltaPaths,
|
||||||
c data.BackupCollection,
|
c data.BackupCollection,
|
||||||
) {
|
) {
|
||||||
catPaths, err := parseMetadataCollections(
|
catPaths, _, err := parseMetadataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
[]data.RestoreCollection{data.NotFoundRestoreCollection{Collection: c}},
|
[]data.RestoreCollection{data.NoFetchRestoreCollection{Collection: c}})
|
||||||
fault.New(true))
|
|
||||||
if !assert.NoError(t, err, "getting metadata", clues.ToCore(err)) {
|
if !assert.NoError(t, err, "getting metadata", clues.ToCore(err)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,7 +14,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -27,9 +26,7 @@ import (
|
|||||||
// store through GraphAPI.
|
// store through GraphAPI.
|
||||||
func RestoreCollections(
|
func RestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
gs graph.Servicer,
|
|
||||||
dest control.RestoreDestination,
|
dest control.RestoreDestination,
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
|
|||||||
@ -15,6 +15,11 @@ const (
|
|||||||
// number of uploads, but the max that can be specified. This is
|
// number of uploads, but the max that can be specified. This is
|
||||||
// added as a safeguard in case we misconfigure the values.
|
// added as a safeguard in case we misconfigure the values.
|
||||||
maxConccurrentUploads = 20
|
maxConccurrentUploads = 20
|
||||||
|
|
||||||
|
// CopyBufferSize is used for chunked upload
|
||||||
|
// Microsoft recommends 5-10MB buffers
|
||||||
|
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession?view=graph-rest-1.0#best-practices
|
||||||
|
CopyBufferSize = 5 * 1024 * 1024
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -169,9 +169,13 @@ func IsMalware(err error) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func IsMalwareResp(ctx context.Context, resp *http.Response) bool {
|
func IsMalwareResp(ctx context.Context, resp *http.Response) bool {
|
||||||
|
if resp == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wsshp/ba4ee7a8-704c-4e9c-ab14-fa44c574bdf4
|
// https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wsshp/ba4ee7a8-704c-4e9c-ab14-fa44c574bdf4
|
||||||
// https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wdvmoduu/6fa6d4a9-ac18-4cd7-b696-8a3b14a98291
|
// https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wdvmoduu/6fa6d4a9-ac18-4cd7-b696-8a3b14a98291
|
||||||
if resp.Header.Get("X-Virus-Infected") == "true" {
|
if len(resp.Header) > 0 && resp.Header.Get("X-Virus-Infected") == "true" {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -15,7 +15,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
m365api "github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -32,9 +32,7 @@ var (
|
|||||||
// GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for
|
// GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for
|
||||||
// bookkeeping and interfacing with other component.
|
// bookkeeping and interfacing with other component.
|
||||||
type GraphConnector struct {
|
type GraphConnector struct {
|
||||||
Service graph.Servicer
|
AC api.Client
|
||||||
Discovery m365api.Client
|
|
||||||
itemClient graph.Requester // configured to handle large item downloads
|
|
||||||
|
|
||||||
tenant string
|
tenant string
|
||||||
credentials account.M365Config
|
credentials account.M365Config
|
||||||
@ -64,12 +62,7 @@ func NewGraphConnector(
|
|||||||
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
|
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
service, err := createService(creds)
|
ac, err := api.NewClient(creds)
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "creating service connection").WithClues(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
ac, err := m365api.NewClient(creds)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
|
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
|
||||||
}
|
}
|
||||||
@ -80,12 +73,10 @@ func NewGraphConnector(
|
|||||||
}
|
}
|
||||||
|
|
||||||
gc := GraphConnector{
|
gc := GraphConnector{
|
||||||
Discovery: ac,
|
AC: ac,
|
||||||
IDNameLookup: idname.NewCache(nil),
|
IDNameLookup: idname.NewCache(nil),
|
||||||
Service: service,
|
|
||||||
|
|
||||||
credentials: creds,
|
credentials: creds,
|
||||||
itemClient: graph.NewNoTimeoutHTTPWrapper(),
|
|
||||||
ownerLookup: rc,
|
ownerLookup: rc,
|
||||||
tenant: acct.ID(),
|
tenant: acct.ID(),
|
||||||
wg: &sync.WaitGroup{},
|
wg: &sync.WaitGroup{},
|
||||||
@ -94,23 +85,6 @@ func NewGraphConnector(
|
|||||||
return &gc, nil
|
return &gc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// Service Client
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// createService constructor for graphService component
|
|
||||||
func createService(creds account.M365Config) (*graph.Service, error) {
|
|
||||||
adapter, err := graph.CreateAdapter(
|
|
||||||
creds.AzureTenantID,
|
|
||||||
creds.AzureClientID,
|
|
||||||
creds.AzureClientSecret)
|
|
||||||
if err != nil {
|
|
||||||
return &graph.Service{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return graph.NewService(adapter), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Processing Status
|
// Processing Status
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -180,7 +154,7 @@ const (
|
|||||||
Sites
|
Sites
|
||||||
)
|
)
|
||||||
|
|
||||||
func (r Resource) resourceClient(ac m365api.Client) (*resourceClient, error) {
|
func (r Resource) resourceClient(ac api.Client) (*resourceClient, error) {
|
||||||
switch r {
|
switch r {
|
||||||
case Users:
|
case Users:
|
||||||
return &resourceClient{enum: r, getter: ac.Users()}, nil
|
return &resourceClient{enum: r, getter: ac.Users()}, nil
|
||||||
@ -209,7 +183,7 @@ var _ getOwnerIDAndNamer = &resourceClient{}
|
|||||||
type getOwnerIDAndNamer interface {
|
type getOwnerIDAndNamer interface {
|
||||||
getOwnerIDAndNameFrom(
|
getOwnerIDAndNameFrom(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
discovery m365api.Client,
|
discovery api.Client,
|
||||||
owner string,
|
owner string,
|
||||||
ins idname.Cacher,
|
ins idname.Cacher,
|
||||||
) (
|
) (
|
||||||
@ -227,7 +201,7 @@ type getOwnerIDAndNamer interface {
|
|||||||
// (PrincipalName for users, WebURL for sites).
|
// (PrincipalName for users, WebURL for sites).
|
||||||
func (r resourceClient) getOwnerIDAndNameFrom(
|
func (r resourceClient) getOwnerIDAndNameFrom(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
discovery m365api.Client,
|
discovery api.Client,
|
||||||
owner string,
|
owner string,
|
||||||
ins idname.Cacher,
|
ins idname.Cacher,
|
||||||
) (string, string, error) {
|
) (string, string, error) {
|
||||||
@ -275,7 +249,7 @@ func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
|
|||||||
owner string, // input value, can be either id or name
|
owner string, // input value, can be either id or name
|
||||||
ins idname.Cacher,
|
ins idname.Cacher,
|
||||||
) (string, string, error) {
|
) (string, string, error) {
|
||||||
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, ins)
|
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.AC, owner, ins)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", "", clues.Wrap(err, "identifying resource owner")
|
return "", "", clues.Wrap(err, "identifying resource owner")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -18,7 +18,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
@ -44,8 +43,8 @@ var (
|
|||||||
func mustGetDefaultDriveID(
|
func mustGetDefaultDriveID(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
backupService path.ServiceType,
|
ac api.Client,
|
||||||
service graph.Servicer,
|
service path.ServiceType,
|
||||||
resourceOwner string,
|
resourceOwner string,
|
||||||
) string {
|
) string {
|
||||||
var (
|
var (
|
||||||
@ -53,13 +52,13 @@ func mustGetDefaultDriveID(
|
|||||||
d models.Driveable
|
d models.Driveable
|
||||||
)
|
)
|
||||||
|
|
||||||
switch backupService {
|
switch service {
|
||||||
case path.OneDriveService:
|
case path.OneDriveService:
|
||||||
d, err = api.GetUsersDrive(ctx, service, resourceOwner)
|
d, err = ac.Users().GetDefaultDrive(ctx, resourceOwner)
|
||||||
case path.SharePointService:
|
case path.SharePointService:
|
||||||
d, err = api.GetSitesDefaultDrive(ctx, service, resourceOwner)
|
d, err = ac.Sites().GetDefaultDrive(ctx, resourceOwner)
|
||||||
default:
|
default:
|
||||||
assert.FailNowf(t, "unknown service type %s", backupService.String())
|
assert.FailNowf(t, "unknown service type %s", service.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -75,19 +74,18 @@ func mustGetDefaultDriveID(
|
|||||||
}
|
}
|
||||||
|
|
||||||
type suiteInfo interface {
|
type suiteInfo interface {
|
||||||
Service() graph.Servicer
|
APIClient() api.Client
|
||||||
Account() account.Account
|
|
||||||
Tenant() string
|
Tenant() string
|
||||||
// Returns (username, user ID) for the user. These values are used for
|
// Returns (username, user ID) for the user. These values are used for
|
||||||
// permissions.
|
// permissions.
|
||||||
PrimaryUser() (string, string)
|
PrimaryUser() (string, string)
|
||||||
SecondaryUser() (string, string)
|
SecondaryUser() (string, string)
|
||||||
TertiaryUser() (string, string)
|
TertiaryUser() (string, string)
|
||||||
// BackupResourceOwner returns the resource owner to run the backup/restore
|
// ResourceOwner returns the resource owner to run the backup/restore
|
||||||
// with. This can be different from the values used for permissions and it can
|
// with. This can be different from the values used for permissions and it can
|
||||||
// also be a site.
|
// also be a site.
|
||||||
BackupResourceOwner() string
|
ResourceOwner() string
|
||||||
BackupService() path.ServiceType
|
Service() path.ServiceType
|
||||||
Resource() Resource
|
Resource() Resource
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,25 +95,46 @@ type oneDriveSuite interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type suiteInfoImpl struct {
|
type suiteInfoImpl struct {
|
||||||
|
ac api.Client
|
||||||
connector *GraphConnector
|
connector *GraphConnector
|
||||||
resourceOwner string
|
resourceOwner string
|
||||||
user string
|
resourceType Resource
|
||||||
userID string
|
|
||||||
secondaryUser string
|
secondaryUser string
|
||||||
secondaryUserID string
|
secondaryUserID string
|
||||||
|
service path.ServiceType
|
||||||
tertiaryUser string
|
tertiaryUser string
|
||||||
tertiaryUserID string
|
tertiaryUserID string
|
||||||
acct account.Account
|
user string
|
||||||
service path.ServiceType
|
userID string
|
||||||
resourceType Resource
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (si suiteInfoImpl) Service() graph.Servicer {
|
func NewSuiteInfoImpl(
|
||||||
return si.connector.Service
|
t *testing.T,
|
||||||
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
|
resourceOwner string,
|
||||||
|
service path.ServiceType,
|
||||||
|
) suiteInfoImpl {
|
||||||
|
resource := Users
|
||||||
|
if service == path.SharePointService {
|
||||||
|
resource = Sites
|
||||||
|
}
|
||||||
|
|
||||||
|
gc := loadConnector(ctx, t, resource)
|
||||||
|
|
||||||
|
return suiteInfoImpl{
|
||||||
|
ac: gc.AC,
|
||||||
|
connector: gc,
|
||||||
|
resourceOwner: resourceOwner,
|
||||||
|
resourceType: resource,
|
||||||
|
secondaryUser: tester.SecondaryM365UserID(t),
|
||||||
|
service: service,
|
||||||
|
tertiaryUser: tester.TertiaryM365UserID(t),
|
||||||
|
user: tester.M365UserID(t),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (si suiteInfoImpl) Account() account.Account {
|
func (si suiteInfoImpl) APIClient() api.Client {
|
||||||
return si.acct
|
return si.ac
|
||||||
}
|
}
|
||||||
|
|
||||||
func (si suiteInfoImpl) Tenant() string {
|
func (si suiteInfoImpl) Tenant() string {
|
||||||
@ -134,11 +153,11 @@ func (si suiteInfoImpl) TertiaryUser() (string, string) {
|
|||||||
return si.tertiaryUser, si.tertiaryUserID
|
return si.tertiaryUser, si.tertiaryUserID
|
||||||
}
|
}
|
||||||
|
|
||||||
func (si suiteInfoImpl) BackupResourceOwner() string {
|
func (si suiteInfoImpl) ResourceOwner() string {
|
||||||
return si.resourceOwner
|
return si.resourceOwner
|
||||||
}
|
}
|
||||||
|
|
||||||
func (si suiteInfoImpl) BackupService() path.ServiceType {
|
func (si suiteInfoImpl) Service() path.ServiceType {
|
||||||
return si.service
|
return si.service
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -162,8 +181,7 @@ func TestGraphConnectorSharePointIntegrationSuite(t *testing.T) {
|
|||||||
suite.Run(t, &GraphConnectorSharePointIntegrationSuite{
|
suite.Run(t, &GraphConnectorSharePointIntegrationSuite{
|
||||||
Suite: tester.NewIntegrationSuite(
|
Suite: tester.NewIntegrationSuite(
|
||||||
t,
|
t,
|
||||||
[][]string{tester.M365AcctCredEnvs},
|
[][]string{tester.M365AcctCredEnvs}),
|
||||||
),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -173,27 +191,18 @@ func (suite *GraphConnectorSharePointIntegrationSuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
si := suiteInfoImpl{
|
si := NewSuiteInfoImpl(suite.T(), ctx, tester.M365SiteID(suite.T()), path.SharePointService)
|
||||||
connector: loadConnector(ctx, suite.T(), Sites),
|
|
||||||
user: tester.M365UserID(suite.T()),
|
|
||||||
secondaryUser: tester.SecondaryM365UserID(suite.T()),
|
|
||||||
tertiaryUser: tester.TertiaryM365UserID(suite.T()),
|
|
||||||
acct: tester.NewM365Account(suite.T()),
|
|
||||||
service: path.SharePointService,
|
|
||||||
resourceType: Sites,
|
|
||||||
}
|
|
||||||
|
|
||||||
si.resourceOwner = tester.M365SiteID(suite.T())
|
// users needed for permissions
|
||||||
|
user, err := si.connector.AC.Users().GetByID(ctx, si.user)
|
||||||
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
|
|
||||||
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
||||||
si.userID = ptr.Val(user.GetId())
|
si.userID = ptr.Val(user.GetId())
|
||||||
|
|
||||||
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
|
secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser)
|
||||||
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||||
|
|
||||||
tertiaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.tertiaryUser)
|
tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser)
|
||||||
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
||||||
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
||||||
|
|
||||||
@ -233,8 +242,7 @@ func TestGraphConnectorOneDriveIntegrationSuite(t *testing.T) {
|
|||||||
suite.Run(t, &GraphConnectorOneDriveIntegrationSuite{
|
suite.Run(t, &GraphConnectorOneDriveIntegrationSuite{
|
||||||
Suite: tester.NewIntegrationSuite(
|
Suite: tester.NewIntegrationSuite(
|
||||||
t,
|
t,
|
||||||
[][]string{tester.M365AcctCredEnvs},
|
[][]string{tester.M365AcctCredEnvs}),
|
||||||
),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -244,25 +252,20 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
si := suiteInfoImpl{
|
si := NewSuiteInfoImpl(t, ctx, tester.M365UserID(t), path.OneDriveService)
|
||||||
connector: loadConnector(ctx, t, Users),
|
|
||||||
user: tester.M365UserID(t),
|
|
||||||
secondaryUser: tester.SecondaryM365UserID(t),
|
|
||||||
acct: tester.NewM365Account(t),
|
|
||||||
service: path.OneDriveService,
|
|
||||||
resourceType: Users,
|
|
||||||
}
|
|
||||||
|
|
||||||
si.resourceOwner = si.user
|
user, err := si.connector.AC.Users().GetByID(ctx, si.user)
|
||||||
|
|
||||||
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
|
|
||||||
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
||||||
si.userID = ptr.Val(user.GetId())
|
si.userID = ptr.Val(user.GetId())
|
||||||
|
|
||||||
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
|
secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser)
|
||||||
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||||
|
|
||||||
|
tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser)
|
||||||
|
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
||||||
|
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
||||||
|
|
||||||
suite.suiteInfo = si
|
suite.suiteInfo = si
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -299,8 +302,7 @@ func TestGraphConnectorOneDriveNightlySuite(t *testing.T) {
|
|||||||
suite.Run(t, &GraphConnectorOneDriveNightlySuite{
|
suite.Run(t, &GraphConnectorOneDriveNightlySuite{
|
||||||
Suite: tester.NewNightlySuite(
|
Suite: tester.NewNightlySuite(
|
||||||
t,
|
t,
|
||||||
[][]string{tester.M365AcctCredEnvs},
|
[][]string{tester.M365AcctCredEnvs}),
|
||||||
),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -310,25 +312,20 @@ func (suite *GraphConnectorOneDriveNightlySuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
si := suiteInfoImpl{
|
si := NewSuiteInfoImpl(t, ctx, tester.M365UserID(t), path.OneDriveService)
|
||||||
connector: loadConnector(ctx, t, Users),
|
|
||||||
user: tester.M365UserID(t),
|
|
||||||
secondaryUser: tester.SecondaryM365UserID(t),
|
|
||||||
acct: tester.NewM365Account(t),
|
|
||||||
service: path.OneDriveService,
|
|
||||||
resourceType: Users,
|
|
||||||
}
|
|
||||||
|
|
||||||
si.resourceOwner = si.user
|
user, err := si.connector.AC.Users().GetByID(ctx, si.user)
|
||||||
|
|
||||||
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
|
|
||||||
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
||||||
si.userID = ptr.Val(user.GetId())
|
si.userID = ptr.Val(user.GetId())
|
||||||
|
|
||||||
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
|
secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser)
|
||||||
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||||
|
|
||||||
|
tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser)
|
||||||
|
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
||||||
|
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
||||||
|
|
||||||
suite.suiteInfo = si
|
suite.suiteInfo = si
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -367,9 +364,9 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
|
|||||||
driveID := mustGetDefaultDriveID(
|
driveID := mustGetDefaultDriveID(
|
||||||
t,
|
t,
|
||||||
ctx,
|
ctx,
|
||||||
suite.BackupService(),
|
suite.APIClient(),
|
||||||
suite.Service(),
|
suite.Service(),
|
||||||
suite.BackupResourceOwner())
|
suite.ResourceOwner())
|
||||||
|
|
||||||
rootPath := []string{
|
rootPath := []string{
|
||||||
odConsts.DrivesPathDir,
|
odConsts.DrivesPathDir,
|
||||||
@ -470,17 +467,17 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||||
suite.Run(fmt.Sprintf("Version%d", vn), func() {
|
suite.Run(fmt.Sprintf("Version%d", vn), func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
input, err := DataForInfo(suite.Service(), cols, vn)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
testData := restoreBackupInfoMultiVersion{
|
testData := restoreBackupInfoMultiVersion{
|
||||||
service: suite.BackupService(),
|
service: suite.Service(),
|
||||||
resource: suite.Resource(),
|
resource: suite.Resource(),
|
||||||
backupVersion: vn,
|
backupVersion: vn,
|
||||||
collectionsPrevious: input,
|
collectionsPrevious: input,
|
||||||
@ -489,10 +486,9 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
|
|||||||
|
|
||||||
runRestoreBackupTestVersions(
|
runRestoreBackupTestVersions(
|
||||||
t,
|
t,
|
||||||
suite.Account(),
|
|
||||||
testData,
|
testData,
|
||||||
suite.Tenant(),
|
suite.Tenant(),
|
||||||
[]string{suite.BackupResourceOwner()},
|
[]string{suite.ResourceOwner()},
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
@ -513,9 +509,9 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
|||||||
driveID := mustGetDefaultDriveID(
|
driveID := mustGetDefaultDriveID(
|
||||||
t,
|
t,
|
||||||
ctx,
|
ctx,
|
||||||
suite.BackupService(),
|
suite.APIClient(),
|
||||||
suite.Service(),
|
suite.Service(),
|
||||||
suite.BackupResourceOwner())
|
suite.ResourceOwner())
|
||||||
|
|
||||||
fileName2 := "test-file2.txt"
|
fileName2 := "test-file2.txt"
|
||||||
folderCName := "folder-c"
|
folderCName := "folder-c"
|
||||||
@ -683,9 +679,9 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
bss := suite.BackupService().String()
|
bss := suite.Service().String()
|
||||||
|
|
||||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||||
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
||||||
@ -693,11 +689,11 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
|||||||
// Ideally this can always be true or false and still
|
// Ideally this can always be true or false and still
|
||||||
// work, but limiting older versions to use emails so as
|
// work, but limiting older versions to use emails so as
|
||||||
// to validate that flow as well.
|
// to validate that flow as well.
|
||||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
input, err := DataForInfo(suite.Service(), cols, vn)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
testData := restoreBackupInfoMultiVersion{
|
testData := restoreBackupInfoMultiVersion{
|
||||||
service: suite.BackupService(),
|
service: suite.Service(),
|
||||||
resource: suite.Resource(),
|
resource: suite.Resource(),
|
||||||
backupVersion: vn,
|
backupVersion: vn,
|
||||||
collectionsPrevious: input,
|
collectionsPrevious: input,
|
||||||
@ -706,10 +702,9 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
|||||||
|
|
||||||
runRestoreBackupTestVersions(
|
runRestoreBackupTestVersions(
|
||||||
t,
|
t,
|
||||||
suite.Account(),
|
|
||||||
testData,
|
testData,
|
||||||
suite.Tenant(),
|
suite.Tenant(),
|
||||||
[]string{suite.BackupResourceOwner()},
|
[]string{suite.ResourceOwner()},
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
@ -730,9 +725,9 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
|
|||||||
driveID := mustGetDefaultDriveID(
|
driveID := mustGetDefaultDriveID(
|
||||||
t,
|
t,
|
||||||
ctx,
|
ctx,
|
||||||
suite.BackupService(),
|
suite.APIClient(),
|
||||||
suite.Service(),
|
suite.Service(),
|
||||||
suite.BackupResourceOwner())
|
suite.ResourceOwner())
|
||||||
|
|
||||||
inputCols := []OnedriveColInfo{
|
inputCols := []OnedriveColInfo{
|
||||||
{
|
{
|
||||||
@ -772,18 +767,18 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expected, err := DataForInfo(suite.BackupService(), expectedCols, version.Backup)
|
expected, err := DataForInfo(suite.Service(), expectedCols, version.Backup)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
bss := suite.BackupService().String()
|
bss := suite.Service().String()
|
||||||
|
|
||||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||||
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
input, err := DataForInfo(suite.BackupService(), inputCols, vn)
|
input, err := DataForInfo(suite.Service(), inputCols, vn)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
testData := restoreBackupInfoMultiVersion{
|
testData := restoreBackupInfoMultiVersion{
|
||||||
service: suite.BackupService(),
|
service: suite.Service(),
|
||||||
resource: suite.Resource(),
|
resource: suite.Resource(),
|
||||||
backupVersion: vn,
|
backupVersion: vn,
|
||||||
collectionsPrevious: input,
|
collectionsPrevious: input,
|
||||||
@ -792,10 +787,9 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
|
|||||||
|
|
||||||
runRestoreBackupTestVersions(
|
runRestoreBackupTestVersions(
|
||||||
t,
|
t,
|
||||||
suite.Account(),
|
|
||||||
testData,
|
testData,
|
||||||
suite.Tenant(),
|
suite.Tenant(),
|
||||||
[]string{suite.BackupResourceOwner()},
|
[]string{suite.ResourceOwner()},
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: false,
|
RestorePermissions: false,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
@ -819,9 +813,9 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
|||||||
driveID := mustGetDefaultDriveID(
|
driveID := mustGetDefaultDriveID(
|
||||||
t,
|
t,
|
||||||
ctx,
|
ctx,
|
||||||
suite.BackupService(),
|
suite.APIClient(),
|
||||||
suite.Service(),
|
suite.Service(),
|
||||||
suite.BackupResourceOwner())
|
suite.ResourceOwner())
|
||||||
|
|
||||||
folderAName := "custom"
|
folderAName := "custom"
|
||||||
folderBName := "inherited"
|
folderBName := "inherited"
|
||||||
@ -953,9 +947,9 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
bss := suite.BackupService().String()
|
bss := suite.Service().String()
|
||||||
|
|
||||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||||
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
||||||
@ -963,11 +957,11 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
|||||||
// Ideally this can always be true or false and still
|
// Ideally this can always be true or false and still
|
||||||
// work, but limiting older versions to use emails so as
|
// work, but limiting older versions to use emails so as
|
||||||
// to validate that flow as well.
|
// to validate that flow as well.
|
||||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
input, err := DataForInfo(suite.Service(), cols, vn)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
testData := restoreBackupInfoMultiVersion{
|
testData := restoreBackupInfoMultiVersion{
|
||||||
service: suite.BackupService(),
|
service: suite.Service(),
|
||||||
resource: suite.Resource(),
|
resource: suite.Resource(),
|
||||||
backupVersion: vn,
|
backupVersion: vn,
|
||||||
collectionsPrevious: input,
|
collectionsPrevious: input,
|
||||||
@ -976,10 +970,9 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
|||||||
|
|
||||||
runRestoreBackupTestVersions(
|
runRestoreBackupTestVersions(
|
||||||
t,
|
t,
|
||||||
suite.Account(),
|
|
||||||
testData,
|
testData,
|
||||||
suite.Tenant(),
|
suite.Tenant(),
|
||||||
[]string{suite.BackupResourceOwner()},
|
[]string{suite.ResourceOwner()},
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
@ -1001,9 +994,9 @@ func testRestoreFolderNamedFolderRegression(
|
|||||||
driveID := mustGetDefaultDriveID(
|
driveID := mustGetDefaultDriveID(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
ctx,
|
ctx,
|
||||||
suite.BackupService(),
|
suite.APIClient(),
|
||||||
suite.Service(),
|
suite.Service(),
|
||||||
suite.BackupResourceOwner())
|
suite.ResourceOwner())
|
||||||
|
|
||||||
rootPath := []string{
|
rootPath := []string{
|
||||||
odConsts.DrivesPathDir,
|
odConsts.DrivesPathDir,
|
||||||
@ -1072,30 +1065,29 @@ func testRestoreFolderNamedFolderRegression(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
bss := suite.BackupService().String()
|
bss := suite.Service().String()
|
||||||
|
|
||||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||||
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
input, err := DataForInfo(suite.Service(), cols, vn)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
testData := restoreBackupInfoMultiVersion{
|
testData := restoreBackupInfoMultiVersion{
|
||||||
service: suite.BackupService(),
|
service: suite.Service(),
|
||||||
resource: suite.Resource(),
|
resource: suite.Resource(),
|
||||||
backupVersion: vn,
|
backupVersion: vn,
|
||||||
collectionsPrevious: input,
|
collectionsPrevious: input,
|
||||||
collectionsLatest: expected,
|
collectionsLatest: expected,
|
||||||
}
|
}
|
||||||
|
|
||||||
runRestoreTestWithVerion(
|
runRestoreTestWithVersion(
|
||||||
t,
|
t,
|
||||||
suite.Account(),
|
|
||||||
testData,
|
testData,
|
||||||
suite.Tenant(),
|
suite.Tenant(),
|
||||||
[]string{suite.BackupResourceOwner()},
|
[]string{suite.ResourceOwner()},
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
|
|||||||
@ -19,7 +19,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -263,7 +262,6 @@ type GraphConnectorIntegrationSuite struct {
|
|||||||
connector *GraphConnector
|
connector *GraphConnector
|
||||||
user string
|
user string
|
||||||
secondaryUser string
|
secondaryUser string
|
||||||
acct account.Account
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGraphConnectorIntegrationSuite(t *testing.T) {
|
func TestGraphConnectorIntegrationSuite(t *testing.T) {
|
||||||
@ -284,7 +282,6 @@ func (suite *GraphConnectorIntegrationSuite) SetupSuite() {
|
|||||||
suite.connector = loadConnector(ctx, t, Users)
|
suite.connector = loadConnector(ctx, t, Users)
|
||||||
suite.user = tester.M365UserID(t)
|
suite.user = tester.M365UserID(t)
|
||||||
suite.secondaryUser = tester.SecondaryM365UserID(t)
|
suite.secondaryUser = tester.SecondaryM365UserID(t)
|
||||||
suite.acct = tester.NewM365Account(t)
|
|
||||||
|
|
||||||
tester.LogTimeOfTest(t)
|
tester.LogTimeOfTest(t)
|
||||||
}
|
}
|
||||||
@ -296,7 +293,6 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
acct = tester.NewM365Account(t)
|
|
||||||
dest = tester.DefaultTestRestoreDestination("")
|
dest = tester.DefaultTestRestoreDestination("")
|
||||||
sel = selectors.Selector{
|
sel = selectors.Selector{
|
||||||
Service: selectors.ServiceUnknown,
|
Service: selectors.ServiceUnknown,
|
||||||
@ -306,7 +302,6 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
|
|||||||
deets, err := suite.connector.ConsumeRestoreCollections(
|
deets, err := suite.connector.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
acct,
|
|
||||||
sel,
|
sel,
|
||||||
dest,
|
dest,
|
||||||
control.Options{
|
control.Options{
|
||||||
@ -385,7 +380,6 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
|||||||
deets, err := suite.connector.ConsumeRestoreCollections(
|
deets, err := suite.connector.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
suite.acct,
|
|
||||||
test.sel,
|
test.sel,
|
||||||
dest,
|
dest,
|
||||||
control.Options{
|
control.Options{
|
||||||
@ -429,7 +423,6 @@ func runRestore(
|
|||||||
deets, err := restoreGC.ConsumeRestoreCollections(
|
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
config.Acct,
|
|
||||||
restoreSel,
|
restoreSel,
|
||||||
config.Dest,
|
config.Dest,
|
||||||
config.Opts,
|
config.Opts,
|
||||||
@ -494,7 +487,7 @@ func runBackupAndCompare(
|
|||||||
t.Logf("Selective backup of %s\n", backupSel)
|
t.Logf("Selective backup of %s\n", backupSel)
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupSel,
|
backupSel,
|
||||||
backupSel,
|
backupSel,
|
||||||
@ -503,6 +496,7 @@ func runBackupAndCompare(
|
|||||||
config.Opts,
|
config.Opts,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|
||||||
@ -528,7 +522,6 @@ func runBackupAndCompare(
|
|||||||
|
|
||||||
func runRestoreBackupTest(
|
func runRestoreBackupTest(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
acct account.Account,
|
|
||||||
test restoreBackupInfo,
|
test restoreBackupInfo,
|
||||||
tenant string,
|
tenant string,
|
||||||
resourceOwners []string,
|
resourceOwners []string,
|
||||||
@ -538,7 +531,6 @@ func runRestoreBackupTest(
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
config := ConfigInfo{
|
config := ConfigInfo{
|
||||||
Acct: acct,
|
|
||||||
Opts: opts,
|
Opts: opts,
|
||||||
Resource: test.resource,
|
Resource: test.resource,
|
||||||
Service: test.service,
|
Service: test.service,
|
||||||
@ -573,9 +565,8 @@ func runRestoreBackupTest(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// runRestoreTest restores with data using the test's backup version
|
// runRestoreTest restores with data using the test's backup version
|
||||||
func runRestoreTestWithVerion(
|
func runRestoreTestWithVersion(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
acct account.Account,
|
|
||||||
test restoreBackupInfoMultiVersion,
|
test restoreBackupInfoMultiVersion,
|
||||||
tenant string,
|
tenant string,
|
||||||
resourceOwners []string,
|
resourceOwners []string,
|
||||||
@ -585,7 +576,6 @@ func runRestoreTestWithVerion(
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
config := ConfigInfo{
|
config := ConfigInfo{
|
||||||
Acct: acct,
|
|
||||||
Opts: opts,
|
Opts: opts,
|
||||||
Resource: test.resource,
|
Resource: test.resource,
|
||||||
Service: test.service,
|
Service: test.service,
|
||||||
@ -614,7 +604,6 @@ func runRestoreTestWithVerion(
|
|||||||
// something that would be in the form of a newer backup.
|
// something that would be in the form of a newer backup.
|
||||||
func runRestoreBackupTestVersions(
|
func runRestoreBackupTestVersions(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
acct account.Account,
|
|
||||||
test restoreBackupInfoMultiVersion,
|
test restoreBackupInfoMultiVersion,
|
||||||
tenant string,
|
tenant string,
|
||||||
resourceOwners []string,
|
resourceOwners []string,
|
||||||
@ -624,7 +613,6 @@ func runRestoreBackupTestVersions(
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
config := ConfigInfo{
|
config := ConfigInfo{
|
||||||
Acct: acct,
|
|
||||||
Opts: opts,
|
Opts: opts,
|
||||||
Resource: test.resource,
|
Resource: test.resource,
|
||||||
Service: test.service,
|
Service: test.service,
|
||||||
@ -920,15 +908,13 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
|||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
runRestoreBackupTest(
|
runRestoreBackupTest(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
suite.acct,
|
|
||||||
test,
|
test,
|
||||||
suite.connector.tenant,
|
suite.connector.tenant,
|
||||||
[]string{suite.user},
|
[]string{suite.user},
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1044,7 +1030,6 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
deets, err := restoreGC.ConsumeRestoreCollections(
|
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
suite.acct,
|
|
||||||
restoreSel,
|
restoreSel,
|
||||||
dest,
|
dest,
|
||||||
control.Options{
|
control.Options{
|
||||||
@ -1075,7 +1060,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
backupSel := backupSelectorForExpected(t, test.service, expectedDests)
|
backupSel := backupSelectorForExpected(t, test.service, expectedDests)
|
||||||
t.Log("Selective backup of", backupSel)
|
t.Log("Selective backup of", backupSel)
|
||||||
|
|
||||||
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupSel,
|
backupSel,
|
||||||
backupSel,
|
backupSel,
|
||||||
@ -1087,6 +1072,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
},
|
},
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|
||||||
@ -1135,7 +1121,6 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
|
|||||||
|
|
||||||
runRestoreBackupTest(
|
runRestoreBackupTest(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
suite.acct,
|
|
||||||
test,
|
test,
|
||||||
suite.connector.tenant,
|
suite.connector.tenant,
|
||||||
[]string{suite.user},
|
[]string{suite.user},
|
||||||
@ -1231,7 +1216,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
|
|||||||
|
|
||||||
backupSel.SetDiscreteOwnerIDName(id, name)
|
backupSel.SetDiscreteOwnerIDName(id, name)
|
||||||
|
|
||||||
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
inMock.NewProvider(id, name),
|
inMock.NewProvider(id, name),
|
||||||
backupSel,
|
backupSel,
|
||||||
@ -1243,6 +1228,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
|
|||||||
},
|
},
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import (
|
|||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -39,7 +38,6 @@ type ItemInfo struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ConfigInfo struct {
|
type ConfigInfo struct {
|
||||||
Acct account.Account
|
|
||||||
Opts control.Options
|
Opts control.Options
|
||||||
Resource Resource
|
Resource Resource
|
||||||
Service path.ServiceType
|
Service path.ServiceType
|
||||||
@ -104,7 +102,7 @@ type mockRestoreCollection struct {
|
|||||||
auxItems map[string]data.Stream
|
auxItems map[string]data.Stream
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rc mockRestoreCollection) Fetch(
|
func (rc mockRestoreCollection) FetchItemByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
name string,
|
name string,
|
||||||
) (data.Stream, error) {
|
) (data.Stream, error) {
|
||||||
|
|||||||
@ -7,7 +7,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -39,9 +38,10 @@ func (gc GraphConnector) ProduceBackupCollections(
|
|||||||
) (
|
) (
|
||||||
[]data.BackupCollection,
|
[]data.BackupCollection,
|
||||||
prefixmatcher.StringSetReader,
|
prefixmatcher.StringSetReader,
|
||||||
|
bool,
|
||||||
error,
|
error,
|
||||||
) {
|
) {
|
||||||
return gc.Collections, gc.Exclude, gc.Err
|
return gc.Collections, gc.Exclude, gc.Err == nil, gc.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (gc GraphConnector) IsBackupRunnable(
|
func (gc GraphConnector) IsBackupRunnable(
|
||||||
@ -59,7 +59,6 @@ func (gc GraphConnector) Wait() *data.CollectionStats {
|
|||||||
func (gc GraphConnector) ConsumeRestoreCollections(
|
func (gc GraphConnector) ConsumeRestoreCollections(
|
||||||
_ context.Context,
|
_ context.Context,
|
||||||
_ int,
|
_ int,
|
||||||
_ account.Account,
|
|
||||||
_ selectors.Selector,
|
_ selectors.Selector,
|
||||||
_ control.RestoreDestination,
|
_ control.RestoreDestination,
|
||||||
_ control.Options,
|
_ control.Options,
|
||||||
|
|||||||
@ -43,8 +43,7 @@ var (
|
|||||||
|
|
||||||
// Collection represents a set of OneDrive objects retrieved from M365
|
// Collection represents a set of OneDrive objects retrieved from M365
|
||||||
type Collection struct {
|
type Collection struct {
|
||||||
// configured to handle large item downloads
|
handler BackupHandler
|
||||||
itemClient graph.Requester
|
|
||||||
|
|
||||||
// data is used to share data streams with the collection consumer
|
// data is used to share data streams with the collection consumer
|
||||||
data chan data.Stream
|
data chan data.Stream
|
||||||
@ -55,16 +54,11 @@ type Collection struct {
|
|||||||
driveItems map[string]models.DriveItemable
|
driveItems map[string]models.DriveItemable
|
||||||
|
|
||||||
// Primary M365 ID of the drive this collection was created from
|
// Primary M365 ID of the drive this collection was created from
|
||||||
driveID string
|
driveID string
|
||||||
// Display Name of the associated drive
|
driveName string
|
||||||
driveName string
|
|
||||||
source driveSource
|
statusUpdater support.StatusUpdater
|
||||||
service graph.Servicer
|
ctrl control.Options
|
||||||
statusUpdater support.StatusUpdater
|
|
||||||
itemGetter itemGetterFunc
|
|
||||||
itemReader itemReaderFunc
|
|
||||||
itemMetaReader itemMetaReaderFunc
|
|
||||||
ctrl control.Options
|
|
||||||
|
|
||||||
// PrevPath is the previous hierarchical path used by this collection.
|
// PrevPath is the previous hierarchical path used by this collection.
|
||||||
// It may be the same as fullPath, if the folder was not renamed or
|
// It may be the same as fullPath, if the folder was not renamed or
|
||||||
@ -92,29 +86,6 @@ type Collection struct {
|
|||||||
doNotMergeItems bool
|
doNotMergeItems bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// itemGetterFunc gets a specified item
|
|
||||||
type itemGetterFunc func(
|
|
||||||
ctx context.Context,
|
|
||||||
srv graph.Servicer,
|
|
||||||
driveID, itemID string,
|
|
||||||
) (models.DriveItemable, error)
|
|
||||||
|
|
||||||
// itemReadFunc returns a reader for the specified item
|
|
||||||
type itemReaderFunc func(
|
|
||||||
ctx context.Context,
|
|
||||||
client graph.Requester,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) (details.ItemInfo, io.ReadCloser, error)
|
|
||||||
|
|
||||||
// itemMetaReaderFunc returns a reader for the metadata of the
|
|
||||||
// specified item
|
|
||||||
type itemMetaReaderFunc func(
|
|
||||||
ctx context.Context,
|
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) (io.ReadCloser, int, error)
|
|
||||||
|
|
||||||
func pathToLocation(p path.Path) (*path.Builder, error) {
|
func pathToLocation(p path.Path) (*path.Builder, error) {
|
||||||
if p == nil {
|
if p == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@ -130,13 +101,11 @@ func pathToLocation(p path.Path) (*path.Builder, error) {
|
|||||||
|
|
||||||
// NewCollection creates a Collection
|
// NewCollection creates a Collection
|
||||||
func NewCollection(
|
func NewCollection(
|
||||||
itemClient graph.Requester,
|
handler BackupHandler,
|
||||||
currPath path.Path,
|
currPath path.Path,
|
||||||
prevPath path.Path,
|
prevPath path.Path,
|
||||||
driveID string,
|
driveID string,
|
||||||
service graph.Servicer,
|
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
source driveSource,
|
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
colScope collectionScope,
|
colScope collectionScope,
|
||||||
doNotMergeItems bool,
|
doNotMergeItems bool,
|
||||||
@ -156,13 +125,11 @@ func NewCollection(
|
|||||||
}
|
}
|
||||||
|
|
||||||
c := newColl(
|
c := newColl(
|
||||||
itemClient,
|
handler,
|
||||||
currPath,
|
currPath,
|
||||||
prevPath,
|
prevPath,
|
||||||
driveID,
|
driveID,
|
||||||
service,
|
|
||||||
statusUpdater,
|
statusUpdater,
|
||||||
source,
|
|
||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
colScope,
|
colScope,
|
||||||
doNotMergeItems)
|
doNotMergeItems)
|
||||||
@ -174,26 +141,21 @@ func NewCollection(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func newColl(
|
func newColl(
|
||||||
gr graph.Requester,
|
handler BackupHandler,
|
||||||
currPath path.Path,
|
currPath path.Path,
|
||||||
prevPath path.Path,
|
prevPath path.Path,
|
||||||
driveID string,
|
driveID string,
|
||||||
service graph.Servicer,
|
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
source driveSource,
|
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
colScope collectionScope,
|
colScope collectionScope,
|
||||||
doNotMergeItems bool,
|
doNotMergeItems bool,
|
||||||
) *Collection {
|
) *Collection {
|
||||||
c := &Collection{
|
c := &Collection{
|
||||||
itemClient: gr,
|
handler: handler,
|
||||||
itemGetter: api.GetDriveItem,
|
|
||||||
folderPath: currPath,
|
folderPath: currPath,
|
||||||
prevPath: prevPath,
|
prevPath: prevPath,
|
||||||
driveItems: map[string]models.DriveItemable{},
|
driveItems: map[string]models.DriveItemable{},
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
source: source,
|
|
||||||
service: service,
|
|
||||||
data: make(chan data.Stream, graph.Parallelism(path.OneDriveMetadataService).CollectionBufferSize()),
|
data: make(chan data.Stream, graph.Parallelism(path.OneDriveMetadataService).CollectionBufferSize()),
|
||||||
statusUpdater: statusUpdater,
|
statusUpdater: statusUpdater,
|
||||||
ctrl: ctrlOpts,
|
ctrl: ctrlOpts,
|
||||||
@ -202,16 +164,6 @@ func newColl(
|
|||||||
doNotMergeItems: doNotMergeItems,
|
doNotMergeItems: doNotMergeItems,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allows tests to set a mock populator
|
|
||||||
switch source {
|
|
||||||
case SharePointSource:
|
|
||||||
c.itemReader = sharePointItemReader
|
|
||||||
c.itemMetaReader = sharePointItemMetaReader
|
|
||||||
default:
|
|
||||||
c.itemReader = oneDriveItemReader
|
|
||||||
c.itemMetaReader = oneDriveItemMetaReader
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -222,7 +174,8 @@ func (oc *Collection) Add(item models.DriveItemable) bool {
|
|||||||
_, found := oc.driveItems[ptr.Val(item.GetId())]
|
_, found := oc.driveItems[ptr.Val(item.GetId())]
|
||||||
oc.driveItems[ptr.Val(item.GetId())] = item
|
oc.driveItems[ptr.Val(item.GetId())] = item
|
||||||
|
|
||||||
return !found // !found = new
|
// if !found, it's a new addition
|
||||||
|
return !found
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove removes a item from the collection
|
// Remove removes a item from the collection
|
||||||
@ -246,7 +199,7 @@ func (oc *Collection) IsEmpty() bool {
|
|||||||
// Items() returns the channel containing M365 Exchange objects
|
// Items() returns the channel containing M365 Exchange objects
|
||||||
func (oc *Collection) Items(
|
func (oc *Collection) Items(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
errs *fault.Bus, // TODO: currently unused while onedrive isn't up to date with clues/fault
|
errs *fault.Bus,
|
||||||
) <-chan data.Stream {
|
) <-chan data.Stream {
|
||||||
go oc.populateItems(ctx, errs)
|
go oc.populateItems(ctx, errs)
|
||||||
return oc.data
|
return oc.data
|
||||||
@ -274,21 +227,7 @@ func (oc Collection) PreviousLocationPath() details.LocationIDer {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var ider details.LocationIDer
|
return oc.handler.NewLocationIDer(oc.driveID, oc.prevLocPath.Elements()...)
|
||||||
|
|
||||||
switch oc.source {
|
|
||||||
case OneDriveSource:
|
|
||||||
ider = details.NewOneDriveLocationIDer(
|
|
||||||
oc.driveID,
|
|
||||||
oc.prevLocPath.Elements()...)
|
|
||||||
|
|
||||||
default:
|
|
||||||
ider = details.NewSharePointLocationIDer(
|
|
||||||
oc.driveID,
|
|
||||||
oc.prevLocPath.Elements()...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ider
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (oc Collection) State() data.CollectionState {
|
func (oc Collection) State() data.CollectionState {
|
||||||
@ -328,14 +267,7 @@ func (oc *Collection) getDriveItemContent(
|
|||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
)
|
)
|
||||||
|
|
||||||
itemData, err := downloadContent(
|
itemData, err := downloadContent(ctx, oc.handler, item, oc.driveID)
|
||||||
ctx,
|
|
||||||
oc.service,
|
|
||||||
oc.itemGetter,
|
|
||||||
oc.itemReader,
|
|
||||||
oc.itemClient,
|
|
||||||
item,
|
|
||||||
oc.driveID)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) {
|
if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) {
|
||||||
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware")
|
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware")
|
||||||
@ -377,19 +309,21 @@ func (oc *Collection) getDriveItemContent(
|
|||||||
return itemData, nil
|
return itemData, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type itemAndAPIGetter interface {
|
||||||
|
GetItemer
|
||||||
|
api.Getter
|
||||||
|
}
|
||||||
|
|
||||||
// downloadContent attempts to fetch the item content. If the content url
|
// downloadContent attempts to fetch the item content. If the content url
|
||||||
// is expired (ie, returns a 401), it re-fetches the item to get a new download
|
// is expired (ie, returns a 401), it re-fetches the item to get a new download
|
||||||
// url and tries again.
|
// url and tries again.
|
||||||
func downloadContent(
|
func downloadContent(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
svc graph.Servicer,
|
iaag itemAndAPIGetter,
|
||||||
igf itemGetterFunc,
|
|
||||||
irf itemReaderFunc,
|
|
||||||
gr graph.Requester,
|
|
||||||
item models.DriveItemable,
|
item models.DriveItemable,
|
||||||
driveID string,
|
driveID string,
|
||||||
) (io.ReadCloser, error) {
|
) (io.ReadCloser, error) {
|
||||||
_, content, err := irf(ctx, gr, item)
|
content, err := downloadItem(ctx, iaag, item)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return content, nil
|
return content, nil
|
||||||
} else if !graph.IsErrUnauthorized(err) {
|
} else if !graph.IsErrUnauthorized(err) {
|
||||||
@ -400,12 +334,12 @@ func downloadContent(
|
|||||||
// token, and that we've overrun the available window to
|
// token, and that we've overrun the available window to
|
||||||
// download the actual file. Re-downloading the item will
|
// download the actual file. Re-downloading the item will
|
||||||
// refresh that download url.
|
// refresh that download url.
|
||||||
di, err := igf(ctx, svc, driveID, ptr.Val(item.GetId()))
|
di, err := iaag.GetItem(ctx, driveID, ptr.Val(item.GetId()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "retrieving expired item")
|
return nil, clues.Wrap(err, "retrieving expired item")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, content, err = irf(ctx, gr, di)
|
content, err = downloadItem(ctx, iaag, di)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "content download retry")
|
return nil, clues.Wrap(err, "content download retry")
|
||||||
}
|
}
|
||||||
@ -428,16 +362,13 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
|||||||
|
|
||||||
// Retrieve the OneDrive folder path to set later in
|
// Retrieve the OneDrive folder path to set later in
|
||||||
// `details.OneDriveInfo`
|
// `details.OneDriveInfo`
|
||||||
parentPathString, err := path.GetDriveFolderPath(oc.folderPath)
|
parentPath, err := path.GetDriveFolderPath(oc.folderPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
oc.reportAsCompleted(ctx, 0, 0, 0)
|
oc.reportAsCompleted(ctx, 0, 0, 0)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
queuedPath := "/" + parentPathString
|
queuedPath := oc.handler.FormatDisplayPath(oc.driveName, parentPath)
|
||||||
if oc.source == SharePointSource && len(oc.driveName) > 0 {
|
|
||||||
queuedPath = "/" + oc.driveName + queuedPath
|
|
||||||
}
|
|
||||||
|
|
||||||
folderProgress := observe.ProgressWithCount(
|
folderProgress := observe.ProgressWithCount(
|
||||||
ctx,
|
ctx,
|
||||||
@ -498,25 +429,13 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch metadata for the file
|
// Fetch metadata for the file
|
||||||
itemMeta, itemMetaSize, err = oc.itemMetaReader(
|
itemMeta, itemMetaSize, err = downloadItemMeta(ctx, oc.handler, oc.driveID, item)
|
||||||
ctx,
|
|
||||||
oc.service,
|
|
||||||
oc.driveID,
|
|
||||||
item)
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
el.AddRecoverable(clues.Wrap(err, "getting item metadata").Label(fault.LabelForceNoBackupCreation))
|
el.AddRecoverable(clues.Wrap(err, "getting item metadata").Label(fault.LabelForceNoBackupCreation))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
switch oc.source {
|
itemInfo = oc.handler.AugmentItemInfo(itemInfo, item, itemSize, parentPath)
|
||||||
case SharePointSource:
|
|
||||||
itemInfo.SharePoint = sharePointItemInfo(item, itemSize)
|
|
||||||
itemInfo.SharePoint.ParentPath = parentPathString
|
|
||||||
default:
|
|
||||||
itemInfo.OneDrive = oneDriveItemInfo(item, itemSize)
|
|
||||||
itemInfo.OneDrive.ParentPath = parentPathString
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = clues.Add(ctx, "item_info", itemInfo)
|
ctx = clues.Add(ctx, "item_info", itemInfo)
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
@ -25,14 +26,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type driveSource int
|
|
||||||
|
|
||||||
const (
|
|
||||||
unknownDriveSource driveSource = iota
|
|
||||||
OneDriveSource
|
|
||||||
SharePointSource
|
|
||||||
)
|
|
||||||
|
|
||||||
type collectionScope int
|
type collectionScope int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -47,21 +40,7 @@ const (
|
|||||||
CollectionScopePackage
|
CollectionScopePackage
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const restrictedDirectory = "Site Pages"
|
||||||
restrictedDirectory = "Site Pages"
|
|
||||||
rootDrivePattern = "/drives/%s/root:"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (ds driveSource) toPathServiceCat() (path.ServiceType, path.CategoryType) {
|
|
||||||
switch ds {
|
|
||||||
case OneDriveSource:
|
|
||||||
return path.OneDriveService, path.FilesCategory
|
|
||||||
case SharePointSource:
|
|
||||||
return path.SharePointService, path.LibrariesCategory
|
|
||||||
default:
|
|
||||||
return path.UnknownService, path.UnknownCategory
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type folderMatcher interface {
|
type folderMatcher interface {
|
||||||
IsAny() bool
|
IsAny() bool
|
||||||
@ -71,14 +50,11 @@ type folderMatcher interface {
|
|||||||
// Collections is used to retrieve drive data for a
|
// Collections is used to retrieve drive data for a
|
||||||
// resource owner, which can be either a user or a sharepoint site.
|
// resource owner, which can be either a user or a sharepoint site.
|
||||||
type Collections struct {
|
type Collections struct {
|
||||||
// configured to handle large item downloads
|
handler BackupHandler
|
||||||
itemClient graph.Requester
|
|
||||||
|
|
||||||
tenant string
|
tenantID string
|
||||||
resourceOwner string
|
resourceOwner string
|
||||||
source driveSource
|
|
||||||
matcher folderMatcher
|
matcher folderMatcher
|
||||||
service graph.Servicer
|
|
||||||
statusUpdater support.StatusUpdater
|
statusUpdater support.StatusUpdater
|
||||||
|
|
||||||
ctrl control.Options
|
ctrl control.Options
|
||||||
@ -88,17 +64,6 @@ type Collections struct {
|
|||||||
// driveID -> itemID -> collection
|
// driveID -> itemID -> collection
|
||||||
CollectionMap map[string]map[string]*Collection
|
CollectionMap map[string]map[string]*Collection
|
||||||
|
|
||||||
// Not the most ideal, but allows us to change the pager function for testing
|
|
||||||
// as needed. This will allow us to mock out some scenarios during testing.
|
|
||||||
drivePagerFunc func(
|
|
||||||
source driveSource,
|
|
||||||
servicer graph.Servicer,
|
|
||||||
resourceOwner string,
|
|
||||||
fields []string,
|
|
||||||
) (api.DrivePager, error)
|
|
||||||
itemPagerFunc driveItemPagerFunc
|
|
||||||
servicePathPfxFunc pathPrefixerFunc
|
|
||||||
|
|
||||||
// Track stats from drive enumeration. Represents the items backed up.
|
// Track stats from drive enumeration. Represents the items backed up.
|
||||||
NumItems int
|
NumItems int
|
||||||
NumFiles int
|
NumFiles int
|
||||||
@ -106,36 +71,28 @@ type Collections struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NewCollections(
|
func NewCollections(
|
||||||
itemClient graph.Requester,
|
bh BackupHandler,
|
||||||
tenant string,
|
tenantID string,
|
||||||
resourceOwner string,
|
resourceOwner string,
|
||||||
source driveSource,
|
|
||||||
matcher folderMatcher,
|
matcher folderMatcher,
|
||||||
service graph.Servicer,
|
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
) *Collections {
|
) *Collections {
|
||||||
return &Collections{
|
return &Collections{
|
||||||
itemClient: itemClient,
|
handler: bh,
|
||||||
tenant: tenant,
|
tenantID: tenantID,
|
||||||
resourceOwner: resourceOwner,
|
resourceOwner: resourceOwner,
|
||||||
source: source,
|
matcher: matcher,
|
||||||
matcher: matcher,
|
CollectionMap: map[string]map[string]*Collection{},
|
||||||
CollectionMap: map[string]map[string]*Collection{},
|
statusUpdater: statusUpdater,
|
||||||
drivePagerFunc: PagerForSource,
|
ctrl: ctrlOpts,
|
||||||
itemPagerFunc: defaultItemPager,
|
|
||||||
servicePathPfxFunc: pathPrefixerForSource(tenant, resourceOwner, source),
|
|
||||||
service: service,
|
|
||||||
statusUpdater: statusUpdater,
|
|
||||||
ctrl: ctrlOpts,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func deserializeMetadata(
|
func deserializeMetadata(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cols []data.RestoreCollection,
|
cols []data.RestoreCollection,
|
||||||
errs *fault.Bus,
|
) (map[string]string, map[string]map[string]string, bool, error) {
|
||||||
) (map[string]string, map[string]map[string]string, error) {
|
|
||||||
logger.Ctx(ctx).Infow(
|
logger.Ctx(ctx).Infow(
|
||||||
"deserialzing previous backup metadata",
|
"deserialzing previous backup metadata",
|
||||||
"num_collections", len(cols))
|
"num_collections", len(cols))
|
||||||
@ -143,11 +100,11 @@ func deserializeMetadata(
|
|||||||
var (
|
var (
|
||||||
prevDeltas = map[string]string{}
|
prevDeltas = map[string]string{}
|
||||||
prevFolders = map[string]map[string]string{}
|
prevFolders = map[string]map[string]string{}
|
||||||
el = errs.Local()
|
errs = fault.New(true) // metadata item reads should not fail backup
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, col := range cols {
|
for _, col := range cols {
|
||||||
if el.Failure() != nil {
|
if errs.Failure() != nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,7 +113,7 @@ func deserializeMetadata(
|
|||||||
for breakLoop := false; !breakLoop; {
|
for breakLoop := false; !breakLoop; {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return nil, nil, clues.Wrap(ctx.Err(), "deserialzing previous backup metadata").WithClues(ctx)
|
return nil, nil, false, clues.Wrap(ctx.Err(), "deserialzing previous backup metadata").WithClues(ctx)
|
||||||
|
|
||||||
case item, ok := <-items:
|
case item, ok := <-items:
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -196,7 +153,7 @@ func deserializeMetadata(
|
|||||||
// these cases. We can make the logic for deciding when to continue vs.
|
// these cases. We can make the logic for deciding when to continue vs.
|
||||||
// when to fail less strict in the future if needed.
|
// when to fail less strict in the future if needed.
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Stack(err).WithClues(ictx)
|
return nil, nil, false, clues.Stack(err).WithClues(ictx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -228,7 +185,14 @@ func deserializeMetadata(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return prevDeltas, prevFolders, el.Failure()
|
// if reads from items failed, return empty but no error
|
||||||
|
if errs.Failure() != nil {
|
||||||
|
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
|
||||||
|
|
||||||
|
return map[string]string{}, map[string]map[string]string{}, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return prevDeltas, prevFolders, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var errExistingMapping = clues.New("mapping already exists for same drive ID")
|
var errExistingMapping = clues.New("mapping already exists for same drive ID")
|
||||||
@ -271,10 +235,10 @@ func (c *Collections) Get(
|
|||||||
prevMetadata []data.RestoreCollection,
|
prevMetadata []data.RestoreCollection,
|
||||||
ssmb *prefixmatcher.StringSetMatchBuilder,
|
ssmb *prefixmatcher.StringSetMatchBuilder,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, error) {
|
) ([]data.BackupCollection, bool, error) {
|
||||||
prevDeltas, oldPathsByDriveID, err := deserializeMetadata(ctx, prevMetadata, errs)
|
prevDeltas, oldPathsByDriveID, canUsePreviousBackup, err := deserializeMetadata(ctx, prevMetadata)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
driveTombstones := map[string]struct{}{}
|
driveTombstones := map[string]struct{}{}
|
||||||
@ -287,14 +251,11 @@ func (c *Collections) Get(
|
|||||||
defer close(driveComplete)
|
defer close(driveComplete)
|
||||||
|
|
||||||
// Enumerate drives for the specified resourceOwner
|
// Enumerate drives for the specified resourceOwner
|
||||||
pager, err := c.drivePagerFunc(c.source, c.service, c.resourceOwner, nil)
|
pager := c.handler.NewDrivePager(c.resourceOwner, nil)
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Stack(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -331,7 +292,7 @@ func (c *Collections) Get(
|
|||||||
|
|
||||||
delta, paths, excluded, err := collectItems(
|
delta, paths, excluded, err := collectItems(
|
||||||
ictx,
|
ictx,
|
||||||
c.itemPagerFunc(c.service, driveID, ""),
|
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
|
||||||
driveID,
|
driveID,
|
||||||
driveName,
|
driveName,
|
||||||
c.UpdateCollections,
|
c.UpdateCollections,
|
||||||
@ -339,7 +300,7 @@ func (c *Collections) Get(
|
|||||||
prevDelta,
|
prevDelta,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used for logging below.
|
// Used for logging below.
|
||||||
@ -370,16 +331,14 @@ func (c *Collections) Get(
|
|||||||
|
|
||||||
// For both cases we don't need to do set difference on folder map if the
|
// For both cases we don't need to do set difference on folder map if the
|
||||||
// delta token was valid because we should see all the changes.
|
// delta token was valid because we should see all the changes.
|
||||||
if !delta.Reset && len(excluded) == 0 {
|
if !delta.Reset {
|
||||||
continue
|
if len(excluded) == 0 {
|
||||||
} else if !delta.Reset {
|
continue
|
||||||
p, err := GetCanonicalPath(
|
}
|
||||||
fmt.Sprintf(rootDrivePattern, driveID),
|
|
||||||
c.tenant,
|
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID, c.resourceOwner)
|
||||||
c.resourceOwner,
|
|
||||||
c.source)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
|
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
|
||||||
}
|
}
|
||||||
|
|
||||||
ssmb.Add(p.String(), excluded)
|
ssmb.Add(p.String(), excluded)
|
||||||
@ -407,22 +366,20 @@ func (c *Collections) Get(
|
|||||||
prevPath, err := path.FromDataLayerPath(p, false)
|
prevPath, err := path.FromDataLayerPath(p, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
|
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
|
||||||
return nil, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
col, err := NewCollection(
|
col, err := NewCollection(
|
||||||
c.itemClient,
|
c.handler,
|
||||||
nil, // delete the folder
|
nil, // delete the folder
|
||||||
prevPath,
|
prevPath,
|
||||||
driveID,
|
driveID,
|
||||||
c.service,
|
|
||||||
c.statusUpdater,
|
c.statusUpdater,
|
||||||
c.source,
|
|
||||||
c.ctrl,
|
c.ctrl,
|
||||||
CollectionScopeUnknown,
|
CollectionScopeUnknown,
|
||||||
true)
|
true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "making collection").WithClues(ictx)
|
return nil, false, clues.Wrap(err, "making collection").WithClues(ictx)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.CollectionMap[driveID][fldID] = col
|
c.CollectionMap[driveID][fldID] = col
|
||||||
@ -442,33 +399,31 @@ func (c *Collections) Get(
|
|||||||
|
|
||||||
// generate tombstones for drives that were removed.
|
// generate tombstones for drives that were removed.
|
||||||
for driveID := range driveTombstones {
|
for driveID := range driveTombstones {
|
||||||
prevDrivePath, err := c.servicePathPfxFunc(driveID)
|
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, c.resourceOwner, driveID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "making drive tombstone previous path").WithClues(ctx)
|
return nil, false, clues.Wrap(err, "making drive tombstone for previous path").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
coll, err := NewCollection(
|
coll, err := NewCollection(
|
||||||
c.itemClient,
|
c.handler,
|
||||||
nil, // delete the drive
|
nil, // delete the drive
|
||||||
prevDrivePath,
|
prevDrivePath,
|
||||||
driveID,
|
driveID,
|
||||||
c.service,
|
|
||||||
c.statusUpdater,
|
c.statusUpdater,
|
||||||
c.source,
|
|
||||||
c.ctrl,
|
c.ctrl,
|
||||||
CollectionScopeUnknown,
|
CollectionScopeUnknown,
|
||||||
true)
|
true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "making drive tombstone").WithClues(ctx)
|
return nil, false, clues.Wrap(err, "making drive tombstone").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, coll)
|
collections = append(collections, coll)
|
||||||
}
|
}
|
||||||
|
|
||||||
// add metadata collections
|
// add metadata collections
|
||||||
service, category := c.source.toPathServiceCat()
|
service, category := c.handler.ServiceCat()
|
||||||
md, err := graph.MakeMetadataCollection(
|
md, err := graph.MakeMetadataCollection(
|
||||||
c.tenant,
|
c.tenantID,
|
||||||
c.resourceOwner,
|
c.resourceOwner,
|
||||||
service,
|
service,
|
||||||
category,
|
category,
|
||||||
@ -487,7 +442,7 @@ func (c *Collections) Get(
|
|||||||
collections = append(collections, md)
|
collections = append(collections, md)
|
||||||
}
|
}
|
||||||
|
|
||||||
return collections, nil
|
return collections, canUsePreviousBackup, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateCollectionPaths(
|
func updateCollectionPaths(
|
||||||
@ -601,13 +556,11 @@ func (c *Collections) handleDelete(
|
|||||||
}
|
}
|
||||||
|
|
||||||
col, err := NewCollection(
|
col, err := NewCollection(
|
||||||
c.itemClient,
|
c.handler,
|
||||||
nil,
|
nil, // deletes the collection
|
||||||
prevPath,
|
prevPath,
|
||||||
driveID,
|
driveID,
|
||||||
c.service,
|
|
||||||
c.statusUpdater,
|
c.statusUpdater,
|
||||||
c.source,
|
|
||||||
c.ctrl,
|
c.ctrl,
|
||||||
CollectionScopeUnknown,
|
CollectionScopeUnknown,
|
||||||
// DoNotMerge is not checked for deleted items.
|
// DoNotMerge is not checked for deleted items.
|
||||||
@ -629,14 +582,12 @@ func (c *Collections) getCollectionPath(
|
|||||||
item models.DriveItemable,
|
item models.DriveItemable,
|
||||||
) (path.Path, error) {
|
) (path.Path, error) {
|
||||||
var (
|
var (
|
||||||
collectionPathStr string
|
pb = odConsts.DriveFolderPrefixBuilder(driveID)
|
||||||
isRoot = item.GetRoot() != nil
|
isRoot = item.GetRoot() != nil
|
||||||
isFile = item.GetFile() != nil
|
isFile = item.GetFile() != nil
|
||||||
)
|
)
|
||||||
|
|
||||||
if isRoot {
|
if !isRoot {
|
||||||
collectionPathStr = fmt.Sprintf(rootDrivePattern, driveID)
|
|
||||||
} else {
|
|
||||||
if item.GetParentReference() == nil ||
|
if item.GetParentReference() == nil ||
|
||||||
item.GetParentReference().GetPath() == nil {
|
item.GetParentReference().GetPath() == nil {
|
||||||
err := clues.New("no parent reference").
|
err := clues.New("no parent reference").
|
||||||
@ -645,15 +596,10 @@ func (c *Collections) getCollectionPath(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
collectionPathStr = ptr.Val(item.GetParentReference().GetPath())
|
pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...)
|
||||||
}
|
}
|
||||||
|
|
||||||
collectionPath, err := GetCanonicalPath(
|
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID, c.resourceOwner)
|
||||||
collectionPathStr,
|
|
||||||
c.tenant,
|
|
||||||
c.resourceOwner,
|
|
||||||
c.source,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "making item path")
|
return nil, clues.Wrap(err, "making item path")
|
||||||
}
|
}
|
||||||
@ -794,17 +740,14 @@ func (c *Collections) UpdateCollections(
|
|||||||
}
|
}
|
||||||
|
|
||||||
col, err := NewCollection(
|
col, err := NewCollection(
|
||||||
c.itemClient,
|
c.handler,
|
||||||
collectionPath,
|
collectionPath,
|
||||||
prevPath,
|
prevPath,
|
||||||
driveID,
|
driveID,
|
||||||
c.service,
|
|
||||||
c.statusUpdater,
|
c.statusUpdater,
|
||||||
c.source,
|
|
||||||
c.ctrl,
|
c.ctrl,
|
||||||
colScope,
|
colScope,
|
||||||
invalidPrevDelta,
|
invalidPrevDelta)
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Stack(err).WithClues(ictx)
|
return clues.Stack(err).WithClues(ictx)
|
||||||
}
|
}
|
||||||
@ -889,33 +832,9 @@ func shouldSkipDrive(ctx context.Context, drivePath path.Path, m folderMatcher,
|
|||||||
(drivePath.Category() == path.LibrariesCategory && restrictedDirectory == driveName)
|
(drivePath.Category() == path.LibrariesCategory && restrictedDirectory == driveName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCanonicalPath constructs the standard path for the given source.
|
|
||||||
func GetCanonicalPath(p, tenant, resourceOwner string, source driveSource) (path.Path, error) {
|
|
||||||
var (
|
|
||||||
pathBuilder = path.Builder{}.Append(strings.Split(p, "/")...)
|
|
||||||
result path.Path
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
switch source {
|
|
||||||
case OneDriveSource:
|
|
||||||
result, err = pathBuilder.ToDataLayerOneDrivePath(tenant, resourceOwner, false)
|
|
||||||
case SharePointSource:
|
|
||||||
result, err = pathBuilder.ToDataLayerSharePointPath(tenant, resourceOwner, path.LibrariesCategory, false)
|
|
||||||
default:
|
|
||||||
return nil, clues.New("unrecognized data source")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "converting to canonical path")
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) bool {
|
func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) bool {
|
||||||
// Check if the folder is allowed by the scope.
|
// Check if the folder is allowed by the scope.
|
||||||
folderPathString, err := path.GetDriveFolderPath(folderPath)
|
pb, err := path.GetDriveFolderPath(folderPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Ctx(ctx).With("err", err).Error("getting drive folder path")
|
logger.Ctx(ctx).With("err", err).Error("getting drive folder path")
|
||||||
return true
|
return true
|
||||||
@ -923,11 +842,11 @@ func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) boo
|
|||||||
|
|
||||||
// Hack for the edge case where we're looking at the root folder and can
|
// Hack for the edge case where we're looking at the root folder and can
|
||||||
// select any folder. Right now the root folder has an empty folder path.
|
// select any folder. Right now the root folder has an empty folder path.
|
||||||
if len(folderPathString) == 0 && m.IsAny() {
|
if len(pb.Elements()) == 0 && m.IsAny() {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
return m.Matches(folderPathString)
|
return m.Matches(pb.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func updatePath(paths map[string]string, id, newPath string) {
|
func updatePath(paths map[string]string, id, newPath string) {
|
||||||
|
|||||||
@ -2,8 +2,6 @@ package onedrive
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
@ -18,7 +16,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/onedrive/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -27,7 +27,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||||
)
|
)
|
||||||
|
|
||||||
type statePath struct {
|
type statePath struct {
|
||||||
@ -38,6 +38,7 @@ type statePath struct {
|
|||||||
|
|
||||||
func getExpectedStatePathGenerator(
|
func getExpectedStatePathGenerator(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
|
bh BackupHandler,
|
||||||
tenant, user, base string,
|
tenant, user, base string,
|
||||||
) func(data.CollectionState, ...string) statePath {
|
) func(data.CollectionState, ...string) statePath {
|
||||||
return func(state data.CollectionState, pths ...string) statePath {
|
return func(state data.CollectionState, pths ...string) statePath {
|
||||||
@ -53,11 +54,13 @@ func getExpectedStatePathGenerator(
|
|||||||
require.Len(t, pths, 1, "invalid number of paths to getExpectedStatePathGenerator")
|
require.Len(t, pths, 1, "invalid number of paths to getExpectedStatePathGenerator")
|
||||||
} else {
|
} else {
|
||||||
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
|
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
|
||||||
p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource)
|
pb := path.Builder{}.Append(path.Split(base + pths[1])...)
|
||||||
|
p2, err = bh.CanonicalPath(pb, tenant, user)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource)
|
pb := path.Builder{}.Append(path.Split(base + pths[0])...)
|
||||||
|
p1, err = bh.CanonicalPath(pb, tenant, user)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
switch state {
|
switch state {
|
||||||
@ -81,14 +84,17 @@ func getExpectedStatePathGenerator(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getExpectedPathGenerator(t *testing.T,
|
func getExpectedPathGenerator(
|
||||||
|
t *testing.T,
|
||||||
|
bh BackupHandler,
|
||||||
tenant, user, base string,
|
tenant, user, base string,
|
||||||
) func(string) string {
|
) func(string) string {
|
||||||
return func(path string) string {
|
return func(p string) string {
|
||||||
p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource)
|
pb := path.Builder{}.Append(path.Split(base + p)...)
|
||||||
|
cp, err := bh.CanonicalPath(pb, tenant, user)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
return p.String()
|
return cp.String()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,52 +106,6 @@ func TestOneDriveCollectionsUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &OneDriveCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &OneDriveCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveCollectionsUnitSuite) TestGetCanonicalPath() {
|
|
||||||
tenant, resourceOwner := "tenant", "resourceOwner"
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
source driveSource
|
|
||||||
dir []string
|
|
||||||
expect string
|
|
||||||
expectErr assert.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "onedrive",
|
|
||||||
source: OneDriveSource,
|
|
||||||
dir: []string{"onedrive"},
|
|
||||||
expect: "tenant/onedrive/resourceOwner/files/onedrive",
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "sharepoint",
|
|
||||||
source: SharePointSource,
|
|
||||||
dir: []string{"sharepoint"},
|
|
||||||
expect: "tenant/sharepoint/resourceOwner/libraries/sharepoint",
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "unknown",
|
|
||||||
source: unknownDriveSource,
|
|
||||||
dir: []string{"unknown"},
|
|
||||||
expectErr: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
p := strings.Join(test.dir, "/")
|
|
||||||
|
|
||||||
result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source)
|
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
if result != nil {
|
|
||||||
assert.Equal(t, test.expect, result.String())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getDelList(files ...string) map[string]struct{} {
|
func getDelList(files ...string) map[string]struct{} {
|
||||||
delList := map[string]struct{}{}
|
delList := map[string]struct{}{}
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
@ -168,9 +128,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
|||||||
pkg = "/package"
|
pkg = "/package"
|
||||||
)
|
)
|
||||||
|
|
||||||
testBaseDrivePath := fmt.Sprintf(rootDrivePattern, "driveID1")
|
bh := itemBackupHandler{}
|
||||||
expectedPath := getExpectedPathGenerator(suite.T(), tenant, user, testBaseDrivePath)
|
testBaseDrivePath := odConsts.DriveFolderPrefixBuilder("driveID1").String()
|
||||||
expectedStatePath := getExpectedStatePathGenerator(suite.T(), tenant, user, testBaseDrivePath)
|
expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
|
||||||
|
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
testCase string
|
testCase string
|
||||||
@ -782,12 +743,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
|||||||
maps.Copy(outputFolderMap, tt.inputFolderMap)
|
maps.Copy(outputFolderMap, tt.inputFolderMap)
|
||||||
|
|
||||||
c := NewCollections(
|
c := NewCollections(
|
||||||
graph.NewNoTimeoutHTTPWrapper(),
|
&itemBackupHandler{api.Drives{}},
|
||||||
tenant,
|
tenant,
|
||||||
user,
|
user,
|
||||||
OneDriveSource,
|
|
||||||
testFolderMatcher{tt.scope},
|
testFolderMatcher{tt.scope},
|
||||||
&MockGraphService{},
|
|
||||||
nil,
|
nil,
|
||||||
control.Options{ToggleFeatures: control.Toggles{}})
|
control.Options{ToggleFeatures: control.Toggles{}})
|
||||||
|
|
||||||
@ -844,10 +803,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
// Each function returns the set of files for a single data.Collection.
|
// Each function returns the set of files for a single data.Collection.
|
||||||
cols []func() []graph.MetadataCollectionEntry
|
cols []func() []graph.MetadataCollectionEntry
|
||||||
expectedDeltas map[string]string
|
expectedDeltas map[string]string
|
||||||
expectedPaths map[string]map[string]string
|
expectedPaths map[string]map[string]string
|
||||||
errCheck assert.ErrorAssertionFunc
|
canUsePreviousBackup bool
|
||||||
|
errCheck assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "SuccessOneDriveAllOneCollection",
|
name: "SuccessOneDriveAllOneCollection",
|
||||||
@ -877,7 +837,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
folderID1: path1,
|
folderID1: path1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MissingPaths",
|
name: "MissingPaths",
|
||||||
@ -891,9 +852,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedDeltas: map[string]string{},
|
expectedDeltas: map[string]string{},
|
||||||
expectedPaths: map[string]map[string]string{},
|
expectedPaths: map[string]map[string]string{},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MissingDeltas",
|
name: "MissingDeltas",
|
||||||
@ -917,7 +879,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
folderID1: path1,
|
folderID1: path1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// An empty path map but valid delta results in metadata being returned
|
// An empty path map but valid delta results in metadata being returned
|
||||||
@ -940,9 +903,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedDeltas: map[string]string{},
|
expectedDeltas: map[string]string{},
|
||||||
expectedPaths: map[string]map[string]string{driveID1: {}},
|
expectedPaths: map[string]map[string]string{driveID1: {}},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// An empty delta map but valid path results in no metadata for that drive
|
// An empty delta map but valid path results in no metadata for that drive
|
||||||
@ -975,7 +939,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
folderID1: path1,
|
folderID1: path1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SuccessTwoDrivesTwoCollections",
|
name: "SuccessTwoDrivesTwoCollections",
|
||||||
@ -1025,7 +990,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
folderID2: path2,
|
folderID2: path2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// Bad formats are logged but skip adding entries to the maps and don't
|
// Bad formats are logged but skip adding entries to the maps and don't
|
||||||
@ -1041,7 +1007,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.Error,
|
canUsePreviousBackup: false,
|
||||||
|
errCheck: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// Unexpected files are logged and skipped. They don't cause an error to
|
// Unexpected files are logged and skipped. They don't cause an error to
|
||||||
@ -1077,7 +1044,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
folderID1: path1,
|
folderID1: path1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "DriveAlreadyFound_Paths",
|
name: "DriveAlreadyFound_Paths",
|
||||||
@ -1111,9 +1079,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedDeltas: nil,
|
expectedDeltas: nil,
|
||||||
expectedPaths: nil,
|
expectedPaths: nil,
|
||||||
errCheck: assert.Error,
|
canUsePreviousBackup: false,
|
||||||
|
errCheck: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "DriveAlreadyFound_Deltas",
|
name: "DriveAlreadyFound_Deltas",
|
||||||
@ -1143,9 +1112,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedDeltas: nil,
|
expectedDeltas: nil,
|
||||||
expectedPaths: nil,
|
expectedPaths: nil,
|
||||||
errCheck: assert.Error,
|
canUsePreviousBackup: false,
|
||||||
|
errCheck: assert.Error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1168,11 +1138,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
func(*support.ConnectorOperationStatus) {})
|
func(*support.ConnectorOperationStatus) {})
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
cols = append(cols, data.NotFoundRestoreCollection{Collection: mc})
|
cols = append(cols, data.NoFetchRestoreCollection{Collection: mc})
|
||||||
}
|
}
|
||||||
|
|
||||||
deltas, paths, err := deserializeMetadata(ctx, cols, fault.New(true))
|
deltas, paths, canUsePreviousBackup, err := deserializeMetadata(ctx, cols)
|
||||||
test.errCheck(t, err)
|
test.errCheck(t, err)
|
||||||
|
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
|
||||||
|
|
||||||
assert.Equal(t, test.expectedDeltas, deltas, "deltas")
|
assert.Equal(t, test.expectedDeltas, deltas, "deltas")
|
||||||
assert.Equal(t, test.expectedPaths, paths, "paths")
|
assert.Equal(t, test.expectedPaths, paths, "paths")
|
||||||
@ -1180,6 +1151,34 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type failingColl struct{}
|
||||||
|
|
||||||
|
func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream {
|
||||||
|
ic := make(chan data.Stream)
|
||||||
|
defer close(ic)
|
||||||
|
|
||||||
|
errs.AddRecoverable(assert.AnError)
|
||||||
|
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
func (f failingColl) FullPath() path.Path { return nil }
|
||||||
|
func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) { return nil, nil }
|
||||||
|
|
||||||
|
// This check is to ensure that we don't error out, but still return
|
||||||
|
// canUsePreviousBackup as false on read errors
|
||||||
|
func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata_ReadFailure() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
fc := failingColl{}
|
||||||
|
|
||||||
|
_, _, canUsePreviousBackup, err := deserializeMetadata(ctx, []data.RestoreCollection{fc})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.False(t, canUsePreviousBackup)
|
||||||
|
}
|
||||||
|
|
||||||
type mockDeltaPageLinker struct {
|
type mockDeltaPageLinker struct {
|
||||||
link *string
|
link *string
|
||||||
delta *string
|
delta *string
|
||||||
@ -1267,11 +1266,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
drive2.SetName(&driveID2)
|
drive2.SetName(&driveID2)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
driveBasePath1 = fmt.Sprintf(rootDrivePattern, driveID1)
|
bh = itemBackupHandler{}
|
||||||
driveBasePath2 = fmt.Sprintf(rootDrivePattern, driveID2)
|
|
||||||
|
|
||||||
expectedPath1 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1)
|
driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String()
|
||||||
expectedPath2 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2)
|
driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String()
|
||||||
|
|
||||||
|
expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath1)
|
||||||
|
expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath2)
|
||||||
|
|
||||||
rootFolderPath1 = expectedPath1("")
|
rootFolderPath1 = expectedPath1("")
|
||||||
folderPath1 = expectedPath1("/folder")
|
folderPath1 = expectedPath1("/folder")
|
||||||
@ -1281,11 +1282,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
drives []models.Driveable
|
drives []models.Driveable
|
||||||
items map[string][]deltaPagerResult
|
items map[string][]deltaPagerResult
|
||||||
errCheck assert.ErrorAssertionFunc
|
canUsePreviousBackup bool
|
||||||
prevFolderPaths map[string]map[string]string
|
errCheck assert.ErrorAssertionFunc
|
||||||
|
prevFolderPaths map[string]map[string]string
|
||||||
// Collection name -> set of item IDs. We can't check item data because
|
// Collection name -> set of item IDs. We can't check item data because
|
||||||
// that's not mocked out. Metadata is checked separately.
|
// that's not mocked out. Metadata is checked separately.
|
||||||
expectedCollections map[string]map[data.CollectionState][]string
|
expectedCollections map[string]map[data.CollectionState][]string
|
||||||
@ -1312,7 +1314,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {"root": rootFolderPath1},
|
driveID1: {"root": rootFolderPath1},
|
||||||
},
|
},
|
||||||
@ -1343,7 +1346,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {"root": rootFolderPath1},
|
driveID1: {"root": rootFolderPath1},
|
||||||
},
|
},
|
||||||
@ -1375,8 +1379,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
prevFolderPaths: map[string]map[string]string{},
|
errCheck: assert.NoError,
|
||||||
|
prevFolderPaths: map[string]map[string]string{},
|
||||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||||
rootFolderPath1: {data.NewState: {}},
|
rootFolderPath1: {data.NewState: {}},
|
||||||
folderPath1: {data.NewState: {"folder", "file"}},
|
folderPath1: {data.NewState: {"folder", "file"}},
|
||||||
@ -1412,8 +1417,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
prevFolderPaths: map[string]map[string]string{},
|
errCheck: assert.NoError,
|
||||||
|
prevFolderPaths: map[string]map[string]string{},
|
||||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||||
rootFolderPath1: {data.NewState: {}},
|
rootFolderPath1: {data.NewState: {}},
|
||||||
folderPath1: {data.NewState: {"folder", "file"}},
|
folderPath1: {data.NewState: {"folder", "file"}},
|
||||||
@ -1449,7 +1455,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -1487,7 +1494,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -1531,7 +1539,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -1582,7 +1591,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
driveID2: {},
|
driveID2: {},
|
||||||
@ -1643,7 +1653,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
driveID2: {},
|
driveID2: {},
|
||||||
@ -1686,7 +1697,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.Error,
|
canUsePreviousBackup: false,
|
||||||
|
errCheck: assert.Error,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -1712,7 +1724,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||||
},
|
},
|
||||||
@ -1754,7 +1767,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||||
expectedPath1("/folder"): {data.NewState: {"folder", "file2"}},
|
expectedPath1("/folder"): {data.NewState: {"folder", "file2"}},
|
||||||
@ -1796,7 +1810,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -1838,7 +1853,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -1884,7 +1900,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -1940,7 +1957,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -1992,7 +2010,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -2038,7 +2057,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -2080,7 +2100,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
"root": rootFolderPath1,
|
"root": rootFolderPath1,
|
||||||
@ -2125,7 +2146,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -2167,7 +2189,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -2204,7 +2227,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -2238,7 +2262,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {},
|
driveID1: {},
|
||||||
},
|
},
|
||||||
@ -2271,7 +2296,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
canUsePreviousBackup: true,
|
||||||
|
errCheck: assert.NoError,
|
||||||
prevFolderPaths: map[string]map[string]string{
|
prevFolderPaths: map[string]map[string]string{
|
||||||
driveID1: {"root": rootFolderPath1},
|
driveID1: {"root": rootFolderPath1},
|
||||||
driveID2: {"root": rootFolderPath2},
|
driveID2: {"root": rootFolderPath2},
|
||||||
@ -2297,42 +2323,31 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
drivePagerFunc := func(
|
mockDrivePager := &apiMock.DrivePager{
|
||||||
source driveSource,
|
ToReturn: []apiMock.PagerResult{
|
||||||
servicer graph.Servicer,
|
{Drives: test.drives},
|
||||||
resourceOwner string,
|
},
|
||||||
fields []string,
|
|
||||||
) (api.DrivePager, error) {
|
|
||||||
return &mock.DrivePager{
|
|
||||||
ToReturn: []mock.PagerResult{
|
|
||||||
{
|
|
||||||
Drives: test.drives,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
itemPagerFunc := func(
|
itemPagers := map[string]api.DriveItemEnumerator{}
|
||||||
servicer graph.Servicer,
|
|
||||||
driveID, link string,
|
for driveID := range test.items {
|
||||||
) itemPager {
|
itemPagers[driveID] = &mockItemPager{
|
||||||
return &mockItemPager{
|
|
||||||
toReturn: test.items[driveID],
|
toReturn: test.items[driveID],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mbh := mock.DefaultOneDriveBH()
|
||||||
|
mbh.DrivePagerV = mockDrivePager
|
||||||
|
mbh.ItemPagerV = itemPagers
|
||||||
|
|
||||||
c := NewCollections(
|
c := NewCollections(
|
||||||
graph.NewNoTimeoutHTTPWrapper(),
|
mbh,
|
||||||
tenant,
|
tenant,
|
||||||
user,
|
user,
|
||||||
OneDriveSource,
|
|
||||||
testFolderMatcher{anyFolder},
|
testFolderMatcher{anyFolder},
|
||||||
&MockGraphService{},
|
|
||||||
func(*support.ConnectorOperationStatus) {},
|
func(*support.ConnectorOperationStatus) {},
|
||||||
control.Options{ToggleFeatures: control.Toggles{}},
|
control.Options{ToggleFeatures: control.Toggles{}})
|
||||||
)
|
|
||||||
c.drivePagerFunc = drivePagerFunc
|
|
||||||
c.itemPagerFunc = itemPagerFunc
|
|
||||||
|
|
||||||
prevDelta := "prev-delta"
|
prevDelta := "prev-delta"
|
||||||
mc, err := graph.MakeMetadataCollection(
|
mc, err := graph.MakeMetadataCollection(
|
||||||
@ -2355,13 +2370,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
)
|
)
|
||||||
assert.NoError(t, err, "creating metadata collection", clues.ToCore(err))
|
assert.NoError(t, err, "creating metadata collection", clues.ToCore(err))
|
||||||
|
|
||||||
prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}}
|
prevMetadata := []data.RestoreCollection{data.NoFetchRestoreCollection{Collection: mc}}
|
||||||
errs := fault.New(true)
|
errs := fault.New(true)
|
||||||
|
|
||||||
delList := prefixmatcher.NewStringSetBuilder()
|
delList := prefixmatcher.NewStringSetBuilder()
|
||||||
|
|
||||||
cols, err := c.Get(ctx, prevMetadata, delList, errs)
|
cols, canUsePreviousBackup, err := c.Get(ctx, prevMetadata, delList, errs)
|
||||||
test.errCheck(t, err)
|
test.errCheck(t, err)
|
||||||
|
assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup")
|
||||||
assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()))
|
assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -2378,12 +2394,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if folderPath == metadataPath.String() {
|
if folderPath == metadataPath.String() {
|
||||||
deltas, paths, err := deserializeMetadata(
|
deltas, paths, _, err := deserializeMetadata(
|
||||||
ctx,
|
ctx,
|
||||||
[]data.RestoreCollection{
|
[]data.RestoreCollection{
|
||||||
data.NotFoundRestoreCollection{Collection: baseCol},
|
data.NoFetchRestoreCollection{Collection: baseCol},
|
||||||
},
|
})
|
||||||
fault.New(true))
|
|
||||||
if !assert.NoError(t, err, "deserializing metadata", clues.ToCore(err)) {
|
if !assert.NoError(t, err, "deserializing metadata", clues.ToCore(err)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
package onedrive
|
package onedrive
|
||||||
|
|
||||||
|
import "github.com/alcionai/corso/src/pkg/path"
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// const used as the root dir for the drive portion of a path prefix.
|
// const used as the root dir for the drive portion of a path prefix.
|
||||||
// eg: tid/onedrive/ro/files/drives/driveid/...
|
// eg: tid/onedrive/ro/files/drives/driveid/...
|
||||||
@ -10,3 +12,7 @@ const (
|
|||||||
// root id for drive items
|
// root id for drive items
|
||||||
RootID = "root"
|
RootID = "root"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func DriveFolderPrefixBuilder(driveID string) *path.Builder {
|
||||||
|
return path.Builder{}.Append(DrivesPathDir, driveID, RootPathDir)
|
||||||
|
}
|
||||||
|
|||||||
@ -16,6 +16,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type odFolderMatcher struct {
|
type odFolderMatcher struct {
|
||||||
@ -34,27 +35,28 @@ func (fm odFolderMatcher) Matches(dir string) bool {
|
|||||||
// for the specified user
|
// for the specified user
|
||||||
func DataCollections(
|
func DataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
ac api.Client,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
user idname.Provider,
|
user idname.Provider,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
lastBackupVersion int,
|
lastBackupVersion int,
|
||||||
tenant string,
|
tenant string,
|
||||||
itemClient graph.Requester,
|
|
||||||
service graph.Servicer,
|
|
||||||
su support.StatusUpdater,
|
su support.StatusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, error) {
|
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
|
||||||
odb, err := selector.ToOneDriveBackup()
|
odb, err := selector.ToOneDriveBackup()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Wrap(err, "parsing selector").WithClues(ctx)
|
return nil, nil, false, clues.Wrap(err, "parsing selector").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
categories = map[path.CategoryType]struct{}{}
|
categories = map[path.CategoryType]struct{}{}
|
||||||
collections = []data.BackupCollection{}
|
collections = []data.BackupCollection{}
|
||||||
ssmb = prefixmatcher.NewStringSetBuilder()
|
ssmb = prefixmatcher.NewStringSetBuilder()
|
||||||
|
odcs []data.BackupCollection
|
||||||
|
canUsePreviousBackup bool
|
||||||
)
|
)
|
||||||
|
|
||||||
// for each scope that includes oneDrive items, get all
|
// for each scope that includes oneDrive items, get all
|
||||||
@ -66,16 +68,14 @@ func DataCollections(
|
|||||||
logger.Ctx(ctx).Debug("creating OneDrive collections")
|
logger.Ctx(ctx).Debug("creating OneDrive collections")
|
||||||
|
|
||||||
nc := NewCollections(
|
nc := NewCollections(
|
||||||
itemClient,
|
&itemBackupHandler{ac.Drives()},
|
||||||
tenant,
|
tenant,
|
||||||
user.ID(),
|
user.ID(),
|
||||||
OneDriveSource,
|
|
||||||
odFolderMatcher{scope},
|
odFolderMatcher{scope},
|
||||||
service,
|
|
||||||
su,
|
su,
|
||||||
ctrlOpts)
|
ctrlOpts)
|
||||||
|
|
||||||
odcs, err := nc.Get(ctx, metadata, ssmb, errs)
|
odcs, canUsePreviousBackup, err = nc.Get(ctx, metadata, ssmb, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation))
|
el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation))
|
||||||
}
|
}
|
||||||
@ -86,14 +86,13 @@ func DataCollections(
|
|||||||
}
|
}
|
||||||
|
|
||||||
mcs, err := migrationCollections(
|
mcs, err := migrationCollections(
|
||||||
service,
|
|
||||||
lastBackupVersion,
|
lastBackupVersion,
|
||||||
tenant,
|
tenant,
|
||||||
user,
|
user,
|
||||||
su,
|
su,
|
||||||
ctrlOpts)
|
ctrlOpts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, mcs...)
|
collections = append(collections, mcs...)
|
||||||
@ -109,18 +108,17 @@ func DataCollections(
|
|||||||
su,
|
su,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, baseCols...)
|
collections = append(collections, baseCols...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return collections, ssmb.ToReader(), el.Failure()
|
return collections, ssmb.ToReader(), canUsePreviousBackup, el.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
// adds data migrations to the collection set.
|
// adds data migrations to the collection set.
|
||||||
func migrationCollections(
|
func migrationCollections(
|
||||||
svc graph.Servicer,
|
|
||||||
lastBackupVersion int,
|
lastBackupVersion int,
|
||||||
tenant string,
|
tenant string,
|
||||||
user idname.Provider,
|
user idname.Provider,
|
||||||
|
|||||||
@ -85,7 +85,7 @@ func (suite *DataCollectionsUnitSuite) TestMigrationCollections() {
|
|||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
}
|
}
|
||||||
|
|
||||||
mc, err := migrationCollections(nil, test.version, "t", u, nil, opts)
|
mc, err := migrationCollections(test.version, "t", u, nil, opts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
if test.expectLen == 0 {
|
if test.expectLen == 0 {
|
||||||
|
|||||||
@ -2,32 +2,20 @@ package onedrive
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const maxDrivesRetries = 3
|
||||||
maxDrivesRetries = 3
|
|
||||||
|
|
||||||
// nextLinkKey is used to find the next link in a paged
|
|
||||||
// graph response
|
|
||||||
nextLinkKey = "@odata.nextLink"
|
|
||||||
itemChildrenRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s/children"
|
|
||||||
itemNotFoundErrorCode = "itemNotFound"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DeltaUpdate holds the results of a current delta token. It normally
|
// DeltaUpdate holds the results of a current delta token. It normally
|
||||||
// gets produced when aggregating the addition and removal of items in
|
// gets produced when aggregating the addition and removal of items in
|
||||||
@ -40,41 +28,6 @@ type DeltaUpdate struct {
|
|||||||
Reset bool
|
Reset bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func PagerForSource(
|
|
||||||
source driveSource,
|
|
||||||
servicer graph.Servicer,
|
|
||||||
resourceOwner string,
|
|
||||||
fields []string,
|
|
||||||
) (api.DrivePager, error) {
|
|
||||||
switch source {
|
|
||||||
case OneDriveSource:
|
|
||||||
return api.NewUserDrivePager(servicer, resourceOwner, fields), nil
|
|
||||||
case SharePointSource:
|
|
||||||
return api.NewSiteDrivePager(servicer, resourceOwner, fields), nil
|
|
||||||
default:
|
|
||||||
return nil, clues.New("unrecognized drive data source")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type pathPrefixerFunc func(driveID string) (path.Path, error)
|
|
||||||
|
|
||||||
func pathPrefixerForSource(
|
|
||||||
tenantID, resourceOwner string,
|
|
||||||
source driveSource,
|
|
||||||
) pathPrefixerFunc {
|
|
||||||
cat := path.FilesCategory
|
|
||||||
serv := path.OneDriveService
|
|
||||||
|
|
||||||
if source == SharePointSource {
|
|
||||||
cat = path.LibrariesCategory
|
|
||||||
serv = path.SharePointService
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(driveID string) (path.Path, error) {
|
|
||||||
return path.Build(tenantID, resourceOwner, serv, cat, false, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// itemCollector functions collect the items found in a drive
|
// itemCollector functions collect the items found in a drive
|
||||||
type itemCollector func(
|
type itemCollector func(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -88,36 +41,22 @@ type itemCollector func(
|
|||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) error
|
) error
|
||||||
|
|
||||||
type driveItemPagerFunc func(
|
|
||||||
servicer graph.Servicer,
|
|
||||||
driveID, link string,
|
|
||||||
) itemPager
|
|
||||||
|
|
||||||
type itemPager interface {
|
|
||||||
GetPage(context.Context) (api.DeltaPageLinker, error)
|
|
||||||
SetNext(nextLink string)
|
|
||||||
Reset()
|
|
||||||
ValuesIn(api.DeltaPageLinker) ([]models.DriveItemable, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func defaultItemPager(
|
|
||||||
servicer graph.Servicer,
|
|
||||||
driveID, link string,
|
|
||||||
) itemPager {
|
|
||||||
return api.NewItemPager(servicer, driveID, link, api.DriveItemSelectDefault())
|
|
||||||
}
|
|
||||||
|
|
||||||
// collectItems will enumerate all items in the specified drive and hand them to the
|
// collectItems will enumerate all items in the specified drive and hand them to the
|
||||||
// provided `collector` method
|
// provided `collector` method
|
||||||
func collectItems(
|
func collectItems(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
pager itemPager,
|
pager api.DriveItemEnumerator,
|
||||||
driveID, driveName string,
|
driveID, driveName string,
|
||||||
collector itemCollector,
|
collector itemCollector,
|
||||||
oldPaths map[string]string,
|
oldPaths map[string]string,
|
||||||
prevDelta string,
|
prevDelta string,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (DeltaUpdate, map[string]string, map[string]struct{}, error) {
|
) (
|
||||||
|
DeltaUpdate,
|
||||||
|
map[string]string, // newPaths
|
||||||
|
map[string]struct{}, // excluded
|
||||||
|
error,
|
||||||
|
) {
|
||||||
var (
|
var (
|
||||||
newDeltaURL = ""
|
newDeltaURL = ""
|
||||||
newPaths = map[string]string{}
|
newPaths = map[string]string{}
|
||||||
@ -196,28 +135,8 @@ func collectItems(
|
|||||||
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
|
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new item in the specified folder
|
|
||||||
func CreateItem(
|
|
||||||
ctx context.Context,
|
|
||||||
service graph.Servicer,
|
|
||||||
driveID, parentFolderID string,
|
|
||||||
newItem models.DriveItemable,
|
|
||||||
) (models.DriveItemable, error) {
|
|
||||||
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
|
|
||||||
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
|
|
||||||
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
|
|
||||||
builder := drives.NewItemItemsRequestBuilder(rawURL, service.Adapter())
|
|
||||||
|
|
||||||
newItem, err := builder.Post(ctx, newItem, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "creating item")
|
|
||||||
}
|
|
||||||
|
|
||||||
return newItem, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
|
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
|
||||||
func newItem(name string, folder bool) models.DriveItemable {
|
func newItem(name string, folder bool) *models.DriveItem {
|
||||||
itemToCreate := models.NewDriveItem()
|
itemToCreate := models.NewDriveItem()
|
||||||
itemToCreate.SetName(&name)
|
itemToCreate.SetName(&name)
|
||||||
|
|
||||||
@ -243,12 +162,12 @@ func (op *Displayable) GetDisplayName() *string {
|
|||||||
// are a subfolder or top-level folder in the hierarchy.
|
// are a subfolder or top-level folder in the hierarchy.
|
||||||
func GetAllFolders(
|
func GetAllFolders(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
bh BackupHandler,
|
||||||
pager api.DrivePager,
|
pager api.DrivePager,
|
||||||
prefix string,
|
prefix string,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]*Displayable, error) {
|
) ([]*Displayable, error) {
|
||||||
drvs, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
ds, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "getting OneDrive folders")
|
return nil, clues.Wrap(err, "getting OneDrive folders")
|
||||||
}
|
}
|
||||||
@ -258,14 +177,14 @@ func GetAllFolders(
|
|||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, d := range drvs {
|
for _, drive := range ds {
|
||||||
if el.Failure() != nil {
|
if el.Failure() != nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
id = ptr.Val(d.GetId())
|
id = ptr.Val(drive.GetId())
|
||||||
name = ptr.Val(d.GetName())
|
name = ptr.Val(drive.GetName())
|
||||||
)
|
)
|
||||||
|
|
||||||
ictx := clues.Add(ctx, "drive_id", id, "drive_name", clues.Hide(name))
|
ictx := clues.Add(ctx, "drive_id", id, "drive_name", clues.Hide(name))
|
||||||
@ -311,7 +230,7 @@ func GetAllFolders(
|
|||||||
|
|
||||||
_, _, _, err = collectItems(
|
_, _, _, err = collectItems(
|
||||||
ictx,
|
ictx,
|
||||||
defaultItemPager(gs, id, ""),
|
bh.NewItemPager(id, "", nil),
|
||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
collector,
|
collector,
|
||||||
|
|||||||
@ -286,6 +286,7 @@ type OneDriveIntgSuite struct {
|
|||||||
tester.Suite
|
tester.Suite
|
||||||
userID string
|
userID string
|
||||||
creds account.M365Config
|
creds account.M365Config
|
||||||
|
ac api.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOneDriveSuite(t *testing.T) {
|
func TestOneDriveSuite(t *testing.T) {
|
||||||
@ -303,9 +304,12 @@ func (suite *OneDriveIntgSuite) SetupSuite() {
|
|||||||
|
|
||||||
acct := tester.NewM365Account(t)
|
acct := tester.NewM365Account(t)
|
||||||
creds, err := acct.M365Config()
|
creds, err := acct.M365Config()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
suite.creds = creds
|
suite.creds = creds
|
||||||
|
|
||||||
|
suite.ac, err = api.NewClient(creds)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
||||||
@ -318,11 +322,9 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
|||||||
folderIDs = []string{}
|
folderIDs = []string{}
|
||||||
folderName1 = "Corso_Folder_Test_" + dttm.FormatNow(dttm.SafeForTesting)
|
folderName1 = "Corso_Folder_Test_" + dttm.FormatNow(dttm.SafeForTesting)
|
||||||
folderElements = []string{folderName1}
|
folderElements = []string{folderName1}
|
||||||
gs = loadTestService(t)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
|
pager := suite.ac.Drives().NewUserDrivePager(suite.userID, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -337,14 +339,14 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
|||||||
|
|
||||||
// deletes require unique http clients
|
// deletes require unique http clients
|
||||||
// https://github.com/alcionai/corso/issues/2707
|
// https://github.com/alcionai/corso/issues/2707
|
||||||
err := api.DeleteDriveItem(ictx, loadTestService(t), driveID, id)
|
err := suite.ac.Drives().DeleteItem(ictx, driveID, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.CtxErr(ictx, err).Errorw("deleting folder")
|
logger.CtxErr(ictx, err).Errorw("deleting folder")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
rootFolder, err := api.GetDriveRoot(ctx, gs, driveID)
|
rootFolder, err := suite.ac.Drives().GetRootFolder(ctx, driveID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
restoreDir := path.Builder{}.Append(folderElements...)
|
restoreDir := path.Builder{}.Append(folderElements...)
|
||||||
@ -357,7 +359,9 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
|||||||
caches := NewRestoreCaches()
|
caches := NewRestoreCaches()
|
||||||
caches.DriveIDToRootFolderID[driveID] = ptr.Val(rootFolder.GetId())
|
caches.DriveIDToRootFolderID[driveID] = ptr.Val(rootFolder.GetId())
|
||||||
|
|
||||||
folderID, err := createRestoreFolders(ctx, gs, &drivePath, restoreDir, caches)
|
rh := NewRestoreHandler(suite.ac)
|
||||||
|
|
||||||
|
folderID, err := createRestoreFolders(ctx, rh, &drivePath, restoreDir, caches)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
folderIDs = append(folderIDs, folderID)
|
folderIDs = append(folderIDs, folderID)
|
||||||
@ -365,7 +369,7 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
|||||||
folderName2 := "Corso_Folder_Test_" + dttm.FormatNow(dttm.SafeForTesting)
|
folderName2 := "Corso_Folder_Test_" + dttm.FormatNow(dttm.SafeForTesting)
|
||||||
restoreDir = restoreDir.Append(folderName2)
|
restoreDir = restoreDir.Append(folderName2)
|
||||||
|
|
||||||
folderID, err = createRestoreFolders(ctx, gs, &drivePath, restoreDir, caches)
|
folderID, err = createRestoreFolders(ctx, rh, &drivePath, restoreDir, caches)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
folderIDs = append(folderIDs, folderID)
|
folderIDs = append(folderIDs, folderID)
|
||||||
@ -387,11 +391,13 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
bh := itemBackupHandler{suite.ac.Drives()}
|
||||||
|
pager := suite.ac.Drives().NewUserDrivePager(suite.userID, nil)
|
||||||
|
|
||||||
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
|
ctx, flush := tester.NewContext(t)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
defer flush()
|
||||||
|
|
||||||
allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix, fault.New(true))
|
allFolders, err := GetAllFolders(ctx, bh, pager, test.prefix, fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
foundFolderIDs := []string{}
|
foundFolderIDs := []string{}
|
||||||
@ -454,12 +460,10 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
colls := NewCollections(
|
colls := NewCollections(
|
||||||
graph.NewNoTimeoutHTTPWrapper(),
|
&itemBackupHandler{suite.ac.Drives()},
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
test.user,
|
test.user,
|
||||||
OneDriveSource,
|
|
||||||
testFolderMatcher{scope},
|
testFolderMatcher{scope},
|
||||||
service,
|
|
||||||
service.updateStatus,
|
service.updateStatus,
|
||||||
control.Options{
|
control.Options{
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
@ -467,7 +471,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
|
|||||||
|
|
||||||
ssmb := prefixmatcher.NewStringSetBuilder()
|
ssmb := prefixmatcher.NewStringSetBuilder()
|
||||||
|
|
||||||
odcs, err := colls.Get(ctx, nil, ssmb, fault.New(true))
|
odcs, _, err := colls.Get(ctx, nil, ssmb, fault.New(true))
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
// Don't expect excludes as this isn't an incremental backup.
|
// Don't expect excludes as this isn't an incremental backup.
|
||||||
assert.True(t, ssmb.Empty())
|
assert.True(t, ssmb.Empty())
|
||||||
|
|||||||
@ -1,25 +0,0 @@
|
|||||||
// Code generated by "stringer -type=driveSource"; DO NOT EDIT.
|
|
||||||
|
|
||||||
package onedrive
|
|
||||||
|
|
||||||
import "strconv"
|
|
||||||
|
|
||||||
func _() {
|
|
||||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
|
||||||
// Re-run the stringer command to generate them again.
|
|
||||||
var x [1]struct{}
|
|
||||||
_ = x[unknownDriveSource-0]
|
|
||||||
_ = x[OneDriveSource-1]
|
|
||||||
_ = x[SharePointSource-2]
|
|
||||||
}
|
|
||||||
|
|
||||||
const _driveSource_name = "unknownDriveSourceOneDriveSourceSharePointSource"
|
|
||||||
|
|
||||||
var _driveSource_index = [...]uint8{0, 18, 32, 48}
|
|
||||||
|
|
||||||
func (i driveSource) String() string {
|
|
||||||
if i < 0 || i >= driveSource(len(_driveSource_index)-1) {
|
|
||||||
return "driveSource(" + strconv.FormatInt(int64(i), 10) + ")"
|
|
||||||
}
|
|
||||||
return _driveSource_name[_driveSource_index[i]:_driveSource_index[i+1]]
|
|
||||||
}
|
|
||||||
132
src/internal/connector/onedrive/handlers.go
Normal file
132
src/internal/connector/onedrive/handlers.go
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
package onedrive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ItemInfoAugmenter interface {
|
||||||
|
// AugmentItemInfo will populate a details.<Service>Info struct
|
||||||
|
// with properties from the drive item. ItemSize is passed in
|
||||||
|
// separately for restore processes because the local itemable
|
||||||
|
// doesn't have its size value updated as a side effect of creation,
|
||||||
|
// and kiota drops any SetSize update.
|
||||||
|
AugmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// backup
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type BackupHandler interface {
|
||||||
|
ItemInfoAugmenter
|
||||||
|
api.Getter
|
||||||
|
GetItemPermissioner
|
||||||
|
GetItemer
|
||||||
|
|
||||||
|
// PathPrefix constructs the service and category specific path prefix for
|
||||||
|
// the given values.
|
||||||
|
PathPrefix(tenantID, resourceOwner, driveID string) (path.Path, error)
|
||||||
|
|
||||||
|
// CanonicalPath constructs the service and category specific path for
|
||||||
|
// the given values.
|
||||||
|
CanonicalPath(
|
||||||
|
folders *path.Builder,
|
||||||
|
tenantID, resourceOwner string,
|
||||||
|
) (path.Path, error)
|
||||||
|
|
||||||
|
// ServiceCat returns the service and category used by this implementation.
|
||||||
|
ServiceCat() (path.ServiceType, path.CategoryType)
|
||||||
|
NewDrivePager(resourceOwner string, fields []string) api.DrivePager
|
||||||
|
NewItemPager(driveID, link string, fields []string) api.DriveItemEnumerator
|
||||||
|
// FormatDisplayPath creates a human-readable string to represent the
|
||||||
|
// provided path.
|
||||||
|
FormatDisplayPath(driveName string, parentPath *path.Builder) string
|
||||||
|
NewLocationIDer(driveID string, elems ...string) details.LocationIDer
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetItemPermissioner interface {
|
||||||
|
GetItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.PermissionCollectionResponseable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetItemer interface {
|
||||||
|
GetItem(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.DriveItemable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// restore
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type RestoreHandler interface {
|
||||||
|
DeleteItemPermissioner
|
||||||
|
GetFolderByNamer
|
||||||
|
GetRootFolderer
|
||||||
|
ItemInfoAugmenter
|
||||||
|
NewItemContentUploader
|
||||||
|
PostItemInContainerer
|
||||||
|
UpdateItemPermissioner
|
||||||
|
}
|
||||||
|
|
||||||
|
type NewItemContentUploader interface {
|
||||||
|
// NewItemContentUpload creates an upload session which is used as a writer
|
||||||
|
// for large item content.
|
||||||
|
NewItemContentUpload(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.UploadSessionable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeleteItemPermissioner interface {
|
||||||
|
DeleteItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID, permissionID string,
|
||||||
|
) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateItemPermissioner interface {
|
||||||
|
PostItemPermissionUpdate(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
body *drives.ItemItemsItemInvitePostRequestBody,
|
||||||
|
) (drives.ItemItemsItemInviteResponseable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type PostItemInContainerer interface {
|
||||||
|
PostItemInContainer(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID string,
|
||||||
|
newItem models.DriveItemable,
|
||||||
|
) (models.DriveItemable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetFolderByNamer interface {
|
||||||
|
GetFolderByName(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID, folderID string,
|
||||||
|
) (models.DriveItemable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetRootFolderer interface {
|
||||||
|
// GetRootFolder gets the root folder for the drive.
|
||||||
|
GetRootFolder(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID string,
|
||||||
|
) (models.DriveItemable, error)
|
||||||
|
}
|
||||||
@ -5,17 +5,14 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/str"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -25,58 +22,80 @@ var downloadURLKeys = []string{
|
|||||||
"@content.downloadUrl",
|
"@content.downloadUrl",
|
||||||
}
|
}
|
||||||
|
|
||||||
// sharePointItemReader will return a io.ReadCloser for the specified item
|
func downloadItem(
|
||||||
// It crafts this by querying M365 for a download URL for the item
|
|
||||||
// and using a http client to initialize a reader
|
|
||||||
// TODO: Add metadata fetching to SharePoint
|
|
||||||
func sharePointItemReader(
|
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client graph.Requester,
|
ag api.Getter,
|
||||||
item models.DriveItemable,
|
item models.DriveItemable,
|
||||||
) (details.ItemInfo, io.ReadCloser, error) {
|
) (io.ReadCloser, error) {
|
||||||
resp, err := downloadItem(ctx, client, item)
|
if item == nil {
|
||||||
if err != nil {
|
return nil, clues.New("nil item")
|
||||||
return details.ItemInfo{}, nil, clues.Wrap(err, "sharepoint reader")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
dii := details.ItemInfo{
|
|
||||||
SharePoint: sharePointItemInfo(item, ptr.Val(item.GetSize())),
|
|
||||||
}
|
|
||||||
|
|
||||||
return dii, resp.Body, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func oneDriveItemMetaReader(
|
|
||||||
ctx context.Context,
|
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) (io.ReadCloser, int, error) {
|
|
||||||
return baseItemMetaReader(ctx, service, driveID, item)
|
|
||||||
}
|
|
||||||
|
|
||||||
func sharePointItemMetaReader(
|
|
||||||
ctx context.Context,
|
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) (io.ReadCloser, int, error) {
|
|
||||||
// TODO: include permissions
|
|
||||||
return baseItemMetaReader(ctx, service, driveID, item)
|
|
||||||
}
|
|
||||||
|
|
||||||
func baseItemMetaReader(
|
|
||||||
ctx context.Context,
|
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) (io.ReadCloser, int, error) {
|
|
||||||
var (
|
var (
|
||||||
perms []metadata.Permission
|
rc io.ReadCloser
|
||||||
err error
|
isFile = item.GetFile() != nil
|
||||||
meta = metadata.Metadata{FileName: ptr.Val(item.GetName())}
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if isFile {
|
||||||
|
var (
|
||||||
|
url string
|
||||||
|
ad = item.GetAdditionalData()
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, key := range downloadURLKeys {
|
||||||
|
if v, err := str.AnyValueToString(key, ad); err == nil {
|
||||||
|
url = v
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, err = downloadFile(ctx, ag, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Stack(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downloadFile(
|
||||||
|
ctx context.Context,
|
||||||
|
ag api.Getter,
|
||||||
|
url string,
|
||||||
|
) (io.ReadCloser, error) {
|
||||||
|
if len(url) == 0 {
|
||||||
|
return nil, clues.New("empty file url")
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := ag.Get(ctx, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "getting file")
|
||||||
|
}
|
||||||
|
|
||||||
|
if graph.IsMalwareResp(ctx, resp) {
|
||||||
|
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resp.StatusCode / 100) != 2 {
|
||||||
|
// upstream error checks can compare the status with
|
||||||
|
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
|
||||||
|
return nil, clues.
|
||||||
|
Wrap(clues.New(resp.Status), "non-2xx http response").
|
||||||
|
Label(graph.LabelStatus(resp.StatusCode))
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp.Body, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downloadItemMeta(
|
||||||
|
ctx context.Context,
|
||||||
|
gip GetItemPermissioner,
|
||||||
|
driveID string,
|
||||||
|
item models.DriveItemable,
|
||||||
|
) (io.ReadCloser, int, error) {
|
||||||
|
meta := metadata.Metadata{FileName: ptr.Val(item.GetName())}
|
||||||
|
|
||||||
if item.GetShared() == nil {
|
if item.GetShared() == nil {
|
||||||
meta.SharingMode = metadata.SharingModeInherited
|
meta.SharingMode = metadata.SharingModeInherited
|
||||||
} else {
|
} else {
|
||||||
@ -84,12 +103,12 @@ func baseItemMetaReader(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if meta.SharingMode == metadata.SharingModeCustom {
|
if meta.SharingMode == metadata.SharingModeCustom {
|
||||||
perms, err = driveItemPermissionInfo(ctx, service, driveID, ptr.Val(item.GetId()))
|
perm, err := gip.GetItemPermission(ctx, driveID, ptr.Val(item.GetId()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
meta.Permissions = perms
|
meta.Permissions = metadata.FilterPermissions(ctx, perm.GetValue())
|
||||||
}
|
}
|
||||||
|
|
||||||
metaJSON, err := json.Marshal(meta)
|
metaJSON, err := json.Marshal(meta)
|
||||||
@ -100,283 +119,25 @@ func baseItemMetaReader(
|
|||||||
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
|
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// oneDriveItemReader will return a io.ReadCloser for the specified item
|
// driveItemWriter is used to initialize and return an io.Writer to upload data for the specified item
|
||||||
// It crafts this by querying M365 for a download URL for the item
|
// It does so by creating an upload session and using that URL to initialize an `itemWriter`
|
||||||
// and using a http client to initialize a reader
|
// TODO: @vkamra verify if var session is the desired input
|
||||||
func oneDriveItemReader(
|
func driveItemWriter(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client graph.Requester,
|
nicu NewItemContentUploader,
|
||||||
item models.DriveItemable,
|
driveID, itemID string,
|
||||||
) (details.ItemInfo, io.ReadCloser, error) {
|
itemSize int64,
|
||||||
var (
|
) (io.Writer, string, error) {
|
||||||
rc io.ReadCloser
|
ctx = clues.Add(ctx, "upload_item_id", itemID)
|
||||||
isFile = item.GetFile() != nil
|
|
||||||
)
|
|
||||||
|
|
||||||
if isFile {
|
icu, err := nicu.NewItemContentUpload(ctx, driveID, itemID)
|
||||||
resp, err := downloadItem(ctx, client, item)
|
|
||||||
if err != nil {
|
|
||||||
return details.ItemInfo{}, nil, clues.Wrap(err, "onedrive reader")
|
|
||||||
}
|
|
||||||
|
|
||||||
rc = resp.Body
|
|
||||||
}
|
|
||||||
|
|
||||||
dii := details.ItemInfo{
|
|
||||||
OneDrive: oneDriveItemInfo(item, ptr.Val(item.GetSize())),
|
|
||||||
}
|
|
||||||
|
|
||||||
return dii, rc, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func downloadItem(
|
|
||||||
ctx context.Context,
|
|
||||||
client graph.Requester,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) (*http.Response, error) {
|
|
||||||
var url string
|
|
||||||
|
|
||||||
for _, key := range downloadURLKeys {
|
|
||||||
tmp, ok := item.GetAdditionalData()[key].(*string)
|
|
||||||
if ok {
|
|
||||||
url = ptr.Val(tmp)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(url) == 0 {
|
|
||||||
return nil, clues.New("extracting file url").With("item_id", ptr.Val(item.GetId()))
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := client.Request(ctx, http.MethodGet, url, nil, nil)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, "", clues.Stack(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (resp.StatusCode / 100) == 2 {
|
iw := graph.NewLargeItemWriter(itemID, ptr.Val(icu.GetUploadUrl()), itemSize)
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if graph.IsMalwareResp(ctx, resp) {
|
return iw, ptr.Val(icu.GetUploadUrl()), nil
|
||||||
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
|
|
||||||
}
|
|
||||||
|
|
||||||
// upstream error checks can compare the status with
|
|
||||||
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
|
|
||||||
cerr := clues.Wrap(clues.New(resp.Status), "non-2xx http response").
|
|
||||||
Label(graph.LabelStatus(resp.StatusCode))
|
|
||||||
|
|
||||||
return resp, cerr
|
|
||||||
}
|
|
||||||
|
|
||||||
// oneDriveItemInfo will populate a details.OneDriveInfo struct
|
|
||||||
// with properties from the drive item. ItemSize is specified
|
|
||||||
// separately for restore processes because the local itemable
|
|
||||||
// doesn't have its size value updated as a side effect of creation,
|
|
||||||
// and kiota drops any SetSize update.
|
|
||||||
func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInfo {
|
|
||||||
var email, driveName, driveID string
|
|
||||||
|
|
||||||
if di.GetCreatedBy() != nil && di.GetCreatedBy().GetUser() != nil {
|
|
||||||
// User is sometimes not available when created via some
|
|
||||||
// external applications (like backup/restore solutions)
|
|
||||||
ed, ok := di.GetCreatedBy().GetUser().GetAdditionalData()["email"]
|
|
||||||
if ok {
|
|
||||||
email = *ed.(*string)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if di.GetParentReference() != nil {
|
|
||||||
driveID = ptr.Val(di.GetParentReference().GetDriveId())
|
|
||||||
driveName = strings.TrimSpace(ptr.Val(di.GetParentReference().GetName()))
|
|
||||||
}
|
|
||||||
|
|
||||||
return &details.OneDriveInfo{
|
|
||||||
Created: ptr.Val(di.GetCreatedDateTime()),
|
|
||||||
DriveID: driveID,
|
|
||||||
DriveName: driveName,
|
|
||||||
ItemName: ptr.Val(di.GetName()),
|
|
||||||
ItemType: details.OneDriveItem,
|
|
||||||
Modified: ptr.Val(di.GetLastModifiedDateTime()),
|
|
||||||
Owner: email,
|
|
||||||
Size: itemSize,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// driveItemPermissionInfo will fetch the permission information
|
|
||||||
// for a drive item given a drive and item id.
|
|
||||||
func driveItemPermissionInfo(
|
|
||||||
ctx context.Context,
|
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
|
||||||
itemID string,
|
|
||||||
) ([]metadata.Permission, error) {
|
|
||||||
perm, err := api.GetItemPermission(ctx, service, driveID, itemID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
uperms := filterUserPermissions(ctx, perm.GetValue())
|
|
||||||
|
|
||||||
return uperms, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func filterUserPermissions(ctx context.Context, perms []models.Permissionable) []metadata.Permission {
|
|
||||||
up := []metadata.Permission{}
|
|
||||||
|
|
||||||
for _, p := range perms {
|
|
||||||
if p.GetGrantedToV2() == nil {
|
|
||||||
// For link shares, we get permissions without a user
|
|
||||||
// specified
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
// Below are the mapping from roles to "Advanced" permissions
|
|
||||||
// screen entries:
|
|
||||||
//
|
|
||||||
// owner - Full Control
|
|
||||||
// write - Design | Edit | Contribute (no difference in /permissions api)
|
|
||||||
// read - Read
|
|
||||||
// empty - Restricted View
|
|
||||||
//
|
|
||||||
// helpful docs:
|
|
||||||
// https://devblogs.microsoft.com/microsoft365dev/controlling-app-access-on-specific-sharepoint-site-collections/
|
|
||||||
roles = p.GetRoles()
|
|
||||||
gv2 = p.GetGrantedToV2()
|
|
||||||
entityID string
|
|
||||||
gv2t metadata.GV2Type
|
|
||||||
)
|
|
||||||
|
|
||||||
switch true {
|
|
||||||
case gv2.GetUser() != nil:
|
|
||||||
gv2t = metadata.GV2User
|
|
||||||
entityID = ptr.Val(gv2.GetUser().GetId())
|
|
||||||
case gv2.GetSiteUser() != nil:
|
|
||||||
gv2t = metadata.GV2SiteUser
|
|
||||||
entityID = ptr.Val(gv2.GetSiteUser().GetId())
|
|
||||||
case gv2.GetGroup() != nil:
|
|
||||||
gv2t = metadata.GV2Group
|
|
||||||
entityID = ptr.Val(gv2.GetGroup().GetId())
|
|
||||||
case gv2.GetSiteGroup() != nil:
|
|
||||||
gv2t = metadata.GV2SiteGroup
|
|
||||||
entityID = ptr.Val(gv2.GetSiteGroup().GetId())
|
|
||||||
case gv2.GetApplication() != nil:
|
|
||||||
gv2t = metadata.GV2App
|
|
||||||
entityID = ptr.Val(gv2.GetApplication().GetId())
|
|
||||||
case gv2.GetDevice() != nil:
|
|
||||||
gv2t = metadata.GV2Device
|
|
||||||
entityID = ptr.Val(gv2.GetDevice().GetId())
|
|
||||||
default:
|
|
||||||
logger.Ctx(ctx).Info("untracked permission")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Technically GrantedToV2 can also contain devices, but the
|
|
||||||
// documentation does not mention about devices in permissions
|
|
||||||
if entityID == "" {
|
|
||||||
// This should ideally not be hit
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
up = append(up, metadata.Permission{
|
|
||||||
ID: ptr.Val(p.GetId()),
|
|
||||||
Roles: roles,
|
|
||||||
EntityID: entityID,
|
|
||||||
EntityType: gv2t,
|
|
||||||
Expiration: p.GetExpirationDateTime(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return up
|
|
||||||
}
|
|
||||||
|
|
||||||
// sharePointItemInfo will populate a details.SharePointInfo struct
|
|
||||||
// with properties from the drive item. ItemSize is specified
|
|
||||||
// separately for restore processes because the local itemable
|
|
||||||
// doesn't have its size value updated as a side effect of creation,
|
|
||||||
// and kiota drops any SetSize update.
|
|
||||||
// TODO: Update drive name during Issue #2071
|
|
||||||
func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.SharePointInfo {
|
|
||||||
var driveName, siteID, driveID, weburl, creatorEmail string
|
|
||||||
|
|
||||||
// TODO: we rely on this info for details/restore lookups,
|
|
||||||
// so if it's nil we have an issue, and will need an alternative
|
|
||||||
// way to source the data.
|
|
||||||
if di.GetCreatedBy() != nil && di.GetCreatedBy().GetUser() != nil {
|
|
||||||
// User is sometimes not available when created via some
|
|
||||||
// external applications (like backup/restore solutions)
|
|
||||||
additionalData := di.GetCreatedBy().GetUser().GetAdditionalData()
|
|
||||||
ed, ok := additionalData["email"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
ed = additionalData["displayName"]
|
|
||||||
}
|
|
||||||
|
|
||||||
if ed != nil {
|
|
||||||
creatorEmail = *ed.(*string)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
gsi := di.GetSharepointIds()
|
|
||||||
if gsi != nil {
|
|
||||||
siteID = ptr.Val(gsi.GetSiteId())
|
|
||||||
weburl = ptr.Val(gsi.GetSiteUrl())
|
|
||||||
|
|
||||||
if len(weburl) == 0 {
|
|
||||||
weburl = constructWebURL(di.GetAdditionalData())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if di.GetParentReference() != nil {
|
|
||||||
driveID = ptr.Val(di.GetParentReference().GetDriveId())
|
|
||||||
driveName = strings.TrimSpace(ptr.Val(di.GetParentReference().GetName()))
|
|
||||||
}
|
|
||||||
|
|
||||||
return &details.SharePointInfo{
|
|
||||||
ItemType: details.SharePointLibrary,
|
|
||||||
ItemName: ptr.Val(di.GetName()),
|
|
||||||
Created: ptr.Val(di.GetCreatedDateTime()),
|
|
||||||
Modified: ptr.Val(di.GetLastModifiedDateTime()),
|
|
||||||
DriveID: driveID,
|
|
||||||
DriveName: driveName,
|
|
||||||
Size: itemSize,
|
|
||||||
Owner: creatorEmail,
|
|
||||||
WebURL: weburl,
|
|
||||||
SiteID: siteID,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// constructWebURL helper function for recreating the webURL
|
|
||||||
// for the originating SharePoint site. Uses additional data map
|
|
||||||
// from a models.DriveItemable that possesses a downloadURL within the map.
|
|
||||||
// Returns "" if map nil or key is not present.
|
|
||||||
func constructWebURL(adtl map[string]any) string {
|
|
||||||
var (
|
|
||||||
desiredKey = "@microsoft.graph.downloadUrl"
|
|
||||||
sep = `/_layouts`
|
|
||||||
url string
|
|
||||||
)
|
|
||||||
|
|
||||||
if adtl == nil {
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
|
|
||||||
r := adtl[desiredKey]
|
|
||||||
point, ok := r.(*string)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
|
|
||||||
value := ptr.Val(point)
|
|
||||||
if len(value) == 0 {
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
|
|
||||||
temp := strings.Split(value, sep)
|
|
||||||
url = temp[0]
|
|
||||||
|
|
||||||
return url
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func setName(orig models.ItemReferenceable, driveName string) models.ItemReferenceable {
|
func setName(orig models.ItemReferenceable, driveName string) models.ItemReferenceable {
|
||||||
|
|||||||
227
src/internal/connector/onedrive/item_handler.go
Normal file
227
src/internal/connector/onedrive/item_handler.go
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
package onedrive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// backup
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ BackupHandler = &itemBackupHandler{}
|
||||||
|
|
||||||
|
type itemBackupHandler struct {
|
||||||
|
ac api.Drives
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) Get(
|
||||||
|
ctx context.Context,
|
||||||
|
url string,
|
||||||
|
headers map[string]string,
|
||||||
|
) (*http.Response, error) {
|
||||||
|
return h.ac.Get(ctx, url, headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) PathPrefix(
|
||||||
|
tenantID, resourceOwner, driveID string,
|
||||||
|
) (path.Path, error) {
|
||||||
|
return path.Build(
|
||||||
|
tenantID,
|
||||||
|
resourceOwner,
|
||||||
|
path.OneDriveService,
|
||||||
|
path.FilesCategory,
|
||||||
|
false,
|
||||||
|
odConsts.DrivesPathDir,
|
||||||
|
driveID,
|
||||||
|
odConsts.RootPathDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) CanonicalPath(
|
||||||
|
folders *path.Builder,
|
||||||
|
tenantID, resourceOwner string,
|
||||||
|
) (path.Path, error) {
|
||||||
|
return folders.ToDataLayerOneDrivePath(tenantID, resourceOwner, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
||||||
|
return path.OneDriveService, path.FilesCategory
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) NewDrivePager(
|
||||||
|
resourceOwner string, fields []string,
|
||||||
|
) api.DrivePager {
|
||||||
|
return h.ac.NewUserDrivePager(resourceOwner, fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) NewItemPager(
|
||||||
|
driveID, link string,
|
||||||
|
fields []string,
|
||||||
|
) api.DriveItemEnumerator {
|
||||||
|
return h.ac.NewItemPager(driveID, link, fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) AugmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo {
|
||||||
|
return augmentItemInfo(dii, item, size, parentPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) FormatDisplayPath(
|
||||||
|
_ string, // drive name not displayed for onedrive
|
||||||
|
pb *path.Builder,
|
||||||
|
) string {
|
||||||
|
return "/" + pb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) NewLocationIDer(
|
||||||
|
driveID string,
|
||||||
|
elems ...string,
|
||||||
|
) details.LocationIDer {
|
||||||
|
return details.NewOneDriveLocationIDer(driveID, elems...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) GetItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.PermissionCollectionResponseable, error) {
|
||||||
|
return h.ac.GetItemPermission(ctx, driveID, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemBackupHandler) GetItem(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.GetItem(ctx, driveID, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Restore
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ RestoreHandler = &itemRestoreHandler{}
|
||||||
|
|
||||||
|
type itemRestoreHandler struct {
|
||||||
|
ac api.Drives
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRestoreHandler(ac api.Client) *itemRestoreHandler {
|
||||||
|
return &itemRestoreHandler{ac.Drives()}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AugmentItemInfo will populate a details.OneDriveInfo struct
|
||||||
|
// with properties from the drive item. ItemSize is specified
|
||||||
|
// separately for restore processes because the local itemable
|
||||||
|
// doesn't have its size value updated as a side effect of creation,
|
||||||
|
// and kiota drops any SetSize update.
|
||||||
|
func (h itemRestoreHandler) AugmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo {
|
||||||
|
return augmentItemInfo(dii, item, size, parentPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemRestoreHandler) NewItemContentUpload(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.UploadSessionable, error) {
|
||||||
|
return h.ac.NewItemContentUpload(ctx, driveID, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemRestoreHandler) DeleteItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID, permissionID string,
|
||||||
|
) error {
|
||||||
|
return h.ac.DeleteItemPermission(ctx, driveID, itemID, permissionID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemRestoreHandler) PostItemPermissionUpdate(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
body *drives.ItemItemsItemInvitePostRequestBody,
|
||||||
|
) (drives.ItemItemsItemInviteResponseable, error) {
|
||||||
|
return h.ac.PostItemPermissionUpdate(ctx, driveID, itemID, body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemRestoreHandler) PostItemInContainer(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID string,
|
||||||
|
newItem models.DriveItemable,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemRestoreHandler) GetFolderByName(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID, folderName string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.GetFolderByName(ctx, driveID, parentFolderID, folderName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h itemRestoreHandler) GetRootFolder(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.GetRootFolder(ctx, driveID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Common
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func augmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo {
|
||||||
|
var email, driveName, driveID string
|
||||||
|
|
||||||
|
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
|
||||||
|
// User is sometimes not available when created via some
|
||||||
|
// external applications (like backup/restore solutions)
|
||||||
|
ed, ok := item.GetCreatedBy().GetUser().GetAdditionalData()["email"]
|
||||||
|
if ok {
|
||||||
|
email = *ed.(*string)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.GetParentReference() != nil {
|
||||||
|
driveID = ptr.Val(item.GetParentReference().GetDriveId())
|
||||||
|
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
|
||||||
|
}
|
||||||
|
|
||||||
|
var pps string
|
||||||
|
if parentPath != nil {
|
||||||
|
pps = parentPath.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
dii.OneDrive = &details.OneDriveInfo{
|
||||||
|
Created: ptr.Val(item.GetCreatedDateTime()),
|
||||||
|
DriveID: driveID,
|
||||||
|
DriveName: driveName,
|
||||||
|
ItemName: ptr.Val(item.GetName()),
|
||||||
|
ItemType: details.OneDriveItem,
|
||||||
|
Modified: ptr.Val(item.GetLastModifiedDateTime()),
|
||||||
|
Owner: email,
|
||||||
|
ParentPath: pps,
|
||||||
|
Size: size,
|
||||||
|
}
|
||||||
|
|
||||||
|
return dii
|
||||||
|
}
|
||||||
58
src/internal/connector/onedrive/item_handler_test.go
Normal file
58
src/internal/connector/onedrive/item_handler_test.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package onedrive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ItemBackupHandlerUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestItemBackupHandlerUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ItemBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ItemBackupHandlerUnitSuite) TestCanonicalPath() {
|
||||||
|
tenantID, resourceOwner := "tenant", "resourceOwner"
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
expect string
|
||||||
|
expectErr assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "onedrive",
|
||||||
|
expect: "tenant/onedrive/resourceOwner/files/prefix",
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
h := itemBackupHandler{}
|
||||||
|
p := path.Builder{}.Append("prefix")
|
||||||
|
|
||||||
|
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if result != nil {
|
||||||
|
assert.Equal(t, test.expect, result.String())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ItemBackupHandlerUnitSuite) TestServiceCat() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
s, c := itemBackupHandler{}.ServiceCat()
|
||||||
|
assert.Equal(t, path.OneDriveService, s)
|
||||||
|
assert.Equal(t, path.FilesCategory, c)
|
||||||
|
}
|
||||||
@ -4,18 +4,16 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
@ -25,7 +23,7 @@ type ItemIntegrationSuite struct {
|
|||||||
tester.Suite
|
tester.Suite
|
||||||
user string
|
user string
|
||||||
userDriveID string
|
userDriveID string
|
||||||
service graph.Servicer
|
service *oneDriveService
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestItemIntegrationSuite(t *testing.T) {
|
func TestItemIntegrationSuite(t *testing.T) {
|
||||||
@ -46,8 +44,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
|
|||||||
suite.service = loadTestService(t)
|
suite.service = loadTestService(t)
|
||||||
suite.user = tester.SecondaryM365UserID(t)
|
suite.user = tester.SecondaryM365UserID(t)
|
||||||
|
|
||||||
pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil)
|
pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
odDrives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
odDrives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -83,6 +80,10 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
|||||||
_ bool,
|
_ bool,
|
||||||
_ *fault.Bus,
|
_ *fault.Bus,
|
||||||
) error {
|
) error {
|
||||||
|
if driveItem != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
|
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
|
||||||
driveItem = item
|
driveItem = item
|
||||||
@ -92,12 +93,14 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ip := suite.service.ac.
|
||||||
|
Drives().
|
||||||
|
NewItemPager(suite.userDriveID, "", api.DriveItemSelectDefault())
|
||||||
|
|
||||||
_, _, _, err := collectItems(
|
_, _, _, err := collectItems(
|
||||||
ctx,
|
ctx,
|
||||||
defaultItemPager(
|
ip,
|
||||||
suite.service,
|
|
||||||
suite.userDriveID,
|
|
||||||
""),
|
|
||||||
suite.userDriveID,
|
suite.userDriveID,
|
||||||
"General",
|
"General",
|
||||||
itemCollector,
|
itemCollector,
|
||||||
@ -114,19 +117,15 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
|||||||
suite.user,
|
suite.user,
|
||||||
suite.userDriveID)
|
suite.userDriveID)
|
||||||
|
|
||||||
// Read data for the file
|
bh := itemBackupHandler{suite.service.ac.Drives()}
|
||||||
itemInfo, itemData, err := oneDriveItemReader(ctx, graph.NewNoTimeoutHTTPWrapper(), driveItem)
|
|
||||||
|
|
||||||
|
// Read data for the file
|
||||||
|
itemData, err := downloadItem(ctx, bh, driveItem)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.NotNil(t, itemInfo.OneDrive)
|
|
||||||
require.NotEmpty(t, itemInfo.OneDrive.ItemName)
|
|
||||||
|
|
||||||
size, err := io.Copy(io.Discard, itemData)
|
size, err := io.Copy(io.Discard, itemData)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.NotZero(t, size)
|
require.NotZero(t, size)
|
||||||
require.Equal(t, size, itemInfo.OneDrive.Size)
|
|
||||||
|
|
||||||
t.Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestItemWriter is an integration test for uploading data to OneDrive
|
// TestItemWriter is an integration test for uploading data to OneDrive
|
||||||
@ -148,21 +147,19 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
rh := NewRestoreHandler(suite.service.ac)
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
srv := suite.service
|
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
|
||||||
|
|
||||||
root, err := api.GetDriveRoot(ctx, srv, test.driveID)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName
|
newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName
|
||||||
t.Logf("creating folder %s", newFolderName)
|
t.Logf("creating folder %s", newFolderName)
|
||||||
|
|
||||||
newFolder, err := CreateItem(
|
newFolder, err := rh.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
srv,
|
|
||||||
test.driveID,
|
test.driveID,
|
||||||
ptr.Val(root.GetId()),
|
ptr.Val(root.GetId()),
|
||||||
newItem(newFolderName, true))
|
newItem(newFolderName, true))
|
||||||
@ -172,9 +169,8 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
|||||||
newItemName := "testItem_" + dttm.FormatNow(dttm.SafeForTesting)
|
newItemName := "testItem_" + dttm.FormatNow(dttm.SafeForTesting)
|
||||||
t.Logf("creating item %s", newItemName)
|
t.Logf("creating item %s", newItemName)
|
||||||
|
|
||||||
newItem, err := CreateItem(
|
newItem, err := rh.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
srv,
|
|
||||||
test.driveID,
|
test.driveID,
|
||||||
ptr.Val(newFolder.GetId()),
|
ptr.Val(newFolder.GetId()),
|
||||||
newItem(newItemName, false))
|
newItem(newItemName, false))
|
||||||
@ -183,19 +179,24 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
|||||||
|
|
||||||
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
|
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
|
||||||
// newly created item should fail because it's a file not a folder
|
// newly created item should fail because it's a file not a folder
|
||||||
_, err = api.GetFolderByName(ctx, srv, test.driveID, ptr.Val(newFolder.GetId()), newItemName)
|
_, err = suite.service.ac.Drives().GetFolderByName(
|
||||||
|
ctx,
|
||||||
|
test.driveID,
|
||||||
|
ptr.Val(newFolder.GetId()),
|
||||||
|
newItemName)
|
||||||
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
|
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
|
||||||
|
|
||||||
// Initialize a 100KB mockDataProvider
|
// Initialize a 100KB mockDataProvider
|
||||||
td, writeSize := mockDataReader(int64(100 * 1024))
|
td, writeSize := mockDataReader(int64(100 * 1024))
|
||||||
|
|
||||||
itemID := ptr.Val(newItem.GetId())
|
w, _, err := driveItemWriter(
|
||||||
|
ctx,
|
||||||
r, err := api.PostDriveItem(ctx, srv, test.driveID, itemID)
|
rh,
|
||||||
|
test.driveID,
|
||||||
|
ptr.Val(newItem.GetId()),
|
||||||
|
writeSize)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
w := graph.NewLargeItemWriter(itemID, ptr.Val(r.GetUploadUrl()), writeSize)
|
|
||||||
|
|
||||||
// Using a 32 KB buffer for the copy allows us to validate the
|
// Using a 32 KB buffer for the copy allows us to validate the
|
||||||
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
|
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
|
||||||
// a time
|
// a time
|
||||||
@ -235,72 +236,40 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
srv := suite.service
|
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
|
||||||
|
|
||||||
root, err := api.GetDriveRoot(ctx, srv, test.driveID)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// Lookup a folder that doesn't exist
|
// Lookup a folder that doesn't exist
|
||||||
_, err = api.GetFolderByName(ctx, srv, test.driveID, ptr.Val(root.GetId()), "FolderDoesNotExist")
|
_, err = suite.service.ac.Drives().GetFolderByName(
|
||||||
|
ctx,
|
||||||
|
test.driveID,
|
||||||
|
ptr.Val(root.GetId()),
|
||||||
|
"FolderDoesNotExist")
|
||||||
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
|
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
|
||||||
|
|
||||||
// Lookup a folder that does exist
|
// Lookup a folder that does exist
|
||||||
_, err = api.GetFolderByName(ctx, srv, test.driveID, ptr.Val(root.GetId()), "")
|
_, err = suite.service.ac.Drives().GetFolderByName(
|
||||||
|
ctx,
|
||||||
|
test.driveID,
|
||||||
|
ptr.Val(root.GetId()),
|
||||||
|
"")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPermsAndResourceOwnerPerms(
|
// Unit tests
|
||||||
permID, resourceOwner string,
|
|
||||||
gv2t metadata.GV2Type,
|
|
||||||
scopes []string,
|
|
||||||
) (models.Permissionable, metadata.Permission) {
|
|
||||||
sharepointIdentitySet := models.NewSharePointIdentitySet()
|
|
||||||
|
|
||||||
switch gv2t {
|
type mockGetter struct {
|
||||||
case metadata.GV2App, metadata.GV2Device, metadata.GV2Group, metadata.GV2User:
|
GetFunc func(ctx context.Context, url string) (*http.Response, error)
|
||||||
identity := models.NewIdentity()
|
}
|
||||||
identity.SetId(&resourceOwner)
|
|
||||||
identity.SetAdditionalData(map[string]any{"email": &resourceOwner})
|
|
||||||
|
|
||||||
switch gv2t {
|
func (m mockGetter) Get(
|
||||||
case metadata.GV2User:
|
ctx context.Context,
|
||||||
sharepointIdentitySet.SetUser(identity)
|
url string,
|
||||||
case metadata.GV2Group:
|
headers map[string]string,
|
||||||
sharepointIdentitySet.SetGroup(identity)
|
) (*http.Response, error) {
|
||||||
case metadata.GV2App:
|
return m.GetFunc(ctx, url)
|
||||||
sharepointIdentitySet.SetApplication(identity)
|
|
||||||
case metadata.GV2Device:
|
|
||||||
sharepointIdentitySet.SetDevice(identity)
|
|
||||||
}
|
|
||||||
|
|
||||||
case metadata.GV2SiteUser, metadata.GV2SiteGroup:
|
|
||||||
spIdentity := models.NewSharePointIdentity()
|
|
||||||
spIdentity.SetId(&resourceOwner)
|
|
||||||
spIdentity.SetAdditionalData(map[string]any{"email": &resourceOwner})
|
|
||||||
|
|
||||||
switch gv2t {
|
|
||||||
case metadata.GV2SiteUser:
|
|
||||||
sharepointIdentitySet.SetSiteUser(spIdentity)
|
|
||||||
case metadata.GV2SiteGroup:
|
|
||||||
sharepointIdentitySet.SetSiteGroup(spIdentity)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
perm := models.NewPermission()
|
|
||||||
perm.SetId(&permID)
|
|
||||||
perm.SetRoles([]string{"read"})
|
|
||||||
perm.SetGrantedToV2(sharepointIdentitySet)
|
|
||||||
|
|
||||||
ownersPerm := metadata.Permission{
|
|
||||||
ID: permID,
|
|
||||||
Roles: []string{"read"},
|
|
||||||
EntityID: resourceOwner,
|
|
||||||
EntityType: gv2t,
|
|
||||||
}
|
|
||||||
|
|
||||||
return perm, ownersPerm
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type ItemUnitTestSuite struct {
|
type ItemUnitTestSuite struct {
|
||||||
@ -311,134 +280,153 @@ func TestItemUnitTestSuite(t *testing.T) {
|
|||||||
suite.Run(t, &ItemUnitTestSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &ItemUnitTestSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ItemUnitTestSuite) TestDrivePermissionsFilter() {
|
func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||||
var (
|
testRc := io.NopCloser(bytes.NewReader([]byte("test")))
|
||||||
pID = "fakePermId"
|
url := "https://example.com"
|
||||||
uID = "fakeuser@provider.com"
|
|
||||||
uID2 = "fakeuser2@provider.com"
|
|
||||||
own = []string{"owner"}
|
|
||||||
r = []string{"read"}
|
|
||||||
rw = []string{"read", "write"}
|
|
||||||
)
|
|
||||||
|
|
||||||
userOwnerPerm, userOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2User, own)
|
table := []struct {
|
||||||
userReadPerm, userReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2User, r)
|
name string
|
||||||
userReadWritePerm, userReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2User, rw)
|
itemFunc func() models.DriveItemable
|
||||||
siteUserOwnerPerm, siteUserOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2SiteUser, own)
|
GetFunc func(ctx context.Context, url string) (*http.Response, error)
|
||||||
siteUserReadPerm, siteUserReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2SiteUser, r)
|
errorExpected require.ErrorAssertionFunc
|
||||||
siteUserReadWritePerm, siteUserReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2SiteUser, rw)
|
rcExpected require.ValueAssertionFunc
|
||||||
|
label string
|
||||||
groupReadPerm, groupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2Group, r)
|
|
||||||
groupReadWritePerm, groupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2Group, rw)
|
|
||||||
siteGroupReadPerm, siteGroupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2SiteGroup, r)
|
|
||||||
siteGroupReadWritePerm, siteGroupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2SiteGroup, rw)
|
|
||||||
|
|
||||||
noPerm, _ := getPermsAndResourceOwnerPerms(pID, uID, "user", []string{"read"})
|
|
||||||
noPerm.SetGrantedToV2(nil) // eg: link shares
|
|
||||||
|
|
||||||
cases := []struct {
|
|
||||||
name string
|
|
||||||
graphPermissions []models.Permissionable
|
|
||||||
parsedPermissions []metadata.Permission
|
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "no perms",
|
name: "nil item",
|
||||||
graphPermissions: []models.Permissionable{},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{},
|
return nil
|
||||||
|
},
|
||||||
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
|
return nil, nil
|
||||||
|
},
|
||||||
|
errorExpected: require.Error,
|
||||||
|
rcExpected: require.Nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "no user bound to perms",
|
name: "success",
|
||||||
graphPermissions: []models.Permissionable{noPerm},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{},
|
di := newItem("test", false)
|
||||||
},
|
di.SetAdditionalData(map[string]interface{}{
|
||||||
|
"@microsoft.graph.downloadUrl": url,
|
||||||
|
})
|
||||||
|
|
||||||
// user
|
return di
|
||||||
{
|
},
|
||||||
name: "user with read permissions",
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
graphPermissions: []models.Permissionable{userReadPerm},
|
return &http.Response{
|
||||||
parsedPermissions: []metadata.Permission{userReadROperm},
|
StatusCode: http.StatusOK,
|
||||||
|
Body: testRc,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
errorExpected: require.NoError,
|
||||||
|
rcExpected: require.NotNil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "user with owner permissions",
|
name: "success, content url set instead of download url",
|
||||||
graphPermissions: []models.Permissionable{userOwnerPerm},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{userOwnerROperm},
|
di := newItem("test", false)
|
||||||
},
|
di.SetAdditionalData(map[string]interface{}{
|
||||||
{
|
"@content.downloadUrl": url,
|
||||||
name: "user with read and write permissions",
|
})
|
||||||
graphPermissions: []models.Permissionable{userReadWritePerm},
|
|
||||||
parsedPermissions: []metadata.Permission{userReadWriteROperm},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multiple users with separate permissions",
|
|
||||||
graphPermissions: []models.Permissionable{userReadPerm, userReadWritePerm},
|
|
||||||
parsedPermissions: []metadata.Permission{userReadROperm, userReadWriteROperm},
|
|
||||||
},
|
|
||||||
|
|
||||||
// site-user
|
return di
|
||||||
{
|
},
|
||||||
name: "site user with read permissions",
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
graphPermissions: []models.Permissionable{siteUserReadPerm},
|
return &http.Response{
|
||||||
parsedPermissions: []metadata.Permission{siteUserReadROperm},
|
StatusCode: http.StatusOK,
|
||||||
|
Body: testRc,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
errorExpected: require.NoError,
|
||||||
|
rcExpected: require.NotNil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "site user with owner permissions",
|
name: "api getter returns error",
|
||||||
graphPermissions: []models.Permissionable{siteUserOwnerPerm},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{siteUserOwnerROperm},
|
di := newItem("test", false)
|
||||||
},
|
di.SetAdditionalData(map[string]interface{}{
|
||||||
{
|
"@microsoft.graph.downloadUrl": url,
|
||||||
name: "site user with read and write permissions",
|
})
|
||||||
graphPermissions: []models.Permissionable{siteUserReadWritePerm},
|
|
||||||
parsedPermissions: []metadata.Permission{siteUserReadWriteROperm},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multiple site users with separate permissions",
|
|
||||||
graphPermissions: []models.Permissionable{siteUserReadPerm, siteUserReadWritePerm},
|
|
||||||
parsedPermissions: []metadata.Permission{siteUserReadROperm, siteUserReadWriteROperm},
|
|
||||||
},
|
|
||||||
|
|
||||||
// group
|
return di
|
||||||
{
|
},
|
||||||
name: "group with read permissions",
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
graphPermissions: []models.Permissionable{groupReadPerm},
|
return nil, clues.New("test error")
|
||||||
parsedPermissions: []metadata.Permission{groupReadROperm},
|
},
|
||||||
|
errorExpected: require.Error,
|
||||||
|
rcExpected: require.Nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "group with read and write permissions",
|
name: "download url is empty",
|
||||||
graphPermissions: []models.Permissionable{groupReadWritePerm},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{groupReadWriteROperm},
|
di := newItem("test", false)
|
||||||
|
return di
|
||||||
|
},
|
||||||
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
|
return &http.Response{
|
||||||
|
StatusCode: http.StatusOK,
|
||||||
|
Body: testRc,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
errorExpected: require.Error,
|
||||||
|
rcExpected: require.Nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple groups with separate permissions",
|
name: "malware",
|
||||||
graphPermissions: []models.Permissionable{groupReadPerm, groupReadWritePerm},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{groupReadROperm, groupReadWriteROperm},
|
di := newItem("test", false)
|
||||||
},
|
di.SetAdditionalData(map[string]interface{}{
|
||||||
|
"@microsoft.graph.downloadUrl": url,
|
||||||
|
})
|
||||||
|
|
||||||
// site-group
|
return di
|
||||||
{
|
},
|
||||||
name: "site group with read permissions",
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
graphPermissions: []models.Permissionable{siteGroupReadPerm},
|
return &http.Response{
|
||||||
parsedPermissions: []metadata.Permission{siteGroupReadROperm},
|
Header: http.Header{
|
||||||
|
"X-Virus-Infected": []string{"true"},
|
||||||
|
},
|
||||||
|
StatusCode: http.StatusOK,
|
||||||
|
Body: testRc,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
errorExpected: require.Error,
|
||||||
|
rcExpected: require.Nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "site group with read and write permissions",
|
name: "non-2xx http response",
|
||||||
graphPermissions: []models.Permissionable{siteGroupReadWritePerm},
|
itemFunc: func() models.DriveItemable {
|
||||||
parsedPermissions: []metadata.Permission{siteGroupReadWriteROperm},
|
di := newItem("test", false)
|
||||||
},
|
di.SetAdditionalData(map[string]interface{}{
|
||||||
{
|
"@microsoft.graph.downloadUrl": url,
|
||||||
name: "multiple site groups with separate permissions",
|
})
|
||||||
graphPermissions: []models.Permissionable{siteGroupReadPerm, siteGroupReadWritePerm},
|
|
||||||
parsedPermissions: []metadata.Permission{siteGroupReadROperm, siteGroupReadWriteROperm},
|
return di
|
||||||
|
},
|
||||||
|
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||||
|
return &http.Response{
|
||||||
|
StatusCode: http.StatusNotFound,
|
||||||
|
Body: nil,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
errorExpected: require.Error,
|
||||||
|
rcExpected: require.Nil,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range cases {
|
|
||||||
suite.Run(tc.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
actual := filterUserPermissions(ctx, tc.graphPermissions)
|
mg := mockGetter{
|
||||||
assert.ElementsMatch(t, tc.parsedPermissions, actual)
|
GetFunc: test.GetFunc,
|
||||||
|
}
|
||||||
|
rc, err := downloadItem(ctx, mg, test.itemFunc())
|
||||||
|
test.errorExpected(t, err, clues.ToCore(err))
|
||||||
|
test.rcExpected(t, rc)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +1,14 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"golang.org/x/exp/slices"
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SharingMode int
|
type SharingMode int
|
||||||
@ -100,3 +105,72 @@ func DiffPermissions(before, after []Permission) ([]Permission, []Permission) {
|
|||||||
|
|
||||||
return added, removed
|
return added, removed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func FilterPermissions(ctx context.Context, perms []models.Permissionable) []Permission {
|
||||||
|
up := []Permission{}
|
||||||
|
|
||||||
|
for _, p := range perms {
|
||||||
|
if p.GetGrantedToV2() == nil {
|
||||||
|
// For link shares, we get permissions without a user
|
||||||
|
// specified
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Below are the mapping from roles to "Advanced" permissions
|
||||||
|
// screen entries:
|
||||||
|
//
|
||||||
|
// owner - Full Control
|
||||||
|
// write - Design | Edit | Contribute (no difference in /permissions api)
|
||||||
|
// read - Read
|
||||||
|
// empty - Restricted View
|
||||||
|
//
|
||||||
|
// helpful docs:
|
||||||
|
// https://devblogs.microsoft.com/microsoft365dev/controlling-app-access-on-specific-sharepoint-site-collections/
|
||||||
|
roles = p.GetRoles()
|
||||||
|
gv2 = p.GetGrantedToV2()
|
||||||
|
entityID string
|
||||||
|
gv2t GV2Type
|
||||||
|
)
|
||||||
|
|
||||||
|
switch true {
|
||||||
|
case gv2.GetUser() != nil:
|
||||||
|
gv2t = GV2User
|
||||||
|
entityID = ptr.Val(gv2.GetUser().GetId())
|
||||||
|
case gv2.GetSiteUser() != nil:
|
||||||
|
gv2t = GV2SiteUser
|
||||||
|
entityID = ptr.Val(gv2.GetSiteUser().GetId())
|
||||||
|
case gv2.GetGroup() != nil:
|
||||||
|
gv2t = GV2Group
|
||||||
|
entityID = ptr.Val(gv2.GetGroup().GetId())
|
||||||
|
case gv2.GetSiteGroup() != nil:
|
||||||
|
gv2t = GV2SiteGroup
|
||||||
|
entityID = ptr.Val(gv2.GetSiteGroup().GetId())
|
||||||
|
case gv2.GetApplication() != nil:
|
||||||
|
gv2t = GV2App
|
||||||
|
entityID = ptr.Val(gv2.GetApplication().GetId())
|
||||||
|
case gv2.GetDevice() != nil:
|
||||||
|
gv2t = GV2Device
|
||||||
|
entityID = ptr.Val(gv2.GetDevice().GetId())
|
||||||
|
default:
|
||||||
|
logger.Ctx(ctx).Info("untracked permission")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Technically GrantedToV2 can also contain devices, but the
|
||||||
|
// documentation does not mention about devices in permissions
|
||||||
|
if entityID == "" {
|
||||||
|
// This should ideally not be hit
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
up = append(up, Permission{
|
||||||
|
ID: ptr.Val(p.GetId()),
|
||||||
|
Roles: roles,
|
||||||
|
EntityID: entityID,
|
||||||
|
EntityType: gv2t,
|
||||||
|
Expiration: p.GetExpirationDateTime(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return up
|
||||||
|
}
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package metadata
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
@ -147,3 +148,187 @@ func (suite *PermissionsUnitTestSuite) TestDiffPermissions() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getPermsAndResourceOwnerPerms(
|
||||||
|
permID, resourceOwner string,
|
||||||
|
gv2t GV2Type,
|
||||||
|
scopes []string,
|
||||||
|
) (models.Permissionable, Permission) {
|
||||||
|
sharepointIdentitySet := models.NewSharePointIdentitySet()
|
||||||
|
|
||||||
|
switch gv2t {
|
||||||
|
case GV2App, GV2Device, GV2Group, GV2User:
|
||||||
|
identity := models.NewIdentity()
|
||||||
|
identity.SetId(&resourceOwner)
|
||||||
|
identity.SetAdditionalData(map[string]any{"email": &resourceOwner})
|
||||||
|
|
||||||
|
switch gv2t {
|
||||||
|
case GV2User:
|
||||||
|
sharepointIdentitySet.SetUser(identity)
|
||||||
|
case GV2Group:
|
||||||
|
sharepointIdentitySet.SetGroup(identity)
|
||||||
|
case GV2App:
|
||||||
|
sharepointIdentitySet.SetApplication(identity)
|
||||||
|
case GV2Device:
|
||||||
|
sharepointIdentitySet.SetDevice(identity)
|
||||||
|
}
|
||||||
|
|
||||||
|
case GV2SiteUser, GV2SiteGroup:
|
||||||
|
spIdentity := models.NewSharePointIdentity()
|
||||||
|
spIdentity.SetId(&resourceOwner)
|
||||||
|
spIdentity.SetAdditionalData(map[string]any{"email": &resourceOwner})
|
||||||
|
|
||||||
|
switch gv2t {
|
||||||
|
case GV2SiteUser:
|
||||||
|
sharepointIdentitySet.SetSiteUser(spIdentity)
|
||||||
|
case GV2SiteGroup:
|
||||||
|
sharepointIdentitySet.SetSiteGroup(spIdentity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
perm := models.NewPermission()
|
||||||
|
perm.SetId(&permID)
|
||||||
|
perm.SetRoles([]string{"read"})
|
||||||
|
perm.SetGrantedToV2(sharepointIdentitySet)
|
||||||
|
|
||||||
|
ownersPerm := Permission{
|
||||||
|
ID: permID,
|
||||||
|
Roles: []string{"read"},
|
||||||
|
EntityID: resourceOwner,
|
||||||
|
EntityType: gv2t,
|
||||||
|
}
|
||||||
|
|
||||||
|
return perm, ownersPerm
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *PermissionsUnitTestSuite) TestDrivePermissionsFilter() {
|
||||||
|
var (
|
||||||
|
pID = "fakePermId"
|
||||||
|
uID = "fakeuser@provider.com"
|
||||||
|
uID2 = "fakeuser2@provider.com"
|
||||||
|
own = []string{"owner"}
|
||||||
|
r = []string{"read"}
|
||||||
|
rw = []string{"read", "write"}
|
||||||
|
)
|
||||||
|
|
||||||
|
userOwnerPerm, userOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2User, own)
|
||||||
|
userReadPerm, userReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2User, r)
|
||||||
|
userReadWritePerm, userReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2User, rw)
|
||||||
|
siteUserOwnerPerm, siteUserOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2SiteUser, own)
|
||||||
|
siteUserReadPerm, siteUserReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2SiteUser, r)
|
||||||
|
siteUserReadWritePerm, siteUserReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2SiteUser, rw)
|
||||||
|
|
||||||
|
groupReadPerm, groupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2Group, r)
|
||||||
|
groupReadWritePerm, groupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2Group, rw)
|
||||||
|
siteGroupReadPerm, siteGroupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2SiteGroup, r)
|
||||||
|
siteGroupReadWritePerm, siteGroupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2SiteGroup, rw)
|
||||||
|
|
||||||
|
noPerm, _ := getPermsAndResourceOwnerPerms(pID, uID, "user", []string{"read"})
|
||||||
|
noPerm.SetGrantedToV2(nil) // eg: link shares
|
||||||
|
|
||||||
|
cases := []struct {
|
||||||
|
name string
|
||||||
|
graphPermissions []models.Permissionable
|
||||||
|
parsedPermissions []Permission
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no perms",
|
||||||
|
graphPermissions: []models.Permissionable{},
|
||||||
|
parsedPermissions: []Permission{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no user bound to perms",
|
||||||
|
graphPermissions: []models.Permissionable{noPerm},
|
||||||
|
parsedPermissions: []Permission{},
|
||||||
|
},
|
||||||
|
|
||||||
|
// user
|
||||||
|
{
|
||||||
|
name: "user with read permissions",
|
||||||
|
graphPermissions: []models.Permissionable{userReadPerm},
|
||||||
|
parsedPermissions: []Permission{userReadROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "user with owner permissions",
|
||||||
|
graphPermissions: []models.Permissionable{userOwnerPerm},
|
||||||
|
parsedPermissions: []Permission{userOwnerROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "user with read and write permissions",
|
||||||
|
graphPermissions: []models.Permissionable{userReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{userReadWriteROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple users with separate permissions",
|
||||||
|
graphPermissions: []models.Permissionable{userReadPerm, userReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{userReadROperm, userReadWriteROperm},
|
||||||
|
},
|
||||||
|
|
||||||
|
// site-user
|
||||||
|
{
|
||||||
|
name: "site user with read permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteUserReadPerm},
|
||||||
|
parsedPermissions: []Permission{siteUserReadROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "site user with owner permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteUserOwnerPerm},
|
||||||
|
parsedPermissions: []Permission{siteUserOwnerROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "site user with read and write permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteUserReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{siteUserReadWriteROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple site users with separate permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteUserReadPerm, siteUserReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{siteUserReadROperm, siteUserReadWriteROperm},
|
||||||
|
},
|
||||||
|
|
||||||
|
// group
|
||||||
|
{
|
||||||
|
name: "group with read permissions",
|
||||||
|
graphPermissions: []models.Permissionable{groupReadPerm},
|
||||||
|
parsedPermissions: []Permission{groupReadROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "group with read and write permissions",
|
||||||
|
graphPermissions: []models.Permissionable{groupReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{groupReadWriteROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple groups with separate permissions",
|
||||||
|
graphPermissions: []models.Permissionable{groupReadPerm, groupReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{groupReadROperm, groupReadWriteROperm},
|
||||||
|
},
|
||||||
|
|
||||||
|
// site-group
|
||||||
|
{
|
||||||
|
name: "site group with read permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteGroupReadPerm},
|
||||||
|
parsedPermissions: []Permission{siteGroupReadROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "site group with read and write permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteGroupReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{siteGroupReadWriteROperm},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple site groups with separate permissions",
|
||||||
|
graphPermissions: []models.Permissionable{siteGroupReadPerm, siteGroupReadWritePerm},
|
||||||
|
parsedPermissions: []Permission{siteGroupReadROperm, siteGroupReadWriteROperm},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tc := range cases {
|
||||||
|
suite.Run(tc.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
actual := FilterPermissions(ctx, tc.graphPermissions)
|
||||||
|
assert.ElementsMatch(t, tc.parsedPermissions, actual)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
54
src/internal/connector/onedrive/metadata/testdata/permissions.go
vendored
Normal file
54
src/internal/connector/onedrive/metadata/testdata/permissions.go
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package testdata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
func AssertMetadataEqual(t *testing.T, expect, got metadata.Metadata) {
|
||||||
|
assert.Equal(t, expect.FileName, got.FileName, "fileName")
|
||||||
|
assert.Equal(t, expect.SharingMode, got.SharingMode, "sharingMode")
|
||||||
|
assert.Equal(t, len(expect.Permissions), len(got.Permissions), "permissions count")
|
||||||
|
|
||||||
|
for i, ep := range expect.Permissions {
|
||||||
|
gp := got.Permissions[i]
|
||||||
|
|
||||||
|
assert.Equal(t, ep.EntityType, gp.EntityType, "permission %d entityType", i)
|
||||||
|
assert.Equal(t, ep.EntityID, gp.EntityID, "permission %d entityID", i)
|
||||||
|
assert.Equal(t, ep.ID, gp.ID, "permission %d ID", i)
|
||||||
|
assert.ElementsMatch(t, ep.Roles, gp.Roles, "permission %d roles", i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewStubPermissionResponse(
|
||||||
|
gv2 metadata.GV2Type,
|
||||||
|
permID, entityID string,
|
||||||
|
roles []string,
|
||||||
|
) models.PermissionCollectionResponseable {
|
||||||
|
var (
|
||||||
|
p = models.NewPermission()
|
||||||
|
pcr = models.NewPermissionCollectionResponse()
|
||||||
|
spis = models.NewSharePointIdentitySet()
|
||||||
|
)
|
||||||
|
|
||||||
|
switch gv2 {
|
||||||
|
case metadata.GV2User:
|
||||||
|
i := models.NewIdentity()
|
||||||
|
i.SetId(&entityID)
|
||||||
|
i.SetDisplayName(&entityID)
|
||||||
|
|
||||||
|
spis.SetUser(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
p.SetGrantedToV2(spis)
|
||||||
|
p.SetId(&permID)
|
||||||
|
p.SetRoles(roles)
|
||||||
|
|
||||||
|
pcr.SetValue([]models.Permissionable{p})
|
||||||
|
|
||||||
|
return pcr
|
||||||
|
}
|
||||||
217
src/internal/connector/onedrive/mock/handlers.go
Normal file
217
src/internal/connector/onedrive/mock/handlers.go
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
package mock
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Backup Handler
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type BackupHandler struct {
|
||||||
|
ItemInfo details.ItemInfo
|
||||||
|
|
||||||
|
GI GetsItem
|
||||||
|
GIP GetsItemPermission
|
||||||
|
|
||||||
|
PathPrefixFn pathPrefixer
|
||||||
|
PathPrefixErr error
|
||||||
|
|
||||||
|
CanonPathFn canonPather
|
||||||
|
CanonPathErr error
|
||||||
|
|
||||||
|
Service path.ServiceType
|
||||||
|
Category path.CategoryType
|
||||||
|
|
||||||
|
DrivePagerV api.DrivePager
|
||||||
|
// driveID -> itemPager
|
||||||
|
ItemPagerV map[string]api.DriveItemEnumerator
|
||||||
|
|
||||||
|
LocationIDFn locationIDer
|
||||||
|
|
||||||
|
getCall int
|
||||||
|
GetResps []*http.Response
|
||||||
|
GetErrs []error
|
||||||
|
}
|
||||||
|
|
||||||
|
func DefaultOneDriveBH() *BackupHandler {
|
||||||
|
return &BackupHandler{
|
||||||
|
ItemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{}},
|
||||||
|
GI: GetsItem{Err: clues.New("not defined")},
|
||||||
|
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
||||||
|
PathPrefixFn: defaultOneDrivePathPrefixer,
|
||||||
|
CanonPathFn: defaultOneDriveCanonPather,
|
||||||
|
Service: path.OneDriveService,
|
||||||
|
Category: path.FilesCategory,
|
||||||
|
LocationIDFn: defaultOneDriveLocationIDer,
|
||||||
|
GetResps: []*http.Response{nil},
|
||||||
|
GetErrs: []error{clues.New("not defined")},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func DefaultSharePointBH() *BackupHandler {
|
||||||
|
return &BackupHandler{
|
||||||
|
ItemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{}},
|
||||||
|
GI: GetsItem{Err: clues.New("not defined")},
|
||||||
|
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
||||||
|
PathPrefixFn: defaultSharePointPathPrefixer,
|
||||||
|
CanonPathFn: defaultSharePointCanonPather,
|
||||||
|
Service: path.SharePointService,
|
||||||
|
Category: path.LibrariesCategory,
|
||||||
|
LocationIDFn: defaultSharePointLocationIDer,
|
||||||
|
GetResps: []*http.Response{nil},
|
||||||
|
GetErrs: []error{clues.New("not defined")},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) PathPrefix(tID, ro, driveID string) (path.Path, error) {
|
||||||
|
pp, err := h.PathPrefixFn(tID, ro, driveID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return pp, h.PathPrefixErr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID, ro string) (path.Path, error) {
|
||||||
|
cp, err := h.CanonPathFn(pb, tID, ro)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return cp, h.CanonPathErr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
||||||
|
return h.Service, h.Category
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) NewDrivePager(string, []string) api.DrivePager {
|
||||||
|
return h.DrivePagerV
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DriveItemEnumerator {
|
||||||
|
return h.ItemPagerV[driveID]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string {
|
||||||
|
return "/" + pb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) NewLocationIDer(driveID string, elems ...string) details.LocationIDer {
|
||||||
|
return h.LocationIDFn(driveID, elems...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) AugmentItemInfo(details.ItemInfo, models.DriveItemable, int64, *path.Builder) details.ItemInfo {
|
||||||
|
return h.ItemInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *BackupHandler) Get(context.Context, string, map[string]string) (*http.Response, error) {
|
||||||
|
c := h.getCall
|
||||||
|
h.getCall++
|
||||||
|
|
||||||
|
// allows mockers to only populate the errors slice
|
||||||
|
if h.GetErrs[c] != nil {
|
||||||
|
return nil, h.GetErrs[c]
|
||||||
|
}
|
||||||
|
|
||||||
|
return h.GetResps[c], h.GetErrs[c]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) {
|
||||||
|
return h.GI.GetItem(ctx, "", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h BackupHandler) GetItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
_, _ string,
|
||||||
|
) (models.PermissionCollectionResponseable, error) {
|
||||||
|
return h.GIP.GetItemPermission(ctx, "", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
type canonPather func(*path.Builder, string, string) (path.Path, error)
|
||||||
|
|
||||||
|
var defaultOneDriveCanonPather = func(pb *path.Builder, tID, ro string) (path.Path, error) {
|
||||||
|
return pb.ToDataLayerOneDrivePath(tID, ro, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultSharePointCanonPather = func(pb *path.Builder, tID, ro string) (path.Path, error) {
|
||||||
|
return pb.ToDataLayerSharePointPath(tID, ro, path.LibrariesCategory, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
type pathPrefixer func(tID, ro, driveID string) (path.Path, error)
|
||||||
|
|
||||||
|
var defaultOneDrivePathPrefixer = func(tID, ro, driveID string) (path.Path, error) {
|
||||||
|
return path.Build(
|
||||||
|
tID,
|
||||||
|
ro,
|
||||||
|
path.OneDriveService,
|
||||||
|
path.FilesCategory,
|
||||||
|
false,
|
||||||
|
odConsts.DrivesPathDir,
|
||||||
|
driveID,
|
||||||
|
odConsts.RootPathDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultSharePointPathPrefixer = func(tID, ro, driveID string) (path.Path, error) {
|
||||||
|
return path.Build(
|
||||||
|
tID,
|
||||||
|
ro,
|
||||||
|
path.SharePointService,
|
||||||
|
path.LibrariesCategory,
|
||||||
|
false,
|
||||||
|
odConsts.DrivesPathDir,
|
||||||
|
driveID,
|
||||||
|
odConsts.RootPathDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
type locationIDer func(string, ...string) details.LocationIDer
|
||||||
|
|
||||||
|
var defaultOneDriveLocationIDer = func(driveID string, elems ...string) details.LocationIDer {
|
||||||
|
return details.NewOneDriveLocationIDer(driveID, elems...)
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultSharePointLocationIDer = func(driveID string, elems ...string) details.LocationIDer {
|
||||||
|
return details.NewSharePointLocationIDer(driveID, elems...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Get Itemer
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type GetsItem struct {
|
||||||
|
Item models.DriveItemable
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m GetsItem) GetItem(
|
||||||
|
_ context.Context,
|
||||||
|
_, _ string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return m.Item, m.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Get Item Permissioner
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type GetsItemPermission struct {
|
||||||
|
Perm models.PermissionCollectionResponseable
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m GetsItemPermission) GetItemPermission(
|
||||||
|
_ context.Context,
|
||||||
|
_, _ string,
|
||||||
|
) (models.PermissionCollectionResponseable, error) {
|
||||||
|
return m.Perm, m.Err
|
||||||
|
}
|
||||||
47
src/internal/connector/onedrive/mock/item.go
Normal file
47
src/internal/connector/onedrive/mock/item.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package mock
|
||||||
|
|
||||||
|
//nolint:lll
|
||||||
|
const DriveFilePayloadData = `{
|
||||||
|
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#drives('b%22-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5AV')/items/$entity",
|
||||||
|
"@microsoft.graph.downloadUrl": "https://test-my.sharepoint.com/personal/brunhilda_test_onmicrosoft_com/_layouts/15/download.aspx?UniqueId=deadbeef-1b6a-4d13-aae6-bf5f9b07d424&Translate=false&tempauth=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJhdWQiOiIwMDAwMDAwMy0wMDAwLTBmZjEtY2UwMC0wMDAwMDAwMDAwMDAvMTBycWMyLW15LnNoYXJlcG9pbnQuY29tQGZiOGFmYmFhLWU5NGMtNGVhNS04YThhLTI0YWZmMDRkNzg3NCIsImlzcyI6IjAwMDAwMDAzLTAwMDAtMGZmMS1jZTAwLTAwMDAwMDAwMDAwMCIsIm5iZiI6IjE2ODUxMjk1MzIiLCJleHAiOiIxNjg1MTMzMTMyIiwiZW5kcG9pbnR1cmwiOiJkTStxblBIQitkNDMzS0ErTHVTUVZMRi9IaVliSkI2eHJWN0tuYk45aXQ0PSIsImVuZHBvaW50dXJsTGVuZ3RoIjoiMTYxIiwiaXNsb29wYmFjayI6IlRydWUiLCJjaWQiOiJOVFl4TXpNMFkyWXRZVFk0TVMwMFpXUmxMVGt5TjJZdFlXVmpNVGMwTldWbU16TXgiLCJ2ZXIiOiJoYXNoZWRwcm9vZnRva2VuIiwic2l0ZWlkIjoiWlRnd01tTmpabUl0TnpRNVlpMDBOV1V3TFdGbU1tRXRZbVExWmpReE5EQmpaV05pIiwiYXBwX2Rpc3BsYXluYW1lIjoiS2VlcGVyc19Mb2NhbCIsIm5hbWVpZCI6ImFkYjk3MTQ2LTcxYTctNDkxYS05YWMwLWUzOGFkNzdkZWViNkBmYjhhZmJhYS1lOTRjLTRlYTUtOGE4YS0yNGFmZjA0ZDc4NzQiLCJyb2xlcyI6ImFsbHNpdGVzLndyaXRlIGFsbHNpdGVzLm1hbmFnZSBhbGxmaWxlcy53cml0ZSBhbGxzaXRlcy5mdWxsY29udHJvbCBhbGxwcm9maWxlcy5yZWFkIiwidHQiOiIxIiwidXNlUGVyc2lzdGVudENvb2tpZSI6bnVsbCwiaXBhZGRyIjoiMjA1MTkwLjE1Ny4zMCJ9.lN7Vpfzk1abEyE0M3gyRyZXEaGQ3JMXCyaXUBNbD5Vo&ApiVersion=2.0",
|
||||||
|
"createdDateTime": "2023-04-25T21:32:58Z",
|
||||||
|
"eTag": "\"{DEADBEEF-1B6A-4D13-AAE6-BF5F9B07D424},1\"",
|
||||||
|
"id": "017W47IH3FQVEFI23QCNG2VZV7L6NQPVBE",
|
||||||
|
"lastModifiedDateTime": "2023-04-25T21:32:58Z",
|
||||||
|
"name": "huehuehue.GIF",
|
||||||
|
"webUrl": "https://test-my.sharepoint.com/personal/brunhilda_test_onmicrosoft_com/Documents/test/huehuehue.GIF",
|
||||||
|
"cTag": "\"c:{DEADBEEF-1B6A-4D13-AAE6-BF5F9B07D424},1\"",
|
||||||
|
"size": 88843,
|
||||||
|
"createdBy": {
|
||||||
|
"user": {
|
||||||
|
"email": "brunhilda@test.onmicrosoft.com",
|
||||||
|
"id": "DEADBEEF-4c80-4da4-86ef-a08d8d6f0f94",
|
||||||
|
"displayName": "BrunHilda"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lastModifiedBy": {
|
||||||
|
"user": {
|
||||||
|
"email": "brunhilda@10rqc2.onmicrosoft.com",
|
||||||
|
"id": "DEADBEEF-4c80-4da4-86ef-a08d8d6f0f94",
|
||||||
|
"displayName": "BrunHilda"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"parentReference": {
|
||||||
|
"driveType": "business",
|
||||||
|
"driveId": "b!-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5VA",
|
||||||
|
"id": "017W47IH6DRQF2GS2N6NGWLZRS7RUJ2DIP",
|
||||||
|
"path": "/drives/b!-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5VA/root:/test",
|
||||||
|
"siteId": "DEADBEEF-749b-45e0-af2a-bd5f4140cecb"
|
||||||
|
},
|
||||||
|
"file": {
|
||||||
|
"mimeType": "image/gif",
|
||||||
|
"hashes": {
|
||||||
|
"quickXorHash": "sU5rmXOvVFn6zJHpCPro9cYaK+Q="
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fileSystemInfo": {
|
||||||
|
"createdDateTime": "2023-04-25T21:32:58Z",
|
||||||
|
"lastModifiedDateTime": "2023-04-25T21:32:58Z"
|
||||||
|
},
|
||||||
|
"image": {}
|
||||||
|
}`
|
||||||
@ -8,13 +8,10 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func getParentMetadata(
|
func getParentMetadata(
|
||||||
@ -132,12 +129,16 @@ func computeParentPermissions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type updateDeleteItemPermissioner interface {
|
||||||
|
DeleteItemPermissioner
|
||||||
|
UpdateItemPermissioner
|
||||||
|
}
|
||||||
|
|
||||||
// UpdatePermissions takes in the set of permission to be added and
|
// UpdatePermissions takes in the set of permission to be added and
|
||||||
// removed from an item to bring it to the desired state.
|
// removed from an item to bring it to the desired state.
|
||||||
func UpdatePermissions(
|
func UpdatePermissions(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
udip updateDeleteItemPermissioner,
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
driveID string,
|
||||||
itemID string,
|
itemID string,
|
||||||
permAdded, permRemoved []metadata.Permission,
|
permAdded, permRemoved []metadata.Permission,
|
||||||
@ -161,9 +162,8 @@ func UpdatePermissions(
|
|||||||
return clues.New("no new permission id").WithClues(ctx)
|
return clues.New("no new permission id").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
err := api.DeleteDriveItemPermission(
|
err := udip.DeleteItemPermission(
|
||||||
ictx,
|
ictx,
|
||||||
creds,
|
|
||||||
driveID,
|
driveID,
|
||||||
itemID,
|
itemID,
|
||||||
pid)
|
pid)
|
||||||
@ -216,7 +216,7 @@ func UpdatePermissions(
|
|||||||
|
|
||||||
pbody.SetRecipients([]models.DriveRecipientable{rec})
|
pbody.SetRecipients([]models.DriveRecipientable{rec})
|
||||||
|
|
||||||
newPerm, err := api.PostItemPermissionUpdate(ictx, service, driveID, itemID, pbody)
|
newPerm, err := udip.PostItemPermissionUpdate(ictx, driveID, itemID, pbody)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Stack(err)
|
return clues.Stack(err)
|
||||||
}
|
}
|
||||||
@ -233,8 +233,7 @@ func UpdatePermissions(
|
|||||||
// on onedrive items.
|
// on onedrive items.
|
||||||
func RestorePermissions(
|
func RestorePermissions(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
rh RestoreHandler,
|
||||||
service graph.Servicer,
|
|
||||||
driveID string,
|
driveID string,
|
||||||
itemID string,
|
itemID string,
|
||||||
itemPath path.Path,
|
itemPath path.Path,
|
||||||
@ -256,8 +255,7 @@ func RestorePermissions(
|
|||||||
|
|
||||||
return UpdatePermissions(
|
return UpdatePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
service,
|
|
||||||
driveID,
|
driveID,
|
||||||
itemID,
|
itemID,
|
||||||
permAdded,
|
permAdded,
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
package onedrive
|
package onedrive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@ -9,6 +8,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -36,8 +36,8 @@ func runComputeParentPermissionsTest(
|
|||||||
category path.CategoryType,
|
category path.CategoryType,
|
||||||
resourceOwner string,
|
resourceOwner string,
|
||||||
) {
|
) {
|
||||||
entryPath := fmt.Sprintf(rootDrivePattern, "drive-id") + "/level0/level1/level2/entry"
|
entryPath := odConsts.DriveFolderPrefixBuilder("drive-id").String() + "/level0/level1/level2/entry"
|
||||||
rootEntryPath := fmt.Sprintf(rootDrivePattern, "drive-id") + "/entry"
|
rootEntryPath := odConsts.DriveFolderPrefixBuilder("drive-id").String() + "/entry"
|
||||||
|
|
||||||
entry, err := path.Build(
|
entry, err := path.Build(
|
||||||
"tenant",
|
"tenant",
|
||||||
|
|||||||
@ -22,7 +22,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -41,6 +40,7 @@ type restoreCaches struct {
|
|||||||
ParentDirToMeta map[string]metadata.Metadata
|
ParentDirToMeta map[string]metadata.Metadata
|
||||||
OldPermIDToNewID map[string]string
|
OldPermIDToNewID map[string]string
|
||||||
DriveIDToRootFolderID map[string]string
|
DriveIDToRootFolderID map[string]string
|
||||||
|
pool sync.Pool
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRestoreCaches() *restoreCaches {
|
func NewRestoreCaches() *restoreCaches {
|
||||||
@ -49,20 +49,25 @@ func NewRestoreCaches() *restoreCaches {
|
|||||||
ParentDirToMeta: map[string]metadata.Metadata{},
|
ParentDirToMeta: map[string]metadata.Metadata{},
|
||||||
OldPermIDToNewID: map[string]string{},
|
OldPermIDToNewID: map[string]string{},
|
||||||
DriveIDToRootFolderID: map[string]string{},
|
DriveIDToRootFolderID: map[string]string{},
|
||||||
|
// Buffer pool for uploads
|
||||||
|
pool: sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
b := make([]byte, graph.CopyBufferSize)
|
||||||
|
return &b
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// RestoreCollections will restore the specified data collections into OneDrive
|
// RestoreCollections will restore the specified data collections into OneDrive
|
||||||
func RestoreCollections(
|
func RestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
rh RestoreHandler,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
service graph.Servicer,
|
|
||||||
dest control.RestoreDestination,
|
dest control.RestoreDestination,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
pool *sync.Pool,
|
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (*support.ConnectorOperationStatus, error) {
|
) (*support.ConnectorOperationStatus, error) {
|
||||||
var (
|
var (
|
||||||
@ -99,16 +104,13 @@ func RestoreCollections(
|
|||||||
|
|
||||||
metrics, err = RestoreCollection(
|
metrics, err = RestoreCollection(
|
||||||
ictx,
|
ictx,
|
||||||
creds,
|
rh,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
service,
|
|
||||||
dc,
|
dc,
|
||||||
caches,
|
caches,
|
||||||
OneDriveSource,
|
|
||||||
dest.ContainerName,
|
dest.ContainerName,
|
||||||
deets,
|
deets,
|
||||||
opts.RestorePermissions,
|
opts.RestorePermissions,
|
||||||
pool,
|
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
el.AddRecoverable(err)
|
el.AddRecoverable(err)
|
||||||
@ -138,16 +140,13 @@ func RestoreCollections(
|
|||||||
// - error, if any besides recoverable
|
// - error, if any besides recoverable
|
||||||
func RestoreCollection(
|
func RestoreCollection(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
rh RestoreHandler,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
service graph.Servicer,
|
|
||||||
dc data.RestoreCollection,
|
dc data.RestoreCollection,
|
||||||
caches *restoreCaches,
|
caches *restoreCaches,
|
||||||
source driveSource,
|
|
||||||
restoreContainerName string,
|
restoreContainerName string,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
restorePerms bool,
|
restorePerms bool,
|
||||||
pool *sync.Pool,
|
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (support.CollectionMetrics, error) {
|
) (support.CollectionMetrics, error) {
|
||||||
var (
|
var (
|
||||||
@ -170,7 +169,7 @@ func RestoreCollection(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := caches.DriveIDToRootFolderID[drivePath.DriveID]; !ok {
|
if _, ok := caches.DriveIDToRootFolderID[drivePath.DriveID]; !ok {
|
||||||
root, err := api.GetDriveRoot(ctx, service, drivePath.DriveID)
|
root, err := rh.GetRootFolder(ctx, drivePath.DriveID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return metrics, clues.Wrap(err, "getting drive root id")
|
return metrics, clues.Wrap(err, "getting drive root id")
|
||||||
}
|
}
|
||||||
@ -207,8 +206,7 @@ func RestoreCollection(
|
|||||||
// Create restore folders and get the folder ID of the folder the data stream will be restored in
|
// Create restore folders and get the folder ID of the folder the data stream will be restored in
|
||||||
restoreFolderID, err := CreateRestoreFolders(
|
restoreFolderID, err := CreateRestoreFolders(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
service,
|
|
||||||
drivePath,
|
drivePath,
|
||||||
restoreDir,
|
restoreDir,
|
||||||
dc.FullPath(),
|
dc.FullPath(),
|
||||||
@ -267,11 +265,10 @@ func RestoreCollection(
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
defer func() { <-semaphoreCh }()
|
defer func() { <-semaphoreCh }()
|
||||||
|
|
||||||
copyBufferPtr := pool.Get().(*[]byte)
|
copyBufferPtr := caches.pool.Get().(*[]byte)
|
||||||
defer pool.Put(copyBufferPtr)
|
defer caches.pool.Put(copyBufferPtr)
|
||||||
|
|
||||||
copyBuffer := *copyBufferPtr
|
copyBuffer := *copyBufferPtr
|
||||||
|
|
||||||
ictx := clues.Add(ctx, "restore_item_id", itemData.UUID())
|
ictx := clues.Add(ctx, "restore_item_id", itemData.UUID())
|
||||||
|
|
||||||
itemPath, err := dc.FullPath().AppendItem(itemData.UUID())
|
itemPath, err := dc.FullPath().AppendItem(itemData.UUID())
|
||||||
@ -282,11 +279,9 @@ func RestoreCollection(
|
|||||||
|
|
||||||
itemInfo, skipped, err := restoreItem(
|
itemInfo, skipped, err := restoreItem(
|
||||||
ictx,
|
ictx,
|
||||||
creds,
|
rh,
|
||||||
dc,
|
dc,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
source,
|
|
||||||
service,
|
|
||||||
drivePath,
|
drivePath,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer,
|
||||||
@ -332,11 +327,9 @@ func RestoreCollection(
|
|||||||
// returns the item info, a bool (true = restore was skipped), and an error
|
// returns the item info, a bool (true = restore was skipped), and an error
|
||||||
func restoreItem(
|
func restoreItem(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
rh RestoreHandler,
|
||||||
dc data.RestoreCollection,
|
fibn data.FetchItemByNamer,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
source driveSource,
|
|
||||||
service graph.Servicer,
|
|
||||||
drivePath *path.DrivePath,
|
drivePath *path.DrivePath,
|
||||||
restoreFolderID string,
|
restoreFolderID string,
|
||||||
copyBuffer []byte,
|
copyBuffer []byte,
|
||||||
@ -351,10 +344,9 @@ func restoreItem(
|
|||||||
if backupVersion < version.OneDrive1DataAndMetaFiles {
|
if backupVersion < version.OneDrive1DataAndMetaFiles {
|
||||||
itemInfo, err := restoreV0File(
|
itemInfo, err := restoreV0File(
|
||||||
ctx,
|
ctx,
|
||||||
source,
|
rh,
|
||||||
service,
|
|
||||||
drivePath,
|
drivePath,
|
||||||
dc,
|
fibn,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer,
|
||||||
itemData)
|
itemData)
|
||||||
@ -401,11 +393,9 @@ func restoreItem(
|
|||||||
if backupVersion < version.OneDrive6NameInMeta {
|
if backupVersion < version.OneDrive6NameInMeta {
|
||||||
itemInfo, err := restoreV1File(
|
itemInfo, err := restoreV1File(
|
||||||
ctx,
|
ctx,
|
||||||
source,
|
rh,
|
||||||
creds,
|
|
||||||
service,
|
|
||||||
drivePath,
|
drivePath,
|
||||||
dc,
|
fibn,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer,
|
||||||
restorePerms,
|
restorePerms,
|
||||||
@ -423,11 +413,9 @@ func restoreItem(
|
|||||||
|
|
||||||
itemInfo, err := restoreV6File(
|
itemInfo, err := restoreV6File(
|
||||||
ctx,
|
ctx,
|
||||||
source,
|
rh,
|
||||||
creds,
|
|
||||||
service,
|
|
||||||
drivePath,
|
drivePath,
|
||||||
dc,
|
fibn,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer,
|
||||||
restorePerms,
|
restorePerms,
|
||||||
@ -443,24 +431,22 @@ func restoreItem(
|
|||||||
|
|
||||||
func restoreV0File(
|
func restoreV0File(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
source driveSource,
|
rh RestoreHandler,
|
||||||
service graph.Servicer,
|
|
||||||
drivePath *path.DrivePath,
|
drivePath *path.DrivePath,
|
||||||
fetcher fileFetcher,
|
fibn data.FetchItemByNamer,
|
||||||
restoreFolderID string,
|
restoreFolderID string,
|
||||||
copyBuffer []byte,
|
copyBuffer []byte,
|
||||||
itemData data.Stream,
|
itemData data.Stream,
|
||||||
) (details.ItemInfo, error) {
|
) (details.ItemInfo, error) {
|
||||||
_, itemInfo, err := restoreData(
|
_, itemInfo, err := restoreData(
|
||||||
ctx,
|
ctx,
|
||||||
service,
|
rh,
|
||||||
fetcher,
|
fibn,
|
||||||
itemData.UUID(),
|
itemData.UUID(),
|
||||||
itemData,
|
itemData,
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer)
|
||||||
source)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return itemInfo, clues.Wrap(err, "restoring file")
|
return itemInfo, clues.Wrap(err, "restoring file")
|
||||||
}
|
}
|
||||||
@ -468,17 +454,11 @@ func restoreV0File(
|
|||||||
return itemInfo, nil
|
return itemInfo, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type fileFetcher interface {
|
|
||||||
Fetch(ctx context.Context, name string) (data.Stream, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func restoreV1File(
|
func restoreV1File(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
source driveSource,
|
rh RestoreHandler,
|
||||||
creds account.M365Config,
|
|
||||||
service graph.Servicer,
|
|
||||||
drivePath *path.DrivePath,
|
drivePath *path.DrivePath,
|
||||||
fetcher fileFetcher,
|
fibn data.FetchItemByNamer,
|
||||||
restoreFolderID string,
|
restoreFolderID string,
|
||||||
copyBuffer []byte,
|
copyBuffer []byte,
|
||||||
restorePerms bool,
|
restorePerms bool,
|
||||||
@ -490,14 +470,13 @@ func restoreV1File(
|
|||||||
|
|
||||||
itemID, itemInfo, err := restoreData(
|
itemID, itemInfo, err := restoreData(
|
||||||
ctx,
|
ctx,
|
||||||
service,
|
rh,
|
||||||
fetcher,
|
fibn,
|
||||||
trimmedName,
|
trimmedName,
|
||||||
itemData,
|
itemData,
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer)
|
||||||
source)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return details.ItemInfo{}, err
|
return details.ItemInfo{}, err
|
||||||
}
|
}
|
||||||
@ -511,15 +490,14 @@ func restoreV1File(
|
|||||||
// Fetch item permissions from the collection and restore them.
|
// Fetch item permissions from the collection and restore them.
|
||||||
metaName := trimmedName + metadata.MetaFileSuffix
|
metaName := trimmedName + metadata.MetaFileSuffix
|
||||||
|
|
||||||
meta, err := fetchAndReadMetadata(ctx, fetcher, metaName)
|
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = RestorePermissions(
|
err = RestorePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
service,
|
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
itemID,
|
itemID,
|
||||||
itemPath,
|
itemPath,
|
||||||
@ -534,11 +512,9 @@ func restoreV1File(
|
|||||||
|
|
||||||
func restoreV6File(
|
func restoreV6File(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
source driveSource,
|
rh RestoreHandler,
|
||||||
creds account.M365Config,
|
|
||||||
service graph.Servicer,
|
|
||||||
drivePath *path.DrivePath,
|
drivePath *path.DrivePath,
|
||||||
fetcher fileFetcher,
|
fibn data.FetchItemByNamer,
|
||||||
restoreFolderID string,
|
restoreFolderID string,
|
||||||
copyBuffer []byte,
|
copyBuffer []byte,
|
||||||
restorePerms bool,
|
restorePerms bool,
|
||||||
@ -551,7 +527,7 @@ func restoreV6File(
|
|||||||
// Get metadata file so we can determine the file name.
|
// Get metadata file so we can determine the file name.
|
||||||
metaName := trimmedName + metadata.MetaFileSuffix
|
metaName := trimmedName + metadata.MetaFileSuffix
|
||||||
|
|
||||||
meta, err := fetchAndReadMetadata(ctx, fetcher, metaName)
|
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
|
||||||
}
|
}
|
||||||
@ -574,14 +550,13 @@ func restoreV6File(
|
|||||||
|
|
||||||
itemID, itemInfo, err := restoreData(
|
itemID, itemInfo, err := restoreData(
|
||||||
ctx,
|
ctx,
|
||||||
service,
|
rh,
|
||||||
fetcher,
|
fibn,
|
||||||
meta.FileName,
|
meta.FileName,
|
||||||
itemData,
|
itemData,
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer,
|
copyBuffer)
|
||||||
source)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return details.ItemInfo{}, err
|
return details.ItemInfo{}, err
|
||||||
}
|
}
|
||||||
@ -594,8 +569,7 @@ func restoreV6File(
|
|||||||
|
|
||||||
err = RestorePermissions(
|
err = RestorePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
service,
|
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
itemID,
|
itemID,
|
||||||
itemPath,
|
itemPath,
|
||||||
@ -615,8 +589,7 @@ func restoreV6File(
|
|||||||
// folderCache is mutated, as a side effect of populating the items.
|
// folderCache is mutated, as a side effect of populating the items.
|
||||||
func CreateRestoreFolders(
|
func CreateRestoreFolders(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
rh RestoreHandler,
|
||||||
service graph.Servicer,
|
|
||||||
drivePath *path.DrivePath,
|
drivePath *path.DrivePath,
|
||||||
restoreDir *path.Builder,
|
restoreDir *path.Builder,
|
||||||
folderPath path.Path,
|
folderPath path.Path,
|
||||||
@ -626,7 +599,7 @@ func CreateRestoreFolders(
|
|||||||
) (string, error) {
|
) (string, error) {
|
||||||
id, err := createRestoreFolders(
|
id, err := createRestoreFolders(
|
||||||
ctx,
|
ctx,
|
||||||
service,
|
rh,
|
||||||
drivePath,
|
drivePath,
|
||||||
restoreDir,
|
restoreDir,
|
||||||
caches)
|
caches)
|
||||||
@ -645,8 +618,7 @@ func CreateRestoreFolders(
|
|||||||
|
|
||||||
err = RestorePermissions(
|
err = RestorePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
service,
|
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
id,
|
id,
|
||||||
folderPath,
|
folderPath,
|
||||||
@ -656,12 +628,17 @@ func CreateRestoreFolders(
|
|||||||
return id, err
|
return id, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type folderRestorer interface {
|
||||||
|
GetFolderByNamer
|
||||||
|
PostItemInContainerer
|
||||||
|
}
|
||||||
|
|
||||||
// createRestoreFolders creates the restore folder hierarchy in the specified
|
// createRestoreFolders creates the restore folder hierarchy in the specified
|
||||||
// drive and returns the folder ID of the last folder entry in the hierarchy.
|
// drive and returns the folder ID of the last folder entry in the hierarchy.
|
||||||
// folderCache is mutated, as a side effect of populating the items.
|
// folderCache is mutated, as a side effect of populating the items.
|
||||||
func createRestoreFolders(
|
func createRestoreFolders(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Servicer,
|
fr folderRestorer,
|
||||||
drivePath *path.DrivePath,
|
drivePath *path.DrivePath,
|
||||||
restoreDir *path.Builder,
|
restoreDir *path.Builder,
|
||||||
caches *restoreCaches,
|
caches *restoreCaches,
|
||||||
@ -692,7 +669,7 @@ func createRestoreFolders(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
folderItem, err := api.GetFolderByName(ictx, service, driveID, parentFolderID, folder)
|
folderItem, err := fr.GetFolderByName(ictx, driveID, parentFolderID, folder)
|
||||||
if err != nil && !errors.Is(err, api.ErrFolderNotFound) {
|
if err != nil && !errors.Is(err, api.ErrFolderNotFound) {
|
||||||
return "", clues.Wrap(err, "getting folder by display name")
|
return "", clues.Wrap(err, "getting folder by display name")
|
||||||
}
|
}
|
||||||
@ -706,7 +683,7 @@ func createRestoreFolders(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// create the folder if not found
|
// create the folder if not found
|
||||||
folderItem, err = CreateItem(ictx, service, driveID, parentFolderID, newItem(folder, true))
|
folderItem, err = fr.PostItemInContainer(ictx, driveID, parentFolderID, newItem(folder, true))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", clues.Wrap(err, "creating folder")
|
return "", clues.Wrap(err, "creating folder")
|
||||||
}
|
}
|
||||||
@ -720,16 +697,21 @@ func createRestoreFolders(
|
|||||||
return parentFolderID, nil
|
return parentFolderID, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type itemRestorer interface {
|
||||||
|
ItemInfoAugmenter
|
||||||
|
NewItemContentUploader
|
||||||
|
PostItemInContainerer
|
||||||
|
}
|
||||||
|
|
||||||
// restoreData will create a new item in the specified `parentFolderID` and upload the data.Stream
|
// restoreData will create a new item in the specified `parentFolderID` and upload the data.Stream
|
||||||
func restoreData(
|
func restoreData(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Servicer,
|
ir itemRestorer,
|
||||||
fetcher fileFetcher,
|
fibn data.FetchItemByNamer,
|
||||||
name string,
|
name string,
|
||||||
itemData data.Stream,
|
itemData data.Stream,
|
||||||
driveID, parentFolderID string,
|
driveID, parentFolderID string,
|
||||||
copyBuffer []byte,
|
copyBuffer []byte,
|
||||||
source driveSource,
|
|
||||||
) (string, details.ItemInfo, error) {
|
) (string, details.ItemInfo, error) {
|
||||||
ctx, end := diagnostics.Span(ctx, "gc:oneDrive:restoreItem", diagnostics.Label("item_uuid", itemData.UUID()))
|
ctx, end := diagnostics.Span(ctx, "gc:oneDrive:restoreItem", diagnostics.Label("item_uuid", itemData.UUID()))
|
||||||
defer end()
|
defer end()
|
||||||
@ -743,17 +725,15 @@ func restoreData(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create Item
|
// Create Item
|
||||||
newItem, err := CreateItem(ctx, service, driveID, parentFolderID, newItem(name, false))
|
newItem, err := ir.PostItemInContainer(ctx, driveID, parentFolderID, newItem(name, false))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", details.ItemInfo{}, err
|
return "", details.ItemInfo{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
itemID := ptr.Val(newItem.GetId())
|
// Get a drive item writer
|
||||||
ctx = clues.Add(ctx, "upload_item_id", itemID)
|
w, uploadURL, err := driveItemWriter(ctx, ir, driveID, ptr.Val(newItem.GetId()), ss.Size())
|
||||||
|
|
||||||
r, err := api.PostDriveItem(ctx, service, driveID, itemID)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", details.ItemInfo{}, clues.Wrap(err, "get upload session")
|
return "", details.ItemInfo{}, clues.Wrap(err, "get item upload session")
|
||||||
}
|
}
|
||||||
|
|
||||||
var written int64
|
var written int64
|
||||||
@ -765,12 +745,6 @@ func restoreData(
|
|||||||
// show "register" any partial file uploads and so if we fail an
|
// show "register" any partial file uploads and so if we fail an
|
||||||
// upload the file size will be 0.
|
// upload the file size will be 0.
|
||||||
for i := 0; i <= maxUploadRetries; i++ {
|
for i := 0; i <= maxUploadRetries; i++ {
|
||||||
// Initialize and return an io.Writer to upload data for the
|
|
||||||
// specified item It does so by creating an upload session and
|
|
||||||
// using that URL to initialize an `itemWriter`
|
|
||||||
// TODO: @vkamra verify if var session is the desired input
|
|
||||||
w := graph.NewLargeItemWriter(itemID, ptr.Val(r.GetUploadUrl()), ss.Size())
|
|
||||||
|
|
||||||
pname := name
|
pname := name
|
||||||
iReader := itemData.ToReader()
|
iReader := itemData.ToReader()
|
||||||
|
|
||||||
@ -780,7 +754,7 @@ func restoreData(
|
|||||||
// If it is not the first try, we have to pull the file
|
// If it is not the first try, we have to pull the file
|
||||||
// again from kopia. Ideally we could just seek the stream
|
// again from kopia. Ideally we could just seek the stream
|
||||||
// but we don't have a Seeker available here.
|
// but we don't have a Seeker available here.
|
||||||
itemData, err := fetcher.Fetch(ctx, itemData.UUID())
|
itemData, err := fibn.FetchItemByName(ctx, itemData.UUID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", details.ItemInfo{}, clues.Wrap(err, "get data file")
|
return "", details.ItemInfo{}, clues.Wrap(err, "get data file")
|
||||||
}
|
}
|
||||||
@ -803,32 +777,29 @@ func restoreData(
|
|||||||
|
|
||||||
// clear out the bar if err
|
// clear out the bar if err
|
||||||
abort()
|
abort()
|
||||||
|
|
||||||
|
// refresh the io.Writer to restart the upload
|
||||||
|
// TODO: @vkamra verify if var session is the desired input
|
||||||
|
w = graph.NewLargeItemWriter(ptr.Val(newItem.GetId()), uploadURL, ss.Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", details.ItemInfo{}, clues.Wrap(err, "uploading file")
|
return "", details.ItemInfo{}, clues.Wrap(err, "uploading file")
|
||||||
}
|
}
|
||||||
|
|
||||||
dii := details.ItemInfo{}
|
dii := ir.AugmentItemInfo(details.ItemInfo{}, newItem, written, nil)
|
||||||
|
|
||||||
switch source {
|
|
||||||
case SharePointSource:
|
|
||||||
dii.SharePoint = sharePointItemInfo(newItem, written)
|
|
||||||
default:
|
|
||||||
dii.OneDrive = oneDriveItemInfo(newItem, written)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ptr.Val(newItem.GetId()), dii, nil
|
return ptr.Val(newItem.GetId()), dii, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchAndReadMetadata(
|
func fetchAndReadMetadata(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
fetcher fileFetcher,
|
fibn data.FetchItemByNamer,
|
||||||
metaName string,
|
metaName string,
|
||||||
) (metadata.Metadata, error) {
|
) (metadata.Metadata, error) {
|
||||||
ctx = clues.Add(ctx, "meta_file_name", metaName)
|
ctx = clues.Add(ctx, "meta_file_name", metaName)
|
||||||
|
|
||||||
metaFile, err := fetcher.Fetch(ctx, metaName)
|
metaFile, err := fibn.FetchItemByName(ctx, metaName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return metadata.Metadata{}, clues.Wrap(err, "getting item metadata")
|
return metadata.Metadata{}, clues.Wrap(err, "getting item metadata")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,55 +4,29 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type MockGraphService struct{}
|
|
||||||
|
|
||||||
func (ms *MockGraphService) Client() *msgraphsdk.GraphServiceClient {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ms *MockGraphService) Adapter() *msgraphsdk.GraphRequestAdapter {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ graph.Servicer = &oneDriveService{}
|
|
||||||
|
|
||||||
// TODO(ashmrtn): Merge with similar structs in graph and exchange packages.
|
// TODO(ashmrtn): Merge with similar structs in graph and exchange packages.
|
||||||
type oneDriveService struct {
|
type oneDriveService struct {
|
||||||
client msgraphsdk.GraphServiceClient
|
|
||||||
adapter msgraphsdk.GraphRequestAdapter
|
|
||||||
credentials account.M365Config
|
credentials account.M365Config
|
||||||
status support.ConnectorOperationStatus
|
status support.ConnectorOperationStatus
|
||||||
}
|
ac api.Client
|
||||||
|
|
||||||
func (ods *oneDriveService) Client() *msgraphsdk.GraphServiceClient {
|
|
||||||
return &ods.client
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ods *oneDriveService) Adapter() *msgraphsdk.GraphRequestAdapter {
|
|
||||||
return &ods.adapter
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewOneDriveService(credentials account.M365Config) (*oneDriveService, error) {
|
func NewOneDriveService(credentials account.M365Config) (*oneDriveService, error) {
|
||||||
adapter, err := graph.CreateAdapter(
|
ac, err := api.NewClient(credentials)
|
||||||
credentials.AzureTenantID,
|
|
||||||
credentials.AzureClientID,
|
|
||||||
credentials.AzureClientSecret)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
service := oneDriveService{
|
service := oneDriveService{
|
||||||
adapter: *adapter,
|
ac: ac,
|
||||||
client: *msgraphsdk.NewGraphServiceClient(adapter),
|
|
||||||
credentials: credentials,
|
credentials: credentials,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,10 +44,10 @@ func (ods *oneDriveService) updateStatus(status *support.ConnectorOperationStatu
|
|||||||
func loadTestService(t *testing.T) *oneDriveService {
|
func loadTestService(t *testing.T) *oneDriveService {
|
||||||
a := tester.NewM365Account(t)
|
a := tester.NewM365Account(t)
|
||||||
|
|
||||||
m365, err := a.M365Config()
|
creds, err := a.M365Config()
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
service, err := NewOneDriveService(m365)
|
service, err := NewOneDriveService(creds)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
return service
|
return service
|
||||||
|
|||||||
32
src/internal/connector/onedrive/testdata/item.go
vendored
Normal file
32
src/internal/connector/onedrive/testdata/item.go
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package testdata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewStubDriveItem(
|
||||||
|
id, name string,
|
||||||
|
size int64,
|
||||||
|
created, modified time.Time,
|
||||||
|
isFile, isShared bool,
|
||||||
|
) models.DriveItemable {
|
||||||
|
stubItem := models.NewDriveItem()
|
||||||
|
stubItem.SetId(&id)
|
||||||
|
stubItem.SetName(&name)
|
||||||
|
stubItem.SetSize(&size)
|
||||||
|
stubItem.SetCreatedDateTime(&created)
|
||||||
|
stubItem.SetLastModifiedDateTime(&modified)
|
||||||
|
stubItem.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": "https://corsobackup.io"})
|
||||||
|
|
||||||
|
if isFile {
|
||||||
|
stubItem.SetFile(models.NewFile())
|
||||||
|
}
|
||||||
|
|
||||||
|
if isShared {
|
||||||
|
stubItem.SetShared(&models.Shared{})
|
||||||
|
}
|
||||||
|
|
||||||
|
return stubItem
|
||||||
|
}
|
||||||
@ -9,9 +9,9 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type itemProps struct {
|
type itemProps struct {
|
||||||
@ -31,8 +31,7 @@ type urlCache struct {
|
|||||||
refreshMu sync.Mutex
|
refreshMu sync.Mutex
|
||||||
deltaQueryCount int
|
deltaQueryCount int
|
||||||
|
|
||||||
svc graph.Servicer
|
itemPager api.DriveItemEnumerator
|
||||||
itemPagerFunc driveItemPagerFunc
|
|
||||||
|
|
||||||
errors *fault.Bus
|
errors *fault.Bus
|
||||||
}
|
}
|
||||||
@ -41,15 +40,13 @@ type urlCache struct {
|
|||||||
func newURLCache(
|
func newURLCache(
|
||||||
driveID string,
|
driveID string,
|
||||||
refreshInterval time.Duration,
|
refreshInterval time.Duration,
|
||||||
svc graph.Servicer,
|
itemPager api.DriveItemEnumerator,
|
||||||
errors *fault.Bus,
|
errors *fault.Bus,
|
||||||
itemPagerFunc driveItemPagerFunc,
|
|
||||||
) (*urlCache, error) {
|
) (*urlCache, error) {
|
||||||
err := validateCacheParams(
|
err := validateCacheParams(
|
||||||
driveID,
|
driveID,
|
||||||
refreshInterval,
|
refreshInterval,
|
||||||
svc,
|
itemPager)
|
||||||
itemPagerFunc)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "cache params")
|
return nil, clues.Wrap(err, "cache params")
|
||||||
}
|
}
|
||||||
@ -59,8 +56,7 @@ func newURLCache(
|
|||||||
lastRefreshTime: time.Time{},
|
lastRefreshTime: time.Time{},
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
refreshInterval: refreshInterval,
|
refreshInterval: refreshInterval,
|
||||||
svc: svc,
|
itemPager: itemPager,
|
||||||
itemPagerFunc: itemPagerFunc,
|
|
||||||
errors: errors,
|
errors: errors,
|
||||||
},
|
},
|
||||||
nil
|
nil
|
||||||
@ -70,8 +66,7 @@ func newURLCache(
|
|||||||
func validateCacheParams(
|
func validateCacheParams(
|
||||||
driveID string,
|
driveID string,
|
||||||
refreshInterval time.Duration,
|
refreshInterval time.Duration,
|
||||||
svc graph.Servicer,
|
itemPager api.DriveItemEnumerator,
|
||||||
itemPagerFunc driveItemPagerFunc,
|
|
||||||
) error {
|
) error {
|
||||||
if len(driveID) == 0 {
|
if len(driveID) == 0 {
|
||||||
return clues.New("drive id is empty")
|
return clues.New("drive id is empty")
|
||||||
@ -81,11 +76,7 @@ func validateCacheParams(
|
|||||||
return clues.New("invalid refresh interval")
|
return clues.New("invalid refresh interval")
|
||||||
}
|
}
|
||||||
|
|
||||||
if svc == nil {
|
if itemPager == nil {
|
||||||
return clues.New("nil graph servicer")
|
|
||||||
}
|
|
||||||
|
|
||||||
if itemPagerFunc == nil {
|
|
||||||
return clues.New("nil item pager")
|
return clues.New("nil item pager")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,7 +165,7 @@ func (uc *urlCache) deltaQuery(
|
|||||||
|
|
||||||
_, _, _, err := collectItems(
|
_, _, _, err := collectItems(
|
||||||
ctx,
|
ctx,
|
||||||
uc.itemPagerFunc(uc.svc, uc.driveID, ""),
|
uc.itemPager,
|
||||||
uc.driveID,
|
uc.driveID,
|
||||||
"",
|
"",
|
||||||
uc.updateCache,
|
uc.updateCache,
|
||||||
|
|||||||
@ -16,13 +16,12 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type URLCacheIntegrationSuite struct {
|
type URLCacheIntegrationSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
service graph.Servicer
|
ac api.Client
|
||||||
user string
|
user string
|
||||||
driveID string
|
driveID string
|
||||||
}
|
}
|
||||||
@ -41,69 +40,60 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.service = loadTestService(t)
|
|
||||||
suite.user = tester.SecondaryM365UserID(t)
|
suite.user = tester.SecondaryM365UserID(t)
|
||||||
|
|
||||||
pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil)
|
acct := tester.NewM365Account(t)
|
||||||
|
|
||||||
|
creds, err := acct.M365Config()
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
odDrives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
suite.ac, err = api.NewClient(creds)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
|
|
||||||
suite.driveID = ptr.Val(odDrives[0].GetId())
|
drive, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
suite.driveID = ptr.Val(drive.GetId())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Basic test for urlCache. Create some files in onedrive, then access them via
|
// Basic test for urlCache. Create some files in onedrive, then access them via
|
||||||
// url cache
|
// url cache
|
||||||
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||||
t := suite.T()
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
ac = suite.ac.Drives()
|
||||||
|
driveID = suite.driveID
|
||||||
|
newFolderName = tester.DefaultTestRestoreDestination("folder").ContainerName
|
||||||
|
driveItemPager = suite.ac.Drives().NewItemPager(driveID, "", api.DriveItemSelectDefault())
|
||||||
|
)
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
svc := suite.service
|
|
||||||
driveID := suite.driveID
|
|
||||||
|
|
||||||
// Create a new test folder
|
// Create a new test folder
|
||||||
root, err := svc.Client().Drives().ByDriveId(driveID).Root().Get(ctx, nil)
|
root, err := ac.GetRootFolder(ctx, driveID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName
|
newFolder, err := ac.Drives().PostItemInContainer(
|
||||||
|
|
||||||
newFolder, err := CreateItem(
|
|
||||||
ctx,
|
ctx,
|
||||||
svc,
|
|
||||||
driveID,
|
driveID,
|
||||||
ptr.Val(root.GetId()),
|
ptr.Val(root.GetId()),
|
||||||
newItem(newFolderName, true))
|
newItem(newFolderName, true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.NotNil(t, newFolder.GetId())
|
require.NotNil(t, newFolder.GetId())
|
||||||
|
|
||||||
// Delete folder on exit
|
nfid := ptr.Val(newFolder.GetId())
|
||||||
defer func() {
|
|
||||||
ictx := clues.Add(ctx, "folder_id", ptr.Val(newFolder.GetId()))
|
|
||||||
|
|
||||||
err := api.DeleteDriveItem(
|
|
||||||
ictx,
|
|
||||||
loadTestService(t),
|
|
||||||
driveID,
|
|
||||||
ptr.Val(newFolder.GetId()))
|
|
||||||
if err != nil {
|
|
||||||
logger.CtxErr(ictx, err).Errorw("deleting folder")
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Create a bunch of files in the new folder
|
// Create a bunch of files in the new folder
|
||||||
var items []models.DriveItemable
|
var items []models.DriveItemable
|
||||||
|
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
newItemName := "testItem_" + dttm.FormatNow(dttm.SafeForTesting)
|
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
|
||||||
|
|
||||||
item, err := CreateItem(
|
item, err := ac.Drives().PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
svc,
|
|
||||||
driveID,
|
driveID,
|
||||||
ptr.Val(newFolder.GetId()),
|
nfid,
|
||||||
newItem(newItemName, false))
|
newItem(newItemName, false))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Something bad happened, skip this item
|
// Something bad happened, skip this item
|
||||||
@ -117,12 +107,14 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
|||||||
cache, err := newURLCache(
|
cache, err := newURLCache(
|
||||||
suite.driveID,
|
suite.driveID,
|
||||||
1*time.Hour,
|
1*time.Hour,
|
||||||
svc,
|
driveItemPager,
|
||||||
fault.New(true),
|
fault.New(true))
|
||||||
defaultItemPager)
|
|
||||||
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
err = cache.refreshCache(ctx)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// Launch parallel requests to the cache, one per item
|
// Launch parallel requests to the cache, one per item
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
for i := 0; i < len(items); i++ {
|
for i := 0; i < len(items); i++ {
|
||||||
|
|||||||
@ -54,7 +54,7 @@ type Collection struct {
|
|||||||
jobs []string
|
jobs []string
|
||||||
// M365 IDs of the items of this collection
|
// M365 IDs of the items of this collection
|
||||||
category DataCategory
|
category DataCategory
|
||||||
service graph.Servicer
|
client api.Sites
|
||||||
ctrl control.Options
|
ctrl control.Options
|
||||||
betaService *betaAPI.BetaService
|
betaService *betaAPI.BetaService
|
||||||
statusUpdater support.StatusUpdater
|
statusUpdater support.StatusUpdater
|
||||||
@ -63,7 +63,7 @@ type Collection struct {
|
|||||||
// NewCollection helper function for creating a Collection
|
// NewCollection helper function for creating a Collection
|
||||||
func NewCollection(
|
func NewCollection(
|
||||||
folderPath path.Path,
|
folderPath path.Path,
|
||||||
service graph.Servicer,
|
ac api.Client,
|
||||||
category DataCategory,
|
category DataCategory,
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
@ -72,7 +72,7 @@ func NewCollection(
|
|||||||
fullPath: folderPath,
|
fullPath: folderPath,
|
||||||
jobs: make([]string, 0),
|
jobs: make([]string, 0),
|
||||||
data: make(chan data.Stream, collectionChannelBufferSize),
|
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||||
service: service,
|
client: ac.Sites(),
|
||||||
statusUpdater: statusUpdater,
|
statusUpdater: statusUpdater,
|
||||||
category: category,
|
category: category,
|
||||||
ctrl: ctrlOpts,
|
ctrl: ctrlOpts,
|
||||||
@ -175,7 +175,10 @@ func (sc *Collection) populate(ctx context.Context, errs *fault.Bus) {
|
|||||||
sc.finishPopulation(ctx, metrics)
|
sc.finishPopulation(ctx, metrics)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support.CollectionMetrics, error) {
|
func (sc *Collection) runPopulate(
|
||||||
|
ctx context.Context,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) (support.CollectionMetrics, error) {
|
||||||
var (
|
var (
|
||||||
err error
|
err error
|
||||||
metrics support.CollectionMetrics
|
metrics support.CollectionMetrics
|
||||||
@ -197,7 +200,7 @@ func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support
|
|||||||
case List:
|
case List:
|
||||||
metrics, err = sc.retrieveLists(ctx, writer, colProgress, errs)
|
metrics, err = sc.retrieveLists(ctx, writer, colProgress, errs)
|
||||||
case Pages:
|
case Pages:
|
||||||
metrics, err = sc.retrievePages(ctx, writer, colProgress, errs)
|
metrics, err = sc.retrievePages(ctx, sc.client, writer, colProgress, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
return metrics, err
|
return metrics, err
|
||||||
@ -216,7 +219,12 @@ func (sc *Collection) retrieveLists(
|
|||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
)
|
)
|
||||||
|
|
||||||
lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs, errs)
|
lists, err := loadSiteLists(
|
||||||
|
ctx,
|
||||||
|
sc.client.Stable,
|
||||||
|
sc.fullPath.ResourceOwner(),
|
||||||
|
sc.jobs,
|
||||||
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return metrics, err
|
return metrics, err
|
||||||
}
|
}
|
||||||
@ -262,6 +270,7 @@ func (sc *Collection) retrieveLists(
|
|||||||
|
|
||||||
func (sc *Collection) retrievePages(
|
func (sc *Collection) retrievePages(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
as api.Sites,
|
||||||
wtr *kjson.JsonSerializationWriter,
|
wtr *kjson.JsonSerializationWriter,
|
||||||
progress chan<- struct{},
|
progress chan<- struct{},
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
@ -276,7 +285,7 @@ func (sc *Collection) retrievePages(
|
|||||||
return metrics, clues.New("beta service required").WithClues(ctx)
|
return metrics, clues.New("beta service required").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
parent, err := api.GetSite(ctx, sc.service, sc.fullPath.ResourceOwner())
|
parent, err := as.GetByID(ctx, sc.fullPath.ResourceOwner())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return metrics, err
|
return metrics, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/sharepoint/api"
|
betaAPI "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
|
||||||
spMock "github.com/alcionai/corso/src/internal/connector/sharepoint/mock"
|
spMock "github.com/alcionai/corso/src/internal/connector/sharepoint/mock"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -21,12 +21,14 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SharePointCollectionSuite struct {
|
type SharePointCollectionSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
siteID string
|
siteID string
|
||||||
creds account.M365Config
|
creds account.M365Config
|
||||||
|
ac api.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *SharePointCollectionSuite) SetupSuite() {
|
func (suite *SharePointCollectionSuite) SetupSuite() {
|
||||||
@ -38,6 +40,11 @@ func (suite *SharePointCollectionSuite) SetupSuite() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
suite.creds = m365
|
suite.creds = m365
|
||||||
|
|
||||||
|
ac, err := api.NewClient(m365)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
suite.ac = ac
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSharePointCollectionSuite(t *testing.T) {
|
func TestSharePointCollectionSuite(t *testing.T) {
|
||||||
@ -67,9 +74,12 @@ func (suite *SharePointCollectionSuite) TestCollection_Item_Read() {
|
|||||||
// TestListCollection tests basic functionality to create
|
// TestListCollection tests basic functionality to create
|
||||||
// SharePoint collection and to use the data stream channel.
|
// SharePoint collection and to use the data stream channel.
|
||||||
func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
||||||
tenant := "some"
|
var (
|
||||||
user := "user"
|
tenant = "some"
|
||||||
dirRoot := "directory"
|
user = "user"
|
||||||
|
dirRoot = "directory"
|
||||||
|
)
|
||||||
|
|
||||||
tables := []struct {
|
tables := []struct {
|
||||||
name, itemName string
|
name, itemName string
|
||||||
category DataCategory
|
category DataCategory
|
||||||
@ -130,13 +140,13 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
|||||||
},
|
},
|
||||||
getItem: func(t *testing.T, itemName string) *Item {
|
getItem: func(t *testing.T, itemName string) *Item {
|
||||||
byteArray := spMock.Page(itemName)
|
byteArray := spMock.Page(itemName)
|
||||||
page, err := api.CreatePageFromBytes(byteArray)
|
page, err := betaAPI.CreatePageFromBytes(byteArray)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
data := &Item{
|
data := &Item{
|
||||||
id: itemName,
|
id: itemName,
|
||||||
data: io.NopCloser(bytes.NewReader(byteArray)),
|
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
info: api.PageInfo(page, int64(len(byteArray))),
|
info: betaAPI.PageInfo(page, int64(len(byteArray))),
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -151,7 +161,12 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
col := NewCollection(test.getDir(t), nil, test.category, nil, control.Defaults())
|
col := NewCollection(
|
||||||
|
test.getDir(t),
|
||||||
|
suite.ac,
|
||||||
|
test.category,
|
||||||
|
nil,
|
||||||
|
control.Defaults())
|
||||||
col.data <- test.getItem(t, test.itemName)
|
col.data <- test.getItem(t, test.itemName)
|
||||||
|
|
||||||
readItems := []data.Stream{}
|
readItems := []data.Stream{}
|
||||||
|
|||||||
@ -19,6 +19,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type statusUpdater interface {
|
type statusUpdater interface {
|
||||||
@ -29,19 +30,18 @@ type statusUpdater interface {
|
|||||||
// for the specified user
|
// for the specified user
|
||||||
func DataCollections(
|
func DataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
itemClient graph.Requester,
|
ac api.Client,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
site idname.Provider,
|
site idname.Provider,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
creds account.M365Config,
|
creds account.M365Config,
|
||||||
serv graph.Servicer,
|
|
||||||
su statusUpdater,
|
su statusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, error) {
|
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
|
||||||
b, err := selector.ToSharePointBackup()
|
b, err := selector.ToSharePointBackup()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Wrap(err, "sharePointDataCollection: parsing selector")
|
return nil, nil, false, clues.Wrap(err, "sharePointDataCollection: parsing selector")
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = clues.Add(
|
ctx = clues.Add(
|
||||||
@ -50,10 +50,11 @@ func DataCollections(
|
|||||||
"site_url", clues.Hide(site.Name()))
|
"site_url", clues.Hide(site.Name()))
|
||||||
|
|
||||||
var (
|
var (
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
collections = []data.BackupCollection{}
|
collections = []data.BackupCollection{}
|
||||||
categories = map[path.CategoryType]struct{}{}
|
categories = map[path.CategoryType]struct{}{}
|
||||||
ssmb = prefixmatcher.NewStringSetBuilder()
|
ssmb = prefixmatcher.NewStringSetBuilder()
|
||||||
|
canUsePreviousBackup bool
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, scope := range b.Scopes() {
|
for _, scope := range b.Scopes() {
|
||||||
@ -72,7 +73,7 @@ func DataCollections(
|
|||||||
case path.ListsCategory:
|
case path.ListsCategory:
|
||||||
spcs, err = collectLists(
|
spcs, err = collectLists(
|
||||||
ctx,
|
ctx,
|
||||||
serv,
|
ac,
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
site,
|
site,
|
||||||
su,
|
su,
|
||||||
@ -83,11 +84,14 @@ func DataCollections(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Lists don't make use of previous metadata
|
||||||
|
// TODO: Revisit when we add support of lists
|
||||||
|
canUsePreviousBackup = true
|
||||||
|
|
||||||
case path.LibrariesCategory:
|
case path.LibrariesCategory:
|
||||||
spcs, err = collectLibraries(
|
spcs, canUsePreviousBackup, err = collectLibraries(
|
||||||
ctx,
|
ctx,
|
||||||
itemClient,
|
ac.Drives(),
|
||||||
serv,
|
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
site,
|
site,
|
||||||
metadata,
|
metadata,
|
||||||
@ -105,7 +109,7 @@ func DataCollections(
|
|||||||
spcs, err = collectPages(
|
spcs, err = collectPages(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
creds,
|
||||||
serv,
|
ac,
|
||||||
site,
|
site,
|
||||||
su,
|
su,
|
||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
@ -114,6 +118,10 @@ func DataCollections(
|
|||||||
el.AddRecoverable(err)
|
el.AddRecoverable(err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Lists don't make use of previous metadata
|
||||||
|
// TODO: Revisit when we add support of pages
|
||||||
|
canUsePreviousBackup = true
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, spcs...)
|
collections = append(collections, spcs...)
|
||||||
@ -133,18 +141,18 @@ func DataCollections(
|
|||||||
su.UpdateStatus,
|
su.UpdateStatus,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, baseCols...)
|
collections = append(collections, baseCols...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return collections, ssmb.ToReader(), el.Failure()
|
return collections, ssmb.ToReader(), canUsePreviousBackup, el.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
func collectLists(
|
func collectLists(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
serv graph.Servicer,
|
ac api.Client,
|
||||||
tenantID string,
|
tenantID string,
|
||||||
site idname.Provider,
|
site idname.Provider,
|
||||||
updater statusUpdater,
|
updater statusUpdater,
|
||||||
@ -158,7 +166,7 @@ func collectLists(
|
|||||||
spcs = make([]data.BackupCollection, 0)
|
spcs = make([]data.BackupCollection, 0)
|
||||||
)
|
)
|
||||||
|
|
||||||
lists, err := preFetchLists(ctx, serv, site.ID())
|
lists, err := preFetchLists(ctx, ac.Stable, site.ID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -179,7 +187,12 @@ func collectLists(
|
|||||||
el.AddRecoverable(clues.Wrap(err, "creating list collection path").WithClues(ctx))
|
el.AddRecoverable(clues.Wrap(err, "creating list collection path").WithClues(ctx))
|
||||||
}
|
}
|
||||||
|
|
||||||
collection := NewCollection(dir, serv, List, updater.UpdateStatus, ctrlOpts)
|
collection := NewCollection(
|
||||||
|
dir,
|
||||||
|
ac,
|
||||||
|
List,
|
||||||
|
updater.UpdateStatus,
|
||||||
|
ctrlOpts)
|
||||||
collection.AddJob(tuple.id)
|
collection.AddJob(tuple.id)
|
||||||
|
|
||||||
spcs = append(spcs, collection)
|
spcs = append(spcs, collection)
|
||||||
@ -192,8 +205,7 @@ func collectLists(
|
|||||||
// all the drives associated with the site.
|
// all the drives associated with the site.
|
||||||
func collectLibraries(
|
func collectLibraries(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
itemClient graph.Requester,
|
ad api.Drives,
|
||||||
serv graph.Servicer,
|
|
||||||
tenantID string,
|
tenantID string,
|
||||||
site idname.Provider,
|
site idname.Provider,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
@ -202,28 +214,26 @@ func collectLibraries(
|
|||||||
updater statusUpdater,
|
updater statusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, error) {
|
) ([]data.BackupCollection, bool, error) {
|
||||||
logger.Ctx(ctx).Debug("creating SharePoint Library collections")
|
logger.Ctx(ctx).Debug("creating SharePoint Library collections")
|
||||||
|
|
||||||
var (
|
var (
|
||||||
collections = []data.BackupCollection{}
|
collections = []data.BackupCollection{}
|
||||||
colls = onedrive.NewCollections(
|
colls = onedrive.NewCollections(
|
||||||
itemClient,
|
&libraryBackupHandler{ad},
|
||||||
tenantID,
|
tenantID,
|
||||||
site.ID(),
|
site.ID(),
|
||||||
onedrive.SharePointSource,
|
|
||||||
folderMatcher{scope},
|
folderMatcher{scope},
|
||||||
serv,
|
|
||||||
updater.UpdateStatus,
|
updater.UpdateStatus,
|
||||||
ctrlOpts)
|
ctrlOpts)
|
||||||
)
|
)
|
||||||
|
|
||||||
odcs, err := colls.Get(ctx, metadata, ssmb, errs)
|
odcs, canUsePreviousBackup, err := colls.Get(ctx, metadata, ssmb, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Wrap(ctx, err, "getting library")
|
return nil, false, graph.Wrap(ctx, err, "getting library")
|
||||||
}
|
}
|
||||||
|
|
||||||
return append(collections, odcs...), nil
|
return append(collections, odcs...), canUsePreviousBackup, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// collectPages constructs a sharepoint Collections struct and Get()s the associated
|
// collectPages constructs a sharepoint Collections struct and Get()s the associated
|
||||||
@ -231,7 +241,7 @@ func collectLibraries(
|
|||||||
func collectPages(
|
func collectPages(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
creds account.M365Config,
|
||||||
serv graph.Servicer,
|
ac api.Client,
|
||||||
site idname.Provider,
|
site idname.Provider,
|
||||||
updater statusUpdater,
|
updater statusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
@ -277,7 +287,12 @@ func collectPages(
|
|||||||
el.AddRecoverable(clues.Wrap(err, "creating page collection path").WithClues(ctx))
|
el.AddRecoverable(clues.Wrap(err, "creating page collection path").WithClues(ctx))
|
||||||
}
|
}
|
||||||
|
|
||||||
collection := NewCollection(dir, serv, Pages, updater.UpdateStatus, ctrlOpts)
|
collection := NewCollection(
|
||||||
|
dir,
|
||||||
|
ac,
|
||||||
|
Pages,
|
||||||
|
updater.UpdateStatus,
|
||||||
|
ctrlOpts)
|
||||||
collection.betaService = betaService
|
collection.betaService = betaService
|
||||||
collection.AddJob(tuple.ID)
|
collection.AddJob(tuple.ID)
|
||||||
|
|
||||||
|
|||||||
@ -10,21 +10,24 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname/mock"
|
"github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// consts
|
// consts
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
const (
|
var testBaseDrivePath = path.Builder{}.Append(
|
||||||
testBaseDrivePath = "drives/driveID1/root:"
|
odConsts.DrivesPathDir,
|
||||||
)
|
"driveID1",
|
||||||
|
odConsts.RootPathDir)
|
||||||
|
|
||||||
type testFolderMatcher struct {
|
type testFolderMatcher struct {
|
||||||
scope selectors.SharePointScope
|
scope selectors.SharePointScope
|
||||||
@ -34,8 +37,8 @@ func (fm testFolderMatcher) IsAny() bool {
|
|||||||
return fm.scope.IsAny(selectors.SharePointLibraryFolder)
|
return fm.scope.IsAny(selectors.SharePointLibraryFolder)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fm testFolderMatcher) Matches(path string) bool {
|
func (fm testFolderMatcher) Matches(p string) bool {
|
||||||
return fm.scope.Matches(selectors.SharePointLibraryFolder, path)
|
return fm.scope.Matches(selectors.SharePointLibraryFolder, p)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -54,11 +57,15 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
|
|||||||
anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
|
anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
|
||||||
|
|
||||||
const (
|
const (
|
||||||
tenant = "tenant"
|
tenantID = "tenant"
|
||||||
site = "site"
|
site = "site"
|
||||||
driveID = "driveID1"
|
driveID = "driveID1"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
|
||||||
|
ep, err := libraryBackupHandler{}.CanonicalPath(pb, tenantID, site)
|
||||||
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
testCase string
|
testCase string
|
||||||
items []models.DriveItemable
|
items []models.DriveItemable
|
||||||
@ -73,21 +80,16 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
|
|||||||
{
|
{
|
||||||
testCase: "Single File",
|
testCase: "Single File",
|
||||||
items: []models.DriveItemable{
|
items: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem(odConsts.RootID),
|
||||||
driveItem("file", testBaseDrivePath, "root", true),
|
driveItem("file", testBaseDrivePath.String(), odConsts.RootID, true),
|
||||||
},
|
},
|
||||||
scope: anyFolder,
|
scope: anyFolder,
|
||||||
expect: assert.NoError,
|
expect: assert.NoError,
|
||||||
expectedCollectionIDs: []string{"root"},
|
expectedCollectionIDs: []string{odConsts.RootID},
|
||||||
expectedCollectionPaths: expectedPathAsSlice(
|
expectedCollectionPaths: []string{ep.String()},
|
||||||
suite.T(),
|
expectedItemCount: 1,
|
||||||
tenant,
|
expectedFileCount: 1,
|
||||||
site,
|
expectedContainerCount: 1,
|
||||||
testBaseDrivePath,
|
|
||||||
),
|
|
||||||
expectedItemCount: 1,
|
|
||||||
expectedFileCount: 1,
|
|
||||||
expectedContainerCount: 1,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,12 +113,10 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
c := onedrive.NewCollections(
|
c := onedrive.NewCollections(
|
||||||
graph.NewNoTimeoutHTTPWrapper(),
|
&libraryBackupHandler{api.Drives{}},
|
||||||
tenant,
|
tenantID,
|
||||||
site,
|
site,
|
||||||
onedrive.SharePointSource,
|
|
||||||
testFolderMatcher{test.scope},
|
testFolderMatcher{test.scope},
|
||||||
&MockGraphService{},
|
|
||||||
nil,
|
nil,
|
||||||
control.Defaults())
|
control.Defaults())
|
||||||
|
|
||||||
@ -203,13 +203,16 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
|
|||||||
a = tester.NewM365Account(t)
|
a = tester.NewM365Account(t)
|
||||||
)
|
)
|
||||||
|
|
||||||
account, err := a.M365Config()
|
creds, err := a.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
ac, err := api.NewClient(creds)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
col, err := collectPages(
|
col, err := collectPages(
|
||||||
ctx,
|
ctx,
|
||||||
account,
|
creds,
|
||||||
nil,
|
ac,
|
||||||
mock.NewProvider(siteID, siteID),
|
mock.NewProvider(siteID, siteID),
|
||||||
&MockGraphService{},
|
&MockGraphService{},
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
)
|
)
|
||||||
@ -56,16 +55,3 @@ func createTestService(t *testing.T, credentials account.M365Config) *graph.Serv
|
|||||||
|
|
||||||
return graph.NewService(adapter)
|
return graph.NewService(adapter)
|
||||||
}
|
}
|
||||||
|
|
||||||
func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string {
|
|
||||||
res := make([]string, 0, len(rest))
|
|
||||||
|
|
||||||
for _, r := range rest {
|
|
||||||
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
res = append(res, p.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|||||||
275
src/internal/connector/sharepoint/library_handler.go
Normal file
275
src/internal/connector/sharepoint/library_handler.go
Normal file
@ -0,0 +1,275 @@
|
|||||||
|
package sharepoint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ onedrive.BackupHandler = &libraryBackupHandler{}
|
||||||
|
|
||||||
|
type libraryBackupHandler struct {
|
||||||
|
ac api.Drives
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) Get(
|
||||||
|
ctx context.Context,
|
||||||
|
url string,
|
||||||
|
headers map[string]string,
|
||||||
|
) (*http.Response, error) {
|
||||||
|
return h.ac.Get(ctx, url, headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) PathPrefix(
|
||||||
|
tenantID, resourceOwner, driveID string,
|
||||||
|
) (path.Path, error) {
|
||||||
|
return path.Build(
|
||||||
|
tenantID,
|
||||||
|
resourceOwner,
|
||||||
|
path.SharePointService,
|
||||||
|
path.LibrariesCategory,
|
||||||
|
false,
|
||||||
|
odConsts.DrivesPathDir,
|
||||||
|
driveID,
|
||||||
|
odConsts.RootPathDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) CanonicalPath(
|
||||||
|
folders *path.Builder,
|
||||||
|
tenantID, resourceOwner string,
|
||||||
|
) (path.Path, error) {
|
||||||
|
return folders.ToDataLayerSharePointPath(tenantID, resourceOwner, path.LibrariesCategory, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
||||||
|
return path.SharePointService, path.LibrariesCategory
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) NewDrivePager(
|
||||||
|
resourceOwner string,
|
||||||
|
fields []string,
|
||||||
|
) api.DrivePager {
|
||||||
|
return h.ac.NewSiteDrivePager(resourceOwner, fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) NewItemPager(
|
||||||
|
driveID, link string,
|
||||||
|
fields []string,
|
||||||
|
) api.DriveItemEnumerator {
|
||||||
|
return h.ac.NewItemPager(driveID, link, fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) AugmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo {
|
||||||
|
return augmentItemInfo(dii, item, size, parentPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// constructWebURL is a helper function for recreating the webURL
|
||||||
|
// for the originating SharePoint site. Uses the additionalData map
|
||||||
|
// from a models.DriveItemable that possesses a downloadURL within the map.
|
||||||
|
// Returns "" if the map is nil or key is not present.
|
||||||
|
func constructWebURL(adtl map[string]any) string {
|
||||||
|
var (
|
||||||
|
desiredKey = "@microsoft.graph.downloadUrl"
|
||||||
|
sep = `/_layouts`
|
||||||
|
url string
|
||||||
|
)
|
||||||
|
|
||||||
|
if adtl == nil {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
r := adtl[desiredKey]
|
||||||
|
point, ok := r.(*string)
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
value := ptr.Val(point)
|
||||||
|
if len(value) == 0 {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
temp := strings.Split(value, sep)
|
||||||
|
url = temp[0]
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) FormatDisplayPath(
|
||||||
|
driveName string,
|
||||||
|
pb *path.Builder,
|
||||||
|
) string {
|
||||||
|
return "/" + driveName + "/" + pb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) NewLocationIDer(
|
||||||
|
driveID string,
|
||||||
|
elems ...string,
|
||||||
|
) details.LocationIDer {
|
||||||
|
return details.NewSharePointLocationIDer(driveID, elems...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) GetItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.PermissionCollectionResponseable, error) {
|
||||||
|
return h.ac.GetItemPermission(ctx, driveID, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryBackupHandler) GetItem(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.GetItem(ctx, driveID, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Restore
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ onedrive.RestoreHandler = &libraryRestoreHandler{}
|
||||||
|
|
||||||
|
type libraryRestoreHandler struct {
|
||||||
|
ac api.Drives
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRestoreHandler(ac api.Client) *libraryRestoreHandler {
|
||||||
|
return &libraryRestoreHandler{ac.Drives()}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) AugmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo {
|
||||||
|
return augmentItemInfo(dii, item, size, parentPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) NewItemContentUpload(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.UploadSessionable, error) {
|
||||||
|
return h.ac.NewItemContentUpload(ctx, driveID, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) DeleteItemPermission(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID, permissionID string,
|
||||||
|
) error {
|
||||||
|
return h.ac.DeleteItemPermission(ctx, driveID, itemID, permissionID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) PostItemPermissionUpdate(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
body *drives.ItemItemsItemInvitePostRequestBody,
|
||||||
|
) (drives.ItemItemsItemInviteResponseable, error) {
|
||||||
|
return h.ac.PostItemPermissionUpdate(ctx, driveID, itemID, body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) PostItemInContainer(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID string,
|
||||||
|
newItem models.DriveItemable,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) GetFolderByName(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID, folderName string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.GetFolderByName(ctx, driveID, parentFolderID, folderName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h libraryRestoreHandler) GetRootFolder(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
return h.ac.GetRootFolder(ctx, driveID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Common
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func augmentItemInfo(
|
||||||
|
dii details.ItemInfo,
|
||||||
|
item models.DriveItemable,
|
||||||
|
size int64,
|
||||||
|
parentPath *path.Builder,
|
||||||
|
) details.ItemInfo {
|
||||||
|
var driveName, siteID, driveID, weburl, creatorEmail string
|
||||||
|
|
||||||
|
// TODO: we rely on this info for details/restore lookups,
|
||||||
|
// so if it's nil we have an issue, and will need an alternative
|
||||||
|
// way to source the data.
|
||||||
|
|
||||||
|
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
|
||||||
|
// User is sometimes not available when created via some
|
||||||
|
// external applications (like backup/restore solutions)
|
||||||
|
additionalData := item.GetCreatedBy().GetUser().GetAdditionalData()
|
||||||
|
|
||||||
|
ed, ok := additionalData["email"]
|
||||||
|
if !ok {
|
||||||
|
ed = additionalData["displayName"]
|
||||||
|
}
|
||||||
|
|
||||||
|
if ed != nil {
|
||||||
|
creatorEmail = *ed.(*string)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gsi := item.GetSharepointIds()
|
||||||
|
if gsi != nil {
|
||||||
|
siteID = ptr.Val(gsi.GetSiteId())
|
||||||
|
weburl = ptr.Val(gsi.GetSiteUrl())
|
||||||
|
|
||||||
|
if len(weburl) == 0 {
|
||||||
|
weburl = constructWebURL(item.GetAdditionalData())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.GetParentReference() != nil {
|
||||||
|
driveID = ptr.Val(item.GetParentReference().GetDriveId())
|
||||||
|
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
|
||||||
|
}
|
||||||
|
|
||||||
|
var pps string
|
||||||
|
if parentPath != nil {
|
||||||
|
pps = parentPath.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
dii.SharePoint = &details.SharePointInfo{
|
||||||
|
Created: ptr.Val(item.GetCreatedDateTime()),
|
||||||
|
DriveID: driveID,
|
||||||
|
DriveName: driveName,
|
||||||
|
ItemName: ptr.Val(item.GetName()),
|
||||||
|
ItemType: details.SharePointLibrary,
|
||||||
|
Modified: ptr.Val(item.GetLastModifiedDateTime()),
|
||||||
|
Owner: creatorEmail,
|
||||||
|
ParentPath: pps,
|
||||||
|
SiteID: siteID,
|
||||||
|
Size: size,
|
||||||
|
WebURL: weburl,
|
||||||
|
}
|
||||||
|
|
||||||
|
return dii
|
||||||
|
}
|
||||||
58
src/internal/connector/sharepoint/library_handler_test.go
Normal file
58
src/internal/connector/sharepoint/library_handler_test.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package sharepoint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LibraryBackupHandlerUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLibraryBackupHandlerUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &LibraryBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
|
||||||
|
tenantID, resourceOwner := "tenant", "resourceOwner"
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
expect string
|
||||||
|
expectErr assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "sharepoint",
|
||||||
|
expect: "tenant/sharepoint/resourceOwner/libraries/prefix",
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
h := libraryBackupHandler{}
|
||||||
|
p := path.Builder{}.Append("prefix")
|
||||||
|
|
||||||
|
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if result != nil {
|
||||||
|
assert.Equal(t, test.expect, result.String())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *LibraryBackupHandlerUnitSuite) TestServiceCat() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
s, c := libraryBackupHandler{}.ServiceCat()
|
||||||
|
assert.Equal(t, path.SharePointService, s)
|
||||||
|
assert.Equal(t, path.LibrariesCategory, c)
|
||||||
|
}
|
||||||
@ -6,7 +6,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"runtime/trace"
|
"runtime/trace"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -18,12 +17,12 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
@ -43,13 +42,11 @@ import (
|
|||||||
func RestoreCollections(
|
func RestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
creds account.M365Config,
|
ac api.Client,
|
||||||
service graph.Servicer,
|
|
||||||
dest control.RestoreDestination,
|
dest control.RestoreDestination,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
pool *sync.Pool,
|
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (*support.ConnectorOperationStatus, error) {
|
) (*support.ConnectorOperationStatus, error) {
|
||||||
var (
|
var (
|
||||||
@ -83,22 +80,19 @@ func RestoreCollections(
|
|||||||
case path.LibrariesCategory:
|
case path.LibrariesCategory:
|
||||||
metrics, err = onedrive.RestoreCollection(
|
metrics, err = onedrive.RestoreCollection(
|
||||||
ictx,
|
ictx,
|
||||||
creds,
|
libraryRestoreHandler{ac.Drives()},
|
||||||
backupVersion,
|
backupVersion,
|
||||||
service,
|
|
||||||
dc,
|
dc,
|
||||||
caches,
|
caches,
|
||||||
onedrive.SharePointSource,
|
|
||||||
dest.ContainerName,
|
dest.ContainerName,
|
||||||
deets,
|
deets,
|
||||||
opts.RestorePermissions,
|
opts.RestorePermissions,
|
||||||
pool,
|
|
||||||
errs)
|
errs)
|
||||||
|
|
||||||
case path.ListsCategory:
|
case path.ListsCategory:
|
||||||
metrics, err = RestoreListCollection(
|
metrics, err = RestoreListCollection(
|
||||||
ictx,
|
ictx,
|
||||||
service,
|
ac.Stable,
|
||||||
dc,
|
dc,
|
||||||
dest.ContainerName,
|
dest.ContainerName,
|
||||||
deets,
|
deets,
|
||||||
@ -107,7 +101,7 @@ func RestoreCollections(
|
|||||||
case path.PagesCategory:
|
case path.PagesCategory:
|
||||||
metrics, err = RestorePageCollection(
|
metrics, err = RestorePageCollection(
|
||||||
ictx,
|
ictx,
|
||||||
creds,
|
ac.Stable,
|
||||||
dc,
|
dc,
|
||||||
dest.ContainerName,
|
dest.ContainerName,
|
||||||
deets,
|
deets,
|
||||||
@ -292,7 +286,7 @@ func RestoreListCollection(
|
|||||||
// - the context cancellation station. True iff context is canceled.
|
// - the context cancellation station. True iff context is canceled.
|
||||||
func RestorePageCollection(
|
func RestorePageCollection(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
gs graph.Servicer,
|
||||||
dc data.RestoreCollection,
|
dc data.RestoreCollection,
|
||||||
restoreContainerName string,
|
restoreContainerName string,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
@ -309,17 +303,9 @@ func RestorePageCollection(
|
|||||||
|
|
||||||
defer end()
|
defer end()
|
||||||
|
|
||||||
adpt, err := graph.CreateAdapter(
|
|
||||||
creds.AzureTenantID,
|
|
||||||
creds.AzureClientID,
|
|
||||||
creds.AzureClientSecret)
|
|
||||||
if err != nil {
|
|
||||||
return metrics, clues.Wrap(err, "constructing graph client")
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
service = betaAPI.NewBetaService(adpt)
|
service = betaAPI.NewBetaService(gs.Adapter())
|
||||||
items = dc.Items(ctx, errs)
|
items = dc.Items(ctx, errs)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -70,19 +70,24 @@ type BackupCollection interface {
|
|||||||
// RestoreCollection is an extension of Collection that is used during restores.
|
// RestoreCollection is an extension of Collection that is used during restores.
|
||||||
type RestoreCollection interface {
|
type RestoreCollection interface {
|
||||||
Collection
|
Collection
|
||||||
|
FetchItemByNamer
|
||||||
|
}
|
||||||
|
|
||||||
|
type FetchItemByNamer interface {
|
||||||
// Fetch retrieves an item with the given name from the Collection if it
|
// Fetch retrieves an item with the given name from the Collection if it
|
||||||
// exists. Items retrieved with Fetch may still appear in the channel returned
|
// exists. Items retrieved with Fetch may still appear in the channel returned
|
||||||
// by Items().
|
// by Items().
|
||||||
Fetch(ctx context.Context, name string) (Stream, error)
|
FetchItemByName(ctx context.Context, name string) (Stream, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NotFoundRestoreCollection is a wrapper for a Collection that returns
|
// NoFetchRestoreCollection is a wrapper for a Collection that returns
|
||||||
// ErrNotFound for all Fetch calls.
|
// ErrNotFound for all Fetch calls.
|
||||||
type NotFoundRestoreCollection struct {
|
type NoFetchRestoreCollection struct {
|
||||||
Collection
|
Collection
|
||||||
|
FetchItemByNamer
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c NotFoundRestoreCollection) Fetch(context.Context, string) (Stream, error) {
|
func (c NoFetchRestoreCollection) FetchItemByName(context.Context, string) (Stream, error) {
|
||||||
return nil, ErrNotFound
|
return nil, ErrNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -20,38 +20,48 @@ var (
|
|||||||
|
|
||||||
type kopiaDataCollection struct {
|
type kopiaDataCollection struct {
|
||||||
path path.Path
|
path path.Path
|
||||||
streams []data.Stream
|
|
||||||
dir fs.Directory
|
dir fs.Directory
|
||||||
|
items []string
|
||||||
counter ByteCounter
|
counter ByteCounter
|
||||||
expectedVersion uint32
|
expectedVersion uint32
|
||||||
}
|
}
|
||||||
|
|
||||||
func (kdc *kopiaDataCollection) addStream(
|
|
||||||
ctx context.Context,
|
|
||||||
name string,
|
|
||||||
) error {
|
|
||||||
s, err := kdc.Fetch(ctx, name)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
kdc.streams = append(kdc.streams, s)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (kdc *kopiaDataCollection) Items(
|
func (kdc *kopiaDataCollection) Items(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
_ *fault.Bus, // unused, just matching the interface
|
errs *fault.Bus,
|
||||||
) <-chan data.Stream {
|
) <-chan data.Stream {
|
||||||
res := make(chan data.Stream)
|
var (
|
||||||
|
res = make(chan data.Stream)
|
||||||
|
el = errs.Local()
|
||||||
|
loadCount = 0
|
||||||
|
)
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
defer close(res)
|
defer close(res)
|
||||||
|
|
||||||
for _, s := range kdc.streams {
|
for _, item := range kdc.items {
|
||||||
|
s, err := kdc.FetchItemByName(ctx, item)
|
||||||
|
if err != nil {
|
||||||
|
el.AddRecoverable(clues.Wrap(err, "fetching item").
|
||||||
|
WithClues(ctx).
|
||||||
|
Label(fault.LabelForceNoBackupCreation))
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
loadCount++
|
||||||
|
if loadCount%1000 == 0 {
|
||||||
|
logger.Ctx(ctx).Infow(
|
||||||
|
"loading items from kopia",
|
||||||
|
"loaded_items", loadCount)
|
||||||
|
}
|
||||||
|
|
||||||
res <- s
|
res <- s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Infow(
|
||||||
|
"done loading items from kopia",
|
||||||
|
"loaded_items", loadCount)
|
||||||
}()
|
}()
|
||||||
|
|
||||||
return res
|
return res
|
||||||
@ -64,7 +74,7 @@ func (kdc kopiaDataCollection) FullPath() path.Path {
|
|||||||
// Fetch returns the file with the given name from the collection as a
|
// Fetch returns the file with the given name from the collection as a
|
||||||
// data.Stream. Returns a data.ErrNotFound error if the file isn't in the
|
// data.Stream. Returns a data.ErrNotFound error if the file isn't in the
|
||||||
// collection.
|
// collection.
|
||||||
func (kdc kopiaDataCollection) Fetch(
|
func (kdc kopiaDataCollection) FetchItemByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
name string,
|
name string,
|
||||||
) (data.Stream, error) {
|
) (data.Stream, error) {
|
||||||
|
|||||||
@ -165,15 +165,15 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
{
|
{
|
||||||
name: "SingleStream",
|
name: "SingleStream",
|
||||||
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
||||||
uuids[0]: assert.NoError,
|
uuids[0]: nil,
|
||||||
},
|
},
|
||||||
expectedLoaded: []loadedData{files[0]},
|
expectedLoaded: []loadedData{files[0]},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MultipleStreams",
|
name: "MultipleStreams",
|
||||||
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
||||||
uuids[0]: assert.NoError,
|
uuids[0]: nil,
|
||||||
uuids[1]: assert.NoError,
|
uuids[1]: nil,
|
||||||
},
|
},
|
||||||
expectedLoaded: files,
|
expectedLoaded: files,
|
||||||
},
|
},
|
||||||
@ -181,7 +181,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
name: "Some Not Found Errors",
|
name: "Some Not Found Errors",
|
||||||
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
||||||
fileLookupErrName: assert.Error,
|
fileLookupErrName: assert.Error,
|
||||||
uuids[0]: assert.NoError,
|
uuids[0]: nil,
|
||||||
},
|
},
|
||||||
expectedLoaded: []loadedData{files[0]},
|
expectedLoaded: []loadedData{files[0]},
|
||||||
},
|
},
|
||||||
@ -189,7 +189,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
name: "Some Not A File Errors",
|
name: "Some Not A File Errors",
|
||||||
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
||||||
notFileErrName: assert.Error,
|
notFileErrName: assert.Error,
|
||||||
uuids[0]: assert.NoError,
|
uuids[0]: nil,
|
||||||
},
|
},
|
||||||
expectedLoaded: []loadedData{files[0]},
|
expectedLoaded: []loadedData{files[0]},
|
||||||
},
|
},
|
||||||
@ -197,7 +197,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
name: "Some Open Errors",
|
name: "Some Open Errors",
|
||||||
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
uuidsAndErrors: map[string]assert.ErrorAssertionFunc{
|
||||||
fileOpenErrName: assert.Error,
|
fileOpenErrName: assert.Error,
|
||||||
uuids[0]: assert.NoError,
|
uuids[0]: nil,
|
||||||
},
|
},
|
||||||
expectedLoaded: []loadedData{files[0]},
|
expectedLoaded: []loadedData{files[0]},
|
||||||
},
|
},
|
||||||
@ -217,20 +217,27 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
|
items := []string{}
|
||||||
|
errs := []assert.ErrorAssertionFunc{}
|
||||||
|
|
||||||
|
for uuid, err := range test.uuidsAndErrors {
|
||||||
|
if err != nil {
|
||||||
|
errs = append(errs, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
items = append(items, uuid)
|
||||||
|
}
|
||||||
|
|
||||||
c := kopiaDataCollection{
|
c := kopiaDataCollection{
|
||||||
dir: getLayout(),
|
dir: getLayout(),
|
||||||
path: nil,
|
path: nil,
|
||||||
|
items: items,
|
||||||
expectedVersion: serializationVersion,
|
expectedVersion: serializationVersion,
|
||||||
}
|
}
|
||||||
|
|
||||||
for uuid, expectErr := range test.uuidsAndErrors {
|
|
||||||
err := c.addStream(ctx, uuid)
|
|
||||||
expectErr(t, err, "adding stream to collection", clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
found []loadedData
|
found []loadedData
|
||||||
bus = fault.New(true)
|
bus = fault.New(false)
|
||||||
)
|
)
|
||||||
|
|
||||||
for returnedStream := range c.Items(ctx, bus) {
|
for returnedStream := range c.Items(ctx, bus) {
|
||||||
@ -256,7 +263,12 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
f.size = ss.Size()
|
f.size = ss.Size()
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Empty(t, bus.Recovered(), "expected no recoverable errors")
|
// We expect the items to be fetched in the order they are
|
||||||
|
// in the struct or the errors will not line up
|
||||||
|
for i, err := range bus.Recovered() {
|
||||||
|
assert.True(t, errs[i](t, err), "expected error", clues.ToCore(err))
|
||||||
|
}
|
||||||
|
|
||||||
assert.NoError(t, bus.Failure(), "expected no hard failures")
|
assert.NoError(t, bus.Failure(), "expected no hard failures")
|
||||||
|
|
||||||
assert.ElementsMatch(t, test.expectedLoaded, found, "loaded items")
|
assert.ElementsMatch(t, test.expectedLoaded, found, "loaded items")
|
||||||
@ -264,7 +276,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
|
func (suite *KopiaDataCollectionUnitSuite) TestFetchItemByName() {
|
||||||
var (
|
var (
|
||||||
tenant = "a-tenant"
|
tenant = "a-tenant"
|
||||||
user = "a-user"
|
user = "a-user"
|
||||||
@ -381,7 +393,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
|
|||||||
expectedVersion: serializationVersion,
|
expectedVersion: serializationVersion,
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := col.Fetch(ctx, test.inputName)
|
s, err := col.FetchItemByName(ctx, test.inputName)
|
||||||
|
|
||||||
test.lookupErr(t, err)
|
test.lookupErr(t, err)
|
||||||
|
|
||||||
|
|||||||
@ -86,7 +86,7 @@ func (mc *mergeCollection) Items(
|
|||||||
// match found or the first error that is not data.ErrNotFound. If multiple
|
// match found or the first error that is not data.ErrNotFound. If multiple
|
||||||
// collections have the requested item, the instance in the collection with the
|
// collections have the requested item, the instance in the collection with the
|
||||||
// lexicographically smallest storage path is returned.
|
// lexicographically smallest storage path is returned.
|
||||||
func (mc *mergeCollection) Fetch(
|
func (mc *mergeCollection) FetchItemByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
name string,
|
name string,
|
||||||
) (data.Stream, error) {
|
) (data.Stream, error) {
|
||||||
@ -99,7 +99,7 @@ func (mc *mergeCollection) Fetch(
|
|||||||
|
|
||||||
logger.Ctx(ictx).Debug("looking for item in merged collection")
|
logger.Ctx(ictx).Debug("looking for item in merged collection")
|
||||||
|
|
||||||
s, err := c.Fetch(ictx, name)
|
s, err := c.FetchItemByName(ictx, name)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return s, nil
|
return s, nil
|
||||||
} else if err != nil && !errors.Is(err, data.ErrNotFound) {
|
} else if err != nil && !errors.Is(err, data.ErrNotFound) {
|
||||||
|
|||||||
@ -76,8 +76,8 @@ func (suite *MergeCollectionUnitSuite) TestItems() {
|
|||||||
|
|
||||||
// Not testing fetch here so safe to use this wrapper.
|
// Not testing fetch here so safe to use this wrapper.
|
||||||
cols := []data.RestoreCollection{
|
cols := []data.RestoreCollection{
|
||||||
data.NotFoundRestoreCollection{Collection: c1},
|
data.NoFetchRestoreCollection{Collection: c1},
|
||||||
data.NotFoundRestoreCollection{Collection: c2},
|
data.NoFetchRestoreCollection{Collection: c2},
|
||||||
}
|
}
|
||||||
|
|
||||||
dc := &mergeCollection{fullPath: pth}
|
dc := &mergeCollection{fullPath: pth}
|
||||||
@ -123,7 +123,7 @@ func (suite *MergeCollectionUnitSuite) TestAddCollection_DifferentPathFails() {
|
|||||||
assert.Error(t, err, clues.ToCore(err))
|
assert.Error(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *MergeCollectionUnitSuite) TestFetch() {
|
func (suite *MergeCollectionUnitSuite) TestFetchItemByName() {
|
||||||
var (
|
var (
|
||||||
fileData1 = []byte("abcdefghijklmnopqrstuvwxyz")
|
fileData1 = []byte("abcdefghijklmnopqrstuvwxyz")
|
||||||
fileData2 = []byte("zyxwvutsrqponmlkjihgfedcba")
|
fileData2 = []byte("zyxwvutsrqponmlkjihgfedcba")
|
||||||
@ -275,7 +275,7 @@ func (suite *MergeCollectionUnitSuite) TestFetch() {
|
|||||||
require.NoError(t, err, "adding collection", clues.ToCore(err))
|
require.NoError(t, err, "adding collection", clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := dc.Fetch(ctx, test.fileName)
|
s, err := dc.FetchItemByName(ctx, test.fileName)
|
||||||
test.expectError(t, err, clues.ToCore(err))
|
test.expectError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -392,9 +392,8 @@ func loadDirsAndItems(
|
|||||||
bus *fault.Bus,
|
bus *fault.Bus,
|
||||||
) ([]data.RestoreCollection, error) {
|
) ([]data.RestoreCollection, error) {
|
||||||
var (
|
var (
|
||||||
el = bus.Local()
|
el = bus.Local()
|
||||||
res = make([]data.RestoreCollection, 0, len(toLoad))
|
res = make([]data.RestoreCollection, 0, len(toLoad))
|
||||||
loadCount = 0
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, col := range toLoad {
|
for _, col := range toLoad {
|
||||||
@ -426,6 +425,7 @@ func loadDirsAndItems(
|
|||||||
dc := &kopiaDataCollection{
|
dc := &kopiaDataCollection{
|
||||||
path: col.restorePath,
|
path: col.restorePath,
|
||||||
dir: dir,
|
dir: dir,
|
||||||
|
items: dirItems.items,
|
||||||
counter: bcounter,
|
counter: bcounter,
|
||||||
expectedVersion: serializationVersion,
|
expectedVersion: serializationVersion,
|
||||||
}
|
}
|
||||||
@ -437,35 +437,9 @@ func loadDirsAndItems(
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, item := range dirItems.items {
|
|
||||||
if el.Failure() != nil {
|
|
||||||
return nil, el.Failure()
|
|
||||||
}
|
|
||||||
|
|
||||||
err := dc.addStream(ictx, item)
|
|
||||||
if err != nil {
|
|
||||||
el.AddRecoverable(clues.Wrap(err, "loading item").
|
|
||||||
WithClues(ictx).
|
|
||||||
Label(fault.LabelForceNoBackupCreation))
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
loadCount++
|
|
||||||
if loadCount%1000 == 0 {
|
|
||||||
logger.Ctx(ctx).Infow(
|
|
||||||
"loading items from kopia",
|
|
||||||
"loaded_items", loadCount)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Infow(
|
|
||||||
"done loading items from kopia",
|
|
||||||
"loaded_items", loadCount)
|
|
||||||
|
|
||||||
return res, el.Failure()
|
return res, el.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -843,16 +843,28 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
|
|
||||||
ic := i64counter{}
|
ic := i64counter{}
|
||||||
|
|
||||||
_, err = suite.w.ProduceRestoreCollections(
|
dcs, err := suite.w.ProduceRestoreCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
string(stats.SnapshotID),
|
string(stats.SnapshotID),
|
||||||
toRestorePaths(t, failedPath),
|
toRestorePaths(t, failedPath),
|
||||||
&ic,
|
&ic,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
|
assert.NoError(t, err, "error producing restore collections")
|
||||||
|
|
||||||
|
require.Len(t, dcs, 1, "number of restore collections")
|
||||||
|
|
||||||
|
errs := fault.New(true)
|
||||||
|
items := dcs[0].Items(suite.ctx, errs)
|
||||||
|
|
||||||
|
// Get all the items from channel
|
||||||
|
//nolint:revive
|
||||||
|
for range items {
|
||||||
|
}
|
||||||
|
|
||||||
// Files that had an error shouldn't make a dir entry in kopia. If they do we
|
// Files that had an error shouldn't make a dir entry in kopia. If they do we
|
||||||
// may run into kopia-assisted incrementals issues because only mod time and
|
// may run into kopia-assisted incrementals issues because only mod time and
|
||||||
// not file size is checked for StreamingFiles.
|
// not file size is checked for StreamingFiles.
|
||||||
assert.ErrorIs(t, err, data.ErrNotFound, "errored file is restorable", clues.ToCore(err))
|
assert.ErrorIs(t, errs.Failure(), data.ErrNotFound, "errored file is restorable", clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
type backedupFile struct {
|
type backedupFile struct {
|
||||||
@ -1223,13 +1235,25 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
|
|||||||
|
|
||||||
ic := i64counter{}
|
ic := i64counter{}
|
||||||
|
|
||||||
_, err = suite.w.ProduceRestoreCollections(
|
dcs, err := suite.w.ProduceRestoreCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
string(stats.SnapshotID),
|
string(stats.SnapshotID),
|
||||||
toRestorePaths(t, suite.files[suite.testPath1.String()][0].itemPath),
|
toRestorePaths(t, suite.files[suite.testPath1.String()][0].itemPath),
|
||||||
&ic,
|
&ic,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
test.restoreCheck(t, err, clues.ToCore(err))
|
|
||||||
|
assert.NoError(t, err, "errors producing collection", clues.ToCore(err))
|
||||||
|
require.Len(t, dcs, 1, "unexpected number of restore collections")
|
||||||
|
|
||||||
|
errs := fault.New(true)
|
||||||
|
items := dcs[0].Items(suite.ctx, errs)
|
||||||
|
|
||||||
|
// Get all the items from channel
|
||||||
|
//nolint:revive
|
||||||
|
for range items {
|
||||||
|
}
|
||||||
|
|
||||||
|
test.restoreCheck(t, errs.Failure(), errs)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1248,18 +1272,20 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
|
|||||||
// suite's map of files. Files that are not in the suite's map are assumed to
|
// suite's map of files. Files that are not in the suite's map are assumed to
|
||||||
// generate errors and not be in the output.
|
// generate errors and not be in the output.
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
inputPaths []path.Path
|
inputPaths []path.Path
|
||||||
expectedCollections int
|
expectedCollections int
|
||||||
expectedErr assert.ErrorAssertionFunc
|
expectedErr assert.ErrorAssertionFunc
|
||||||
|
expectedCollectionErr assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "SingleItem",
|
name: "SingleItem",
|
||||||
inputPaths: []path.Path{
|
inputPaths: []path.Path{
|
||||||
suite.files[suite.testPath1.String()][0].itemPath,
|
suite.files[suite.testPath1.String()][0].itemPath,
|
||||||
},
|
},
|
||||||
expectedCollections: 1,
|
expectedCollections: 1,
|
||||||
expectedErr: assert.NoError,
|
expectedErr: assert.NoError,
|
||||||
|
expectedCollectionErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MultipleItemsSameCollection",
|
name: "MultipleItemsSameCollection",
|
||||||
@ -1267,8 +1293,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
|
|||||||
suite.files[suite.testPath1.String()][0].itemPath,
|
suite.files[suite.testPath1.String()][0].itemPath,
|
||||||
suite.files[suite.testPath1.String()][1].itemPath,
|
suite.files[suite.testPath1.String()][1].itemPath,
|
||||||
},
|
},
|
||||||
expectedCollections: 1,
|
expectedCollections: 1,
|
||||||
expectedErr: assert.NoError,
|
expectedErr: assert.NoError,
|
||||||
|
expectedCollectionErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MultipleItemsDifferentCollections",
|
name: "MultipleItemsDifferentCollections",
|
||||||
@ -1276,8 +1303,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
|
|||||||
suite.files[suite.testPath1.String()][0].itemPath,
|
suite.files[suite.testPath1.String()][0].itemPath,
|
||||||
suite.files[suite.testPath2.String()][0].itemPath,
|
suite.files[suite.testPath2.String()][0].itemPath,
|
||||||
},
|
},
|
||||||
expectedCollections: 2,
|
expectedCollections: 2,
|
||||||
expectedErr: assert.NoError,
|
expectedErr: assert.NoError,
|
||||||
|
expectedCollectionErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "TargetNotAFile",
|
name: "TargetNotAFile",
|
||||||
@ -1286,8 +1314,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
|
|||||||
suite.testPath1,
|
suite.testPath1,
|
||||||
suite.files[suite.testPath2.String()][0].itemPath,
|
suite.files[suite.testPath2.String()][0].itemPath,
|
||||||
},
|
},
|
||||||
expectedCollections: 0,
|
expectedCollections: 0,
|
||||||
expectedErr: assert.Error,
|
expectedErr: assert.Error,
|
||||||
|
expectedCollectionErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "NonExistentFile",
|
name: "NonExistentFile",
|
||||||
@ -1296,8 +1325,9 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
|
|||||||
doesntExist,
|
doesntExist,
|
||||||
suite.files[suite.testPath2.String()][0].itemPath,
|
suite.files[suite.testPath2.String()][0].itemPath,
|
||||||
},
|
},
|
||||||
expectedCollections: 0,
|
expectedCollections: 0,
|
||||||
expectedErr: assert.Error,
|
expectedErr: assert.NoError,
|
||||||
|
expectedCollectionErr: assert.Error, // folder for doesntExist does not exist
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1330,12 +1360,28 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
|
|||||||
toRestorePaths(t, test.inputPaths...),
|
toRestorePaths(t, test.inputPaths...),
|
||||||
&ic,
|
&ic,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
test.expectedErr(t, err, clues.ToCore(err))
|
test.expectedCollectionErr(t, err, clues.ToCore(err), "producing collections")
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
errs := fault.New(true)
|
||||||
|
|
||||||
|
for _, dc := range result {
|
||||||
|
// Get all the items from channel
|
||||||
|
items := dc.Items(suite.ctx, errs)
|
||||||
|
//nolint:revive
|
||||||
|
for range items {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test.expectedErr(t, errs.Failure(), errs.Failure(), "getting items")
|
||||||
|
|
||||||
|
if errs.Failure() != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Len(t, result, test.expectedCollections)
|
assert.Len(t, result, test.expectedCollections)
|
||||||
assert.Less(t, int64(0), ic.i)
|
assert.Less(t, int64(0), ic.i)
|
||||||
testForFiles(t, ctx, expected, result)
|
testForFiles(t, ctx, expected, result)
|
||||||
@ -1456,7 +1502,6 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Path
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
assert.Len(t, result, test.expectedCollections)
|
assert.Len(t, result, test.expectedCollections)
|
||||||
assert.Less(t, int64(0), ic.i)
|
|
||||||
testForFiles(t, ctx, expected, result)
|
testForFiles(t, ctx, expected, result)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -1465,7 +1510,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Path
|
|||||||
// TestProduceRestoreCollections_Fetch tests that the Fetch function still works
|
// TestProduceRestoreCollections_Fetch tests that the Fetch function still works
|
||||||
// properly even with different Restore and Storage paths and items from
|
// properly even with different Restore and Storage paths and items from
|
||||||
// different kopia directories.
|
// different kopia directories.
|
||||||
func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Fetch() {
|
func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_FetchItemByName() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
@ -1507,7 +1552,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Fetc
|
|||||||
// Item from first kopia directory.
|
// Item from first kopia directory.
|
||||||
f := suite.files[suite.testPath1.String()][0]
|
f := suite.files[suite.testPath1.String()][0]
|
||||||
|
|
||||||
item, err := result[0].Fetch(ctx, f.itemPath.Item())
|
item, err := result[0].FetchItemByName(ctx, f.itemPath.Item())
|
||||||
require.NoError(t, err, "fetching file", clues.ToCore(err))
|
require.NoError(t, err, "fetching file", clues.ToCore(err))
|
||||||
|
|
||||||
r := item.ToReader()
|
r := item.ToReader()
|
||||||
@ -1520,7 +1565,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Fetc
|
|||||||
// Item from second kopia directory.
|
// Item from second kopia directory.
|
||||||
f = suite.files[suite.testPath2.String()][0]
|
f = suite.files[suite.testPath2.String()][0]
|
||||||
|
|
||||||
item, err = result[0].Fetch(ctx, f.itemPath.Item())
|
item, err = result[0].FetchItemByName(ctx, f.itemPath.Item())
|
||||||
require.NoError(t, err, "fetching file", clues.ToCore(err))
|
require.NoError(t, err, "fetching file", clues.ToCore(err))
|
||||||
|
|
||||||
r = item.ToReader()
|
r = item.ToReader()
|
||||||
|
|||||||
@ -324,7 +324,7 @@ func (op *BackupOperation) do(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cs, ssmb, err := produceBackupDataCollections(
|
cs, ssmb, canUsePreviousBackup, err := produceBackupDataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
op.bp,
|
op.bp,
|
||||||
op.ResourceOwner,
|
op.ResourceOwner,
|
||||||
@ -348,7 +348,7 @@ func (op *BackupOperation) do(
|
|||||||
cs,
|
cs,
|
||||||
ssmb,
|
ssmb,
|
||||||
backupID,
|
backupID,
|
||||||
op.incremental && canUseMetaData,
|
op.incremental && canUseMetaData && canUsePreviousBackup,
|
||||||
op.Errors)
|
op.Errors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "persisting collection backups")
|
return nil, clues.Wrap(err, "persisting collection backups")
|
||||||
@ -406,7 +406,7 @@ func produceBackupDataCollections(
|
|||||||
lastBackupVersion int,
|
lastBackupVersion int,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, prefixmatcher.StringSetReader, error) {
|
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
|
||||||
complete := observe.MessageWithCompletion(ctx, "Discovering items to backup")
|
complete := observe.MessageWithCompletion(ctx, "Discovering items to backup")
|
||||||
defer func() {
|
defer func() {
|
||||||
complete <- struct{}{}
|
complete <- struct{}{}
|
||||||
|
|||||||
@ -30,6 +30,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||||
@ -347,7 +348,6 @@ func generateContainerOfItems(
|
|||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
gc *connector.GraphConnector,
|
gc *connector.GraphConnector,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
acct account.Account,
|
|
||||||
cat path.CategoryType,
|
cat path.CategoryType,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
tenantID, resourceOwner, driveID, destFldr string,
|
tenantID, resourceOwner, driveID, destFldr string,
|
||||||
@ -397,7 +397,6 @@ func generateContainerOfItems(
|
|||||||
deets, err := gc.ConsumeRestoreCollections(
|
deets, err := gc.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
acct,
|
|
||||||
sel,
|
sel,
|
||||||
dest,
|
dest,
|
||||||
opts,
|
opts,
|
||||||
@ -468,7 +467,7 @@ func buildCollections(
|
|||||||
mc.Data[i] = c.items[i].data
|
mc.Data[i] = c.items[i].data
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, data.NotFoundRestoreCollection{Collection: mc})
|
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
|
||||||
}
|
}
|
||||||
|
|
||||||
return collections
|
return collections
|
||||||
@ -513,6 +512,7 @@ func toDataLayerPath(
|
|||||||
type BackupOpIntegrationSuite struct {
|
type BackupOpIntegrationSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
user, site string
|
user, site string
|
||||||
|
ac api.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBackupOpIntegrationSuite(t *testing.T) {
|
func TestBackupOpIntegrationSuite(t *testing.T) {
|
||||||
@ -524,8 +524,18 @@ func TestBackupOpIntegrationSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupOpIntegrationSuite) SetupSuite() {
|
func (suite *BackupOpIntegrationSuite) SetupSuite() {
|
||||||
suite.user = tester.M365UserID(suite.T())
|
t := suite.T()
|
||||||
suite.site = tester.M365SiteID(suite.T())
|
|
||||||
|
suite.user = tester.M365UserID(t)
|
||||||
|
suite.site = tester.M365SiteID(t)
|
||||||
|
|
||||||
|
a := tester.NewM365Account(t)
|
||||||
|
|
||||||
|
creds, err := a.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
suite.ac, err = api.NewClient(creds)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
||||||
@ -847,7 +857,6 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
|||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
service,
|
service,
|
||||||
acct,
|
|
||||||
category,
|
category,
|
||||||
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
||||||
m365.AzureTenantID, uidn.ID(), "", destName,
|
m365.AzureTenantID, uidn.ID(), "", destName,
|
||||||
@ -1029,7 +1038,6 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
|||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
service,
|
service,
|
||||||
acct,
|
|
||||||
category,
|
category,
|
||||||
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
|
||||||
m365.AzureTenantID, suite.user, "", container3,
|
m365.AzureTenantID, suite.user, "", container3,
|
||||||
@ -1316,9 +1324,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
|
|||||||
gtdi := func(
|
gtdi := func(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
) string {
|
) string {
|
||||||
d, err := api.GetUsersDrive(ctx, gs, suite.user)
|
d, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = graph.Wrap(ctx, err, "retrieving default user drive").
|
err = graph.Wrap(ctx, err, "retrieving default user drive").
|
||||||
With("user", suite.user)
|
With("user", suite.user)
|
||||||
@ -1332,6 +1339,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
|
|||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
grh := func(ac api.Client) onedrive.RestoreHandler {
|
||||||
|
return onedrive.NewRestoreHandler(ac)
|
||||||
|
}
|
||||||
|
|
||||||
runDriveIncrementalTest(
|
runDriveIncrementalTest(
|
||||||
suite,
|
suite,
|
||||||
suite.user,
|
suite.user,
|
||||||
@ -1341,6 +1352,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
|
|||||||
path.FilesCategory,
|
path.FilesCategory,
|
||||||
ic,
|
ic,
|
||||||
gtdi,
|
gtdi,
|
||||||
|
grh,
|
||||||
false)
|
false)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1355,9 +1367,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
|
|||||||
gtdi := func(
|
gtdi := func(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
) string {
|
) string {
|
||||||
d, err := api.GetSitesDefaultDrive(ctx, gs, suite.site)
|
d, err := suite.ac.Sites().GetDefaultDrive(ctx, suite.site)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = graph.Wrap(ctx, err, "retrieving default site drive").
|
err = graph.Wrap(ctx, err, "retrieving default site drive").
|
||||||
With("site", suite.site)
|
With("site", suite.site)
|
||||||
@ -1371,6 +1382,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
|
|||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
grh := func(ac api.Client) onedrive.RestoreHandler {
|
||||||
|
return sharepoint.NewRestoreHandler(ac)
|
||||||
|
}
|
||||||
|
|
||||||
runDriveIncrementalTest(
|
runDriveIncrementalTest(
|
||||||
suite,
|
suite,
|
||||||
suite.site,
|
suite.site,
|
||||||
@ -1380,6 +1395,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
|
|||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
ic,
|
ic,
|
||||||
gtdi,
|
gtdi,
|
||||||
|
grh,
|
||||||
true)
|
true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1390,7 +1406,8 @@ func runDriveIncrementalTest(
|
|||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
category path.CategoryType,
|
category path.CategoryType,
|
||||||
includeContainers func([]string) selectors.Selector,
|
includeContainers func([]string) selectors.Selector,
|
||||||
getTestDriveID func(*testing.T, context.Context, graph.Servicer) string,
|
getTestDriveID func(*testing.T, context.Context) string,
|
||||||
|
getRestoreHandler func(api.Client) onedrive.RestoreHandler,
|
||||||
skipPermissionsTests bool,
|
skipPermissionsTests bool,
|
||||||
) {
|
) {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
@ -1429,12 +1446,14 @@ func runDriveIncrementalTest(
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
gc, sel := GCWithSelector(t, ctx, acct, resource, sel, nil, nil)
|
gc, sel := GCWithSelector(t, ctx, acct, resource, sel, nil, nil)
|
||||||
|
ac := gc.AC.Drives()
|
||||||
|
rh := getRestoreHandler(gc.AC)
|
||||||
|
|
||||||
roidn := inMock.NewProvider(sel.ID(), sel.Name())
|
roidn := inMock.NewProvider(sel.ID(), sel.Name())
|
||||||
|
|
||||||
var (
|
var (
|
||||||
atid = creds.AzureTenantID
|
atid = creds.AzureTenantID
|
||||||
driveID = getTestDriveID(t, ctx, gc.Service)
|
driveID = getTestDriveID(t, ctx)
|
||||||
fileDBF = func(id, timeStamp, subject, body string) []byte {
|
fileDBF = func(id, timeStamp, subject, body string) []byte {
|
||||||
return []byte(id + subject)
|
return []byte(id + subject)
|
||||||
}
|
}
|
||||||
@ -1462,7 +1481,6 @@ func runDriveIncrementalTest(
|
|||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
service,
|
service,
|
||||||
acct,
|
|
||||||
category,
|
category,
|
||||||
sel,
|
sel,
|
||||||
atid, roidn.ID(), driveID, destName,
|
atid, roidn.ID(), driveID, destName,
|
||||||
@ -1488,7 +1506,7 @@ func runDriveIncrementalTest(
|
|||||||
// onedrive package `getFolder` function.
|
// onedrive package `getFolder` function.
|
||||||
itemURL := fmt.Sprintf("https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, destName)
|
itemURL := fmt.Sprintf("https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, destName)
|
||||||
resp, err := drives.
|
resp, err := drives.
|
||||||
NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
|
NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.AC.Stable.Adapter()).
|
||||||
Get(ctx, nil)
|
Get(ctx, nil)
|
||||||
require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
|
require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
|
||||||
|
|
||||||
@ -1543,9 +1561,8 @@ func runDriveIncrementalTest(
|
|||||||
driveItem := models.NewDriveItem()
|
driveItem := models.NewDriveItem()
|
||||||
driveItem.SetName(&newFileName)
|
driveItem.SetName(&newFileName)
|
||||||
driveItem.SetFile(models.NewFile())
|
driveItem.SetFile(models.NewFile())
|
||||||
newFile, err = onedrive.CreateItem(
|
newFile, err = ac.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
targetContainer,
|
targetContainer,
|
||||||
driveItem)
|
driveItem)
|
||||||
@ -1562,19 +1579,14 @@ func runDriveIncrementalTest(
|
|||||||
{
|
{
|
||||||
name: "add permission to new file",
|
name: "add permission to new file",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
driveItem := models.NewDriveItem()
|
|
||||||
driveItem.SetName(&newFileName)
|
|
||||||
driveItem.SetFile(models.NewFile())
|
|
||||||
err = onedrive.UpdatePermissions(
|
err = onedrive.UpdatePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
*newFile.GetId(),
|
ptr.Val(newFile.GetId()),
|
||||||
[]metadata.Permission{writePerm},
|
[]metadata.Permission{writePerm},
|
||||||
[]metadata.Permission{},
|
[]metadata.Permission{},
|
||||||
permissionIDMappings,
|
permissionIDMappings)
|
||||||
)
|
|
||||||
require.NoErrorf(t, err, "adding permission to file %v", clues.ToCore(err))
|
require.NoErrorf(t, err, "adding permission to file %v", clues.ToCore(err))
|
||||||
// no expectedDeets: metadata isn't tracked
|
// no expectedDeets: metadata isn't tracked
|
||||||
},
|
},
|
||||||
@ -1585,13 +1597,9 @@ func runDriveIncrementalTest(
|
|||||||
{
|
{
|
||||||
name: "remove permission from new file",
|
name: "remove permission from new file",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
driveItem := models.NewDriveItem()
|
|
||||||
driveItem.SetName(&newFileName)
|
|
||||||
driveItem.SetFile(models.NewFile())
|
|
||||||
err = onedrive.UpdatePermissions(
|
err = onedrive.UpdatePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
*newFile.GetId(),
|
*newFile.GetId(),
|
||||||
[]metadata.Permission{},
|
[]metadata.Permission{},
|
||||||
@ -1608,13 +1616,9 @@ func runDriveIncrementalTest(
|
|||||||
name: "add permission to container",
|
name: "add permission to container",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
targetContainer := containerIDs[container1]
|
targetContainer := containerIDs[container1]
|
||||||
driveItem := models.NewDriveItem()
|
|
||||||
driveItem.SetName(&newFileName)
|
|
||||||
driveItem.SetFile(models.NewFile())
|
|
||||||
err = onedrive.UpdatePermissions(
|
err = onedrive.UpdatePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
targetContainer,
|
targetContainer,
|
||||||
[]metadata.Permission{writePerm},
|
[]metadata.Permission{writePerm},
|
||||||
@ -1631,13 +1635,9 @@ func runDriveIncrementalTest(
|
|||||||
name: "remove permission from container",
|
name: "remove permission from container",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
targetContainer := containerIDs[container1]
|
targetContainer := containerIDs[container1]
|
||||||
driveItem := models.NewDriveItem()
|
|
||||||
driveItem.SetName(&newFileName)
|
|
||||||
driveItem.SetFile(models.NewFile())
|
|
||||||
err = onedrive.UpdatePermissions(
|
err = onedrive.UpdatePermissions(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
rh,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
targetContainer,
|
targetContainer,
|
||||||
[]metadata.Permission{},
|
[]metadata.Permission{},
|
||||||
@ -1653,9 +1653,8 @@ func runDriveIncrementalTest(
|
|||||||
{
|
{
|
||||||
name: "update contents of a file",
|
name: "update contents of a file",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
err := api.PutDriveItemContent(
|
err := suite.ac.Drives().PutItemContent(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
ptr.Val(newFile.GetId()),
|
ptr.Val(newFile.GetId()),
|
||||||
[]byte("new content"))
|
[]byte("new content"))
|
||||||
@ -1678,9 +1677,8 @@ func runDriveIncrementalTest(
|
|||||||
parentRef.SetId(&container)
|
parentRef.SetId(&container)
|
||||||
driveItem.SetParentReference(parentRef)
|
driveItem.SetParentReference(parentRef)
|
||||||
|
|
||||||
err := api.PatchDriveItem(
|
err := suite.ac.Drives().PatchItem(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
ptr.Val(newFile.GetId()),
|
ptr.Val(newFile.GetId()),
|
||||||
driveItem)
|
driveItem)
|
||||||
@ -1702,9 +1700,8 @@ func runDriveIncrementalTest(
|
|||||||
parentRef.SetId(&dest)
|
parentRef.SetId(&dest)
|
||||||
driveItem.SetParentReference(parentRef)
|
driveItem.SetParentReference(parentRef)
|
||||||
|
|
||||||
err := api.PatchDriveItem(
|
err := suite.ac.Drives().PatchItem(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
ptr.Val(newFile.GetId()),
|
ptr.Val(newFile.GetId()),
|
||||||
driveItem)
|
driveItem)
|
||||||
@ -1723,9 +1720,8 @@ func runDriveIncrementalTest(
|
|||||||
{
|
{
|
||||||
name: "delete file",
|
name: "delete file",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
err := api.DeleteDriveItem(
|
err := suite.ac.Drives().DeleteItem(
|
||||||
ctx,
|
ctx,
|
||||||
newDeleteServicer(t),
|
|
||||||
driveID,
|
driveID,
|
||||||
ptr.Val(newFile.GetId()))
|
ptr.Val(newFile.GetId()))
|
||||||
require.NoErrorf(t, err, "deleting file %v", clues.ToCore(err))
|
require.NoErrorf(t, err, "deleting file %v", clues.ToCore(err))
|
||||||
@ -1748,9 +1744,8 @@ func runDriveIncrementalTest(
|
|||||||
parentRef.SetId(&parent)
|
parentRef.SetId(&parent)
|
||||||
driveItem.SetParentReference(parentRef)
|
driveItem.SetParentReference(parentRef)
|
||||||
|
|
||||||
err := api.PatchDriveItem(
|
err := suite.ac.Drives().PatchItem(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
child,
|
child,
|
||||||
driveItem)
|
driveItem)
|
||||||
@ -1777,9 +1772,8 @@ func runDriveIncrementalTest(
|
|||||||
parentRef.SetId(&parent)
|
parentRef.SetId(&parent)
|
||||||
driveItem.SetParentReference(parentRef)
|
driveItem.SetParentReference(parentRef)
|
||||||
|
|
||||||
err := api.PatchDriveItem(
|
err := suite.ac.Drives().PatchItem(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Service,
|
|
||||||
driveID,
|
driveID,
|
||||||
child,
|
child,
|
||||||
driveItem)
|
driveItem)
|
||||||
@ -1800,9 +1794,8 @@ func runDriveIncrementalTest(
|
|||||||
name: "delete a folder",
|
name: "delete a folder",
|
||||||
updateFiles: func(t *testing.T) {
|
updateFiles: func(t *testing.T) {
|
||||||
container := containerIDs[containerRename]
|
container := containerIDs[containerRename]
|
||||||
err := api.DeleteDriveItem(
|
err := suite.ac.Drives().DeleteItem(
|
||||||
ctx,
|
ctx,
|
||||||
newDeleteServicer(t),
|
|
||||||
driveID,
|
driveID,
|
||||||
container)
|
container)
|
||||||
require.NoError(t, err, "deleting folder", clues.ToCore(err))
|
require.NoError(t, err, "deleting folder", clues.ToCore(err))
|
||||||
@ -1821,7 +1814,6 @@ func runDriveIncrementalTest(
|
|||||||
ctx,
|
ctx,
|
||||||
gc,
|
gc,
|
||||||
service,
|
service,
|
||||||
acct,
|
|
||||||
category,
|
category,
|
||||||
sel,
|
sel,
|
||||||
atid, roidn.ID(), driveID, container3,
|
atid, roidn.ID(), driveID, container3,
|
||||||
@ -1834,7 +1826,7 @@ func runDriveIncrementalTest(
|
|||||||
"https://graph.microsoft.com/v1.0/drives/%s/root:/%s",
|
"https://graph.microsoft.com/v1.0/drives/%s/root:/%s",
|
||||||
driveID,
|
driveID,
|
||||||
container3)
|
container3)
|
||||||
resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
|
resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.AC.Stable.Adapter()).
|
||||||
Get(ctx, nil)
|
Get(ctx, nil)
|
||||||
require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err))
|
require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err))
|
||||||
|
|
||||||
@ -1928,7 +1920,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() {
|
|||||||
connector.Users)
|
connector.Users)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
userable, err := gc.Discovery.Users().GetByID(ctx, suite.user)
|
userable, err := gc.AC.Users().GetByID(ctx, suite.user)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
uid := ptr.Val(userable.GetId())
|
uid := ptr.Val(userable.GetId())
|
||||||
@ -2046,19 +2038,3 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
|
|||||||
runAndCheckBackup(t, ctx, &bo, mb, false)
|
runAndCheckBackup(t, ctx, &bo, mb, false)
|
||||||
checkBackupIsInManifests(t, ctx, kw, &bo, sels, suite.site, path.LibrariesCategory)
|
checkBackupIsInManifests(t, ctx, kw, &bo, sels, suite.site, path.LibrariesCategory)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// helpers
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
func newDeleteServicer(t *testing.T) graph.Servicer {
|
|
||||||
acct := tester.NewM365Account(t)
|
|
||||||
|
|
||||||
m365, err := acct.M365Config()
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
a, err := graph.CreateAdapter(acct.ID(), m365.AzureClientID, m365.AzureClientSecret)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
return graph.NewService(a)
|
|
||||||
}
|
|
||||||
|
|||||||
@ -727,6 +727,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
itemParents1, err := path.GetDriveFolderPath(itemPath1)
|
itemParents1, err := path.GetDriveFolderPath(itemPath1)
|
||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
|
||||||
|
itemParents1String := itemParents1.String()
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
populatedModels map[model.StableID]backup.Backup
|
populatedModels map[model.StableID]backup.Backup
|
||||||
@ -899,7 +901,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
|
|||||||
ItemInfo: details.ItemInfo{
|
ItemInfo: details.ItemInfo{
|
||||||
OneDrive: &details.OneDriveInfo{
|
OneDrive: &details.OneDriveInfo{
|
||||||
ItemType: details.OneDriveItem,
|
ItemType: details.OneDriveItem,
|
||||||
ParentPath: itemParents1,
|
ParentPath: itemParents1String,
|
||||||
Size: 42,
|
Size: 42,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@ -7,7 +7,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -27,7 +26,7 @@ type (
|
|||||||
lastBackupVersion int,
|
lastBackupVersion int,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, prefixmatcher.StringSetReader, error)
|
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error)
|
||||||
IsBackupRunnable(ctx context.Context, service path.ServiceType, resourceOwner string) (bool, error)
|
IsBackupRunnable(ctx context.Context, service path.ServiceType, resourceOwner string) (bool, error)
|
||||||
|
|
||||||
Wait() *data.CollectionStats
|
Wait() *data.CollectionStats
|
||||||
@ -37,7 +36,6 @@ type (
|
|||||||
ConsumeRestoreCollections(
|
ConsumeRestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
acct account.Account,
|
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
dest control.RestoreDestination,
|
dest control.RestoreDestination,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
|
|||||||
@ -553,7 +553,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
mr: mockManifestRestorer{
|
mr: mockManifestRestorer{
|
||||||
mockRestoreProducer: mockRestoreProducer{
|
mockRestoreProducer: mockRestoreProducer{
|
||||||
collsByID: map[string][]data.RestoreCollection{
|
collsByID: map[string][]data.RestoreCollection{
|
||||||
"id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "id_coll"}}},
|
"id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")},
|
mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")},
|
||||||
@ -580,8 +580,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
mr: mockManifestRestorer{
|
mr: mockManifestRestorer{
|
||||||
mockRestoreProducer: mockRestoreProducer{
|
mockRestoreProducer: mockRestoreProducer{
|
||||||
collsByID: map[string][]data.RestoreCollection{
|
collsByID: map[string][]data.RestoreCollection{
|
||||||
"id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "id_coll"}}},
|
"id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}},
|
||||||
"incmpl_id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "incmpl_id_coll"}}},
|
"incmpl_id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "incmpl_id_coll"}}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
mans: []kopia.ManifestEntry{
|
mans: []kopia.ManifestEntry{
|
||||||
@ -600,7 +600,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
mr: mockManifestRestorer{
|
mr: mockManifestRestorer{
|
||||||
mockRestoreProducer: mockRestoreProducer{
|
mockRestoreProducer: mockRestoreProducer{
|
||||||
collsByID: map[string][]data.RestoreCollection{
|
collsByID: map[string][]data.RestoreCollection{
|
||||||
"id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "id_coll"}}},
|
"id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")},
|
mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")},
|
||||||
@ -616,8 +616,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
mr: mockManifestRestorer{
|
mr: mockManifestRestorer{
|
||||||
mockRestoreProducer: mockRestoreProducer{
|
mockRestoreProducer: mockRestoreProducer{
|
||||||
collsByID: map[string][]data.RestoreCollection{
|
collsByID: map[string][]data.RestoreCollection{
|
||||||
"mail": {data.NotFoundRestoreCollection{Collection: mockColl{id: "mail_coll"}}},
|
"mail": {data.NoFetchRestoreCollection{Collection: mockColl{id: "mail_coll"}}},
|
||||||
"contact": {data.NotFoundRestoreCollection{Collection: mockColl{id: "contact_coll"}}},
|
"contact": {data.NoFetchRestoreCollection{Collection: mockColl{id: "contact_coll"}}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
mans: []kopia.ManifestEntry{
|
mans: []kopia.ManifestEntry{
|
||||||
@ -681,7 +681,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
for _, dc := range dcs {
|
for _, dc := range dcs {
|
||||||
if !assert.IsTypef(
|
if !assert.IsTypef(
|
||||||
t,
|
t,
|
||||||
data.NotFoundRestoreCollection{},
|
data.NoFetchRestoreCollection{},
|
||||||
dc,
|
dc,
|
||||||
"unexpected type returned [%T]",
|
"unexpected type returned [%T]",
|
||||||
dc,
|
dc,
|
||||||
@ -689,7 +689,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
tmp := dc.(data.NotFoundRestoreCollection)
|
tmp := dc.(data.NoFetchRestoreCollection)
|
||||||
|
|
||||||
if !assert.IsTypef(
|
if !assert.IsTypef(
|
||||||
t,
|
t,
|
||||||
|
|||||||
@ -36,13 +36,13 @@ type RestoreOperation struct {
|
|||||||
operation
|
operation
|
||||||
|
|
||||||
BackupID model.StableID `json:"backupID"`
|
BackupID model.StableID `json:"backupID"`
|
||||||
|
Destination control.RestoreDestination `json:"destination"`
|
||||||
Results RestoreResults `json:"results"`
|
Results RestoreResults `json:"results"`
|
||||||
Selectors selectors.Selector `json:"selectors"`
|
Selectors selectors.Selector `json:"selectors"`
|
||||||
Destination control.RestoreDestination `json:"destination"`
|
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
|
|
||||||
account account.Account
|
acct account.Account
|
||||||
rc inject.RestoreConsumer
|
rc inject.RestoreConsumer
|
||||||
}
|
}
|
||||||
|
|
||||||
// RestoreResults aggregate the details of the results of the operation.
|
// RestoreResults aggregate the details of the results of the operation.
|
||||||
@ -66,11 +66,11 @@ func NewRestoreOperation(
|
|||||||
) (RestoreOperation, error) {
|
) (RestoreOperation, error) {
|
||||||
op := RestoreOperation{
|
op := RestoreOperation{
|
||||||
operation: newOperation(opts, bus, kw, sw),
|
operation: newOperation(opts, bus, kw, sw),
|
||||||
|
acct: acct,
|
||||||
BackupID: backupID,
|
BackupID: backupID,
|
||||||
Selectors: sel,
|
|
||||||
Destination: dest,
|
Destination: dest,
|
||||||
|
Selectors: sel,
|
||||||
Version: "v0",
|
Version: "v0",
|
||||||
account: acct,
|
|
||||||
rc: rc,
|
rc: rc,
|
||||||
}
|
}
|
||||||
if err := op.validate(); err != nil {
|
if err := op.validate(); err != nil {
|
||||||
@ -116,7 +116,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
|
|||||||
restoreID: uuid.NewString(),
|
restoreID: uuid.NewString(),
|
||||||
}
|
}
|
||||||
start = time.Now()
|
start = time.Now()
|
||||||
sstore = streamstore.NewStreamer(op.kopia, op.account.ID(), op.Selectors.PathService())
|
sstore = streamstore.NewStreamer(op.kopia, op.acct.ID(), op.Selectors.PathService())
|
||||||
)
|
)
|
||||||
|
|
||||||
// -----
|
// -----
|
||||||
@ -135,7 +135,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
|
|||||||
|
|
||||||
ctx = clues.Add(
|
ctx = clues.Add(
|
||||||
ctx,
|
ctx,
|
||||||
"tenant_id", clues.Hide(op.account.ID()),
|
"tenant_id", clues.Hide(op.acct.ID()),
|
||||||
"backup_id", op.BackupID,
|
"backup_id", op.BackupID,
|
||||||
"service", op.Selectors.Service,
|
"service", op.Selectors.Service,
|
||||||
"destination_container", clues.Hide(op.Destination.ContainerName))
|
"destination_container", clues.Hide(op.Destination.ContainerName))
|
||||||
@ -256,7 +256,6 @@ func (op *RestoreOperation) do(
|
|||||||
ctx,
|
ctx,
|
||||||
op.rc,
|
op.rc,
|
||||||
bup.Version,
|
bup.Version,
|
||||||
op.account,
|
|
||||||
op.Selectors,
|
op.Selectors,
|
||||||
op.Destination,
|
op.Destination,
|
||||||
op.Options,
|
op.Options,
|
||||||
@ -314,7 +313,6 @@ func consumeRestoreCollections(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
rc inject.RestoreConsumer,
|
rc inject.RestoreConsumer,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
acct account.Account,
|
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
dest control.RestoreDestination,
|
dest control.RestoreDestination,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
@ -330,7 +328,6 @@ func consumeRestoreCollections(
|
|||||||
deets, err := rc.ConsumeRestoreCollections(
|
deets, err := rc.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
acct,
|
|
||||||
sel,
|
sel,
|
||||||
dest,
|
dest,
|
||||||
opts,
|
opts,
|
||||||
|
|||||||
@ -15,7 +15,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/mock"
|
"github.com/alcionai/corso/src/internal/connector/mock"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
@ -50,7 +49,6 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
kw = &kopia.Wrapper{}
|
kw = &kopia.Wrapper{}
|
||||||
sw = &store.Wrapper{}
|
sw = &store.Wrapper{}
|
||||||
gc = &mock.GraphConnector{}
|
gc = &mock.GraphConnector{}
|
||||||
acct = account.Account{}
|
|
||||||
now = time.Now()
|
now = time.Now()
|
||||||
dest = tester.DefaultTestRestoreDestination("")
|
dest = tester.DefaultTestRestoreDestination("")
|
||||||
)
|
)
|
||||||
@ -70,7 +68,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
NumBytes: 42,
|
NumBytes: 42,
|
||||||
},
|
},
|
||||||
cs: []data.RestoreCollection{
|
cs: []data.RestoreCollection{
|
||||||
data.NotFoundRestoreCollection{
|
data.NoFetchRestoreCollection{
|
||||||
Collection: &exchMock.DataCollection{},
|
Collection: &exchMock.DataCollection{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -112,7 +110,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
kw,
|
kw,
|
||||||
sw,
|
sw,
|
||||||
gc,
|
gc,
|
||||||
acct,
|
account.Account{},
|
||||||
"foo",
|
"foo",
|
||||||
selectors.Selector{DiscreteOwner: "test"},
|
selectors.Selector{DiscreteOwner: "test"},
|
||||||
dest,
|
dest,
|
||||||
@ -220,7 +218,6 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
|||||||
kw = &kopia.Wrapper{}
|
kw = &kopia.Wrapper{}
|
||||||
sw = &store.Wrapper{}
|
sw = &store.Wrapper{}
|
||||||
gc = &mock.GraphConnector{}
|
gc = &mock.GraphConnector{}
|
||||||
acct = tester.NewM365Account(suite.T())
|
|
||||||
dest = tester.DefaultTestRestoreDestination("")
|
dest = tester.DefaultTestRestoreDestination("")
|
||||||
opts = control.Defaults()
|
opts = control.Defaults()
|
||||||
)
|
)
|
||||||
@ -230,18 +227,19 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
|||||||
kw *kopia.Wrapper
|
kw *kopia.Wrapper
|
||||||
sw *store.Wrapper
|
sw *store.Wrapper
|
||||||
rc inject.RestoreConsumer
|
rc inject.RestoreConsumer
|
||||||
acct account.Account
|
|
||||||
targets []string
|
targets []string
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{"good", kw, sw, gc, acct, nil, assert.NoError},
|
{"good", kw, sw, gc, nil, assert.NoError},
|
||||||
{"missing kopia", nil, sw, gc, acct, nil, assert.Error},
|
{"missing kopia", nil, sw, gc, nil, assert.Error},
|
||||||
{"missing modelstore", kw, nil, gc, acct, nil, assert.Error},
|
{"missing modelstore", kw, nil, gc, nil, assert.Error},
|
||||||
{"missing restore consumer", kw, sw, nil, acct, nil, assert.Error},
|
{"missing restore consumer", kw, sw, nil, nil, assert.Error},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
ctx, flush := tester.NewContext(suite.T())
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
_, err := NewRestoreOperation(
|
_, err := NewRestoreOperation(
|
||||||
@ -250,12 +248,12 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
|||||||
test.kw,
|
test.kw,
|
||||||
test.sw,
|
test.sw,
|
||||||
test.rc,
|
test.rc,
|
||||||
test.acct,
|
tester.NewM365Account(t),
|
||||||
"backup-id",
|
"backup-id",
|
||||||
selectors.Selector{DiscreteOwner: "test"},
|
selectors.Selector{DiscreteOwner: "test"},
|
||||||
dest,
|
dest,
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
test.errCheck(suite.T(), err, clues.ToCore(err))
|
test.errCheck(t, err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -346,18 +344,7 @@ func setupSharePointBackup(
|
|||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// get the count of drives
|
spPgr := gc.AC.Drives().NewSiteDrivePager(owner, []string{"id", "name"})
|
||||||
m365, err := acct.M365Config()
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
adpt, err := graph.CreateAdapter(
|
|
||||||
m365.AzureTenantID,
|
|
||||||
m365.AzureClientID,
|
|
||||||
m365.AzureClientSecret)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
service := graph.NewService(adpt)
|
|
||||||
spPgr := api.NewSiteDrivePager(service, owner, []string{"id", "name"})
|
|
||||||
|
|
||||||
drives, err := api.GetAllDrives(ctx, spPgr, true, 3)
|
drives, err := api.GetAllDrives(ctx, spPgr, true, 3)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|||||||
@ -30,13 +30,13 @@ func ToDrivePath(p Path) (*DrivePath, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Returns the path to the folder within the drive (i.e. under `root:`)
|
// Returns the path to the folder within the drive (i.e. under `root:`)
|
||||||
func GetDriveFolderPath(p Path) (string, error) {
|
func GetDriveFolderPath(p Path) (*Builder, error) {
|
||||||
drivePath, err := ToDrivePath(p)
|
drivePath, err := ToDrivePath(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return Builder{}.Append(drivePath.Folders...).String(), nil
|
return Builder{}.Append(drivePath.Folders...), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildDriveLocation takes a driveID and a set of unescaped element names,
|
// BuildDriveLocation takes a driveID and a set of unescaped element names,
|
||||||
|
|||||||
@ -450,8 +450,7 @@ func (pb Builder) ToDataLayerPath(
|
|||||||
tenant,
|
tenant,
|
||||||
service.String(),
|
service.String(),
|
||||||
user,
|
user,
|
||||||
category.String(),
|
category.String()),
|
||||||
),
|
|
||||||
service: service,
|
service: service,
|
||||||
category: category,
|
category: category,
|
||||||
hasItem: isItem,
|
hasItem: isItem,
|
||||||
|
|||||||
@ -1,6 +1,9 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
@ -27,6 +30,11 @@ type Client struct {
|
|||||||
// downloading large items such as drive item content or outlook
|
// downloading large items such as drive item content or outlook
|
||||||
// mail and event attachments.
|
// mail and event attachments.
|
||||||
LargeItem graph.Servicer
|
LargeItem graph.Servicer
|
||||||
|
|
||||||
|
// The Requester provides a client specifically for calling
|
||||||
|
// arbitrary urls instead of constructing queries using the
|
||||||
|
// graph api client.
|
||||||
|
Requester graph.Requester
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewClient produces a new exchange api client. Must be used in
|
// NewClient produces a new exchange api client. Must be used in
|
||||||
@ -42,7 +50,9 @@ func NewClient(creds account.M365Config) (Client, error) {
|
|||||||
return Client{}, err
|
return Client{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return Client{creds, s, li}, nil
|
rqr := graph.NewNoTimeoutHTTPWrapper()
|
||||||
|
|
||||||
|
return Client{creds, s, li, rqr}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Service generates a new graph servicer. New servicers are used for paged
|
// Service generates a new graph servicer. New servicers are used for paged
|
||||||
@ -75,3 +85,20 @@ func newLargeItemService(creds account.M365Config) (*graph.Service, error) {
|
|||||||
|
|
||||||
return a, nil
|
return a, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Getter interface {
|
||||||
|
Get(
|
||||||
|
ctx context.Context,
|
||||||
|
url string,
|
||||||
|
headers map[string]string,
|
||||||
|
) (*http.Response, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get performs an ad-hoc get request using its graph.Requester
|
||||||
|
func (c Client) Get(
|
||||||
|
ctx context.Context,
|
||||||
|
url string,
|
||||||
|
headers map[string]string,
|
||||||
|
) (*http.Response, error) {
|
||||||
|
return c.Requester.Request(ctx, http.MethodGet, url, nil, headers)
|
||||||
|
}
|
||||||
|
|||||||
@ -10,14 +10,12 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ExchangeServiceSuite struct {
|
type ExchangeServiceSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
gs graph.Servicer
|
|
||||||
credentials account.M365Config
|
credentials account.M365Config
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,14 +36,6 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
suite.credentials = m365
|
suite.credentials = m365
|
||||||
|
|
||||||
adpt, err := graph.CreateAdapter(
|
|
||||||
m365.AzureTenantID,
|
|
||||||
m365.AzureClientID,
|
|
||||||
m365.AzureClientSecret)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
suite.gs = graph.NewService(adpt)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:lll
|
//nolint:lll
|
||||||
|
|||||||
@ -296,9 +296,8 @@ type contactPager struct {
|
|||||||
options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration
|
options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewContactPager(
|
func (c Contacts) NewContactPager(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemPager {
|
) itemPager {
|
||||||
@ -309,7 +308,7 @@ func NewContactPager(
|
|||||||
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
||||||
}
|
}
|
||||||
|
|
||||||
builder := gs.
|
builder := c.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(userID).
|
ByUserId(userID).
|
||||||
@ -317,7 +316,7 @@ func NewContactPager(
|
|||||||
ByContactFolderId(containerID).
|
ByContactFolderId(containerID).
|
||||||
Contacts()
|
Contacts()
|
||||||
|
|
||||||
return &contactPager{gs, builder, config}
|
return &contactPager{c.Stable, builder, config}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
@ -364,9 +363,8 @@ func getContactDeltaBuilder(
|
|||||||
return builder
|
return builder
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewContactDeltaPager(
|
func (c Contacts) NewContactDeltaPager(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
userID, containerID, oldDelta string,
|
userID, containerID, oldDelta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemPager {
|
) itemPager {
|
||||||
@ -379,12 +377,12 @@ func NewContactDeltaPager(
|
|||||||
|
|
||||||
var builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder
|
var builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder
|
||||||
if oldDelta != "" {
|
if oldDelta != "" {
|
||||||
builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(oldDelta, gs.Adapter())
|
builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
|
||||||
} else {
|
} else {
|
||||||
builder = getContactDeltaBuilder(ctx, gs, userID, containerID, options)
|
builder = getContactDeltaBuilder(ctx, c.Stable, userID, containerID, options)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &contactDeltaPager{gs, userID, containerID, builder, options}
|
return &contactDeltaPager{c.Stable, userID, containerID, builder, options}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
@ -419,8 +417,8 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
|
|||||||
"category", selectors.ExchangeContact,
|
"category", selectors.ExchangeContact,
|
||||||
"container_id", containerID)
|
"container_id", containerID)
|
||||||
|
|
||||||
pager := NewContactPager(ctx, c.Stable, userID, containerID, immutableIDs)
|
pager := c.NewContactPager(ctx, userID, containerID, immutableIDs)
|
||||||
deltaPager := NewContactDeltaPager(ctx, c.Stable, userID, containerID, oldDelta, immutableIDs)
|
deltaPager := c.NewContactDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs)
|
||||||
|
|
||||||
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
|
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,184 +9,41 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Drives
|
// controller
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func GetUsersDrive(
|
func (c Client) Drives() Drives {
|
||||||
ctx context.Context,
|
return Drives{c}
|
||||||
srv graph.Servicer,
|
|
||||||
user string,
|
|
||||||
) (models.Driveable, error) {
|
|
||||||
d, err := srv.Client().
|
|
||||||
Users().
|
|
||||||
ByUserId(user).
|
|
||||||
Drive().
|
|
||||||
Get(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "getting user's drive")
|
|
||||||
}
|
|
||||||
|
|
||||||
return d, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSitesDefaultDrive(
|
// Drives is an interface-compliant provider of the client.
|
||||||
ctx context.Context,
|
type Drives struct {
|
||||||
srv graph.Servicer,
|
Client
|
||||||
site string,
|
|
||||||
) (models.Driveable, error) {
|
|
||||||
d, err := srv.Client().
|
|
||||||
Sites().
|
|
||||||
BySiteId(site).
|
|
||||||
Drive().
|
|
||||||
Get(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "getting site's drive")
|
|
||||||
}
|
|
||||||
|
|
||||||
return d, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetDriveRoot(
|
|
||||||
ctx context.Context,
|
|
||||||
srv graph.Servicer,
|
|
||||||
driveID string,
|
|
||||||
) (models.DriveItemable, error) {
|
|
||||||
root, err := srv.Client().
|
|
||||||
Drives().
|
|
||||||
ByDriveId(driveID).
|
|
||||||
Root().
|
|
||||||
Get(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "getting drive root")
|
|
||||||
}
|
|
||||||
|
|
||||||
return root, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Drive Items
|
// Folders
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// generic drive item getter
|
|
||||||
func GetDriveItem(
|
|
||||||
ctx context.Context,
|
|
||||||
srv graph.Servicer,
|
|
||||||
driveID, itemID string,
|
|
||||||
) (models.DriveItemable, error) {
|
|
||||||
di, err := srv.Client().
|
|
||||||
Drives().
|
|
||||||
ByDriveId(driveID).
|
|
||||||
Items().
|
|
||||||
ByDriveItemId(itemID).
|
|
||||||
Get(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "getting item")
|
|
||||||
}
|
|
||||||
|
|
||||||
return di, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func PostDriveItem(
|
|
||||||
ctx context.Context,
|
|
||||||
srv graph.Servicer,
|
|
||||||
driveID, itemID string,
|
|
||||||
) (models.UploadSessionable, error) {
|
|
||||||
session := drives.NewItemItemsItemCreateUploadSessionPostRequestBody()
|
|
||||||
|
|
||||||
r, err := srv.Client().
|
|
||||||
Drives().
|
|
||||||
ByDriveId(driveID).
|
|
||||||
Items().
|
|
||||||
ByDriveItemId(itemID).
|
|
||||||
CreateUploadSession().
|
|
||||||
Post(ctx, session, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "uploading drive item")
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func PatchDriveItem(
|
|
||||||
ctx context.Context,
|
|
||||||
srv graph.Servicer,
|
|
||||||
driveID, itemID string,
|
|
||||||
item models.DriveItemable,
|
|
||||||
) error {
|
|
||||||
_, err := srv.Client().
|
|
||||||
Drives().
|
|
||||||
ByDriveId(driveID).
|
|
||||||
Items().
|
|
||||||
ByDriveItemId(itemID).
|
|
||||||
Patch(ctx, item, nil)
|
|
||||||
if err != nil {
|
|
||||||
return graph.Wrap(ctx, err, "patching drive item")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func PutDriveItemContent(
|
|
||||||
ctx context.Context,
|
|
||||||
srv graph.Servicer,
|
|
||||||
driveID, itemID string,
|
|
||||||
content []byte,
|
|
||||||
) error {
|
|
||||||
_, err := srv.Client().
|
|
||||||
Drives().
|
|
||||||
ByDriveId(driveID).
|
|
||||||
Items().
|
|
||||||
ByDriveItemId(itemID).
|
|
||||||
Content().
|
|
||||||
Put(ctx, content, nil)
|
|
||||||
if err != nil {
|
|
||||||
return graph.Wrap(ctx, err, "uploading drive item content")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// deletes require unique http clients
|
|
||||||
// https://github.com/alcionai/corso/issues/2707
|
|
||||||
func DeleteDriveItem(
|
|
||||||
ctx context.Context,
|
|
||||||
gs graph.Servicer,
|
|
||||||
driveID, itemID string,
|
|
||||||
) error {
|
|
||||||
err := gs.Client().
|
|
||||||
Drives().
|
|
||||||
ByDriveId(driveID).
|
|
||||||
Items().
|
|
||||||
ByDriveItemId(itemID).
|
|
||||||
Delete(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return graph.Wrap(ctx, err, "deleting item").With("item_id", itemID)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const itemByPathRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s:/%s"
|
const itemByPathRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s:/%s"
|
||||||
|
|
||||||
var ErrFolderNotFound = clues.New("folder not found")
|
var ErrFolderNotFound = clues.New("folder not found")
|
||||||
|
|
||||||
// GetFolderByName will lookup the specified folder by name within the parentFolderID folder.
|
// GetFolderByName will lookup the specified folder by name within the parentFolderID folder.
|
||||||
func GetFolderByName(
|
func (c Drives) GetFolderByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
srv graph.Servicer,
|
driveID, parentFolderID, folderID string,
|
||||||
driveID, parentFolderID, folder string,
|
|
||||||
) (models.DriveItemable, error) {
|
) (models.DriveItemable, error) {
|
||||||
// The `Children().Get()` API doesn't yet support $filter, so using that to find a folder
|
// The `Children().Get()` API doesn't yet support $filter, so using that to find a folder
|
||||||
// will be sub-optimal.
|
// will be sub-optimal.
|
||||||
// Instead, we leverage OneDrive path-based addressing -
|
// Instead, we leverage OneDrive path-based addressing -
|
||||||
// https://learn.microsoft.com/en-us/graph/onedrive-addressing-driveitems#path-based-addressing
|
// https://learn.microsoft.com/en-us/graph/onedrive-addressing-driveitems#path-based-addressing
|
||||||
// - which allows us to lookup an item by its path relative to the parent ID
|
// - which allows us to lookup an item by its path relative to the parent ID
|
||||||
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folder)
|
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folderID)
|
||||||
builder := drives.NewItemItemsDriveItemItemRequestBuilder(rawURL, srv.Adapter())
|
builder := drives.NewItemItemsDriveItemItemRequestBuilder(rawURL, c.Stable.Adapter())
|
||||||
|
|
||||||
foundItem, err := builder.Get(ctx, nil)
|
foundItem, err := builder.Get(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -205,16 +62,163 @@ func GetFolderByName(
|
|||||||
return foundItem, nil
|
return foundItem, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c Drives) GetRootFolder(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
root, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Drives().
|
||||||
|
ByDriveId(driveID).
|
||||||
|
Root().
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting drive root")
|
||||||
|
}
|
||||||
|
|
||||||
|
return root, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Items
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// generic drive item getter
|
||||||
|
func (c Drives) GetItem(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
di, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Drives().
|
||||||
|
ByDriveId(driveID).
|
||||||
|
Items().
|
||||||
|
ByDriveItemId(itemID).
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting item")
|
||||||
|
}
|
||||||
|
|
||||||
|
return di, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Drives) NewItemContentUpload(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (models.UploadSessionable, error) {
|
||||||
|
session := drives.NewItemItemsItemCreateUploadSessionPostRequestBody()
|
||||||
|
|
||||||
|
r, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Drives().
|
||||||
|
ByDriveId(driveID).
|
||||||
|
Items().
|
||||||
|
ByDriveItemId(itemID).
|
||||||
|
CreateUploadSession().
|
||||||
|
Post(ctx, session, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "uploading drive item")
|
||||||
|
}
|
||||||
|
|
||||||
|
return r, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const itemChildrenRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s/children"
|
||||||
|
|
||||||
|
// PostItemInContainer creates a new item in the specified folder
|
||||||
|
func (c Drives) PostItemInContainer(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, parentFolderID string,
|
||||||
|
newItem models.DriveItemable,
|
||||||
|
) (models.DriveItemable, error) {
|
||||||
|
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
|
||||||
|
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
|
||||||
|
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
|
||||||
|
builder := drives.NewItemItemsRequestBuilder(rawURL, c.Stable.Adapter())
|
||||||
|
|
||||||
|
newItem, err := builder.Post(ctx, newItem, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "creating item in folder")
|
||||||
|
}
|
||||||
|
|
||||||
|
return newItem, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Drives) PatchItem(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
item models.DriveItemable,
|
||||||
|
) error {
|
||||||
|
_, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Drives().
|
||||||
|
ByDriveId(driveID).
|
||||||
|
Items().
|
||||||
|
ByDriveItemId(itemID).
|
||||||
|
Patch(ctx, item, nil)
|
||||||
|
if err != nil {
|
||||||
|
return graph.Wrap(ctx, err, "patching drive item")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Drives) PutItemContent(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
content []byte,
|
||||||
|
) error {
|
||||||
|
_, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Drives().
|
||||||
|
ByDriveId(driveID).
|
||||||
|
Items().
|
||||||
|
ByDriveItemId(itemID).
|
||||||
|
Content().
|
||||||
|
Put(ctx, content, nil)
|
||||||
|
if err != nil {
|
||||||
|
return graph.Wrap(ctx, err, "uploading drive item content")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// deletes require unique http clients
|
||||||
|
// https://github.com/alcionai/corso/issues/2707
|
||||||
|
func (c Drives) DeleteItem(
|
||||||
|
ctx context.Context,
|
||||||
|
driveID, itemID string,
|
||||||
|
) error {
|
||||||
|
// deletes require unique http clients
|
||||||
|
// https://github.com/alcionai/corso/issues/2707
|
||||||
|
srv, err := c.Service()
|
||||||
|
if err != nil {
|
||||||
|
return graph.Wrap(ctx, err, "creating adapter to delete item permission")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = srv.
|
||||||
|
Client().
|
||||||
|
Drives().
|
||||||
|
ByDriveId(driveID).
|
||||||
|
Items().
|
||||||
|
ByDriveItemId(itemID).
|
||||||
|
Delete(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return graph.Wrap(ctx, err, "deleting item").With("item_id", itemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Permissions
|
// Permissions
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func GetItemPermission(
|
func (c Drives) GetItemPermission(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Servicer,
|
|
||||||
driveID, itemID string,
|
driveID, itemID string,
|
||||||
) (models.PermissionCollectionResponseable, error) {
|
) (models.PermissionCollectionResponseable, error) {
|
||||||
perm, err := service.
|
perm, err := c.Stable.
|
||||||
Client().
|
Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(driveID).
|
ByDriveId(driveID).
|
||||||
@ -229,15 +233,15 @@ func GetItemPermission(
|
|||||||
return perm, nil
|
return perm, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func PostItemPermissionUpdate(
|
func (c Drives) PostItemPermissionUpdate(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Servicer,
|
|
||||||
driveID, itemID string,
|
driveID, itemID string,
|
||||||
body *drives.ItemItemsItemInvitePostRequestBody,
|
body *drives.ItemItemsItemInvitePostRequestBody,
|
||||||
) (drives.ItemItemsItemInviteResponseable, error) {
|
) (drives.ItemItemsItemInviteResponseable, error) {
|
||||||
ctx = graph.ConsumeNTokens(ctx, graph.PermissionsLC)
|
ctx = graph.ConsumeNTokens(ctx, graph.PermissionsLC)
|
||||||
|
|
||||||
itm, err := service.Client().
|
itm, err := c.Stable.
|
||||||
|
Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(driveID).
|
ByDriveId(driveID).
|
||||||
Items().
|
Items().
|
||||||
@ -251,17 +255,18 @@ func PostItemPermissionUpdate(
|
|||||||
return itm, nil
|
return itm, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteDriveItemPermission(
|
func (c Drives) DeleteItemPermission(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
|
||||||
driveID, itemID, permissionID string,
|
driveID, itemID, permissionID string,
|
||||||
) error {
|
) error {
|
||||||
a, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret)
|
// deletes require unique http clients
|
||||||
|
// https://github.com/alcionai/corso/issues/2707
|
||||||
|
srv, err := c.Service()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return graph.Wrap(ctx, err, "creating adapter to delete item permission")
|
return graph.Wrap(ctx, err, "creating adapter to delete item permission")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = graph.NewService(a).
|
err = srv.
|
||||||
Client().
|
Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(driveID).
|
ByDriveId(driveID).
|
||||||
|
|||||||
@ -21,18 +21,26 @@ import (
|
|||||||
// item pager
|
// item pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type driveItemPager struct {
|
type DriveItemEnumerator interface {
|
||||||
|
GetPage(context.Context) (DeltaPageLinker, error)
|
||||||
|
SetNext(nextLink string)
|
||||||
|
Reset()
|
||||||
|
ValuesIn(DeltaPageLinker) ([]models.DriveItemable, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ DriveItemEnumerator = &DriveItemPager{}
|
||||||
|
|
||||||
|
type DriveItemPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
driveID string
|
driveID string
|
||||||
builder *drives.ItemItemsItemDeltaRequestBuilder
|
builder *drives.ItemItemsItemDeltaRequestBuilder
|
||||||
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
|
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewItemPager(
|
func (c Drives) NewItemPager(
|
||||||
gs graph.Servicer,
|
|
||||||
driveID, link string,
|
driveID, link string,
|
||||||
selectFields []string,
|
selectFields []string,
|
||||||
) *driveItemPager {
|
) *DriveItemPager {
|
||||||
preferHeaderItems := []string{
|
preferHeaderItems := []string{
|
||||||
"deltashowremovedasdeleted",
|
"deltashowremovedasdeleted",
|
||||||
"deltatraversepermissiongaps",
|
"deltatraversepermissiongaps",
|
||||||
@ -48,24 +56,25 @@ func NewItemPager(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
res := &driveItemPager{
|
res := &DriveItemPager{
|
||||||
gs: gs,
|
gs: c.Stable,
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
options: requestConfig,
|
options: requestConfig,
|
||||||
builder: gs.Client().
|
builder: c.Stable.
|
||||||
|
Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(driveID).
|
ByDriveId(driveID).
|
||||||
Items().ByDriveItemId(onedrive.RootID).Delta(),
|
Items().ByDriveItemId(onedrive.RootID).Delta(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(link) > 0 {
|
if len(link) > 0 {
|
||||||
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, gs.Adapter())
|
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, c.Stable.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *driveItemPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *DriveItemPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
var (
|
var (
|
||||||
resp DeltaPageLinker
|
resp DeltaPageLinker
|
||||||
err error
|
err error
|
||||||
@ -79,11 +88,11 @@ func (p *driveItemPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
|||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *driveItemPager) SetNext(link string) {
|
func (p *DriveItemPager) SetNext(link string) {
|
||||||
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
|
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *driveItemPager) Reset() {
|
func (p *DriveItemPager) Reset() {
|
||||||
p.builder = p.gs.Client().
|
p.builder = p.gs.Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(p.driveID).
|
ByDriveId(p.driveID).
|
||||||
@ -92,7 +101,7 @@ func (p *driveItemPager) Reset() {
|
|||||||
Delta()
|
Delta()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *driveItemPager) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, error) {
|
func (p *DriveItemPager) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, error) {
|
||||||
return getValues[models.DriveItemable](l)
|
return getValues[models.DriveItemable](l)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,6 +109,8 @@ func (p *driveItemPager) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, er
|
|||||||
// user pager
|
// user pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ DrivePager = &userDrivePager{}
|
||||||
|
|
||||||
type userDrivePager struct {
|
type userDrivePager struct {
|
||||||
userID string
|
userID string
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -107,8 +118,7 @@ type userDrivePager struct {
|
|||||||
options *users.ItemDrivesRequestBuilderGetRequestConfiguration
|
options *users.ItemDrivesRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUserDrivePager(
|
func (c Drives) NewUserDrivePager(
|
||||||
gs graph.Servicer,
|
|
||||||
userID string,
|
userID string,
|
||||||
fields []string,
|
fields []string,
|
||||||
) *userDrivePager {
|
) *userDrivePager {
|
||||||
@ -120,9 +130,13 @@ func NewUserDrivePager(
|
|||||||
|
|
||||||
res := &userDrivePager{
|
res := &userDrivePager{
|
||||||
userID: userID,
|
userID: userID,
|
||||||
gs: gs,
|
gs: c.Stable,
|
||||||
options: requestConfig,
|
options: requestConfig,
|
||||||
builder: gs.Client().Users().ByUserId(userID).Drives(),
|
builder: c.Stable.
|
||||||
|
Client().
|
||||||
|
Users().
|
||||||
|
ByUserId(userID).
|
||||||
|
Drives(),
|
||||||
}
|
}
|
||||||
|
|
||||||
return res
|
return res
|
||||||
@ -140,7 +154,12 @@ func (p *userDrivePager) GetPage(ctx context.Context) (PageLinker, error) {
|
|||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
d, err := p.gs.Client().Users().ByUserId(p.userID).Drive().Get(ctx, nil)
|
d, err := p.gs.
|
||||||
|
Client().
|
||||||
|
Users().
|
||||||
|
ByUserId(p.userID).
|
||||||
|
Drive().
|
||||||
|
Get(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
}
|
}
|
||||||
@ -180,6 +199,8 @@ func (p *userDrivePager) ValuesIn(l PageLinker) ([]models.Driveable, error) {
|
|||||||
// site pager
|
// site pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ DrivePager = &siteDrivePager{}
|
||||||
|
|
||||||
type siteDrivePager struct {
|
type siteDrivePager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
builder *sites.ItemDrivesRequestBuilder
|
builder *sites.ItemDrivesRequestBuilder
|
||||||
@ -191,8 +212,7 @@ type siteDrivePager struct {
|
|||||||
// in a query. NOTE: Fields are case-sensitive. Incorrect field settings will
|
// in a query. NOTE: Fields are case-sensitive. Incorrect field settings will
|
||||||
// cause errors during later paging.
|
// cause errors during later paging.
|
||||||
// Available fields: https://learn.microsoft.com/en-us/graph/api/resources/drive?view=graph-rest-1.0
|
// Available fields: https://learn.microsoft.com/en-us/graph/api/resources/drive?view=graph-rest-1.0
|
||||||
func NewSiteDrivePager(
|
func (c Drives) NewSiteDrivePager(
|
||||||
gs graph.Servicer,
|
|
||||||
siteID string,
|
siteID string,
|
||||||
fields []string,
|
fields []string,
|
||||||
) *siteDrivePager {
|
) *siteDrivePager {
|
||||||
@ -203,9 +223,13 @@ func NewSiteDrivePager(
|
|||||||
}
|
}
|
||||||
|
|
||||||
res := &siteDrivePager{
|
res := &siteDrivePager{
|
||||||
gs: gs,
|
gs: c.Stable,
|
||||||
options: requestConfig,
|
options: requestConfig,
|
||||||
builder: gs.Client().Sites().BySiteId(siteID).Drives(),
|
builder: c.Stable.
|
||||||
|
Client().
|
||||||
|
Sites().
|
||||||
|
BySiteId(siteID).
|
||||||
|
Drives(),
|
||||||
}
|
}
|
||||||
|
|
||||||
return res
|
return res
|
||||||
@ -313,7 +337,8 @@ func GetAllDrives(
|
|||||||
func getValues[T any](l PageLinker) ([]T, error) {
|
func getValues[T any](l PageLinker) ([]T, error) {
|
||||||
page, ok := l.(interface{ GetValue() []T })
|
page, ok := l.(interface{ GetValue() []T })
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, clues.New("page does not comply with GetValue() interface").With("page_item_type", fmt.Sprintf("%T", l))
|
return nil, clues.New("page does not comply with GetValue() interface").
|
||||||
|
With("page_item_type", fmt.Sprintf("%T", l))
|
||||||
}
|
}
|
||||||
|
|
||||||
return page.GetValue(), nil
|
return page.GetValue(), nil
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
@ -16,24 +15,19 @@ import (
|
|||||||
|
|
||||||
type OneDriveAPISuite struct {
|
type OneDriveAPISuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
creds account.M365Config
|
creds account.M365Config
|
||||||
service graph.Servicer
|
ac api.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveAPISuite) SetupSuite() {
|
func (suite *OneDriveAPISuite) SetupSuite() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
a := tester.NewM365Account(t)
|
a := tester.NewM365Account(t)
|
||||||
m365, err := a.M365Config()
|
creds, err := a.M365Config()
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
suite.creds = m365
|
suite.creds = creds
|
||||||
adpt, err := graph.CreateAdapter(
|
suite.ac, err = api.NewClient(creds)
|
||||||
m365.AzureTenantID,
|
|
||||||
m365.AzureClientID,
|
|
||||||
m365.AzureClientSecret)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
suite.service = graph.NewService(adpt)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOneDriveAPIs(t *testing.T) {
|
func TestOneDriveAPIs(t *testing.T) {
|
||||||
@ -51,7 +45,8 @@ func (suite *OneDriveAPISuite) TestCreatePagerAndGetPage() {
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
siteID := tester.M365SiteID(t)
|
siteID := tester.M365SiteID(t)
|
||||||
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"name"})
|
pager := suite.ac.Drives().NewSiteDrivePager(siteID, []string{"name"})
|
||||||
|
|
||||||
a, err := pager.GetPage(ctx)
|
a, err := pager.GetPage(ctx)
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.NotNil(t, a)
|
assert.NotNil(t, a)
|
||||||
|
|||||||
@ -446,9 +446,8 @@ type eventPager struct {
|
|||||||
options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration
|
options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewEventPager(
|
func (c Events) NewEventPager(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemPager, error) {
|
) (itemPager, error) {
|
||||||
@ -456,7 +455,7 @@ func NewEventPager(
|
|||||||
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
||||||
}
|
}
|
||||||
|
|
||||||
builder := gs.
|
builder := c.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(userID).
|
ByUserId(userID).
|
||||||
@ -464,7 +463,7 @@ func NewEventPager(
|
|||||||
ByCalendarId(containerID).
|
ByCalendarId(containerID).
|
||||||
Events()
|
Events()
|
||||||
|
|
||||||
return &eventPager{gs, builder, options}, nil
|
return &eventPager{c.Stable, builder, options}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
@ -501,9 +500,8 @@ type eventDeltaPager struct {
|
|||||||
options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration
|
options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewEventDeltaPager(
|
func (c Events) NewEventDeltaPager(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
userID, containerID, oldDelta string,
|
userID, containerID, oldDelta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemPager, error) {
|
) (itemPager, error) {
|
||||||
@ -514,12 +512,12 @@ func NewEventDeltaPager(
|
|||||||
var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder
|
var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder
|
||||||
|
|
||||||
if oldDelta == "" {
|
if oldDelta == "" {
|
||||||
builder = getEventDeltaBuilder(ctx, gs, userID, containerID, options)
|
builder = getEventDeltaBuilder(ctx, c.Stable, userID, containerID, options)
|
||||||
} else {
|
} else {
|
||||||
builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, gs.Adapter())
|
builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
return &eventDeltaPager{gs, userID, containerID, builder, options}, nil
|
return &eventDeltaPager{c.Stable, userID, containerID, builder, options}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getEventDeltaBuilder(
|
func getEventDeltaBuilder(
|
||||||
@ -571,12 +569,12 @@ func (c Events) GetAddedAndRemovedItemIDs(
|
|||||||
) ([]string, []string, DeltaUpdate, error) {
|
) ([]string, []string, DeltaUpdate, error) {
|
||||||
ctx = clues.Add(ctx, "container_id", containerID)
|
ctx = clues.Add(ctx, "container_id", containerID)
|
||||||
|
|
||||||
pager, err := NewEventPager(ctx, c.Stable, userID, containerID, immutableIDs)
|
pager, err := c.NewEventPager(ctx, userID, containerID, immutableIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager")
|
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager")
|
||||||
}
|
}
|
||||||
|
|
||||||
deltaPager, err := NewEventDeltaPager(ctx, c.Stable, userID, containerID, oldDelta, immutableIDs)
|
deltaPager, err := c.NewEventDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager")
|
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -197,12 +197,12 @@ type mailFolderPager struct {
|
|||||||
builder *users.ItemMailFoldersRequestBuilder
|
builder *users.ItemMailFoldersRequestBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewMailFolderPager(service graph.Servicer, userID string) mailFolderPager {
|
func (c Mail) NewMailFolderPager(userID string) mailFolderPager {
|
||||||
// v1.0 non delta /mailFolders endpoint does not return any of the nested folders
|
// v1.0 non delta /mailFolders endpoint does not return any of the nested folders
|
||||||
rawURL := fmt.Sprintf(mailFoldersBetaURLTemplate, userID)
|
rawURL := fmt.Sprintf(mailFoldersBetaURLTemplate, userID)
|
||||||
builder := users.NewItemMailFoldersRequestBuilder(rawURL, service.Adapter())
|
builder := users.NewItemMailFoldersRequestBuilder(rawURL, c.Stable.Adapter())
|
||||||
|
|
||||||
return mailFolderPager{service, builder}
|
return mailFolderPager{c.Stable, builder}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailFolderPager) getPage(ctx context.Context) (PageLinker, error) {
|
func (p *mailFolderPager) getPage(ctx context.Context) (PageLinker, error) {
|
||||||
@ -241,7 +241,7 @@ func (c Mail) EnumerateContainers(
|
|||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) error {
|
) error {
|
||||||
el := errs.Local()
|
el := errs.Local()
|
||||||
pgr := NewMailFolderPager(c.Stable, userID)
|
pgr := c.NewMailFolderPager(userID)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if el.Failure() != nil {
|
if el.Failure() != nil {
|
||||||
@ -544,9 +544,8 @@ type mailPager struct {
|
|||||||
options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration
|
options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewMailPager(
|
func (c Mail) NewMailPager(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemPager {
|
) itemPager {
|
||||||
@ -557,7 +556,7 @@ func NewMailPager(
|
|||||||
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
||||||
}
|
}
|
||||||
|
|
||||||
builder := gs.
|
builder := c.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(userID).
|
ByUserId(userID).
|
||||||
@ -565,7 +564,7 @@ func NewMailPager(
|
|||||||
ByMailFolderId(containerID).
|
ByMailFolderId(containerID).
|
||||||
Messages()
|
Messages()
|
||||||
|
|
||||||
return &mailPager{gs, builder, config}
|
return &mailPager{c.Stable, builder, config}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
@ -620,9 +619,8 @@ func getMailDeltaBuilder(
|
|||||||
return builder
|
return builder
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewMailDeltaPager(
|
func (c Mail) NewMailDeltaPager(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gs graph.Servicer,
|
|
||||||
userID, containerID, oldDelta string,
|
userID, containerID, oldDelta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemPager {
|
) itemPager {
|
||||||
@ -636,12 +634,12 @@ func NewMailDeltaPager(
|
|||||||
var builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder
|
var builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder
|
||||||
|
|
||||||
if len(oldDelta) > 0 {
|
if len(oldDelta) > 0 {
|
||||||
builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(oldDelta, gs.Adapter())
|
builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
|
||||||
} else {
|
} else {
|
||||||
builder = getMailDeltaBuilder(ctx, gs, userID, containerID, config)
|
builder = getMailDeltaBuilder(ctx, c.Stable, userID, containerID, config)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &mailDeltaPager{gs, userID, containerID, builder, config}
|
return &mailDeltaPager{c.Stable, userID, containerID, builder, config}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
@ -683,8 +681,8 @@ func (c Mail) GetAddedAndRemovedItemIDs(
|
|||||||
"category", selectors.ExchangeMail,
|
"category", selectors.ExchangeMail,
|
||||||
"container_id", containerID)
|
"container_id", containerID)
|
||||||
|
|
||||||
pager := NewMailPager(ctx, c.Stable, userID, containerID, immutableIDs)
|
pager := c.NewMailPager(ctx, userID, containerID, immutableIDs)
|
||||||
deltaPager := NewMailDeltaPager(ctx, c.Stable, userID, containerID, oldDelta, immutableIDs)
|
deltaPager := c.NewMailDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs)
|
||||||
|
|
||||||
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
|
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -32,23 +32,9 @@ type Sites struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// methods
|
// api calls
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// GetSite returns a minimal Site with the SiteID and the WebURL
|
|
||||||
// TODO: delete in favor of sites.GetByID()
|
|
||||||
func GetSite(ctx context.Context, gs graph.Servicer, siteID string) (models.Siteable, error) {
|
|
||||||
resp, err := gs.Client().
|
|
||||||
Sites().
|
|
||||||
BySiteId(siteID).
|
|
||||||
Get(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Stack(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetAll retrieves all sites.
|
// GetAll retrieves all sites.
|
||||||
func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error) {
|
func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error) {
|
||||||
service, err := c.Service()
|
service, err := c.Service()
|
||||||
@ -171,6 +157,27 @@ func (c Sites) GetIDAndName(ctx context.Context, siteID string) (string, string,
|
|||||||
return ptr.Val(s.GetId()), ptr.Val(s.GetWebUrl()), nil
|
return ptr.Val(s.GetId()), ptr.Val(s.GetWebUrl()), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Info
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (c Sites) GetDefaultDrive(
|
||||||
|
ctx context.Context,
|
||||||
|
site string,
|
||||||
|
) (models.Driveable, error) {
|
||||||
|
d, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Sites().
|
||||||
|
BySiteId(site).
|
||||||
|
Drive().
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting site's default drive")
|
||||||
|
}
|
||||||
|
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// helpers
|
// helpers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
187
src/pkg/services/m365/api/user_info.go
Normal file
187
src/pkg/services/m365/api/user_info.go
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/str"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/tform"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// User Info
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type UserInfo struct {
|
||||||
|
ServicesEnabled map[path.ServiceType]struct{}
|
||||||
|
Mailbox MailboxInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type MailboxInfo struct {
|
||||||
|
Purpose string
|
||||||
|
ArchiveFolder string
|
||||||
|
DateFormat string
|
||||||
|
TimeFormat string
|
||||||
|
DelegateMeetMsgDeliveryOpt string
|
||||||
|
Timezone string
|
||||||
|
AutomaticRepliesSetting AutomaticRepliesSettings
|
||||||
|
Language Language
|
||||||
|
WorkingHours WorkingHours
|
||||||
|
ErrGetMailBoxSetting []error
|
||||||
|
QuotaExceeded bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type AutomaticRepliesSettings struct {
|
||||||
|
ExternalAudience string
|
||||||
|
ExternalReplyMessage string
|
||||||
|
InternalReplyMessage string
|
||||||
|
ScheduledEndDateTime timeInfo
|
||||||
|
ScheduledStartDateTime timeInfo
|
||||||
|
Status string
|
||||||
|
}
|
||||||
|
|
||||||
|
type timeInfo struct {
|
||||||
|
DateTime string
|
||||||
|
Timezone string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Language struct {
|
||||||
|
Locale string
|
||||||
|
DisplayName string
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorkingHours struct {
|
||||||
|
DaysOfWeek []string
|
||||||
|
StartTime string
|
||||||
|
EndTime string
|
||||||
|
TimeZone struct {
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUserInfo() *UserInfo {
|
||||||
|
return &UserInfo{
|
||||||
|
ServicesEnabled: map[path.ServiceType]struct{}{
|
||||||
|
path.ExchangeService: {},
|
||||||
|
path.OneDriveService: {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceEnabled returns true if the UserInfo has an entry for the
|
||||||
|
// service. If no entry exists, the service is assumed to not be enabled.
|
||||||
|
func (ui *UserInfo) ServiceEnabled(service path.ServiceType) bool {
|
||||||
|
if ui == nil || len(ui.ServicesEnabled) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
_, ok := ui.ServicesEnabled[service]
|
||||||
|
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns if we can run delta queries on a mailbox. We cannot run
|
||||||
|
// them if the mailbox is full which is indicated by QuotaExceeded.
|
||||||
|
func (ui *UserInfo) CanMakeDeltaQueries() bool {
|
||||||
|
return !ui.Mailbox.QuotaExceeded
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseMailboxSettings(
|
||||||
|
settings models.Userable,
|
||||||
|
mi MailboxInfo,
|
||||||
|
) MailboxInfo {
|
||||||
|
var (
|
||||||
|
additionalData = settings.GetAdditionalData()
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
mi.ArchiveFolder, err = str.AnyValueToString("archiveFolder", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.Timezone, err = str.AnyValueToString("timeZone", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.DateFormat, err = str.AnyValueToString("dateFormat", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.TimeFormat, err = str.AnyValueToString("timeFormat", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.Purpose, err = str.AnyValueToString("userPurpose", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.DelegateMeetMsgDeliveryOpt, err = str.AnyValueToString("delegateMeetingMessageDeliveryOptions", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
// decode automatic replies settings
|
||||||
|
replySetting, err := tform.AnyValueToT[map[string]any]("automaticRepliesSetting", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.Status, err = str.AnyValueToString("status", replySetting)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.ExternalAudience, err = str.AnyValueToString("externalAudience", replySetting)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.ExternalReplyMessage, err = str.AnyValueToString("externalReplyMessage", replySetting)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.InternalReplyMessage, err = str.AnyValueToString("internalReplyMessage", replySetting)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
// decode scheduledStartDateTime
|
||||||
|
startDateTime, err := tform.AnyValueToT[map[string]any]("scheduledStartDateTime", replySetting)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.ScheduledStartDateTime.DateTime, err = str.AnyValueToString("dateTime", startDateTime)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.ScheduledStartDateTime.Timezone, err = str.AnyValueToString("timeZone", startDateTime)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
endDateTime, err := tform.AnyValueToT[map[string]any]("scheduledEndDateTime", replySetting)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.ScheduledEndDateTime.DateTime, err = str.AnyValueToString("dateTime", endDateTime)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.AutomaticRepliesSetting.ScheduledEndDateTime.Timezone, err = str.AnyValueToString("timeZone", endDateTime)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
// Language decode
|
||||||
|
language, err := tform.AnyValueToT[map[string]any]("language", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.Language.DisplayName, err = str.AnyValueToString("displayName", language)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.Language.Locale, err = str.AnyValueToString("locale", language)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
// working hours
|
||||||
|
workingHours, err := tform.AnyValueToT[map[string]any]("workingHours", additionalData)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.WorkingHours.StartTime, err = str.AnyValueToString("startTime", workingHours)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.WorkingHours.EndTime, err = str.AnyValueToString("endTime", workingHours)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
timeZone, err := tform.AnyValueToT[map[string]any]("timeZone", workingHours)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
mi.WorkingHours.TimeZone.Name, err = str.AnyValueToString("name", timeZone)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
days, err := tform.AnyValueToT[[]any]("daysOfWeek", workingHours)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
|
||||||
|
for _, day := range days {
|
||||||
|
s, err := str.AnyToString(day)
|
||||||
|
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
||||||
|
mi.WorkingHours.DaysOfWeek = append(mi.WorkingHours.DaysOfWeek, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
return mi
|
||||||
|
}
|
||||||
@ -12,8 +12,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/common/str"
|
|
||||||
"github.com/alcionai/corso/src/internal/common/tform"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
@ -39,85 +37,7 @@ type Users struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// structs
|
// User CRUD
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
type UserInfo struct {
|
|
||||||
ServicesEnabled map[path.ServiceType]struct{}
|
|
||||||
Mailbox MailboxInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
type MailboxInfo struct {
|
|
||||||
Purpose string
|
|
||||||
ArchiveFolder string
|
|
||||||
DateFormat string
|
|
||||||
TimeFormat string
|
|
||||||
DelegateMeetMsgDeliveryOpt string
|
|
||||||
Timezone string
|
|
||||||
AutomaticRepliesSetting AutomaticRepliesSettings
|
|
||||||
Language Language
|
|
||||||
WorkingHours WorkingHours
|
|
||||||
ErrGetMailBoxSetting []error
|
|
||||||
QuotaExceeded bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type AutomaticRepliesSettings struct {
|
|
||||||
ExternalAudience string
|
|
||||||
ExternalReplyMessage string
|
|
||||||
InternalReplyMessage string
|
|
||||||
ScheduledEndDateTime timeInfo
|
|
||||||
ScheduledStartDateTime timeInfo
|
|
||||||
Status string
|
|
||||||
}
|
|
||||||
|
|
||||||
type timeInfo struct {
|
|
||||||
DateTime string
|
|
||||||
Timezone string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Language struct {
|
|
||||||
Locale string
|
|
||||||
DisplayName string
|
|
||||||
}
|
|
||||||
|
|
||||||
type WorkingHours struct {
|
|
||||||
DaysOfWeek []string
|
|
||||||
StartTime string
|
|
||||||
EndTime string
|
|
||||||
TimeZone struct {
|
|
||||||
Name string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newUserInfo() *UserInfo {
|
|
||||||
return &UserInfo{
|
|
||||||
ServicesEnabled: map[path.ServiceType]struct{}{
|
|
||||||
path.ExchangeService: {},
|
|
||||||
path.OneDriveService: {},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ServiceEnabled returns true if the UserInfo has an entry for the
|
|
||||||
// service. If no entry exists, the service is assumed to not be enabled.
|
|
||||||
func (ui *UserInfo) ServiceEnabled(service path.ServiceType) bool {
|
|
||||||
if ui == nil || len(ui.ServicesEnabled) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
_, ok := ui.ServicesEnabled[service]
|
|
||||||
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns if we can run delta queries on a mailbox. We cannot run
|
|
||||||
// them if the mailbox is full which is indicated by QuotaExceeded.
|
|
||||||
func (ui *UserInfo) CanMakeDeltaQueries() bool {
|
|
||||||
return !ui.Mailbox.QuotaExceeded
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// methods
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// Filter out both guest users, and (for on-prem installations) non-synced users.
|
// Filter out both guest users, and (for on-prem installations) non-synced users.
|
||||||
@ -133,28 +53,26 @@ func (ui *UserInfo) CanMakeDeltaQueries() bool {
|
|||||||
//nolint:lll
|
//nolint:lll
|
||||||
var userFilterNoGuests = "onPremisesSyncEnabled eq true OR userType ne 'Guest'"
|
var userFilterNoGuests = "onPremisesSyncEnabled eq true OR userType ne 'Guest'"
|
||||||
|
|
||||||
func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration {
|
|
||||||
return &users.UsersRequestBuilderGetRequestConfiguration{
|
|
||||||
Headers: newEventualConsistencyHeaders(),
|
|
||||||
QueryParameters: &users.UsersRequestBuilderGetQueryParameters{
|
|
||||||
Select: idAnd(userPrincipalName, displayName),
|
|
||||||
Filter: fs,
|
|
||||||
Count: ptr.To(true),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetAll retrieves all users.
|
// GetAll retrieves all users.
|
||||||
func (c Users) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable, error) {
|
func (c Users) GetAll(
|
||||||
|
ctx context.Context,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]models.Userable, error) {
|
||||||
service, err := c.Service()
|
service, err := c.Service()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var resp models.UserCollectionResponseable
|
config := &users.UsersRequestBuilderGetRequestConfiguration{
|
||||||
|
Headers: newEventualConsistencyHeaders(),
|
||||||
resp, err = service.Client().Users().Get(ctx, userOptions(&userFilterNoGuests))
|
QueryParameters: &users.UsersRequestBuilderGetQueryParameters{
|
||||||
|
Select: idAnd(userPrincipalName, displayName),
|
||||||
|
Filter: &userFilterNoGuests,
|
||||||
|
Count: ptr.To(true),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := service.Client().Users().Get(ctx, config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Wrap(ctx, err, "getting all users")
|
return nil, graph.Wrap(ctx, err, "getting all users")
|
||||||
}
|
}
|
||||||
@ -241,238 +159,6 @@ func (c Users) GetAllIDsAndNames(ctx context.Context, errs *fault.Bus) (idname.C
|
|||||||
return idname.NewCache(idToName), nil
|
return idname.NewCache(idToName), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
|
||||||
// Assume all services are enabled
|
|
||||||
// then filter down to only services the user has enabled
|
|
||||||
userInfo := newUserInfo()
|
|
||||||
|
|
||||||
requestParameters := users.ItemMailFoldersRequestBuilderGetQueryParameters{
|
|
||||||
Select: idAnd(),
|
|
||||||
Top: ptr.To[int32](1), // if we get any folders, then we have access.
|
|
||||||
}
|
|
||||||
|
|
||||||
options := users.ItemMailFoldersRequestBuilderGetRequestConfiguration{
|
|
||||||
QueryParameters: &requestParameters,
|
|
||||||
}
|
|
||||||
|
|
||||||
mfs, err := c.GetMailFolders(ctx, userID, options)
|
|
||||||
if err != nil {
|
|
||||||
logger.CtxErr(ctx, err).Error("getting user's mail folders")
|
|
||||||
|
|
||||||
if graph.IsErrUserNotFound(err) {
|
|
||||||
return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !graph.IsErrExchangeMailFolderNotFound(err) {
|
|
||||||
return nil, clues.Stack(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(userInfo.ServicesEnabled, path.ExchangeService)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := c.GetDrives(ctx, userID); err != nil {
|
|
||||||
logger.CtxErr(ctx, err).Error("getting user's drives")
|
|
||||||
|
|
||||||
if graph.IsErrUserNotFound(err) {
|
|
||||||
return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !clues.HasLabel(err, graph.LabelsMysiteNotFound) {
|
|
||||||
return nil, clues.Stack(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(userInfo.ServicesEnabled, path.OneDriveService)
|
|
||||||
}
|
|
||||||
|
|
||||||
mbxInfo, err := c.getMailboxSettings(ctx, userID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
userInfo.Mailbox = mbxInfo
|
|
||||||
|
|
||||||
// TODO: This tries to determine if the user has hit their mailbox
|
|
||||||
// limit by trying to fetch an item and seeing if we get the quota
|
|
||||||
// exceeded error. Ideally(if available) we should convert this to
|
|
||||||
// pull the user's usage via an api and compare if they have used
|
|
||||||
// up their quota.
|
|
||||||
if mfs != nil {
|
|
||||||
mf := mfs.GetValue()[0] // we will always have one
|
|
||||||
options := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
|
|
||||||
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
|
|
||||||
Top: ptr.To[int32](1), // just one item is enough
|
|
||||||
},
|
|
||||||
}
|
|
||||||
_, err = c.Stable.Client().
|
|
||||||
Users().
|
|
||||||
ByUserId(userID).
|
|
||||||
MailFolders().
|
|
||||||
ByMailFolderId(ptr.Val(mf.GetId())).
|
|
||||||
Messages().
|
|
||||||
Delta().
|
|
||||||
Get(ctx, options)
|
|
||||||
|
|
||||||
if err != nil && !graph.IsErrQuotaExceeded(err) {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
userInfo.Mailbox.QuotaExceeded = graph.IsErrQuotaExceeded(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return userInfo, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: remove when exchange api goes into this package
|
|
||||||
func (c Users) GetMailFolders(
|
|
||||||
ctx context.Context,
|
|
||||||
userID string,
|
|
||||||
options users.ItemMailFoldersRequestBuilderGetRequestConfiguration,
|
|
||||||
) (models.MailFolderCollectionResponseable, error) {
|
|
||||||
mailFolders, err := c.Stable.Client().Users().ByUserId(userID).MailFolders().Get(ctx, &options)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "getting MailFolders")
|
|
||||||
}
|
|
||||||
|
|
||||||
return mailFolders, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: remove when drive api goes into this package
|
|
||||||
func (c Users) GetDrives(ctx context.Context, userID string) (models.DriveCollectionResponseable, error) {
|
|
||||||
drives, err := c.Stable.Client().Users().ByUserId(userID).Drives().Get(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "getting drives")
|
|
||||||
}
|
|
||||||
|
|
||||||
return drives, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Users) getMailboxSettings(
|
|
||||||
ctx context.Context,
|
|
||||||
userID string,
|
|
||||||
) (MailboxInfo, error) {
|
|
||||||
var (
|
|
||||||
rawURL = fmt.Sprintf("https://graph.microsoft.com/v1.0/users/%s/mailboxSettings", userID)
|
|
||||||
adapter = c.Stable.Adapter()
|
|
||||||
mi = MailboxInfo{
|
|
||||||
ErrGetMailBoxSetting: []error{},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
settings, err := users.NewUserItemRequestBuilder(rawURL, adapter).Get(ctx, nil)
|
|
||||||
if err != nil && !(graph.IsErrAccessDenied(err) || graph.IsErrExchangeMailFolderNotFound(err)) {
|
|
||||||
logger.CtxErr(ctx, err).Error("getting mailbox settings")
|
|
||||||
return mi, graph.Wrap(ctx, err, "getting additional data")
|
|
||||||
}
|
|
||||||
|
|
||||||
if graph.IsErrAccessDenied(err) {
|
|
||||||
logger.Ctx(ctx).Info("err getting additional data: access denied")
|
|
||||||
|
|
||||||
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, clues.New("access denied"))
|
|
||||||
|
|
||||||
return mi, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if graph.IsErrExchangeMailFolderNotFound(err) {
|
|
||||||
logger.Ctx(ctx).Info("mailfolders not found")
|
|
||||||
|
|
||||||
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, ErrMailBoxSettingsNotFound)
|
|
||||||
|
|
||||||
return mi, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
additionalData := settings.GetAdditionalData()
|
|
||||||
|
|
||||||
mi.ArchiveFolder, err = str.AnyValueToString("archiveFolder", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.Timezone, err = str.AnyValueToString("timeZone", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.DateFormat, err = str.AnyValueToString("dateFormat", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.TimeFormat, err = str.AnyValueToString("timeFormat", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.Purpose, err = str.AnyValueToString("userPurpose", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.DelegateMeetMsgDeliveryOpt, err = str.AnyValueToString("delegateMeetingMessageDeliveryOptions", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
// decode automatic replies settings
|
|
||||||
replySetting, err := tform.AnyValueToT[map[string]any]("automaticRepliesSetting", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.Status, err = str.AnyValueToString("status", replySetting)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.ExternalAudience, err = str.AnyValueToString("externalAudience", replySetting)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.ExternalReplyMessage, err = str.AnyValueToString("externalReplyMessage", replySetting)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.InternalReplyMessage, err = str.AnyValueToString("internalReplyMessage", replySetting)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
// decode scheduledStartDateTime
|
|
||||||
startDateTime, err := tform.AnyValueToT[map[string]any]("scheduledStartDateTime", replySetting)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.ScheduledStartDateTime.DateTime, err = str.AnyValueToString("dateTime", startDateTime)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.ScheduledStartDateTime.Timezone, err = str.AnyValueToString("timeZone", startDateTime)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
endDateTime, err := tform.AnyValueToT[map[string]any]("scheduledEndDateTime", replySetting)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.ScheduledEndDateTime.DateTime, err = str.AnyValueToString("dateTime", endDateTime)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.AutomaticRepliesSetting.ScheduledEndDateTime.Timezone, err = str.AnyValueToString("timeZone", endDateTime)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
// Language decode
|
|
||||||
language, err := tform.AnyValueToT[map[string]any]("language", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.Language.DisplayName, err = str.AnyValueToString("displayName", language)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.Language.Locale, err = str.AnyValueToString("locale", language)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
// working hours
|
|
||||||
workingHours, err := tform.AnyValueToT[map[string]any]("workingHours", additionalData)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.WorkingHours.StartTime, err = str.AnyValueToString("startTime", workingHours)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.WorkingHours.EndTime, err = str.AnyValueToString("endTime", workingHours)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
timeZone, err := tform.AnyValueToT[map[string]any]("timeZone", workingHours)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
mi.WorkingHours.TimeZone.Name, err = str.AnyValueToString("name", timeZone)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
days, err := tform.AnyValueToT[[]any]("daysOfWeek", workingHours)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
|
|
||||||
for _, day := range days {
|
|
||||||
s, err := str.AnyToString(day)
|
|
||||||
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
|
|
||||||
mi.WorkingHours.DaysOfWeek = append(mi.WorkingHours.DaysOfWeek, s)
|
|
||||||
}
|
|
||||||
|
|
||||||
return mi, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func appendIfErr(errs []error, err error) []error {
|
func appendIfErr(errs []error, err error) []error {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return errs
|
return errs
|
||||||
@ -481,6 +167,177 @@ func appendIfErr(errs []error, err error) []error {
|
|||||||
return append(errs, err)
|
return append(errs, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Info
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
||||||
|
var (
|
||||||
|
// Assume all services are enabled
|
||||||
|
// then filter down to only services the user has enabled
|
||||||
|
userInfo = newUserInfo()
|
||||||
|
|
||||||
|
mailFolderFound = true
|
||||||
|
)
|
||||||
|
|
||||||
|
// check whether the user is able to access their onedrive drive.
|
||||||
|
// if they cannot, we can assume they are ineligible for onedrive backups.
|
||||||
|
if _, err := c.GetDefaultDrive(ctx, userID); err != nil {
|
||||||
|
if !clues.HasLabel(err, graph.LabelsMysiteNotFound) {
|
||||||
|
logger.CtxErr(ctx, err).Error("getting user's drive")
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting user's drive")
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Info("resource owner does not have a drive")
|
||||||
|
delete(userInfo.ServicesEnabled, path.OneDriveService)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check whether the user is able to access their inbox.
|
||||||
|
// if they cannot, we can assume they are ineligible for exchange backups.
|
||||||
|
inbx, err := c.GetMailInbox(ctx, userID)
|
||||||
|
if err != nil {
|
||||||
|
err = graph.Stack(ctx, err)
|
||||||
|
|
||||||
|
if graph.IsErrUserNotFound(err) {
|
||||||
|
logger.CtxErr(ctx, err).Error("user not found")
|
||||||
|
return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !graph.IsErrExchangeMailFolderNotFound(err) {
|
||||||
|
logger.CtxErr(ctx, err).Error("getting user's mail folder")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Info("resource owner does not have a mailbox enabled")
|
||||||
|
delete(userInfo.ServicesEnabled, path.ExchangeService)
|
||||||
|
|
||||||
|
mailFolderFound = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// check whether the user has accessible mailbox settings.
|
||||||
|
// if they do, aggregate them in the MailboxInfo
|
||||||
|
mi := MailboxInfo{
|
||||||
|
ErrGetMailBoxSetting: []error{},
|
||||||
|
}
|
||||||
|
|
||||||
|
if !mailFolderFound {
|
||||||
|
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, ErrMailBoxSettingsNotFound)
|
||||||
|
userInfo.Mailbox = mi
|
||||||
|
|
||||||
|
return userInfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
mboxSettings, err := c.getMailboxSettings(ctx, userID)
|
||||||
|
if err != nil {
|
||||||
|
logger.CtxErr(ctx, err).Info("err getting user's mailbox settings")
|
||||||
|
|
||||||
|
if !graph.IsErrAccessDenied(err) {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting user's mailbox settings")
|
||||||
|
}
|
||||||
|
|
||||||
|
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, clues.New("access denied"))
|
||||||
|
} else {
|
||||||
|
mi = parseMailboxSettings(mboxSettings, mi)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = c.getFirstInboxMessage(ctx, userID, ptr.Val(inbx.GetId()))
|
||||||
|
if err != nil {
|
||||||
|
if !graph.IsErrQuotaExceeded(err) {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
userInfo.Mailbox.QuotaExceeded = graph.IsErrQuotaExceeded(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
userInfo.Mailbox = mi
|
||||||
|
|
||||||
|
return userInfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Users) getMailboxSettings(
|
||||||
|
ctx context.Context,
|
||||||
|
userID string,
|
||||||
|
) (models.Userable, error) {
|
||||||
|
settings, err := users.
|
||||||
|
NewUserItemRequestBuilder(
|
||||||
|
fmt.Sprintf("https://graph.microsoft.com/v1.0/users/%s/mailboxSettings", userID),
|
||||||
|
c.Stable.Adapter(),
|
||||||
|
).
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Stack(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return settings, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Users) GetMailInbox(
|
||||||
|
ctx context.Context,
|
||||||
|
userID string,
|
||||||
|
) (models.MailFolderable, error) {
|
||||||
|
inbox, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Users().
|
||||||
|
ByUserId(userID).
|
||||||
|
MailFolders().
|
||||||
|
ByMailFolderId("inbox").
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting MailFolders")
|
||||||
|
}
|
||||||
|
|
||||||
|
return inbox, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Users) GetDefaultDrive(
|
||||||
|
ctx context.Context,
|
||||||
|
userID string,
|
||||||
|
) (models.Driveable, error) {
|
||||||
|
d, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Users().
|
||||||
|
ByUserId(userID).
|
||||||
|
Drive().
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting user's drive")
|
||||||
|
}
|
||||||
|
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: This tries to determine if the user has hit their mailbox
|
||||||
|
// limit by trying to fetch an item and seeing if we get the quota
|
||||||
|
// exceeded error. Ideally(if available) we should convert this to
|
||||||
|
// pull the user's usage via an api and compare if they have used
|
||||||
|
// up their quota.
|
||||||
|
func (c Users) getFirstInboxMessage(
|
||||||
|
ctx context.Context,
|
||||||
|
userID, inboxID string,
|
||||||
|
) error {
|
||||||
|
config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
|
||||||
|
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
|
||||||
|
Select: idAnd(),
|
||||||
|
},
|
||||||
|
Headers: newPreferHeaders(preferPageSize(1)),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := c.Stable.
|
||||||
|
Client().
|
||||||
|
Users().
|
||||||
|
ByUserId(userID).
|
||||||
|
MailFolders().
|
||||||
|
ByMailFolderId(inboxID).
|
||||||
|
Messages().
|
||||||
|
Delta().
|
||||||
|
Get(ctx, config)
|
||||||
|
if err != nil {
|
||||||
|
return graph.Stack(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// helpers
|
// helpers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -5,7 +5,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
@ -79,16 +78,7 @@ func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (b
|
|||||||
return false, clues.Wrap(err, "getting mailbox").WithClues(ctx)
|
return false, clues.Wrap(err, "getting mailbox").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
requestParameters := users.ItemMailFoldersRequestBuilderGetQueryParameters{
|
_, err = uapi.GetMailInbox(ctx, userID)
|
||||||
Select: []string{"id"},
|
|
||||||
Top: ptr.To[int32](1), // if we get any folders, then we have access.
|
|
||||||
}
|
|
||||||
|
|
||||||
options := users.ItemMailFoldersRequestBuilderGetRequestConfiguration{
|
|
||||||
QueryParameters: &requestParameters,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = uapi.GetMailFolders(ctx, userID, options)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// we consider this a non-error case, since it
|
// we consider this a non-error case, since it
|
||||||
// answers the question the caller is asking.
|
// answers the question the caller is asking.
|
||||||
@ -100,6 +90,10 @@ func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (b
|
|||||||
return false, clues.Stack(graph.ErrResourceOwnerNotFound, err)
|
return false, clues.Stack(graph.ErrResourceOwnerNotFound, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if graph.IsErrExchangeMailFolderNotFound(err) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
return false, clues.Stack(err)
|
return false, clues.Stack(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,7 +108,7 @@ func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bo
|
|||||||
return false, clues.Wrap(err, "getting drives").WithClues(ctx)
|
return false, clues.Wrap(err, "getting drives").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = uapi.GetDrives(ctx, userID)
|
_, err = uapi.GetDefaultDrive(ctx, userID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// we consider this a non-error case, since it
|
// we consider this a non-error case, since it
|
||||||
// answers the question the caller is asking.
|
// answers the question the caller is asking.
|
||||||
|
|||||||
14
website/package-lock.json
generated
14
website/package-lock.json
generated
@ -20,7 +20,7 @@
|
|||||||
"feather-icons": "^4.29.0",
|
"feather-icons": "^4.29.0",
|
||||||
"jarallax": "^2.1.3",
|
"jarallax": "^2.1.3",
|
||||||
"mdx-mermaid": "^1.3.2",
|
"mdx-mermaid": "^1.3.2",
|
||||||
"mermaid": "^10.2.0",
|
"mermaid": "^10.2.2",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
"react": "^17.0.2",
|
"react": "^17.0.2",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^17.0.2",
|
||||||
@ -9259,9 +9259,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/mermaid": {
|
"node_modules/mermaid": {
|
||||||
"version": "10.2.0",
|
"version": "10.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.2.tgz",
|
||||||
"integrity": "sha512-mYKXlH9ngKdMsJ87VYMdlDZXS+MXDAGKPf3XzDf2vvAPnmRoFm7GFebemOAOWYI1bWSECDyoWTGwesWe6mW1Cw==",
|
"integrity": "sha512-ifYKlCcZKYq48hxC1poJXnvk/PbCdgqqbg5B4qsybb8nIItPM1ATKqVEDkyde6BBJxVFhVJr9hoUjipzniQJZg==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@braintree/sanitize-url": "^6.0.2",
|
"@braintree/sanitize-url": "^6.0.2",
|
||||||
"cytoscape": "^3.23.0",
|
"cytoscape": "^3.23.0",
|
||||||
@ -21647,9 +21647,9 @@
|
|||||||
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
|
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
|
||||||
},
|
},
|
||||||
"mermaid": {
|
"mermaid": {
|
||||||
"version": "10.2.0",
|
"version": "10.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.2.tgz",
|
||||||
"integrity": "sha512-mYKXlH9ngKdMsJ87VYMdlDZXS+MXDAGKPf3XzDf2vvAPnmRoFm7GFebemOAOWYI1bWSECDyoWTGwesWe6mW1Cw==",
|
"integrity": "sha512-ifYKlCcZKYq48hxC1poJXnvk/PbCdgqqbg5B4qsybb8nIItPM1ATKqVEDkyde6BBJxVFhVJr9hoUjipzniQJZg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@braintree/sanitize-url": "^6.0.2",
|
"@braintree/sanitize-url": "^6.0.2",
|
||||||
"cytoscape": "^3.23.0",
|
"cytoscape": "^3.23.0",
|
||||||
|
|||||||
@ -26,7 +26,7 @@
|
|||||||
"feather-icons": "^4.29.0",
|
"feather-icons": "^4.29.0",
|
||||||
"jarallax": "^2.1.3",
|
"jarallax": "^2.1.3",
|
||||||
"mdx-mermaid": "^1.3.2",
|
"mdx-mermaid": "^1.3.2",
|
||||||
"mermaid": "^10.2.0",
|
"mermaid": "^10.2.2",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
"react": "^17.0.2",
|
"react": "^17.0.2",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^17.0.2",
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user