merge commit

This commit is contained in:
neha-Gupta1 2023-10-11 12:25:10 +05:30
commit 50d8e1af6b
130 changed files with 2711 additions and 1976 deletions

View File

@ -7,6 +7,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] (beta) ## [Unreleased] (beta)
### Added
- Skips graph calls for expired item download URLs.
- Export operation now shows the stats at the end of the run
### Fixed
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
## [v0.14.0] (beta) - 2023-10-09
### Added ### Added
- Enables local or network-attached storage for Corso repositories. - Enables local or network-attached storage for Corso repositories.
- Reduce backup runtime for OneDrive and SharePoint incremental backups that have no file changes. - Reduce backup runtime for OneDrive and SharePoint incremental backups that have no file changes.
@ -14,17 +23,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Added `--backups` flag to delete multiple backups in `corso backup delete` command. - Added `--backups` flag to delete multiple backups in `corso backup delete` command.
- Backup now includes all sites that belongs to a team, not just the root site. - Backup now includes all sites that belongs to a team, not just the root site.
## Fixed ### Fixed
- Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup. - Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup.
### Known issues
- Restoring the data into a different Group from the one it was backed up from is not currently supported
### Other
- Groups and Teams service support is still in feature preview
## [v0.13.0] (beta) - 2023-09-18 ## [v0.13.0] (beta) - 2023-09-18
### Added ### Added
- Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'` - Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'`
* The cli commands for "groups" and "teams" can be used interchangably, and will operate on the same backup data. - The cli commands for "groups" and "teams" can be used interchangeably, and will operate on the same backup data.
* New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details. - New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details.
Even though Channel message restoration is not available, message write permissions are included to cover future integration. Even though Channel message restoration is not available, message write permissions are included to cover future integration.
* This is a feature preview, and may be subject to breaking changes based on feedback and testing. - This is a feature preview, and may be subject to breaking changes based on feedback and testing.
### Changed ### Changed
- Switched to Go 1.21 - Switched to Go 1.21
@ -379,7 +394,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Miscellaneous - Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35)) - Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.11.1...HEAD [Unreleased]: https://github.com/alcionai/corso/compare/v0.14.0...HEAD
[v0.14.0]: https://github.com/alcionai/corso/compare/v0.13.0...v0.14.0
[v0.13.0]: https://github.com/alcionai/corso/compare/v0.12.0...v0.13.0
[v0.12.0]: https://github.com/alcionai/corso/compare/v0.11.1...v0.12.0
[v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1 [v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1
[v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0 [v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0
[v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0 [v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0

View File

@ -317,6 +317,7 @@ func genericListCommand(
b.Print(ctx) b.Print(ctx)
fe.PrintItems( fe.PrintItems(
ctx, ctx,
!ifShow(flags.ListAlertsFV),
!ifShow(flags.ListFailedItemsFV), !ifShow(flags.ListFailedItemsFV),
!ifShow(flags.ListSkippedItemsFV), !ifShow(flags.ListSkippedItemsFV),
!ifShow(flags.ListRecoveredErrorsFV)) !ifShow(flags.ListRecoveredErrorsFV))

View File

@ -17,7 +17,6 @@ import (
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/repo" "github.com/alcionai/corso/src/cli/repo"
"github.com/alcionai/corso/src/cli/restore" "github.com/alcionai/corso/src/cli/restore"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -61,43 +60,6 @@ func preRun(cc *cobra.Command, args []string) error {
print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile) print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile)
} }
avoidTheseDescription := []string{
"Initialize a repository.",
"Initialize a S3 repository",
"Connect to a S3 repository",
"Initialize a repository on local or network storage.",
"Connect to a repository on local or network storage.",
"Help about any command",
"Free, Secure, Open-Source Backup for M365.",
"env var guide",
}
if !slices.Contains(avoidTheseDescription, cc.Short) {
provider, overrides, err := utils.GetStorageProviderAndOverrides(ctx, cc)
if err != nil {
return err
}
cfg, err := config.GetConfigRepoDetails(
ctx,
provider,
true,
false,
overrides)
if err != nil {
log.Error("Error while getting config info to run command: ", cc.Use)
return err
}
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": cc.CommandPath()},
cfg.RepoID,
utils.Control())
}
// handle deprecated user flag in Backup exchange command // handle deprecated user flag in Backup exchange command
if cc.CommandPath() == "corso backup create exchange" { if cc.CommandPath() == "corso backup create exchange" {
handleMailBoxFlag(ctx, cc, flagSl) handleMailBoxFlag(ctx, cc, flagSl)

View File

@ -5,6 +5,7 @@ import (
"errors" "errors"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
@ -110,5 +111,14 @@ func runExport(
return Only(ctx, err) return Only(ctx, err)
} }
stats := eo.GetStats()
if len(stats) > 0 {
Infof(ctx, "\nExport details")
}
for k, s := range stats {
Infof(ctx, "%s: %d items (%s)", k.HumanString(), s.ResourceCount, humanize.Bytes(uint64(s.BytesRead)))
}
return nil return nil
} }

View File

@ -8,6 +8,7 @@ func AddAllBackupListFlags(cmd *cobra.Command) {
AddFailedItemsFN(cmd) AddFailedItemsFN(cmd)
AddSkippedItemsFN(cmd) AddSkippedItemsFN(cmd)
AddRecoveredErrorsFN(cmd) AddRecoveredErrorsFN(cmd)
AddAlertsFN(cmd)
} }
func AddFailedItemsFN(cmd *cobra.Command) { func AddFailedItemsFN(cmd *cobra.Command) {
@ -27,3 +28,9 @@ func AddRecoveredErrorsFN(cmd *cobra.Command) {
&ListRecoveredErrorsFV, RecoveredErrorsFN, Show, &ListRecoveredErrorsFV, RecoveredErrorsFN, Show,
"Toggles showing or hiding the list of errors which Corso recovered from.") "Toggles showing or hiding the list of errors which Corso recovered from.")
} }
func AddAlertsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&ListAlertsFV, AlertsFN, Show,
"Toggles showing or hiding the list of alerts produced during the operation.")
}

View File

@ -5,6 +5,7 @@ import (
) )
const ( const (
AlertsFN = "alerts"
DeltaPageSizeFN = "delta-page-size" DeltaPageSizeFN = "delta-page-size"
DisableConcurrencyLimiterFN = "disable-concurrency-limiter" DisableConcurrencyLimiterFN = "disable-concurrency-limiter"
DisableDeltaFN = "disable-delta" DisableDeltaFN = "disable-delta"
@ -31,6 +32,7 @@ var (
EnableImmutableIDFV bool EnableImmutableIDFV bool
FailFastFV bool FailFastFV bool
FetchParallelismFV int FetchParallelismFV int
ListAlertsFV string
ListFailedItemsFV string ListFailedItemsFV string
ListSkippedItemsFV string ListSkippedItemsFV string
ListRecoveredErrorsFV string ListRecoveredErrorsFV string

View File

@ -19,7 +19,7 @@ var (
) )
// AddRestoreConfigFlags adds the restore config flag set. // AddRestoreConfigFlags adds the restore config flag set.
func AddRestoreConfigFlags(cmd *cobra.Command) { func AddRestoreConfigFlags(cmd *cobra.Command, canRestoreToAlternate bool) {
fs := cmd.Flags() fs := cmd.Flags()
fs.StringVar( fs.StringVar(
&CollisionsFV, CollisionsFN, string(control.Skip), &CollisionsFV, CollisionsFN, string(control.Skip),
@ -28,7 +28,10 @@ func AddRestoreConfigFlags(cmd *cobra.Command) {
fs.StringVar( fs.StringVar(
&DestinationFV, DestinationFN, "", &DestinationFV, DestinationFN, "",
"Overrides the folder where items get restored; '/' places items into their original location") "Overrides the folder where items get restored; '/' places items into their original location")
if canRestoreToAlternate {
fs.StringVar( fs.StringVar(
&ToResourceFV, ToResourceFN, "", &ToResourceFV, ToResourceFN, "",
"Overrides the protected resource (mailbox, site, user, etc) where data gets restored") "Overrides the protected resource (mailbox, site, user, etc) where data gets restored")
}
} }

View File

@ -11,6 +11,7 @@ import (
func PreparedBackupListFlags() []string { func PreparedBackupListFlags() []string {
return []string{ return []string{
"--" + flags.AlertsFN, flags.Show,
"--" + flags.FailedItemsFN, flags.Show, "--" + flags.FailedItemsFN, flags.Show,
"--" + flags.SkippedItemsFN, flags.Show, "--" + flags.SkippedItemsFN, flags.Show,
"--" + flags.RecoveredErrorsFN, flags.Show, "--" + flags.RecoveredErrorsFN, flags.Show,
@ -18,6 +19,7 @@ func PreparedBackupListFlags() []string {
} }
func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) { func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, flags.Show, flags.ListAlertsFV)
assert.Equal(t, flags.Show, flags.ListFailedItemsFV) assert.Equal(t, flags.Show, flags.ListFailedItemsFV)
assert.Equal(t, flags.Show, flags.ListSkippedItemsFV) assert.Equal(t, flags.Show, flags.ListSkippedItemsFV)
assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV) assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV)

View File

@ -87,15 +87,6 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
// Retention is not supported for filesystem repos. // Retention is not supported for filesystem repos.
retentionOpts := ctrlRepo.Retention{} retentionOpts := ctrlRepo.Retention{}
// SendStartCorsoEvent uses distict ID as tenant ID because repoID is still not generated
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": "init repo"},
cfg.Account.ID(),
opt)
storageCfg, err := cfg.Storage.ToFilesystemConfig() storageCfg, err := cfg.Storage.ToFilesystemConfig()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration")) return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration"))

View File

@ -102,15 +102,6 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
return Only(ctx, err) return Only(ctx, err)
} }
// SendStartCorsoEvent uses distict ID as tenant ID because repoID is still not generated
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": "init repo"},
cfg.Account.ID(),
opt)
s3Cfg, err := cfg.Storage.ToS3Config() s3Cfg, err := cfg.Storage.ToS3Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration")) return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration"))

View File

@ -28,7 +28,7 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c) flags.AddExchangeDetailsAndRestoreFlags(c)
flags.AddRestoreConfigFlags(c) flags.AddRestoreConfigFlags(c, true)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }

View File

@ -30,7 +30,7 @@ func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
flags.AddNoPermissionsFlag(c) flags.AddNoPermissionsFlag(c)
flags.AddSharePointDetailsAndRestoreFlags(c) // for sp restores flags.AddSharePointDetailsAndRestoreFlags(c) // for sp restores
flags.AddSiteIDFlag(c) flags.AddSiteIDFlag(c)
flags.AddRestoreConfigFlags(c) flags.AddRestoreConfigFlags(c, false)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }

View File

@ -65,7 +65,7 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput), "--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions, "--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination, "--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource, // "--" + flags.ToResourceFN, flagsTD.ToResource,
"--" + flags.NoPermissionsFN, "--" + flags.NoPermissionsFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
@ -91,7 +91,7 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination) assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource) // assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
assert.True(t, flags.NoPermissionsFV) assert.True(t, flags.NoPermissionsFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)

View File

@ -29,7 +29,7 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c) flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddNoPermissionsFlag(c) flags.AddNoPermissionsFlag(c)
flags.AddRestoreConfigFlags(c) flags.AddRestoreConfigFlags(c, true)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }

View File

@ -29,7 +29,7 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddNoPermissionsFlag(c) flags.AddNoPermissionsFlag(c)
flags.AddRestoreConfigFlags(c) flags.AddRestoreConfigFlags(c, true)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }

View File

@ -239,24 +239,6 @@ func splitFoldersIntoContainsAndPrefix(folders []string) ([]string, []string) {
return containsFolders, prefixFolders return containsFolders, prefixFolders
} }
// SendStartCorsoEvent utility sends corso start event at start of each action
func SendStartCorsoEvent(
ctx context.Context,
s storage.Storage,
tenID string,
data map[string]any,
repoID string,
opts control.Options,
) {
bus, err := events.NewBus(ctx, s, tenID, opts)
if err != nil {
logger.CtxErr(ctx, err).Info("sending start event")
}
bus.SetRepoID(repoID)
bus.Event(ctx, events.CorsoStart, data)
}
// GetStorageProviderAndOverrides returns the storage provider type and // GetStorageProviderAndOverrides returns the storage provider type and
// any flags specified on the command line which are storage provider specific. // any flags specified on the command line which are storage provider specific.
func GetStorageProviderAndOverrides( func GetStorageProviderAndOverrides(

View File

@ -120,7 +120,7 @@ func generateAndRestoreItems(
func getControllerAndVerifyResourceOwner( func getControllerAndVerifyResourceOwner(
ctx context.Context, ctx context.Context,
resourceOwner string, protectedResource string,
pst path.ServiceType, pst path.ServiceType,
) ( ) (
*m365.Controller, *m365.Controller,
@ -150,12 +150,12 @@ func getControllerAndVerifyResourceOwner(
return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api") return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api")
} }
id, _, err := ctrl.PopulateProtectedResourceIDAndName(ctx, resourceOwner, nil) pr, err := ctrl.PopulateProtectedResourceIDAndName(ctx, protectedResource, nil)
if err != nil { if err != nil {
return nil, account.Account{}, nil, clues.Wrap(err, "verifying user") return nil, account.Account{}, nil, clues.Wrap(err, "verifying user")
} }
return ctrl, acct, ctrl.IDNameLookup.ProviderForID(id), nil return ctrl, acct, pr, nil
} }
type item struct { type item struct {

View File

@ -229,7 +229,7 @@ elseif (![string]::IsNullOrEmpty($Site)) {
} }
} }
else { else {
Write-Host "User (for OneDrvie) or Site (for Sharpeoint) is required" Write-Host "User (for OneDrive) or Site (for Sharepoint) is required"
Exit Exit
} }

View File

@ -10,15 +10,16 @@ require (
github.com/armon/go-metrics v0.4.1 github.com/armon/go-metrics v0.4.1
github.com/aws/aws-xray-sdk-go v1.8.2 github.com/aws/aws-xray-sdk-go v1.8.2
github.com/cenkalti/backoff/v4 v4.2.1 github.com/cenkalti/backoff/v4 v4.2.1
github.com/golang-jwt/jwt/v5 v5.0.0
github.com/google/uuid v1.3.1 github.com/google/uuid v1.3.1
github.com/h2non/gock v1.2.0 github.com/h2non/gock v1.2.0
github.com/kopia/kopia v0.13.0 github.com/kopia/kopia v0.13.0
github.com/microsoft/kiota-abstractions-go v1.2.1 github.com/microsoft/kiota-abstractions-go v1.2.3
github.com/microsoft/kiota-authentication-azure-go v1.0.0 github.com/microsoft/kiota-authentication-azure-go v1.0.0
github.com/microsoft/kiota-http-go v1.1.0 github.com/microsoft/kiota-http-go v1.1.0
github.com/microsoft/kiota-serialization-form-go v1.0.0 github.com/microsoft/kiota-serialization-form-go v1.0.0
github.com/microsoft/kiota-serialization-json-go v1.0.4 github.com/microsoft/kiota-serialization-json-go v1.0.4
github.com/microsoftgraph/msgraph-sdk-go v1.19.0 github.com/microsoftgraph/msgraph-sdk-go v1.20.0
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/puzpuzpuz/xsync/v2 v2.5.1 github.com/puzpuzpuz/xsync/v2 v2.5.1
@ -27,7 +28,7 @@ require (
github.com/spf13/cast v1.5.1 github.com/spf13/cast v1.5.1
github.com/spf13/cobra v1.7.0 github.com/spf13/cobra v1.7.0
github.com/spf13/pflag v1.0.5 github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.16.0 github.com/spf13/viper v1.17.0
github.com/stretchr/testify v1.8.4 github.com/stretchr/testify v1.8.4
github.com/tidwall/pretty v1.2.1 github.com/tidwall/pretty v1.2.1
github.com/tomlazar/table v0.1.2 github.com/tomlazar/table v0.1.2
@ -35,7 +36,7 @@ require (
go.uber.org/zap v1.26.0 go.uber.org/zap v1.26.0
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 golang.org/x/exp v0.0.0-20230905200255-921286631fa9
golang.org/x/time v0.3.0 golang.org/x/time v0.3.0
golang.org/x/tools v0.13.0 golang.org/x/tools v0.14.0
gotest.tools/v3 v3.5.1 gotest.tools/v3 v3.5.1
) )
@ -46,7 +47,6 @@ require (
github.com/aws/aws-sdk-go v1.45.0 // indirect github.com/aws/aws-sdk-go v1.45.0 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gofrs/flock v0.8.1 // indirect github.com/gofrs/flock v0.8.1 // indirect
github.com/golang-jwt/jwt/v5 v5.0.0 // indirect
github.com/google/go-cmp v0.5.9 // indirect github.com/google/go-cmp v0.5.9 // indirect
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
github.com/hashicorp/cronexpr v1.1.2 // indirect github.com/hashicorp/cronexpr v1.1.2 // indirect
@ -55,14 +55,17 @@ require (
github.com/magiconair/properties v1.8.7 // indirect github.com/magiconair/properties v1.8.7 // indirect
github.com/microsoft/kiota-serialization-multipart-go v1.0.0 // indirect github.com/microsoft/kiota-serialization-multipart-go v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/pelletier/go-toml/v2 v2.0.9 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect
github.com/spf13/afero v1.9.5 // indirect github.com/sagikazarmark/locafero v0.3.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/subosito/gotenv v1.4.2 // indirect github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.10.0 // indirect
github.com/std-uritemplate/std-uritemplate/go v0.0.42 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.48.0 // indirect github.com/valyala/fasthttp v1.48.0 // indirect
go.opentelemetry.io/otel/metric v1.18.0 // indirect go.opentelemetry.io/otel/metric v1.19.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13 // indirect
) )
require ( require (
@ -74,7 +77,7 @@ require (
github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/chmduquesne/rollinghash v4.0.0+incompatible // indirect github.com/chmduquesne/rollinghash v4.0.0+incompatible // indirect
github.com/cjlapao/common-go v0.0.39 // indirect github.com/cjlapao/common-go v0.0.39 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dustin/go-humanize v1.0.1 github.com/dustin/go-humanize v1.0.1
github.com/edsrzf/mmap-go v1.1.0 // indirect github.com/edsrzf/mmap-go v1.1.0 // indirect
github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/logr v1.2.4 // indirect
@ -84,7 +87,7 @@ require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.16.7 // indirect github.com/klauspost/compress v1.17.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect github.com/klauspost/pgzip v1.2.6 // indirect
github.com/klauspost/reedsolomon v1.11.8 // indirect github.com/klauspost/reedsolomon v1.11.8 // indirect
@ -103,7 +106,7 @@ require (
github.com/natefinch/atomic v1.0.1 // indirect github.com/natefinch/atomic v1.0.1 // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect github.com/pierrec/lz4 v2.6.1+incompatible // indirect
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_golang v1.16.0 // indirect github.com/prometheus/client_golang v1.16.0 // indirect
github.com/prometheus/client_model v0.4.0 // indirect github.com/prometheus/client_model v0.4.0 // indirect
github.com/prometheus/common v0.44.0 // indirect github.com/prometheus/common v0.44.0 // indirect
@ -115,18 +118,17 @@ require (
github.com/tidwall/gjson v1.15.0 // indirect github.com/tidwall/gjson v1.15.0 // indirect
github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/match v1.1.1 // indirect
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect github.com/zeebo/blake3 v0.2.3 // indirect
go.opentelemetry.io/otel v1.18.0 // indirect go.opentelemetry.io/otel v1.19.0 // indirect
go.opentelemetry.io/otel/trace v1.18.0 // indirect go.opentelemetry.io/otel/trace v1.19.0 // indirect
go.uber.org/multierr v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.13.0 // indirect golang.org/x/crypto v0.14.0 // indirect
golang.org/x/mod v0.12.0 // indirect golang.org/x/mod v0.13.0 // indirect
golang.org/x/net v0.15.0 golang.org/x/net v0.16.0
golang.org/x/sync v0.3.0 // indirect golang.org/x/sync v0.4.0 // indirect
golang.org/x/sys v0.12.0 // indirect golang.org/x/sys v0.13.0 // indirect
golang.org/x/text v0.13.0 // indirect golang.org/x/text v0.13.0 // indirect
google.golang.org/grpc v1.57.0 // indirect google.golang.org/grpc v1.58.2 // indirect
google.golang.org/protobuf v1.31.0 // indirect google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect

View File

@ -102,8 +102,9 @@ github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46t
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
@ -246,8 +247,8 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM=
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
@ -287,8 +288,8 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zk
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/microsoft/kiota-abstractions-go v1.2.1 h1:TnLF7rjy1GfhuGK2ra/a3Vuz6piFXTR1OfdNoqesagA= github.com/microsoft/kiota-abstractions-go v1.2.3 h1:ir+p5o/0ytcLunikHSylhYyCm2Ojvoq3pXWSYomOACc=
github.com/microsoft/kiota-abstractions-go v1.2.1/go.mod h1:rEeeaytcnal/If3f1tz6/spFz4V+Hiqvz3rxF+oWQFA= github.com/microsoft/kiota-abstractions-go v1.2.3/go.mod h1:yPSuzNSOIVQSFFe1iT+3Lu5zmis22E8Wg+bkyjhd+pY=
github.com/microsoft/kiota-authentication-azure-go v1.0.0 h1:29FNZZ/4nnCOwFcGWlB/sxPvWz487HA2bXH8jR5k2Rk= github.com/microsoft/kiota-authentication-azure-go v1.0.0 h1:29FNZZ/4nnCOwFcGWlB/sxPvWz487HA2bXH8jR5k2Rk=
github.com/microsoft/kiota-authentication-azure-go v1.0.0/go.mod h1:rnx3PRlkGdXDcA/0lZQTbBwyYGmc+3POt7HpE/e4jGw= github.com/microsoft/kiota-authentication-azure-go v1.0.0/go.mod h1:rnx3PRlkGdXDcA/0lZQTbBwyYGmc+3POt7HpE/e4jGw=
github.com/microsoft/kiota-http-go v1.1.0 h1:L5I93EiNtlP/X6YzeTlhjWt7Q1DxzC9CmWSVtX3b0tE= github.com/microsoft/kiota-http-go v1.1.0 h1:L5I93EiNtlP/X6YzeTlhjWt7Q1DxzC9CmWSVtX3b0tE=
@ -301,8 +302,8 @@ github.com/microsoft/kiota-serialization-multipart-go v1.0.0 h1:3O5sb5Zj+moLBiJy
github.com/microsoft/kiota-serialization-multipart-go v1.0.0/go.mod h1:yauLeBTpANk4L03XD985akNysG24SnRJGaveZf+p4so= github.com/microsoft/kiota-serialization-multipart-go v1.0.0/go.mod h1:yauLeBTpANk4L03XD985akNysG24SnRJGaveZf+p4so=
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA= github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M= github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
github.com/microsoftgraph/msgraph-sdk-go v1.19.0 h1:hx+SvDTm5ENYZFqmMIskF7tOn48zzT2Xv3OVFrxl2dc= github.com/microsoftgraph/msgraph-sdk-go v1.20.0 h1:Hi8URs+Ll07+GojbY9lyuYUMj8rxI4mcYW+GISO7BTA=
github.com/microsoftgraph/msgraph-sdk-go v1.19.0/go.mod h1:3DArbqPS7riix0VsJhdtYsgPaAFAH9Jer64psW55riI= github.com/microsoftgraph/msgraph-sdk-go v1.20.0/go.mod h1:UTUjxLPExc1K+YLmFeyEyep6vYd1GOj2bLMSd7/lPWE=
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY= github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE= github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
@ -327,8 +328,8 @@ github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms=
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM=
github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
@ -338,8 +339,9 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
@ -373,26 +375,32 @@ github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rudderlabs/analytics-go v3.3.3+incompatible h1:OG0XlKoXfr539e2t1dXtTB+Gr89uFW+OUNQBVhHIIBY= github.com/rudderlabs/analytics-go v3.3.3+incompatible h1:OG0XlKoXfr539e2t1dXtTB+Gr89uFW+OUNQBVhHIIBY=
github.com/rudderlabs/analytics-go v3.3.3+incompatible/go.mod h1:LF8/ty9kUX4PTY3l5c97K3nZZaX5Hwsvt+NBaRL/f30= github.com/rudderlabs/analytics-go v3.3.3+incompatible/go.mod h1:LF8/ty9kUX4PTY3l5c97K3nZZaX5Hwsvt+NBaRL/f30=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.3.0 h1:zT7VEGWC2DTflmccN/5T1etyKvxSxpHsjb9cJvm4SvQ=
github.com/sagikazarmark/locafero v0.3.0/go.mod h1:w+v7UsPNFwzF1cHuOajOOzoq4U7v/ig1mpRjqV+Bu1U=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4= github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4=
github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc= github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1 h1:lQ3JvmcVO1/AMFbabvUSJ4YtJRpEAX9Qza73p5j03sw= github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1 h1:lQ3JvmcVO1/AMFbabvUSJ4YtJRpEAX9Qza73p5j03sw=
github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1/go.mod h1:4aKqcbhASNqjbrG0h9BmkzcWvPJGxbef4B+j0XfFrZo= github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1/go.mod h1:4aKqcbhASNqjbrG0h9BmkzcWvPJGxbef4B+j0XfFrZo=
github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY=
github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ=
github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI=
github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI=
github.com/std-uritemplate/std-uritemplate/go v0.0.42 h1:rG+XlE4drkVWs2NLfGS15N+vg+CUcjXElQKvJ0fctlI=
github.com/std-uritemplate/std-uritemplate/go v0.0.42/go.mod h1:Qov4Ay4U83j37XjgxMYevGJFLbnZ2o9cEOhGufBKgKY=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@ -406,8 +414,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tg123/go-htpasswd v1.2.1 h1:i4wfsX1KvvkyoMiHZzjS0VzbAPWfxzI8INcZAKtutoU= github.com/tg123/go-htpasswd v1.2.1 h1:i4wfsX1KvvkyoMiHZzjS0VzbAPWfxzI8INcZAKtutoU=
github.com/tg123/go-htpasswd v1.2.1/go.mod h1:erHp1B86KXdwQf1X5ZrLb7erXZnWueEQezb2dql4q58= github.com/tg123/go-htpasswd v1.2.1/go.mod h1:erHp1B86KXdwQf1X5ZrLb7erXZnWueEQezb2dql4q58=
github.com/tidwall/gjson v1.15.0 h1:5n/pM+v3r5ujuNl4YLZLsQ+UE5jlkLVm7jMzT5Mpolw= github.com/tidwall/gjson v1.15.0 h1:5n/pM+v3r5ujuNl4YLZLsQ+UE5jlkLVm7jMzT5Mpolw=
@ -428,8 +436,6 @@ github.com/vbauerster/mpb/v8 v8.1.6 h1:EswHDkAsy4OQ7QBAmU1MUPz4vHzl6KlINjlh7vJox
github.com/vbauerster/mpb/v8 v8.1.6/go.mod h1:O9/Wl8X9dUbR63tZ41MLIAxrtNfwlpwUhGkeYugUPW8= github.com/vbauerster/mpb/v8 v8.1.6/go.mod h1:O9/Wl8X9dUbR63tZ41MLIAxrtNfwlpwUhGkeYugUPW8=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g= github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM= github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM=
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@ -449,12 +455,12 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opentelemetry.io/otel v1.18.0 h1:TgVozPGZ01nHyDZxK5WGPFB9QexeTMXEH7+tIClWfzs= go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs=
go.opentelemetry.io/otel v1.18.0/go.mod h1:9lWqYO0Db579XzVuCKFNPDl4s73Voa+zEck3wHaAYQI= go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY=
go.opentelemetry.io/otel/metric v1.18.0 h1:JwVzw94UYmbx3ej++CwLUQZxEODDj/pOuTCvzhtRrSQ= go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE=
go.opentelemetry.io/otel/metric v1.18.0/go.mod h1:nNSpsVDjWGfb7chbRLUNW+PBNdcSTHD4Uu5pfFMOI0k= go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8=
go.opentelemetry.io/otel/trace v1.18.0 h1:NY+czwbHbmndxojTEKiSMHkG2ClNH2PwmcHrdo0JY10= go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg=
go.opentelemetry.io/otel/trace v1.18.0/go.mod h1:T2+SGJGuYZY3bjj5rgh/hN7KIrlpWC5nS8Mjvzckz+0= go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo=
go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk=
go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo= go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
@ -470,8 +476,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -508,8 +514,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -546,8 +552,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -568,8 +574,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -619,8 +625,8 @@ golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -688,8 +694,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -756,8 +762,8 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 h1:wukfNtZmZUurLN/atp2hiIeTKn7QJWIQdHzqmsOnAOk= google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13 h1:N3bU/SQDCDyD6R528GJ/PwW9KjYcJA3dgyH+MovAkIM=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13/go.mod h1:KSqppvjFjtoCI+KGd4PELB0qLNxdJHRGqRI09mB6pQA=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -774,8 +780,8 @@ google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= google.golang.org/grpc v1.58.2 h1:SXUpjxeVF3FKrTYQI4f4KvbGD5u2xccdYdurwowix5I=
google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= google.golang.org/grpc v1.58.2/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=

View File

@ -1,8 +1,11 @@
package idname package idname
import ( import (
"context"
"fmt"
"strings" "strings"
"github.com/alcionai/clues"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
) )
@ -21,7 +24,18 @@ type Provider interface {
Name() string Name() string
} }
var _ Provider = &is{} type GetResourceIDAndNamer interface {
GetResourceIDAndNameFrom(
ctx context.Context,
owner string,
cacher Cacher,
) (Provider, error)
}
var (
_ Provider = &is{}
_ clues.Concealer = &is{}
)
type is struct { type is struct {
id string id string
@ -35,6 +49,24 @@ func NewProvider(id, name string) *is {
func (is is) ID() string { return is.id } func (is is) ID() string { return is.id }
func (is is) Name() string { return is.name } func (is is) Name() string { return is.name }
const isStringTmpl = "{id:%s, name:%s}"
func (is is) PlainString() string {
return fmt.Sprintf(isStringTmpl, clues.Hide(is.id), clues.Hide(is.name))
}
func (is is) Conceal() string {
return fmt.Sprintf(isStringTmpl, clues.Hide(is.id), clues.Hide(is.name))
}
func (is is) String() string {
return is.Conceal()
}
func (is is) Format(fs fmt.State, _ rune) {
fmt.Fprint(fs, is.Conceal())
}
type Cacher interface { type Cacher interface {
IDOf(name string) (string, bool) IDOf(name string) (string, bool)
NameOf(id string) (string, bool) NameOf(id string) (string, bool)

View File

@ -0,0 +1,39 @@
package jwt
import (
"time"
"github.com/alcionai/clues"
jwt "github.com/golang-jwt/jwt/v5"
)
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
// "exp" claim present in the token. Token is considered expired if "exp"
// claim < current time. Missing "exp" claim is considered as non-expired.
// An error is returned if the supplied token is malformed.
func IsJWTExpired(
rawToken string,
) (bool, error) {
p := jwt.NewParser()
// Note: Call to ParseUnverified is intentional since token verification is
// not our objective. We only care about the embed claims in the token.
// We assume the token signature is valid & verified by caller stack.
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
if err != nil {
return false, clues.Wrap(err, "invalid jwt")
}
t, err := token.Claims.GetExpirationTime()
if err != nil {
return false, clues.Wrap(err, "getting token expiry time")
}
if t == nil {
return false, nil
}
expired := t.Before(time.Now())
return expired, nil
}

View File

@ -0,0 +1,115 @@
package jwt
import (
"testing"
"time"
jwt "github.com/golang-jwt/jwt/v5"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
)
type JWTUnitSuite struct {
tester.Suite
}
func TestJWTUnitSuite(t *testing.T) {
suite.Run(t, &JWTUnitSuite{Suite: tester.NewUnitSuite(t)})
}
// createJWTToken creates a JWT token with the specified expiration time.
func createJWTToken(
claims jwt.RegisteredClaims,
) (string, error) {
// build claims from map
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
return token.SignedString([]byte(""))
}
const (
// Raw test token valid for 100 years.
rawToken = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9." +
"eyJuYmYiOiIxNjkxODE5NTc5IiwiZXhwIjoiMzk0NTUyOTE3OSIsImVuZHBvaW50dXJsTGVuZ3RoIjoiMTYw" +
"IiwiaXNsb29wYmFjayI6IlRydWUiLCJ2ZXIiOiJoYXNoZWRwcm9vZnRva2VuIiwicm9sZXMiOiJhbGxmaWxl" +
"cy53cml0ZSBhbGxzaXRlcy5mdWxsY29udHJvbCBhbGxwcm9maWxlcy5yZWFkIiwidHQiOiIxIiwiYWxnIjoi" +
"SFMyNTYifQ" +
".signature"
)
func (suite *JWTUnitSuite) TestIsJWTExpired() {
table := []struct {
name string
expect bool
getToken func() (string, error)
expectErr assert.ErrorAssertionFunc
}{
{
name: "alive token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Hour)),
})
},
expect: false,
expectErr: assert.NoError,
},
{
name: "expired token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(time.Now().Add(-time.Hour)),
})
},
expect: true,
expectErr: assert.NoError,
},
// Test with a raw token which is not generated with go-jwt lib.
{
name: "alive raw token",
getToken: func() (string, error) {
return rawToken, nil
},
expect: false,
expectErr: assert.NoError,
},
{
name: "alive token, missing exp claim",
getToken: func() (string, error) {
return createJWTToken(jwt.RegisteredClaims{})
},
expect: false,
expectErr: assert.NoError,
},
{
name: "malformed token",
getToken: func() (string, error) {
return "header.claims.signature", nil
},
expect: false,
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
_, flush := tester.NewContext(t)
defer flush()
token, err := test.getToken()
require.NoError(t, err)
expired, err := IsJWTExpired(token)
test.expectErr(t, err)
assert.Equal(t, test.expect, expired)
})
}
}

View File

@ -27,7 +27,7 @@ func NewPrefixMap(m map[string]map[string]struct{}) *PrefixMap {
func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) { func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) {
if pm.Empty() { if pm.Empty() {
require.True(t, r.Empty(), "both prefix maps are empty") require.True(t, r.Empty(), "result prefixMap should be empty but contains keys: %+v", r.Keys())
return return
} }

View File

@ -0,0 +1,27 @@
package common
import (
"net/url"
"github.com/alcionai/clues"
)
// GetQueryParamFromURL parses an URL and returns value of the specified
// query parameter. In case of multiple occurrences, first one is returned.
func GetQueryParamFromURL(
rawURL, queryParam string,
) (string, error) {
u, err := url.Parse(rawURL)
if err != nil {
return "", clues.Wrap(err, "parsing url")
}
qp := u.Query()
val := qp.Get(queryParam)
if len(val) == 0 {
return "", clues.New("query param not found").With("query_param", queryParam)
}
return val, nil
}

View File

@ -0,0 +1,72 @@
package common_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester"
)
type URLUnitSuite struct {
tester.Suite
}
func TestURLUnitSuite(t *testing.T) {
suite.Run(t, &URLUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *URLUnitSuite) TestGetQueryParamFromURL() {
qp := "tempauth"
table := []struct {
name string
rawURL string
queryParam string
expectedResult string
expect assert.ErrorAssertionFunc
}{
{
name: "valid",
rawURL: "http://localhost:8080?" + qp + "=h.c.s&other=val",
queryParam: qp,
expectedResult: "h.c.s",
expect: assert.NoError,
},
{
name: "query param not found",
rawURL: "http://localhost:8080?other=val",
queryParam: qp,
expect: assert.Error,
},
{
name: "empty query param",
rawURL: "http://localhost:8080?" + qp + "=h.c.s&other=val",
queryParam: "",
expect: assert.Error,
},
// In case of multiple occurrences, the first occurrence of param is returned.
{
name: "multiple occurrences",
rawURL: "http://localhost:8080?" + qp + "=h.c.s&other=val&" + qp + "=h1.c1.s1",
queryParam: qp,
expectedResult: "h.c.s",
expect: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
_, flush := tester.NewContext(t)
defer flush()
token, err := common.GetQueryParamFromURL(test.rawURL, test.queryParam)
test.expect(t, err)
assert.Equal(t, test.expectedResult, token)
})
}
}

View File

@ -16,23 +16,23 @@ import (
) )
var ( var (
_ Item = &unindexedPrefetchedItem{}
_ ItemModTime = &unindexedPrefetchedItem{}
_ Item = &prefetchedItem{} _ Item = &prefetchedItem{}
_ ItemInfo = &prefetchedItem{}
_ ItemModTime = &prefetchedItem{} _ ItemModTime = &prefetchedItem{}
_ Item = &unindexedLazyItem{} _ Item = &prefetchedItemWithInfo{}
_ ItemModTime = &unindexedLazyItem{} _ ItemInfo = &prefetchedItemWithInfo{}
_ ItemModTime = &prefetchedItemWithInfo{}
_ Item = &lazyItem{} _ Item = &lazyItem{}
_ ItemInfo = &lazyItem{}
_ ItemModTime = &lazyItem{} _ ItemModTime = &lazyItem{}
_ Item = &lazyItemWithInfo{}
_ ItemInfo = &lazyItemWithInfo{}
_ ItemModTime = &lazyItemWithInfo{}
) )
func NewDeletedItem(itemID string) Item { func NewDeletedItem(itemID string) Item {
return &unindexedPrefetchedItem{ return &prefetchedItem{
id: itemID, id: itemID,
deleted: true, deleted: true,
// TODO(ashmrtn): This really doesn't need to be set since deleted items are // TODO(ashmrtn): This really doesn't need to be set since deleted items are
@ -42,11 +42,11 @@ func NewDeletedItem(itemID string) Item {
} }
} }
func NewUnindexedPrefetchedItem( func NewPrefetchedItem(
reader io.ReadCloser, reader io.ReadCloser,
itemID string, itemID string,
modTime time.Time, modTime time.Time,
) (*unindexedPrefetchedItem, error) { ) (*prefetchedItem, error) {
r, err := readers.NewVersionedBackupReader( r, err := readers.NewVersionedBackupReader(
readers.SerializationFormat{Version: readers.DefaultSerializationVersion}, readers.SerializationFormat{Version: readers.DefaultSerializationVersion},
reader) reader)
@ -54,19 +54,18 @@ func NewUnindexedPrefetchedItem(
return nil, clues.Stack(err) return nil, clues.Stack(err)
} }
return &unindexedPrefetchedItem{ return &prefetchedItem{
id: itemID, id: itemID,
reader: r, reader: r,
modTime: modTime, modTime: modTime,
}, nil }, nil
} }
// unindexedPrefetchedItem represents a single item retrieved from the remote // prefetchedItem represents a single item retrieved from the remote service.
// service.
// //
// This item doesn't implement ItemInfo so it's safe to use for items like // This item doesn't implement ItemInfo so it's safe to use for items like
// metadata that shouldn't appear in backup details. // metadata that shouldn't appear in backup details.
type unindexedPrefetchedItem struct { type prefetchedItem struct {
id string id string
reader io.ReadCloser reader io.ReadCloser
// modTime is the modified time of the item. It should match the modTime in // modTime is the modified time of the item. It should match the modTime in
@ -79,48 +78,49 @@ type unindexedPrefetchedItem struct {
deleted bool deleted bool
} }
func (i unindexedPrefetchedItem) ID() string { func (i prefetchedItem) ID() string {
return i.id return i.id
} }
func (i *unindexedPrefetchedItem) ToReader() io.ReadCloser { func (i *prefetchedItem) ToReader() io.ReadCloser {
return i.reader return i.reader
} }
func (i unindexedPrefetchedItem) Deleted() bool { func (i prefetchedItem) Deleted() bool {
return i.deleted return i.deleted
} }
func (i unindexedPrefetchedItem) ModTime() time.Time { func (i prefetchedItem) ModTime() time.Time {
return i.modTime return i.modTime
} }
func NewPrefetchedItem( func NewPrefetchedItemWithInfo(
reader io.ReadCloser, reader io.ReadCloser,
itemID string, itemID string,
info details.ItemInfo, info details.ItemInfo,
) (*prefetchedItem, error) { ) (*prefetchedItemWithInfo, error) {
inner, err := NewUnindexedPrefetchedItem(reader, itemID, info.Modified()) inner, err := NewPrefetchedItem(reader, itemID, info.Modified())
if err != nil { if err != nil {
return nil, clues.Stack(err) return nil, clues.Stack(err)
} }
return &prefetchedItem{ return &prefetchedItemWithInfo{
unindexedPrefetchedItem: inner, prefetchedItem: inner,
info: info, info: info,
}, nil }, nil
} }
// prefetchedItem represents a single item retrieved from the remote service. // prefetchedItemWithInfo represents a single item retrieved from the remote
// service.
// //
// This item implements ItemInfo so it should be used for things that need to // This item implements ItemInfo so it should be used for things that need to
// appear in backup details. // appear in backup details.
type prefetchedItem struct { type prefetchedItemWithInfo struct {
*unindexedPrefetchedItem *prefetchedItem
info details.ItemInfo info details.ItemInfo
} }
func (i prefetchedItem) Info() (details.ItemInfo, error) { func (i prefetchedItemWithInfo) Info() (details.ItemInfo, error) {
return i.info, nil return i.info, nil
} }
@ -131,14 +131,14 @@ type ItemDataGetter interface {
) (io.ReadCloser, *details.ItemInfo, bool, error) ) (io.ReadCloser, *details.ItemInfo, bool, error)
} }
func NewUnindexedLazyItem( func NewLazyItem(
ctx context.Context, ctx context.Context,
itemGetter ItemDataGetter, itemGetter ItemDataGetter,
itemID string, itemID string,
modTime time.Time, modTime time.Time,
errs *fault.Bus, errs *fault.Bus,
) *unindexedLazyItem { ) *lazyItem {
return &unindexedLazyItem{ return &lazyItem{
ctx: ctx, ctx: ctx,
id: itemID, id: itemID,
itemGetter: itemGetter, itemGetter: itemGetter,
@ -147,13 +147,13 @@ func NewUnindexedLazyItem(
} }
} }
// unindexedLazyItem represents a single item retrieved from the remote service. // lazyItem represents a single item retrieved from the remote service. It
// It lazily fetches the item's data when the first call to ToReader().Read() is // lazily fetches the item's data when the first call to ToReader().Read() is
// made. // made.
// //
// This item doesn't implement ItemInfo so it's safe to use for items like // This item doesn't implement ItemInfo so it's safe to use for items like
// metadata that shouldn't appear in backup details. // metadata that shouldn't appear in backup details.
type unindexedLazyItem struct { type lazyItem struct {
ctx context.Context ctx context.Context
mu sync.Mutex mu sync.Mutex
id string id string
@ -165,19 +165,19 @@ type unindexedLazyItem struct {
// struct so we can tell if it's been set already or not. // struct so we can tell if it's been set already or not.
// //
// This also helps with garbage collection because now the golang garbage // This also helps with garbage collection because now the golang garbage
// collector can collect the lazyItem struct once the storage engine is done // collector can collect the lazyItemWithInfo struct once the storage engine
// with it. The ItemInfo struct needs to stick around until the end of the // is done with it. The ItemInfo struct needs to stick around until the end of
// backup though as backup details is written last. // the backup though as backup details is written last.
info *details.ItemInfo info *details.ItemInfo
delInFlight bool delInFlight bool
} }
func (i *unindexedLazyItem) ID() string { func (i *lazyItem) ID() string {
return i.id return i.id
} }
func (i *unindexedLazyItem) ToReader() io.ReadCloser { func (i *lazyItem) ToReader() io.ReadCloser {
return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) { return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
// Don't allow getting Item info while trying to initialize said info. // Don't allow getting Item info while trying to initialize said info.
// GetData could be a long running call, but in theory nothing should happen // GetData could be a long running call, but in theory nothing should happen
@ -219,23 +219,23 @@ func (i *unindexedLazyItem) ToReader() io.ReadCloser {
}) })
} }
func (i *unindexedLazyItem) Deleted() bool { func (i *lazyItem) Deleted() bool {
return false return false
} }
func (i *unindexedLazyItem) ModTime() time.Time { func (i *lazyItem) ModTime() time.Time {
return i.modTime return i.modTime
} }
func NewLazyItem( func NewLazyItemWithInfo(
ctx context.Context, ctx context.Context,
itemGetter ItemDataGetter, itemGetter ItemDataGetter,
itemID string, itemID string,
modTime time.Time, modTime time.Time,
errs *fault.Bus, errs *fault.Bus,
) *lazyItem { ) *lazyItemWithInfo {
return &lazyItem{ return &lazyItemWithInfo{
unindexedLazyItem: NewUnindexedLazyItem( lazyItem: NewLazyItem(
ctx, ctx,
itemGetter, itemGetter,
itemID, itemID,
@ -244,17 +244,17 @@ func NewLazyItem(
} }
} }
// lazyItem represents a single item retrieved from the remote service. It // lazyItemWithInfo represents a single item retrieved from the remote service.
// lazily fetches the item's data when the first call to ToReader().Read() is // It lazily fetches the item's data when the first call to ToReader().Read() is
// made. // made.
// //
// This item implements ItemInfo so it should be used for things that need to // This item implements ItemInfo so it should be used for things that need to
// appear in backup details. // appear in backup details.
type lazyItem struct { type lazyItemWithInfo struct {
*unindexedLazyItem *lazyItem
} }
func (i *lazyItem) Info() (details.ItemInfo, error) { func (i *lazyItemWithInfo) Info() (details.ItemInfo, error) {
i.mu.Lock() i.mu.Lock()
defer i.mu.Unlock() defer i.mu.Unlock()

View File

@ -51,7 +51,7 @@ func TestItemUnitSuite(t *testing.T) {
} }
func (suite *ItemUnitSuite) TestUnindexedPrefetchedItem() { func (suite *ItemUnitSuite) TestUnindexedPrefetchedItem() {
prefetch, err := data.NewUnindexedPrefetchedItem( prefetch, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader([]byte{})), io.NopCloser(bytes.NewReader([]byte{})),
"foo", "foo",
time.Time{}) time.Time{})
@ -69,7 +69,7 @@ func (suite *ItemUnitSuite) TestUnindexedLazyItem() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
lazy := data.NewUnindexedLazyItem( lazy := data.NewLazyItem(
ctx, ctx,
nil, nil,
"foo", "foo",
@ -148,7 +148,7 @@ func (suite *ItemUnitSuite) TestPrefetchedItem() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
item, err := data.NewPrefetchedItem(test.reader, id, test.info) item, err := data.NewPrefetchedItemWithInfo(test.reader, id, test.info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, id, item.ID(), "ID") assert.Equal(t, id, item.ID(), "ID")
@ -291,7 +291,7 @@ func (suite *ItemUnitSuite) TestLazyItem() {
defer test.mid.check(t, true) defer test.mid.check(t, true)
item := data.NewLazyItem( item := data.NewLazyItemWithInfo(
ctx, ctx,
test.mid, test.mid,
id, id,
@ -354,7 +354,7 @@ func (suite *ItemUnitSuite) TestLazyItem_DeletedInFlight() {
mid := &mockItemDataGetter{delInFlight: true} mid := &mockItemDataGetter{delInFlight: true}
defer mid.check(t, true) defer mid.check(t, true)
item := data.NewLazyItem(ctx, mid, id, now, errs) item := data.NewLazyItemWithInfo(ctx, mid, id, now, errs)
assert.Equal(t, id, item.ID(), "ID") assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted") assert.False(t, item.Deleted(), "deleted")
@ -400,7 +400,7 @@ func (suite *ItemUnitSuite) TestLazyItem_InfoBeforeReadErrors() {
mid := &mockItemDataGetter{} mid := &mockItemDataGetter{}
defer mid.check(t, false) defer mid.check(t, false)
item := data.NewLazyItem(ctx, mid, id, now, errs) item := data.NewLazyItemWithInfo(ctx, mid, id, now, errs)
assert.Equal(t, id, item.ID(), "ID") assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted") assert.False(t, item.Deleted(), "deleted")

View File

@ -1,5 +1,12 @@
package data package data
import (
"io"
"sync/atomic"
"github.com/alcionai/corso/src/pkg/path"
)
type CollectionStats struct { type CollectionStats struct {
Folders, Folders,
Objects, Objects,
@ -15,3 +22,68 @@ func (cs CollectionStats) IsZero() bool {
func (cs CollectionStats) String() string { func (cs CollectionStats) String() string {
return cs.Details return cs.Details
} }
type KindStats struct {
BytesRead int64
ResourceCount int64
}
type ExportStats struct {
// data is kept private so that we can enforce atomic int updates
data map[path.CategoryType]KindStats
}
func (es *ExportStats) UpdateBytes(kind path.CategoryType, bytesRead int64) {
if es.data == nil {
es.data = map[path.CategoryType]KindStats{}
}
ks := es.data[kind]
atomic.AddInt64(&ks.BytesRead, bytesRead)
es.data[kind] = ks
}
func (es *ExportStats) UpdateResourceCount(kind path.CategoryType) {
if es.data == nil {
es.data = map[path.CategoryType]KindStats{}
}
ks := es.data[kind]
atomic.AddInt64(&ks.ResourceCount, 1)
es.data[kind] = ks
}
func (es *ExportStats) GetStats() map[path.CategoryType]KindStats {
return es.data
}
type statsReader struct {
io.ReadCloser
kind path.CategoryType
stats *ExportStats
}
func (sr *statsReader) Read(p []byte) (int, error) {
n, err := sr.ReadCloser.Read(p)
sr.stats.UpdateBytes(sr.kind, int64(n))
return n, err
}
// Create a function that will take a reader and return a reader that
// will update the stats
func ReaderWithStats(
reader io.ReadCloser,
kind path.CategoryType,
stats *ExportStats,
) io.ReadCloser {
if reader == nil {
return nil
}
return &statsReader{
ReadCloser: reader,
kind: kind,
stats: stats,
}
}

View File

@ -28,17 +28,12 @@ const (
tenantIDDeprecated = "m365_tenant_hash_deprecated" tenantIDDeprecated = "m365_tenant_hash_deprecated"
// Event Keys // Event Keys
CorsoStart = "Corso Start"
RepoInit = "Repo Init" RepoInit = "Repo Init"
RepoConnect = "Repo Connect" RepoConnect = "Repo Connect"
BackupStart = "Backup Start"
BackupEnd = "Backup End" BackupEnd = "Backup End"
CorsoError = "Corso Error"
RestoreStart = "Restore Start"
RestoreEnd = "Restore End" RestoreEnd = "Restore End"
ExportStart = "Export Start" CorsoError = "Corso Error"
ExportEnd = "Export End" ExportEnd = "Export End"
MaintenanceStart = "Maintenance Start"
MaintenanceEnd = "Maintenance End" MaintenanceEnd = "Maintenance End"
// Event Data Keys // Event Data Keys

View File

@ -0,0 +1,165 @@
package kopia
import (
"context"
"fmt"
"testing"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
func BenchmarkHierarchyMerge(b *testing.B) {
ctx, flush := tester.NewContext(b)
defer flush()
c, err := openKopiaRepo(b, ctx)
require.NoError(b, err, clues.ToCore(err))
w := &Wrapper{c}
defer func() {
err := w.Close(ctx)
assert.NoError(b, err, clues.ToCore(err))
}()
var (
cols []data.BackupCollection
collectionLimit = 1000
collectionItemsLimit = 3
itemData = []byte("abcdefghijklmnopqrstuvwxyz")
)
baseStorePath, err := path.Build(
"a-tenant",
"a-user",
path.ExchangeService,
path.EmailCategory,
false,
"Inbox")
require.NoError(b, err, clues.ToCore(err))
for i := 0; i < collectionLimit; i++ {
folderName := fmt.Sprintf("folder%d", i)
storePath, err := baseStorePath.Append(false, folderName)
require.NoError(b, err, clues.ToCore(err))
col := exchMock.NewCollection(
storePath,
storePath,
collectionItemsLimit)
for j := 0; j < collectionItemsLimit; j++ {
itemName := fmt.Sprintf("item%d", j)
col.Names[j] = itemName
col.Data[j] = itemData
}
cols = append(cols, col)
}
reasons := []identity.Reasoner{
NewReason(
testTenant,
baseStorePath.ProtectedResource(),
baseStorePath.Service(),
baseStorePath.Category()),
}
type testCase struct {
name string
baseBackups func(base ManifestEntry) BackupBases
collections []data.BackupCollection
}
// Initial backup. All files should be considered new by kopia.
baseBackupCase := testCase{
name: "Setup",
baseBackups: func(ManifestEntry) BackupBases {
return NewMockBackupBases()
},
collections: cols,
}
runAndTestBackup := func(
t tester.TestT,
ctx context.Context,
test testCase,
base ManifestEntry,
) ManifestEntry {
bbs := test.baseBackups(base)
stats, _, _, err := w.ConsumeBackupCollections(
ctx,
reasons,
bbs,
test.collections,
nil,
nil,
true,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, 0, stats.IgnoredErrorCount)
assert.Equal(t, 0, stats.ErrorCount)
assert.False(t, stats.Incomplete)
snap, err := snapshot.LoadSnapshot(
ctx,
w.c,
manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err))
return ManifestEntry{
Manifest: snap,
Reasons: reasons,
}
}
b.Logf("setting up base backup\n")
base := runAndTestBackup(b, ctx, baseBackupCase, ManifestEntry{})
table := []testCase{
{
name: "Merge All",
baseBackups: func(base ManifestEntry) BackupBases {
return NewMockBackupBases().WithMergeBases(base)
},
collections: func() []data.BackupCollection {
p, err := baseStorePath.Dir()
require.NoError(b, err, clues.ToCore(err))
col := exchMock.NewCollection(p, p, 0)
col.ColState = data.NotMovedState
col.PrevPath = p
return []data.BackupCollection{col}
}(),
},
}
b.ResetTimer()
for _, test := range table {
b.Run(fmt.Sprintf("num_dirs_%d", collectionLimit), func(b *testing.B) {
ctx, flush := tester.NewContext(b)
defer flush()
for i := 0; i < b.N; i++ {
runAndTestBackup(b, ctx, test, base)
}
})
}
}

View File

@ -380,18 +380,18 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
siteIDs = []string{siteID} siteIDs = []string{siteID}
) )
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil) site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup(siteIDs) sel := selectors.NewSharePointBackup(siteIDs)
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch())) sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(id, name) sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
bpc := inject.BackupProducerConfig{ bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup, LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(), Options: control.DefaultOptions(),
ProtectedResource: inMock.NewProvider(id, name), ProtectedResource: site,
Selector: sel.Selector, Selector: sel.Selector,
} }
@ -430,18 +430,18 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
siteIDs = []string{siteID} siteIDs = []string{siteID}
) )
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil) site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup(siteIDs) sel := selectors.NewSharePointBackup(siteIDs)
sel.Include(sel.Lists(selectors.Any())) sel.Include(sel.Lists(selectors.Any()))
sel.SetDiscreteOwnerIDName(id, name) sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
bpc := inject.BackupProducerConfig{ bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup, LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(), Options: control.DefaultOptions(),
ProtectedResource: inMock.NewProvider(id, name), ProtectedResource: site,
Selector: sel.Selector, Selector: sel.Selector,
} }
@ -516,18 +516,18 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
groupIDs = []string{groupID} groupIDs = []string{groupID}
) )
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil) group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup(groupIDs) sel := selectors.NewGroupsBackup(groupIDs)
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch())) sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(id, name) sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
bpc := inject.BackupProducerConfig{ bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup, LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(), Options: control.DefaultOptions(),
ProtectedResource: inMock.NewProvider(id, name), ProtectedResource: group,
Selector: sel.Selector, Selector: sel.Selector,
} }
@ -590,13 +590,13 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
groupIDs = []string{groupID} groupIDs = []string{groupID}
) )
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil) group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup(groupIDs) sel := selectors.NewGroupsBackup(groupIDs)
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch())) sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(id, name) sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
site, err := suite.connector.AC.Groups().GetRootSite(ctx, groupID) site, err := suite.connector.AC.Groups().GetRootSite(ctx, groupID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -626,7 +626,7 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
bpc := inject.BackupProducerConfig{ bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup, LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(), Options: control.DefaultOptions(),
ProtectedResource: inMock.NewProvider(id, name), ProtectedResource: group,
Selector: sel.Selector, Selector: sel.Selector,
MetadataCollections: mmc, MetadataCollections: mmc,
} }

View File

@ -13,6 +13,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/spatialcurrent/go-lazy/pkg/lazy" "github.com/spatialcurrent/go-lazy/pkg/lazy"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
@ -39,6 +40,9 @@ var _ data.BackupCollection = &Collection{}
type Collection struct { type Collection struct {
handler BackupHandler handler BackupHandler
// the protected resource represented in this collection.
protectedResource idname.Provider
// data is used to share data streams with the collection consumer // data is used to share data streams with the collection consumer
data chan data.Item data chan data.Item
// folderPath indicates what level in the hierarchy this collection // folderPath indicates what level in the hierarchy this collection
@ -98,6 +102,7 @@ func pathToLocation(p path.Path) (*path.Builder, error) {
// NewCollection creates a Collection // NewCollection creates a Collection
func NewCollection( func NewCollection(
handler BackupHandler, handler BackupHandler,
resource idname.Provider,
currPath path.Path, currPath path.Path,
prevPath path.Path, prevPath path.Path,
driveID string, driveID string,
@ -123,6 +128,7 @@ func NewCollection(
c := newColl( c := newColl(
handler, handler,
resource,
currPath, currPath,
prevPath, prevPath,
driveID, driveID,
@ -140,6 +146,7 @@ func NewCollection(
func newColl( func newColl(
handler BackupHandler, handler BackupHandler,
resource idname.Provider,
currPath path.Path, currPath path.Path,
prevPath path.Path, prevPath path.Path,
driveID string, driveID string,
@ -151,6 +158,7 @@ func newColl(
) *Collection { ) *Collection {
c := &Collection{ c := &Collection{
handler: handler, handler: handler,
protectedResource: resource,
folderPath: currPath, folderPath: currPath,
prevPath: prevPath, prevPath: prevPath,
driveItems: map[string]models.DriveItemable{}, driveItems: map[string]models.DriveItemable{},
@ -265,9 +273,9 @@ func (oc *Collection) getDriveItemContent(
// Skip big OneNote files as they can't be downloaded // Skip big OneNote files as they can't be downloaded
if clues.HasLabel(err, graph.LabelStatus(http.StatusServiceUnavailable)) && if clues.HasLabel(err, graph.LabelStatus(http.StatusServiceUnavailable)) &&
// oc.scope == CollectionScopePackage && *item.GetSize() >= MaxOneNoteFileSize {
// TODO: We've removed the file size check because it looks like we've seen persistent // TODO: We've removed the file size check because it looks like we've seen persistent
// 503's with smaller OneNote files also. // 503's with smaller OneNote files also.
// oc.scope == CollectionScopePackage && *item.GetSize() >= MaxOneNoteFileSize {
oc.scope == CollectionScopePackage { oc.scope == CollectionScopePackage {
// FIXME: It is possible that in case of a OneNote file we // FIXME: It is possible that in case of a OneNote file we
// will end up just backing up the `onetoc2` file without // will end up just backing up the `onetoc2` file without
@ -275,10 +283,18 @@ func (oc *Collection) getDriveItemContent(
// "item". This will have to be handled during the // "item". This will have to be handled during the
// restore, or we have to handle it separately by somehow // restore, or we have to handle it separately by somehow
// deleting the entire collection. // deleting the entire collection.
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipBigOneNote).Info("max OneNote file size exceeded") logger.
errs.AddSkip(ctx, fault.FileSkip(fault.SkipBigOneNote, driveID, itemID, itemName, graph.ItemInfo(item))) CtxErr(ctx, err).
With("skipped_reason", fault.SkipOneNote).
Info("inaccessible one note file")
errs.AddSkip(ctx, fault.FileSkip(
fault.SkipOneNote,
driveID,
itemID,
itemName,
graph.ItemInfo(item)))
return nil, clues.Wrap(err, "max oneNote item").Label(graph.LabelsSkippable) return nil, clues.Wrap(err, "inaccesible oneNote item").Label(graph.LabelsSkippable)
} }
errs.AddRecoverable( errs.AddRecoverable(
@ -551,9 +567,22 @@ func (oc *Collection) streamDriveItem(
return return
} }
itemInfo = oc.handler.AugmentItemInfo(itemInfo, item, itemSize, parentPath) itemInfo = oc.handler.AugmentItemInfo(
itemInfo,
oc.protectedResource,
item,
itemSize,
parentPath)
ctx = clues.Add(ctx, "item_info", itemInfo) ctx = clues.Add(ctx, "item_info", itemInfo)
// Drive content download requests are also rate limited by graph api.
// Ensure that this request goes through the drive limiter & not the default
// limiter.
ctx = graph.BindRateLimiterConfig(
ctx,
graph.LimiterCfg{
Service: path.OneDriveService,
})
if isFile { if isFile {
dataSuffix := metadata.DataFileSuffix dataSuffix := metadata.DataFileSuffix
@ -562,7 +591,7 @@ func (oc *Collection) streamDriveItem(
// This ensures that downloads won't be attempted unless that consumer // This ensures that downloads won't be attempted unless that consumer
// attempts to read bytes. Assumption is that kopia will check things // attempts to read bytes. Assumption is that kopia will check things
// like file modtimes before attempting to read. // like file modtimes before attempting to read.
oc.data <- data.NewLazyItem( oc.data <- data.NewLazyItemWithInfo(
ctx, ctx,
&lazyItemGetter{ &lazyItemGetter{
info: &itemInfo, info: &itemInfo,
@ -587,7 +616,7 @@ func (oc *Collection) streamDriveItem(
return progReader, nil return progReader, nil
}) })
storeItem, err := data.NewUnindexedPrefetchedItem( storeItem, err := data.NewPrefetchedItem(
metaReader, metaReader,
metaFileName+metaSuffix, metaFileName+metaSuffix,
// Metadata file should always use the latest time as // Metadata file should always use the latest time as

View File

@ -207,6 +207,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
coll, err := NewCollection( coll, err := NewCollection(
mbh, mbh,
mbh.ProtectedResource,
folderPath, folderPath,
nil, nil,
"drive-id", "drive-id",
@ -328,6 +329,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadError() {
coll, err := NewCollection( coll, err := NewCollection(
mbh, mbh,
mbh.ProtectedResource,
folderPath, folderPath,
nil, nil,
"fakeDriveID", "fakeDriveID",
@ -405,6 +407,7 @@ func (suite *CollectionUnitSuite) TestCollectionReadUnauthorizedErrorRetry() {
coll, err := NewCollection( coll, err := NewCollection(
mbh, mbh,
mbh.ProtectedResource,
folderPath, folderPath,
nil, nil,
"fakeDriveID", "fakeDriveID",
@ -460,6 +463,7 @@ func (suite *CollectionUnitSuite) TestCollectionPermissionBackupLatestModTime()
coll, err := NewCollection( coll, err := NewCollection(
mbh, mbh,
mbh.ProtectedResource,
folderPath, folderPath,
nil, nil,
"drive-id", "drive-id",
@ -971,6 +975,7 @@ func (suite *CollectionUnitSuite) TestItemExtensions() {
coll, err := NewCollection( coll, err := NewCollection(
mbh, mbh,
mbh.ProtectedResource,
folderPath, folderPath,
nil, nil,
driveID, driveID,

View File

@ -11,6 +11,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -49,7 +50,7 @@ type Collections struct {
handler BackupHandler handler BackupHandler
tenantID string tenantID string
resourceOwner string protectedResource idname.Provider
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
@ -69,14 +70,14 @@ type Collections struct {
func NewCollections( func NewCollections(
bh BackupHandler, bh BackupHandler,
tenantID string, tenantID string,
resourceOwner string, protectedResource idname.Provider,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
) *Collections { ) *Collections {
return &Collections{ return &Collections{
handler: bh, handler: bh,
tenantID: tenantID, tenantID: tenantID,
resourceOwner: resourceOwner, protectedResource: protectedResource,
CollectionMap: map[string]map[string]*Collection{}, CollectionMap: map[string]map[string]*Collection{},
statusUpdater: statusUpdater, statusUpdater: statusUpdater,
ctrl: ctrlOpts, ctrl: ctrlOpts,
@ -227,16 +228,16 @@ func (c *Collections) Get(
ssmb *prefixmatcher.StringSetMatchBuilder, ssmb *prefixmatcher.StringSetMatchBuilder,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, bool, error) { ) ([]data.BackupCollection, bool, error) {
prevDeltas, oldPathsByDriveID, canUsePreviousBackup, err := deserializeMetadata(ctx, prevMetadata) prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
} }
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup) ctx = clues.Add(ctx, "can_use_previous_backup", canUsePrevBackup)
driveTombstones := map[string]struct{}{} driveTombstones := map[string]struct{}{}
for driveID := range oldPathsByDriveID { for driveID := range oldPrevPathsByDriveID {
driveTombstones[driveID] = struct{}{} driveTombstones[driveID] = struct{}{}
} }
@ -246,7 +247,7 @@ func (c *Collections) Get(
defer close(progressBar) defer close(progressBar)
// Enumerate drives for the specified resourceOwner // Enumerate drives for the specified resourceOwner
pager := c.handler.NewDrivePager(c.resourceOwner, nil) pager := c.handler.NewDrivePager(c.protectedResource.ID(), nil)
drives, err := api.GetAllDrives(ctx, pager) drives, err := api.GetAllDrives(ctx, pager)
if err != nil { if err != nil {
@ -254,10 +255,8 @@ func (c *Collections) Get(
} }
var ( var (
// Drive ID -> delta URL for drive driveIDToDeltaLink = map[string]string{}
deltaURLs = map[string]string{} driveIDToPrevPaths = map[string]map[string]string{}
// Drive ID -> folder ID -> folder path
folderPaths = map[string]map[string]string{}
numPrevItems = 0 numPrevItems = 0
) )
@ -265,65 +264,80 @@ func (c *Collections) Get(
var ( var (
driveID = ptr.Val(d.GetId()) driveID = ptr.Val(d.GetId())
driveName = ptr.Val(d.GetName()) driveName = ptr.Val(d.GetName())
prevDelta = prevDeltas[driveID] ictx = clues.Add(
oldPaths = oldPathsByDriveID[driveID] ctx,
numOldDelta = 0 "drive_id", driveID,
ictx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName) "drive_name", clues.Hide(driveName))
excludedItemIDs = map[string]struct{}{}
oldPrevPaths = oldPrevPathsByDriveID[driveID]
prevDeltaLink = prevDriveIDToDelta[driveID]
// itemCollection is used to identify which collection a
// file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a
// different collection within the same delta query
// item ID -> item ID
itemCollection = map[string]string{}
) )
delete(driveTombstones, driveID) delete(driveTombstones, driveID)
if _, ok := driveIDToPrevPaths[driveID]; !ok {
driveIDToPrevPaths[driveID] = map[string]string{}
}
if _, ok := c.CollectionMap[driveID]; !ok { if _, ok := c.CollectionMap[driveID]; !ok {
c.CollectionMap[driveID] = map[string]*Collection{} c.CollectionMap[driveID] = map[string]*Collection{}
} }
if len(prevDelta) > 0 {
numOldDelta++
}
logger.Ctx(ictx).Infow( logger.Ctx(ictx).Infow(
"previous metadata for drive", "previous metadata for drive",
"num_paths_entries", len(oldPaths), "num_paths_entries", len(oldPrevPaths))
"num_deltas_entries", numOldDelta)
delta, paths, excluded, err := collectItems( items, du, err := c.handler.EnumerateDriveItemsDelta(
ictx, ictx,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
driveID, driveID,
driveName, prevDeltaLink,
c.UpdateCollections, api.DefaultDriveItemProps())
oldPaths,
prevDelta,
errs)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
} }
// Used for logging below.
numDeltas := 0
// It's alright to have an empty folders map (i.e. no folders found) but not // It's alright to have an empty folders map (i.e. no folders found) but not
// an empty delta token. This is because when deserializing the metadata we // an empty delta token. This is because when deserializing the metadata we
// remove entries for which there is no corresponding delta token/folder. If // remove entries for which there is no corresponding delta token/folder. If
// we leave empty delta tokens then we may end up setting the State field // we leave empty delta tokens then we may end up setting the State field
// for collections when not actually getting delta results. // for collections when not actually getting delta results.
if len(delta.URL) > 0 { if len(du.URL) > 0 {
deltaURLs[driveID] = delta.URL driveIDToDeltaLink[driveID] = du.URL
numDeltas++ }
newPrevPaths, err := c.UpdateCollections(
ctx,
driveID,
driveName,
items,
oldPrevPaths,
itemCollection,
excludedItemIDs,
du.Reset,
errs)
if err != nil {
return nil, false, clues.Stack(err)
} }
// Avoid the edge case where there's no paths but we do have a valid delta // Avoid the edge case where there's no paths but we do have a valid delta
// token. We can accomplish this by adding an empty paths map for this // token. We can accomplish this by adding an empty paths map for this
// drive. If we don't have this then the next backup won't use the delta // drive. If we don't have this then the next backup won't use the delta
// token because it thinks the folder paths weren't persisted. // token because it thinks the folder paths weren't persisted.
folderPaths[driveID] = map[string]string{} driveIDToPrevPaths[driveID] = map[string]string{}
maps.Copy(folderPaths[driveID], paths) maps.Copy(driveIDToPrevPaths[driveID], newPrevPaths)
logger.Ctx(ictx).Infow( logger.Ctx(ictx).Infow(
"persisted metadata for drive", "persisted metadata for drive",
"num_paths_entries", len(paths), "num_new_paths_entries", len(newPrevPaths),
"num_deltas_entries", numDeltas, "delta_reset", du.Reset)
"delta_reset", delta.Reset)
numDriveItems := c.NumItems - numPrevItems numDriveItems := c.NumItems - numPrevItems
numPrevItems = c.NumItems numPrevItems = c.NumItems
@ -335,7 +349,7 @@ func (c *Collections) Get(
err = c.addURLCacheToDriveCollections( err = c.addURLCacheToDriveCollections(
ictx, ictx,
driveID, driveID,
prevDelta, prevDeltaLink,
errs) errs)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
@ -344,8 +358,8 @@ func (c *Collections) Get(
// For both cases we don't need to do set difference on folder map if the // For both cases we don't need to do set difference on folder map if the
// delta token was valid because we should see all the changes. // delta token was valid because we should see all the changes.
if !delta.Reset { if !du.Reset {
if len(excluded) == 0 { if len(excludedItemIDs) == 0 {
continue continue
} }
@ -354,7 +368,7 @@ func (c *Collections) Get(
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx) return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
} }
ssmb.Add(p.String(), excluded) ssmb.Add(p.String(), excludedItemIDs)
continue continue
} }
@ -369,13 +383,11 @@ func (c *Collections) Get(
foundFolders[id] = struct{}{} foundFolders[id] = struct{}{}
} }
for fldID, p := range oldPaths { for fldID, p := range oldPrevPaths {
if _, ok := foundFolders[fldID]; ok { if _, ok := foundFolders[fldID]; ok {
continue continue
} }
delete(paths, fldID)
prevPath, err := path.FromDataLayerPath(p, false) prevPath, err := path.FromDataLayerPath(p, false)
if err != nil { if err != nil {
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p) err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
@ -384,6 +396,7 @@ func (c *Collections) Get(
col, err := NewCollection( col, err := NewCollection(
c.handler, c.handler,
c.protectedResource,
nil, // delete the folder nil, // delete the folder
prevPath, prevPath,
driveID, driveID,
@ -420,6 +433,7 @@ func (c *Collections) Get(
coll, err := NewCollection( coll, err := NewCollection(
c.handler, c.handler,
c.protectedResource,
nil, // delete the drive nil, // delete the drive
prevDrivePath, prevDrivePath,
driveID, driveID,
@ -443,14 +457,14 @@ func (c *Collections) Get(
// empty/missing and default to a full backup. // empty/missing and default to a full backup.
logger.CtxErr(ctx, err).Info("making metadata collection path prefixes") logger.CtxErr(ctx, err).Info("making metadata collection path prefixes")
return collections, canUsePreviousBackup, nil return collections, canUsePrevBackup, nil
} }
md, err := graph.MakeMetadataCollection( md, err := graph.MakeMetadataCollection(
pathPrefix, pathPrefix,
[]graph.MetadataCollectionEntry{ []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(bupMD.PreviousPathFileName, folderPaths), graph.NewMetadataEntry(bupMD.PreviousPathFileName, driveIDToPrevPaths),
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, deltaURLs), graph.NewMetadataEntry(bupMD.DeltaURLsFileName, driveIDToDeltaLink),
}, },
c.statusUpdater) c.statusUpdater)
@ -463,7 +477,7 @@ func (c *Collections) Get(
collections = append(collections, md) collections = append(collections, md)
} }
return collections, canUsePreviousBackup, nil return collections, canUsePrevBackup, nil
} }
// addURLCacheToDriveCollections adds an URL cache to all collections belonging to // addURLCacheToDriveCollections adds an URL cache to all collections belonging to
@ -477,7 +491,7 @@ func (c *Collections) addURLCacheToDriveCollections(
driveID, driveID,
prevDelta, prevDelta,
urlCacheRefreshInterval, urlCacheRefreshInterval,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectURLCache()), c.handler,
errs) errs)
if err != nil { if err != nil {
return err return err
@ -533,22 +547,21 @@ func updateCollectionPaths(
func (c *Collections) handleDelete( func (c *Collections) handleDelete(
itemID, driveID string, itemID, driveID string,
oldPaths, newPaths map[string]string, oldPrevPaths, currPrevPaths, newPrevPaths map[string]string,
isFolder bool, isFolder bool,
excluded map[string]struct{}, excluded map[string]struct{},
itemCollection map[string]map[string]string,
invalidPrevDelta bool, invalidPrevDelta bool,
) error { ) error {
if !isFolder { if !isFolder {
// Try to remove the item from the Collection if an entry exists for this // Try to remove the item from the Collection if an entry exists for this
// item. This handles cases where an item was created and deleted during the // item. This handles cases where an item was created and deleted during the
// same delta query. // same delta query.
if parentID, ok := itemCollection[driveID][itemID]; ok { if parentID, ok := currPrevPaths[itemID]; ok {
if col := c.CollectionMap[driveID][parentID]; col != nil { if col := c.CollectionMap[driveID][parentID]; col != nil {
col.Remove(itemID) col.Remove(itemID)
} }
delete(itemCollection[driveID], itemID) delete(currPrevPaths, itemID)
} }
// Don't need to add to exclude list if the delta is invalid since the // Don't need to add to exclude list if the delta is invalid since the
@ -569,7 +582,7 @@ func (c *Collections) handleDelete(
var prevPath path.Path var prevPath path.Path
prevPathStr, ok := oldPaths[itemID] prevPathStr, ok := oldPrevPaths[itemID]
if ok { if ok {
var err error var err error
@ -586,7 +599,7 @@ func (c *Collections) handleDelete(
// Nested folders also return deleted delta results so we don't have to // Nested folders also return deleted delta results so we don't have to
// worry about doing a prefix search in the map to remove the subtree of // worry about doing a prefix search in the map to remove the subtree of
// the deleted folder/package. // the deleted folder/package.
delete(newPaths, itemID) delete(newPrevPaths, itemID)
if prevPath == nil || invalidPrevDelta { if prevPath == nil || invalidPrevDelta {
// It is possible that an item was created and deleted between two delta // It is possible that an item was created and deleted between two delta
@ -605,6 +618,7 @@ func (c *Collections) handleDelete(
col, err := NewCollection( col, err := NewCollection(
c.handler, c.handler,
c.protectedResource,
nil, // deletes the collection nil, // deletes the collection
prevPath, prevPath,
driveID, driveID,
@ -676,21 +690,29 @@ func (c *Collections) getCollectionPath(
// UpdateCollections initializes and adds the provided drive items to Collections // UpdateCollections initializes and adds the provided drive items to Collections
// A new collection is created for every drive folder (or package). // A new collection is created for every drive folder (or package).
// oldPaths is the unchanged data that was loaded from the metadata file. // oldPrevPaths is the unchanged data that was loaded from the metadata file.
// newPaths starts as a copy of oldPaths and is updated as changes are found in // This map is not modified during the call.
// the returned results. // currPrevPaths starts as a copy of oldPaths and is updated as changes are found in
// the returned results. Items are added to this collection throughout the call.
// newPrevPaths, ie: the items added during this call, get returned as a map.
func (c *Collections) UpdateCollections( func (c *Collections) UpdateCollections(
ctx context.Context, ctx context.Context,
driveID, driveName string, driveID, driveName string,
items []models.DriveItemable, items []models.DriveItemable,
oldPaths map[string]string, oldPrevPaths map[string]string,
newPaths map[string]string, currPrevPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
itemCollection map[string]map[string]string,
invalidPrevDelta bool, invalidPrevDelta bool,
errs *fault.Bus, errs *fault.Bus,
) error { ) (map[string]string, error) {
el := errs.Local() var (
el = errs.Local()
newPrevPaths = map[string]string{}
)
if !invalidPrevDelta {
maps.Copy(newPrevPaths, oldPrevPaths)
}
for _, item := range items { for _, item := range items {
if el.Failure() != nil { if el.Failure() != nil {
@ -700,8 +722,12 @@ func (c *Collections) UpdateCollections(
var ( var (
itemID = ptr.Val(item.GetId()) itemID = ptr.Val(item.GetId())
itemName = ptr.Val(item.GetName()) itemName = ptr.Val(item.GetName())
ictx = clues.Add(ctx, "item_id", itemID, "item_name", clues.Hide(itemName))
isFolder = item.GetFolder() != nil || item.GetPackageEscaped() != nil isFolder = item.GetFolder() != nil || item.GetPackageEscaped() != nil
ictx = clues.Add(
ctx,
"item_id", itemID,
"item_name", clues.Hide(itemName),
"item_is_folder", isFolder)
) )
if item.GetMalware() != nil { if item.GetMalware() != nil {
@ -723,13 +749,13 @@ func (c *Collections) UpdateCollections(
if err := c.handleDelete( if err := c.handleDelete(
itemID, itemID,
driveID, driveID,
oldPaths, oldPrevPaths,
newPaths, currPrevPaths,
newPrevPaths,
isFolder, isFolder,
excluded, excluded,
itemCollection,
invalidPrevDelta); err != nil { invalidPrevDelta); err != nil {
return clues.Stack(err).WithClues(ictx) return nil, clues.Stack(err).WithClues(ictx)
} }
continue continue
@ -755,13 +781,13 @@ func (c *Collections) UpdateCollections(
// Deletions are handled above so this is just moves/renames. // Deletions are handled above so this is just moves/renames.
var prevPath path.Path var prevPath path.Path
prevPathStr, ok := oldPaths[itemID] prevPathStr, ok := oldPrevPaths[itemID]
if ok { if ok {
prevPath, err = path.FromDataLayerPath(prevPathStr, false) prevPath, err = path.FromDataLayerPath(prevPathStr, false)
if err != nil { if err != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path"). el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path").
WithClues(ictx). WithClues(ictx).
With("path_string", prevPathStr)) With("prev_path_string", path.LoggableDir(prevPathStr)))
} }
} else if item.GetRoot() != nil { } else if item.GetRoot() != nil {
// Root doesn't move or get renamed. // Root doesn't move or get renamed.
@ -771,11 +797,11 @@ func (c *Collections) UpdateCollections(
// Moved folders don't cause delta results for any subfolders nested in // Moved folders don't cause delta results for any subfolders nested in
// them. We need to go through and update paths to handle that. We only // them. We need to go through and update paths to handle that. We only
// update newPaths so we don't accidentally clobber previous deletes. // update newPaths so we don't accidentally clobber previous deletes.
updatePath(newPaths, itemID, collectionPath.String()) updatePath(newPrevPaths, itemID, collectionPath.String())
found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath) found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ictx) return nil, clues.Stack(err).WithClues(ictx)
} }
if found { if found {
@ -787,8 +813,11 @@ func (c *Collections) UpdateCollections(
colScope = CollectionScopePackage colScope = CollectionScopePackage
} }
ictx = clues.Add(ictx, "collection_scope", colScope)
col, err := NewCollection( col, err := NewCollection(
c.handler, c.handler,
c.protectedResource,
collectionPath, collectionPath,
prevPath, prevPath,
driveID, driveID,
@ -798,7 +827,7 @@ func (c *Collections) UpdateCollections(
invalidPrevDelta, invalidPrevDelta,
nil) nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ictx) return nil, clues.Stack(err).WithClues(ictx)
} }
col.driveName = driveName col.driveName = driveName
@ -820,35 +849,38 @@ func (c *Collections) UpdateCollections(
case item.GetFile() != nil: case item.GetFile() != nil:
// Deletions are handled above so this is just moves/renames. // Deletions are handled above so this is just moves/renames.
if len(ptr.Val(item.GetParentReference().GetId())) == 0 { if len(ptr.Val(item.GetParentReference().GetId())) == 0 {
return clues.New("file without parent ID").WithClues(ictx) return nil, clues.New("file without parent ID").WithClues(ictx)
} }
// Get the collection for this item. // Get the collection for this item.
parentID := ptr.Val(item.GetParentReference().GetId()) parentID := ptr.Val(item.GetParentReference().GetId())
ictx = clues.Add(ictx, "parent_id", parentID) ictx = clues.Add(ictx, "parent_id", parentID)
collection, found := c.CollectionMap[driveID][parentID] collection, ok := c.CollectionMap[driveID][parentID]
if !ok {
return nil, clues.New("item seen before parent folder").WithClues(ictx)
}
// This will only kick in if the file was moved multiple times
// within a single delta query. We delete the file from the previous
// collection so that it doesn't appear in two places.
prevParentContainerID, ok := currPrevPaths[itemID]
if ok {
prevColl, found := c.CollectionMap[driveID][prevParentContainerID]
if !found { if !found {
return clues.New("item seen before parent folder").WithClues(ictx) return nil, clues.New("previous collection not found").
With("prev_parent_container_id", prevParentContainerID).
WithClues(ictx)
} }
// Delete the file from previous collection. This will if ok := prevColl.Remove(itemID); !ok {
// only kick in if the file was moved multiple times return nil, clues.New("removing item from prev collection").
// within a single delta query With("prev_parent_container_id", prevParentContainerID).
icID, found := itemCollection[driveID][itemID] WithClues(ictx)
if found {
pcollection, found := c.CollectionMap[driveID][icID]
if !found {
return clues.New("previous collection not found").WithClues(ictx)
}
removed := pcollection.Remove(itemID)
if !removed {
return clues.New("removing from prev collection").WithClues(ictx)
} }
} }
itemCollection[driveID][itemID] = parentID currPrevPaths[itemID] = parentID
if collection.Add(item) { if collection.Add(item) {
c.NumItems++ c.NumItems++
@ -869,11 +901,13 @@ func (c *Collections) UpdateCollections(
} }
default: default:
return clues.New("item type not supported").WithClues(ictx) el.AddRecoverable(ictx, clues.New("item is neither folder nor file").
WithClues(ictx).
Label(fault.LabelForceNoBackupCreation))
} }
} }
return el.Failure() return newPrevPaths, el.Failure()
} }
type dirScopeChecker interface { type dirScopeChecker interface {

View File

@ -8,12 +8,12 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -137,7 +137,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath) expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
tests := []struct { tests := []struct {
testCase string name string
items []models.DriveItemable items []models.DriveItemable
inputFolderMap map[string]string inputFolderMap map[string]string
scope selectors.OneDriveScope scope selectors.OneDriveScope
@ -147,11 +147,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedContainerCount int expectedContainerCount int
expectedFileCount int expectedFileCount int
expectedSkippedCount int expectedSkippedCount int
expectedMetadataPaths map[string]string expectedPrevPaths map[string]string
expectedExcludes map[string]struct{} expectedExcludes map[string]struct{}
}{ }{
{ {
testCase: "Invalid item", name: "Invalid item",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("item", "item", testBaseDrivePath, "root", false, false, false), driveItem("item", "item", testBaseDrivePath, "root", false, false, false),
@ -163,13 +163,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
}, },
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "Single File", name: "Single File",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("file", "file", testBaseDrivePath, "root", true, false, false), driveItem("file", "file", testBaseDrivePath, "root", true, false, false),
@ -184,13 +184,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
// Root folder is skipped since it's always present. // Root folder is skipped since it's always present.
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "Single Folder", name: "Single Folder",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -202,7 +202,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NewState, folder), "folder": expectedStatePath(data.NewState, folder),
}, },
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
}, },
@ -211,7 +211,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "Single Package", name: "Single Package",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("package", "package", testBaseDrivePath, "root", false, false, true), driveItem("package", "package", testBaseDrivePath, "root", false, false, true),
@ -223,7 +223,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"package": expectedStatePath(data.NewState, pkg), "package": expectedStatePath(data.NewState, pkg),
}, },
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"package": expectedPath("/package"), "package": expectedPath("/package"),
}, },
@ -232,7 +232,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "1 root file, 1 folder, 1 package, 2 files, 3 collections", name: "1 root file, 1 folder, 1 package, 2 files, 3 collections",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -252,7 +252,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 5, expectedItemCount: 5,
expectedFileCount: 3, expectedFileCount: 3,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
"package": expectedPath("/package"), "package": expectedPath("/package"),
@ -260,7 +260,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("fileInRoot", "fileInFolder", "fileInPackage"), expectedExcludes: getDelList("fileInRoot", "fileInFolder", "fileInPackage"),
}, },
{ {
testCase: "contains folder selector", name: "contains folder selector",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -285,7 +285,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedContainerCount: 3, expectedContainerCount: 3,
// just "folder" isn't added here because the include check is done on the // just "folder" isn't added here because the include check is done on the
// parent path since we only check later if something is a folder or not. // parent path since we only check later if something is a folder or not.
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
"folder2": expectedPath(folderSub + folder), "folder2": expectedPath(folderSub + folder),
@ -293,7 +293,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("fileInFolder", "fileInFolder2"), expectedExcludes: getDelList("fileInFolder", "fileInFolder2"),
}, },
{ {
testCase: "prefix subfolder selector", name: "prefix subfolder selector",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -316,14 +316,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 3, expectedItemCount: 3,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
"folder2": expectedPath(folderSub + folder), "folder2": expectedPath(folderSub + folder),
}, },
expectedExcludes: getDelList("fileInFolder2"), expectedExcludes: getDelList("fileInFolder2"),
}, },
{ {
testCase: "match subfolder selector", name: "match subfolder selector",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -344,13 +344,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
// No child folders for subfolder so nothing here. // No child folders for subfolder so nothing here.
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
}, },
expectedExcludes: getDelList("fileInSubfolder"), expectedExcludes: getDelList("fileInSubfolder"),
}, },
{ {
testCase: "not moved folder tree", name: "not moved folder tree",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -368,7 +368,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
@ -376,7 +376,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "moved folder tree", name: "moved folder tree",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -394,7 +394,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
@ -402,7 +402,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "moved folder tree with file no previous", name: "moved folder tree with file no previous",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -419,14 +419,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder2"), "folder": expectedPath("/folder2"),
}, },
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "moved folder tree with file no previous 1", name: "moved folder tree with file no previous 1",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -442,14 +442,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
}, },
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "moved folder tree and subfolder 1", name: "moved folder tree and subfolder 1",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -469,7 +469,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath("/subfolder"), "subfolder": expectedPath("/subfolder"),
@ -477,7 +477,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "moved folder tree and subfolder 2", name: "moved folder tree and subfolder 2",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false),
@ -497,7 +497,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath("/subfolder"), "subfolder": expectedPath("/subfolder"),
@ -505,7 +505,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "move subfolder when moving parent", name: "move subfolder when moving parent",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false), driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false),
@ -539,7 +539,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 5, expectedItemCount: 5,
expectedFileCount: 2, expectedFileCount: 2,
expectedContainerCount: 4, expectedContainerCount: 4,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
"folder2": expectedPath("/folder2"), "folder2": expectedPath("/folder2"),
@ -548,7 +548,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("itemInSubfolder", "itemInFolder2"), expectedExcludes: getDelList("itemInSubfolder", "itemInFolder2"),
}, },
{ {
testCase: "moved folder tree multiple times", name: "moved folder tree multiple times",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -568,7 +568,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder2"), "folder": expectedPath("/folder2"),
"subfolder": expectedPath("/folder2/subfolder"), "subfolder": expectedPath("/folder2/subfolder"),
@ -576,7 +576,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "deleted folder and package", name: "deleted folder and package",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), // root is always present, but not necessary here driveRootItem("root"), // root is always present, but not necessary here
delItem("folder", testBaseDrivePath, "root", false, true, false), delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -597,13 +597,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "delete folder without previous", name: "delete folder without previous",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("folder", testBaseDrivePath, "root", false, true, false), delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -619,13 +619,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "delete folder tree move subfolder", name: "delete folder tree move subfolder",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("folder", testBaseDrivePath, "root", false, true, false), delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -646,14 +646,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"subfolder": expectedPath("/subfolder"), "subfolder": expectedPath("/subfolder"),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "delete file", name: "delete file",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("item", testBaseDrivePath, "root", true, false, false), delItem("item", testBaseDrivePath, "root", true, false, false),
@ -669,13 +669,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: getDelList("item"), expectedExcludes: getDelList("item"),
}, },
{ {
testCase: "item before parent errors", name: "item before parent errors",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false), driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false),
@ -690,13 +690,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: nil,
"root": expectedPath(""),
},
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "1 root file, 1 folder, 1 package, 1 good file, 1 malware", name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -717,7 +715,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 2, expectedFileCount: 2,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedSkippedCount: 1, expectedSkippedCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
"package": expectedPath("/package"), "package": expectedPath("/package"),
@ -726,8 +724,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
}, },
} }
for _, tt := range tests { for _, test := range tests {
suite.Run(tt.testCase, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
@ -735,43 +733,39 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
var ( var (
excludes = map[string]struct{}{} excludes = map[string]struct{}{}
outputFolderMap = map[string]string{} currPrevPaths = map[string]string{}
itemCollection = map[string]map[string]string{
driveID: {},
}
errs = fault.New(true) errs = fault.New(true)
) )
maps.Copy(outputFolderMap, tt.inputFolderMap) maps.Copy(currPrevPaths, test.inputFolderMap)
c := NewCollections( c := NewCollections(
&itemBackupHandler{api.Drives{}, user, tt.scope}, &itemBackupHandler{api.Drives{}, user, test.scope},
tenant, tenant,
user, idname.NewProvider(user, user),
nil, nil,
control.Options{ToggleFeatures: control.Toggles{}}) control.Options{ToggleFeatures: control.Toggles{}})
c.CollectionMap[driveID] = map[string]*Collection{} c.CollectionMap[driveID] = map[string]*Collection{}
err := c.UpdateCollections( newPrevPaths, err := c.UpdateCollections(
ctx, ctx,
driveID, driveID,
"General", "General",
tt.items, test.items,
tt.inputFolderMap, test.inputFolderMap,
outputFolderMap, currPrevPaths,
excludes, excludes,
itemCollection,
false, false,
errs) errs)
tt.expect(t, err, clues.ToCore(err)) test.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections") assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count") assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items") assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()), "skipped items")
for id, sp := range tt.expectedCollectionIDs { for id, sp := range test.expectedCollectionIDs {
if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) { if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) {
// Skip collections we don't find so we don't get an NPE. // Skip collections we don't find so we don't get an NPE.
continue continue
@ -782,8 +776,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id) assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id)
} }
assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths") assert.Equal(t, test.expectedPrevPaths, newPrevPaths, "metadata paths")
assert.Equal(t, tt.expectedExcludes, excludes, "exclude list") assert.Equal(t, test.expectedExcludes, excludes, "exclude list")
}) })
} }
} }
@ -1306,6 +1300,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1344,6 +1339,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1421,6 +1417,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &empty, // probably will never happen with graph DeltaLink: &empty, // probably will never happen with graph
ResetDelta: true,
}, },
}, },
}, },
@ -1458,6 +1455,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
NextLink: &next, NextLink: &next,
ResetDelta: true,
}, },
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
@ -1466,6 +1464,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1508,6 +1507,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
driveID2: { driveID2: {
@ -1518,6 +1518,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false), driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -1570,6 +1571,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
driveID2: { driveID2: {
@ -1580,6 +1582,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false), driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -1637,87 +1640,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
expectedFolderPaths: nil, expectedFolderPaths: nil,
expectedDelList: nil, expectedDelList: nil,
}, },
{
name: "OneDrive_OneItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta,
},
},
},
canUsePreviousBackup: true,
errCheck: assert.NoError,
expectedCollections: map[string]map[data.CollectionState][]string{
rootFolderPath1: {data.NotMovedState: {"file"}},
},
expectedDeltaURLs: map[string]string{
driveID1: delta,
},
expectedFolderPaths: map[string]map[string]string{
driveID1: {
"root": rootFolderPath1,
},
},
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
doNotMergeItems: map[string]bool{
rootFolderPath1: true,
},
},
{
name: "OneDrive_TwoItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
NextLink: &next,
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
},
},
},
canUsePreviousBackup: true,
errCheck: assert.NoError,
expectedCollections: map[string]map[data.CollectionState][]string{
rootFolderPath1: {data.NotMovedState: {"file"}},
expectedPath1("/folder"): {data.NewState: {"folder", "file2"}},
},
expectedDeltaURLs: map[string]string{
driveID1: delta,
},
expectedFolderPaths: map[string]map[string]string{
driveID1: {
"root": rootFolderPath1,
"folder": folderPath1,
},
},
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
doNotMergeItems: map[string]bool{
rootFolderPath1: true,
folderPath1: true,
},
},
{ {
name: "OneDrive_TwoItemPage_NoDeltaError", name: "OneDrive_TwoItemPage_NoDeltaError",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
@ -1770,9 +1692,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
@ -1780,6 +1699,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false), driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1817,9 +1737,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
@ -1827,6 +1744,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1884,6 +1802,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false), malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1913,13 +1832,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
expectedSkippedCount: 2, expectedSkippedCount: 2,
}, },
{ {
name: "One Drive Delta Error Deleted Folder In New Results", name: "One Drive Deleted Folder In New Results",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
@ -1937,6 +1853,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("file2", driveBasePath1, "root", true, false, false), delItem("file2", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -1971,19 +1888,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
}, },
}, },
{ {
name: "One Drive Delta Error Random Folder Delete", name: "One Drive Random Folder Delete",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false), delItem("folder", driveBasePath1, "root", false, true, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2014,19 +1929,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
}, },
}, },
{ {
name: "One Drive Delta Error Random Item Delete", name: "One Drive Random Item Delete",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2073,6 +1986,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -2116,6 +2030,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2154,6 +2069,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("folder", driveBasePath1, "root", false, true, false), delItem("folder", driveBasePath1, "root", false, true, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2189,6 +2105,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2270,11 +2187,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
mbh := mock.DefaultOneDriveBH("a-user") mbh := mock.DefaultOneDriveBH("a-user")
mbh.DrivePagerV = mockDrivePager mbh.DrivePagerV = mockDrivePager
mbh.ItemPagerV = itemPagers mbh.ItemPagerV = itemPagers
mbh.DriveItemEnumeration = mock.PagerResultToEDID(test.items)
c := NewCollections( c := NewCollections(
mbh, mbh,
tenant, tenant,
user, idname.NewProvider(user, user),
func(*support.ControllerOperationStatus) {}, func(*support.ControllerOperationStatus) {},
control.Options{ToggleFeatures: control.Toggles{}}) control.Options{ToggleFeatures: control.Toggles{}})
@ -2500,121 +2418,6 @@ func delItem(
return item return item
} }
func getDeltaError() error {
syncStateNotFound := "SyncStateNotFound"
me := odataerrors.NewMainError()
me.SetCode(&syncStateNotFound)
deltaError := odataerrors.NewODataError()
deltaError.SetErrorEscaped(me)
return deltaError
}
func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
next := "next"
delta := "delta"
prevDelta := "prev-delta"
table := []struct {
name string
items []apiMock.PagerResult[models.DriveItemable]
deltaURL string
prevDeltaSuccess bool
prevDelta string
err error
}{
{
name: "delta on first run",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
},
{
name: "empty prev delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: false,
prevDelta: "",
},
{
name: "next then delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
},
{
name: "invalid prev delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{Err: getDeltaError()},
{DeltaLink: &delta}, // works on retry
},
prevDelta: prevDelta,
prevDeltaSuccess: false,
},
{
name: "fail a normal delta query",
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{Err: assert.AnError},
},
prevDelta: prevDelta,
prevDeltaSuccess: true,
err: assert.AnError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.items,
}
collectorFunc := func(
ctx context.Context,
driveID, driveName string,
driveItems []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
excluded map[string]struct{},
itemCollection map[string]map[string]string,
doNotMergeItems bool,
errs *fault.Bus,
) error {
return nil
}
delta, _, _, err := collectItems(
ctx,
itemPager,
"",
"General",
collectorFunc,
map[string]string{},
test.prevDelta,
fault.New(true))
require.ErrorIs(t, err, test.err, "delta fetch err", clues.ToCore(err))
require.Equal(t, test.deltaURL, delta.URL, "delta url")
require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset")
})
}
}
func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() { func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
driveID := "test-drive" driveID := "test-drive"
collCount := 3 collCount := 3
@ -2648,7 +2451,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
c := NewCollections( c := NewCollections(
mbh, mbh,
"test-tenant", "test-tenant",
"test-user", idname.NewProvider("test-user", "test-user"),
nil, nil,
control.Options{ToggleFeatures: control.Toggles{}}) control.Options{ToggleFeatures: control.Toggles{}})
@ -2660,6 +2463,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
for i := 0; i < collCount; i++ { for i := 0; i < collCount; i++ {
coll, err := NewCollection( coll, err := NewCollection(
&itemBackupHandler{api.Drives{}, "test-user", anyFolder}, &itemBackupHandler{api.Drives{}, "test-user", anyFolder},
idname.NewProvider("", ""),
nil, nil,
nil, nil,
driveID, driveID,

View File

@ -12,18 +12,21 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
) )
func NewExportCollection( func NewExportCollection(
baseDir string, baseDir string,
backingCollection []data.RestoreCollection, backingCollection []data.RestoreCollection,
backupVersion int, backupVersion int,
stats *data.ExportStats,
) export.Collectioner { ) export.Collectioner {
return export.BaseCollection{ return export.BaseCollection{
BaseDir: baseDir, BaseDir: baseDir,
BackingCollection: backingCollection, BackingCollection: backingCollection,
BackupVersion: backupVersion, BackupVersion: backupVersion,
Stream: streamItems, Stream: streamItems,
Stats: stats,
} }
} }
@ -34,6 +37,7 @@ func streamItems(
backupVersion int, backupVersion int,
cec control.ExportConfig, cec control.ExportConfig,
ch chan<- export.Item, ch chan<- export.Item,
stats *data.ExportStats,
) { ) {
defer close(ch) defer close(ch)
@ -47,11 +51,22 @@ func streamItems(
} }
name, err := getItemName(ctx, itemUUID, backupVersion, rc) name, err := getItemName(ctx, itemUUID, backupVersion, rc)
if err != nil {
ch <- export.Item{
ID: itemUUID,
Error: err,
}
continue
}
stats.UpdateResourceCount(path.FilesCategory)
body := data.ReaderWithStats(item.ToReader(), path.FilesCategory, stats)
ch <- export.Item{ ch <- export.Item{
ID: itemUUID, ID: itemUUID,
Name: name, Name: name,
Body: item.ToReader(), Body: body,
Error: err, Error: err,
} }
} }

View File

@ -5,6 +5,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -12,12 +13,13 @@ import (
func augmentItemInfo( func augmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
resource idname.Provider,
service path.ServiceType, service path.ServiceType,
item models.DriveItemable, item models.DriveItemable,
size int64, size int64,
parentPath *path.Builder, parentPath *path.Builder,
) details.ItemInfo { ) details.ItemInfo {
var driveName, siteID, driveID, weburl, creatorEmail string var driveName, driveID, creatorEmail string
// TODO: we rely on this info for details/restore lookups, // TODO: we rely on this info for details/restore lookups,
// so if it's nil we have an issue, and will need an alternative // so if it's nil we have an issue, and will need an alternative
@ -38,19 +40,6 @@ func augmentItemInfo(
} }
} }
if service == path.SharePointService ||
service == path.GroupsService {
gsi := item.GetSharepointIds()
if gsi != nil {
siteID = ptr.Val(gsi.GetSiteId())
weburl = ptr.Val(gsi.GetSiteUrl())
if len(weburl) == 0 {
weburl = constructWebURL(item.GetAdditionalData())
}
}
}
if item.GetParentReference() != nil { if item.GetParentReference() != nil {
driveID = ptr.Val(item.GetParentReference().GetDriveId()) driveID = ptr.Val(item.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName())) driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
@ -84,9 +73,9 @@ func augmentItemInfo(
Modified: ptr.Val(item.GetLastModifiedDateTime()), Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail, Owner: creatorEmail,
ParentPath: pps, ParentPath: pps,
SiteID: siteID, SiteID: resource.ID(),
Size: size, Size: size,
WebURL: weburl, WebURL: resource.Name(),
} }
case path.GroupsService: case path.GroupsService:
@ -99,9 +88,9 @@ func augmentItemInfo(
Modified: ptr.Val(item.GetLastModifiedDateTime()), Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail, Owner: creatorEmail,
ParentPath: pps, ParentPath: pps,
SiteID: siteID, SiteID: resource.ID(),
Size: size, Size: size,
WebURL: weburl, WebURL: resource.Name(),
} }
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -20,6 +21,7 @@ type ItemInfoAugmenter interface {
// and kiota drops any SetSize update. // and kiota drops any SetSize update.
AugmentItemInfo( AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
resource idname.Provider,
item models.DriveItemable, item models.DriveItemable,
size int64, size int64,
parentPath *path.Builder, parentPath *path.Builder,
@ -36,6 +38,7 @@ type BackupHandler interface {
GetItemPermissioner GetItemPermissioner
GetItemer GetItemer
NewDrivePagerer NewDrivePagerer
EnumerateDriveItemsDeltaer
// PathPrefix constructs the service and category specific path prefix for // PathPrefix constructs the service and category specific path prefix for
// the given values. // the given values.
@ -50,7 +53,7 @@ type BackupHandler interface {
// ServiceCat returns the service and category used by this implementation. // ServiceCat returns the service and category used by this implementation.
ServiceCat() (path.ServiceType, path.CategoryType) ServiceCat() (path.ServiceType, path.CategoryType)
NewItemPager(driveID, link string, fields []string) api.DeltaPager[models.DriveItemable]
// FormatDisplayPath creates a human-readable string to represent the // FormatDisplayPath creates a human-readable string to represent the
// provided path. // provided path.
FormatDisplayPath(driveName string, parentPath *path.Builder) string FormatDisplayPath(driveName string, parentPath *path.Builder) string
@ -79,6 +82,18 @@ type GetItemer interface {
) (models.DriveItemable, error) ) (models.DriveItemable, error)
} }
type EnumerateDriveItemsDeltaer interface {
EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
selectProps []string,
) (
[]models.DriveItemable,
api.DeltaUpdate,
error,
)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// restore // restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -10,17 +10,24 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common"
jwt "github.com/alcionai/corso/src/internal/common/jwt"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
const ( const (
acceptHeaderKey = "Accept" acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*" acceptHeaderValue = "*/*"
// JWTQueryParam is a query param embed in graph download URLs which holds
// JWT token.
JWTQueryParam = "tempauth"
) )
// downloadUrlKeys is used to find the download URL in a DriveItem response. // downloadUrlKeys is used to find the download URL in a DriveItem response.
@ -121,6 +128,19 @@ func downloadFile(
return nil, clues.New("empty file url").WithClues(ctx) return nil, clues.New("empty file url").WithClues(ctx)
} }
// Precheck for url expiry before we make a call to graph to download the
// file. If the url is expired, we can return early and save a call to graph.
//
// Ignore all errors encountered during the check. We can rely on graph to
// return errors on malformed urls. Ignoring errors also future proofs against
// any sudden graph changes, for e.g. if graph decides to embed the token in a
// new query param.
expired, err := isURLExpired(ctx, url)
if err == nil && expired {
logger.Ctx(ctx).Debug("expired item download url")
return nil, graph.ErrTokenExpired
}
rc, err := readers.NewResetRetryHandler( rc, err := readers.NewResetRetryHandler(
ctx, ctx,
&downloadWithRetries{ &downloadWithRetries{
@ -193,3 +213,27 @@ func setName(orig models.ItemReferenceable, driveName string) models.ItemReferen
return orig return orig
} }
// isURLExpired inspects the jwt token embed in the item download url
// and returns true if it is expired.
func isURLExpired(
ctx context.Context,
url string,
) (bool, error) {
// Extract the raw JWT string from the download url.
rawJWT, err := common.GetQueryParamFromURL(url, JWTQueryParam)
if err != nil {
logger.CtxErr(ctx, err).Info("query param not found")
return false, clues.Stack(err).WithClues(ctx)
}
expired, err := jwt.IsJWTExpired(rawJWT)
if err != nil {
logger.CtxErr(ctx, err).Info("checking jwt expiry")
return false, clues.Stack(err).WithClues(ctx)
}
return expired, nil
}

View File

@ -1,142 +0,0 @@
package drive
import (
"context"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
// FIXME: This is same as exchange.api.DeltaUpdate
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}
// itemCollector functions collect the items found in a drive
type itemCollector func(
ctx context.Context,
driveID, driveName string,
driveItems []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
excluded map[string]struct{},
itemCollections map[string]map[string]string,
validPrevDelta bool,
errs *fault.Bus,
) error
// collectItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method
func collectItems(
ctx context.Context,
pager api.DeltaPager[models.DriveItemable],
driveID, driveName string,
collector itemCollector,
oldPaths map[string]string,
prevDelta string,
errs *fault.Bus,
) (
DeltaUpdate,
map[string]string, // newPaths
map[string]struct{}, // excluded
error,
) {
var (
newDeltaURL = ""
newPaths = map[string]string{}
excluded = map[string]struct{}{}
invalidPrevDelta = len(prevDelta) == 0
// itemCollection is used to identify which collection a
// file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a
// different collection within the same delta query
// drive ID -> item ID -> item ID
itemCollection = map[string]map[string]string{
driveID: {},
}
)
if !invalidPrevDelta {
maps.Copy(newPaths, oldPaths)
pager.SetNextLink(prevDelta)
}
for {
// assume delta urls here, which allows single-token consumption
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
if graph.IsErrInvalidDelta(err) {
logger.Ctx(ctx).Infow("Invalid previous delta link", "link", prevDelta)
invalidPrevDelta = true
newPaths = map[string]string{}
pager.Reset(ctx)
continue
}
if err != nil {
return DeltaUpdate{}, nil, nil, graph.Wrap(ctx, err, "getting page")
}
vals := page.GetValue()
err = collector(
ctx,
driveID,
driveName,
vals,
oldPaths,
newPaths,
excluded,
itemCollection,
invalidPrevDelta,
errs)
if err != nil {
return DeltaUpdate{}, nil, nil, err
}
nextLink, deltaLink := api.NextAndDeltaLink(page)
if len(deltaLink) > 0 {
newDeltaURL = deltaLink
}
// Check if there are more items
if len(nextLink) == 0 {
break
}
logger.Ctx(ctx).Debugw("Found nextLink", "link", nextLink)
pager.SetNextLink(nextLink)
}
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
}
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
func newItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -267,7 +268,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
colls := NewCollections( colls := NewCollections(
&itemBackupHandler{suite.ac.Drives(), test.user, scope}, &itemBackupHandler{suite.ac.Drives(), test.user, scope},
creds.AzureTenantID, creds.AzureTenantID,
test.user, idname.NewProvider(test.user, test.user),
service.updateStatus, service.updateStatus,
control.Options{ control.Options{
ToggleFeatures: control.Toggles{}, ToggleFeatures: control.Toggles{},

View File

@ -8,6 +8,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -87,20 +88,14 @@ func (h itemBackupHandler) NewDrivePager(
return h.ac.NewUserDrivePager(resourceOwner, fields) return h.ac.NewUserDrivePager(resourceOwner, fields)
} }
func (h itemBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
func (h itemBackupHandler) AugmentItemInfo( func (h itemBackupHandler) AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
resource idname.Provider,
item models.DriveItemable, item models.DriveItemable,
size int64, size int64,
parentPath *path.Builder, parentPath *path.Builder,
) details.ItemInfo { ) details.ItemInfo {
return augmentItemInfo(dii, path.OneDriveService, item, size, parentPath) return augmentItemInfo(dii, resource, path.OneDriveService, item, size, parentPath)
} }
func (h itemBackupHandler) FormatDisplayPath( func (h itemBackupHandler) FormatDisplayPath(
@ -139,6 +134,14 @@ func (h itemBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.OneDriveFolder, dir) return h.scope.Matches(selectors.OneDriveFolder, dir)
} }
func (h itemBackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
selectProps []string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, selectProps)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore // Restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -173,11 +176,12 @@ func (h itemRestoreHandler) NewDrivePager(
// and kiota drops any SetSize update. // and kiota drops any SetSize update.
func (h itemRestoreHandler) AugmentItemInfo( func (h itemRestoreHandler) AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
resource idname.Provider,
item models.DriveItemable, item models.DriveItemable,
size int64, size int64,
parentPath *path.Builder, parentPath *path.Builder,
) details.ItemInfo { ) details.ItemInfo {
return augmentItemInfo(dii, path.OneDriveService, item, size, parentPath) return augmentItemInfo(dii, resource, path.OneDriveService, item, size, parentPath)
} }
func (h itemRestoreHandler) DeleteItem( func (h itemRestoreHandler) DeleteItem(

View File

@ -16,12 +16,11 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -49,6 +48,8 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.service = loadTestService(t) suite.service = loadTestService(t)
suite.user = tconfig.SecondaryM365UserID(t) suite.user = tconfig.SecondaryM365UserID(t)
graph.InitializeConcurrencyLimiter(ctx, true, 4)
pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil) pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil)
odDrives, err := api.GetAllDrives(ctx, pager) odDrives, err := api.GetAllDrives(ctx, pager)
@ -60,83 +61,6 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.userDriveID = ptr.Val(odDrives[0].GetId()) suite.userDriveID = ptr.Val(odDrives[0].GetId())
} }
// TestItemReader is an integration test that makes a few assumptions
// about the test environment
// 1) It assumes the test user has a drive
// 2) It assumes the drive has a file it can use to test `driveItemReader`
// The test checks these in below
func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var driveItem models.DriveItemable
// This item collector tries to find "a" drive item that is a non-empty
// file to test the reader function
itemCollector := func(
_ context.Context,
_, _ string,
items []models.DriveItemable,
_ map[string]string,
_ map[string]string,
_ map[string]struct{},
_ map[string]map[string]string,
_ bool,
_ *fault.Bus,
) error {
if driveItem != nil {
return nil
}
for _, item := range items {
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
driveItem = item
break
}
}
return nil
}
ip := suite.service.ac.
Drives().
NewDriveItemDeltaPager(suite.userDriveID, "", api.DriveItemSelectDefault())
_, _, _, err := collectItems(
ctx,
ip,
suite.userDriveID,
"General",
itemCollector,
map[string]string{},
"",
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// Test Requirement 2: Need a file
require.NotEmpty(
t,
driveItem,
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
bh := itemBackupHandler{
suite.service.ac.Drives(),
suite.user,
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
}
// Read data for the file
itemData, err := downloadItem(ctx, bh, driveItem)
require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData)
require.NoError(t, err, clues.ToCore(err))
require.NotZero(t, size)
}
// TestItemWriter is an integration test for uploading data to OneDrive // TestItemWriter is an integration test for uploading data to OneDrive
// It creates a new folder with a new item and writes data to it // It creates a new folder with a new item and writes data to it
func (suite *ItemIntegrationSuite) TestItemWriter() { func (suite *ItemIntegrationSuite) TestItemWriter() {
@ -171,7 +95,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx, ctx,
test.driveID, test.driveID,
ptr.Val(root.GetId()), ptr.Val(root.GetId()),
newItem(newFolderName, true), api.NewDriveItem(newFolderName, true),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId()) require.NotNil(t, newFolder.GetId())
@ -183,7 +107,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx, ctx,
test.driveID, test.driveID,
ptr.Val(newFolder.GetId()), ptr.Val(newFolder.GetId()),
newItem(newItemName, false), api.NewDriveItem(newItemName, false),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newItem.GetId()) require.NotNil(t, newItem.GetId())
@ -317,7 +241,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "success", name: "success",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -336,7 +260,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "success, content url set instead of download url", name: "success, content url set instead of download url",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@content.downloadUrl": url, "@content.downloadUrl": url,
}) })
@ -355,7 +279,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "api getter returns error", name: "api getter returns error",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -371,7 +295,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "download url is empty", name: "download url is empty",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
return di return di
}, },
GetFunc: func(ctx context.Context, url string) (*http.Response, error) { GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
@ -386,7 +310,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "malware", name: "malware",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -408,7 +332,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "non-2xx http response", name: "non-2xx http response",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -457,7 +381,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
url = "https://example.com" url = "https://example.com"
itemFunc = func() models.DriveItemable { itemFunc = func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })

View File

@ -3,13 +3,12 @@ package drive
import ( import (
"context" "context"
"net/http" "net/http"
"strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/idname"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -92,53 +91,14 @@ func (h libraryBackupHandler) NewDrivePager(
return h.ac.NewSiteDrivePager(resourceOwner, fields) return h.ac.NewSiteDrivePager(resourceOwner, fields)
} }
func (h libraryBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
func (h libraryBackupHandler) AugmentItemInfo( func (h libraryBackupHandler) AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
resource idname.Provider,
item models.DriveItemable, item models.DriveItemable,
size int64, size int64,
parentPath *path.Builder, parentPath *path.Builder,
) details.ItemInfo { ) details.ItemInfo {
return augmentItemInfo(dii, h.service, item, size, parentPath) return augmentItemInfo(dii, resource, h.service, item, size, parentPath)
}
// constructWebURL is a helper function for recreating the webURL
// for the originating SharePoint site. Uses the additionalData map
// from a models.DriveItemable that possesses a downloadURL within the map.
// Returns "" if the map is nil or key is not present.
func constructWebURL(adtl map[string]any) string {
var (
desiredKey = "@microsoft.graph.downloadUrl"
sep = `/_layouts`
url string
)
if adtl == nil {
return url
}
r := adtl[desiredKey]
point, ok := r.(*string)
if !ok {
return url
}
value := ptr.Val(point)
if len(value) == 0 {
return url
}
temp := strings.Split(value, sep)
url = temp[0]
return url
} }
func (h libraryBackupHandler) FormatDisplayPath( func (h libraryBackupHandler) FormatDisplayPath(
@ -177,6 +137,14 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.SharePointLibraryFolder, dir) return h.scope.Matches(selectors.SharePointLibraryFolder, dir)
} }
func (h libraryBackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
selectProps []string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, selectProps)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore // Restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -208,11 +176,12 @@ func (h libraryRestoreHandler) NewDrivePager(
func (h libraryRestoreHandler) AugmentItemInfo( func (h libraryRestoreHandler) AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
resource idname.Provider,
item models.DriveItemable, item models.DriveItemable,
size int64, size int64,
parentPath *path.Builder, parentPath *path.Builder,
) details.ItemInfo { ) details.ItemInfo {
return augmentItemInfo(dii, h.service, item, size, parentPath) return augmentItemInfo(dii, resource, h.service, item, size, parentPath)
} }
func (h libraryRestoreHandler) DeleteItem( func (h libraryRestoreHandler) DeleteItem(

View File

@ -271,7 +271,7 @@ func restoreItem(
itemInfo, err := restoreV0File( itemInfo, err := restoreV0File(
ctx, ctx,
rh, rh,
rcc.RestoreConfig, rcc,
drivePath, drivePath,
fibn, fibn,
restoreFolderID, restoreFolderID,
@ -377,7 +377,7 @@ func restoreItem(
func restoreV0File( func restoreV0File(
ctx context.Context, ctx context.Context,
rh RestoreHandler, rh RestoreHandler,
restoreCfg control.RestoreConfig, rcc inject.RestoreConsumerConfig,
drivePath *path.DrivePath, drivePath *path.DrivePath,
fibn data.FetchItemByNamer, fibn data.FetchItemByNamer,
restoreFolderID string, restoreFolderID string,
@ -388,7 +388,7 @@ func restoreV0File(
) (details.ItemInfo, error) { ) (details.ItemInfo, error) {
_, itemInfo, err := restoreFile( _, itemInfo, err := restoreFile(
ctx, ctx,
restoreCfg, rcc,
rh, rh,
fibn, fibn,
itemData.ID(), itemData.ID(),
@ -423,7 +423,7 @@ func restoreV1File(
itemID, itemInfo, err := restoreFile( itemID, itemInfo, err := restoreFile(
ctx, ctx,
rcc.RestoreConfig, rcc,
rh, rh,
fibn, fibn,
trimmedName, trimmedName,
@ -509,7 +509,7 @@ func restoreV6File(
itemID, itemInfo, err := restoreFile( itemID, itemInfo, err := restoreFile(
ctx, ctx,
rcc.RestoreConfig, rcc,
rh, rh,
fibn, fibn,
meta.FileName, meta.FileName,
@ -671,7 +671,7 @@ func createFolder(
ctx, ctx,
driveID, driveID,
parentFolderID, parentFolderID,
newItem(folderName, true), api.NewDriveItem(folderName, true),
control.Replace) control.Replace)
// ErrItemAlreadyExistsConflict can only occur for folders if the // ErrItemAlreadyExistsConflict can only occur for folders if the
@ -692,7 +692,7 @@ func createFolder(
ctx, ctx,
driveID, driveID,
parentFolderID, parentFolderID,
newItem(folderName, true), api.NewDriveItem(folderName, true),
control.Copy) control.Copy)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating folder") return nil, clues.Wrap(err, "creating folder")
@ -711,7 +711,7 @@ type itemRestorer interface {
// restoreFile will create a new item in the specified `parentFolderID` and upload the data.Item // restoreFile will create a new item in the specified `parentFolderID` and upload the data.Item
func restoreFile( func restoreFile(
ctx context.Context, ctx context.Context,
restoreCfg control.RestoreConfig, rcc inject.RestoreConsumerConfig,
ir itemRestorer, ir itemRestorer,
fibn data.FetchItemByNamer, fibn data.FetchItemByNamer,
name string, name string,
@ -733,7 +733,7 @@ func restoreFile(
} }
var ( var (
item = newItem(name, false) item = api.NewDriveItem(name, false)
collisionKey = api.DriveItemCollisionKey(item) collisionKey = api.DriveItemCollisionKey(item)
collision api.DriveItemIDType collision api.DriveItemIDType
shouldDeleteOriginal bool shouldDeleteOriginal bool
@ -743,7 +743,7 @@ func restoreFile(
log := logger.Ctx(ctx).With("collision_key", clues.Hide(collisionKey)) log := logger.Ctx(ctx).With("collision_key", clues.Hide(collisionKey))
log.Debug("item collision") log.Debug("item collision")
if restoreCfg.OnCollision == control.Skip { if rcc.RestoreConfig.OnCollision == control.Skip {
ctr.Inc(count.CollisionSkip) ctr.Inc(count.CollisionSkip)
log.Debug("skipping item with collision") log.Debug("skipping item with collision")
@ -751,7 +751,7 @@ func restoreFile(
} }
collision = dci collision = dci
shouldDeleteOriginal = restoreCfg.OnCollision == control.Replace && !dci.IsFolder shouldDeleteOriginal = rcc.RestoreConfig.OnCollision == control.Replace && !dci.IsFolder
} }
// drive items do not support PUT requests on the drive item data, so // drive items do not support PUT requests on the drive item data, so
@ -850,7 +850,12 @@ func restoreFile(
defer closeProgressBar() defer closeProgressBar()
dii := ir.AugmentItemInfo(details.ItemInfo{}, newItem, written, nil) dii := ir.AugmentItemInfo(
details.ItemInfo{},
rcc.ProtectedResource,
newItem,
written,
nil)
if shouldDeleteOriginal { if shouldDeleteOriginal {
ctr.Inc(count.CollisionReplace) ctr.Inc(count.CollisionReplace)

View File

@ -47,7 +47,7 @@ type urlCache struct {
refreshMu sync.Mutex refreshMu sync.Mutex
deltaQueryCount int deltaQueryCount int
itemPager api.DeltaPager[models.DriveItemable] edid EnumerateDriveItemsDeltaer
errs *fault.Bus errs *fault.Bus
} }
@ -56,13 +56,10 @@ type urlCache struct {
func newURLCache( func newURLCache(
driveID, prevDelta string, driveID, prevDelta string,
refreshInterval time.Duration, refreshInterval time.Duration,
itemPager api.DeltaPager[models.DriveItemable], edid EnumerateDriveItemsDeltaer,
errs *fault.Bus, errs *fault.Bus,
) (*urlCache, error) { ) (*urlCache, error) {
err := validateCacheParams( err := validateCacheParams(driveID, refreshInterval, edid)
driveID,
refreshInterval,
itemPager)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "cache params") return nil, clues.Wrap(err, "cache params")
} }
@ -71,9 +68,9 @@ func newURLCache(
idToProps: make(map[string]itemProps), idToProps: make(map[string]itemProps),
lastRefreshTime: time.Time{}, lastRefreshTime: time.Time{},
driveID: driveID, driveID: driveID,
edid: edid,
prevDelta: prevDelta, prevDelta: prevDelta,
refreshInterval: refreshInterval, refreshInterval: refreshInterval,
itemPager: itemPager,
errs: errs, errs: errs,
}, },
nil nil
@ -83,7 +80,7 @@ func newURLCache(
func validateCacheParams( func validateCacheParams(
driveID string, driveID string,
refreshInterval time.Duration, refreshInterval time.Duration,
itemPager api.DeltaPager[models.DriveItemable], edid EnumerateDriveItemsDeltaer,
) error { ) error {
if len(driveID) == 0 { if len(driveID) == 0 {
return clues.New("drive id is empty") return clues.New("drive id is empty")
@ -93,8 +90,8 @@ func validateCacheParams(
return clues.New("invalid refresh interval") return clues.New("invalid refresh interval")
} }
if itemPager == nil { if edid == nil {
return clues.New("nil item pager") return clues.New("nil item enumerator")
} }
return nil return nil
@ -160,44 +157,27 @@ func (uc *urlCache) refreshCache(
// Issue a delta query to graph // Issue a delta query to graph
logger.Ctx(ctx).Info("refreshing url cache") logger.Ctx(ctx).Info("refreshing url cache")
err := uc.deltaQuery(ctx) items, du, err := uc.edid.EnumerateDriveItemsDelta(
ctx,
uc.driveID,
uc.prevDelta,
api.URLCacheDriveItemProps())
if err != nil { if err != nil {
// clear cache
uc.idToProps = make(map[string]itemProps) uc.idToProps = make(map[string]itemProps)
return clues.Stack(err)
}
return err uc.deltaQueryCount++
if err := uc.updateCache(ctx, items, uc.errs); err != nil {
return clues.Stack(err)
} }
logger.Ctx(ctx).Info("url cache refreshed") logger.Ctx(ctx).Info("url cache refreshed")
// Update last refresh time // Update last refresh time
uc.lastRefreshTime = time.Now() uc.lastRefreshTime = time.Now()
uc.prevDelta = du.URL
return nil
}
// deltaQuery performs a delta query on the drive and update the cache
func (uc *urlCache) deltaQuery(
ctx context.Context,
) error {
logger.Ctx(ctx).Debug("starting delta query")
// Reset item pager to remove any previous state
uc.itemPager.Reset(ctx)
_, _, _, err := collectItems(
ctx,
uc.itemPager,
uc.driveID,
"",
uc.updateCache,
map[string]string{},
uc.prevDelta,
uc.errs)
if err != nil {
return clues.Wrap(err, "delta query")
}
uc.deltaQueryCount++
return nil return nil
} }
@ -224,13 +204,7 @@ func (uc *urlCache) readCache(
// It assumes that cacheMu is held by caller in write mode // It assumes that cacheMu is held by caller in write mode
func (uc *urlCache) updateCache( func (uc *urlCache) updateCache(
ctx context.Context, ctx context.Context,
_, _ string,
items []models.DriveItemable, items []models.DriveItemable,
_ map[string]string,
_ map[string]string,
_ map[string]struct{},
_ map[string]map[string]string,
_ bool,
errs *fault.Bus, errs *fault.Bus,
) error { ) error {
el := errs.Local() el := errs.Local()

View File

@ -1,7 +1,6 @@
package drive package drive
import ( import (
"context"
"errors" "errors"
"io" "io"
"math/rand" "math/rand"
@ -18,15 +17,19 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
) )
// ---------------------------------------------------------------------------
// integration
// ---------------------------------------------------------------------------
type URLCacheIntegrationSuite struct { type URLCacheIntegrationSuite struct {
tester.Suite tester.Suite
ac api.Client ac api.Client
@ -72,7 +75,6 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
ac = suite.ac.Drives() ac = suite.ac.Drives()
driveID = suite.driveID driveID = suite.driveID
newFolderName = testdata.DefaultRestoreConfig("folder").Location newFolderName = testdata.DefaultRestoreConfig("folder").Location
driveItemPager = suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectDefault())
) )
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
@ -82,11 +84,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
root, err := ac.GetRootFolder(ctx, driveID) root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
newFolder, err := ac.Drives().PostItemInContainer( newFolder, err := ac.PostItemInContainer(
ctx, ctx,
driveID, driveID,
ptr.Val(root.GetId()), ptr.Val(root.GetId()),
newItem(newFolderName, true), api.NewDriveItem(newFolderName, true),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -94,33 +96,14 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
nfid := ptr.Val(newFolder.GetId()) nfid := ptr.Val(newFolder.GetId())
collectorFunc := func(
context.Context,
string,
string,
[]models.DriveItemable,
map[string]string,
map[string]string,
map[string]struct{},
map[string]map[string]string,
bool,
*fault.Bus,
) error {
return nil
}
// Get the previous delta to feed into url cache // Get the previous delta to feed into url cache
prevDelta, _, _, err := collectItems( _, du, err := ac.EnumerateDriveItemsDelta(
ctx, ctx,
suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectURLCache()),
suite.driveID, suite.driveID,
"drive-name",
collectorFunc,
map[string]string{},
"", "",
fault.New(true)) api.URLCacheDriveItemProps())
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, prevDelta.URL) require.NotEmpty(t, du.URL)
// Create a bunch of files in the new folder // Create a bunch of files in the new folder
var items []models.DriveItemable var items []models.DriveItemable
@ -128,11 +111,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting) newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
item, err := ac.Drives().PostItemInContainer( item, err := ac.PostItemInContainer(
ctx, ctx,
driveID, driveID,
nfid, nfid,
newItem(newItemName, false), api.NewDriveItem(newItemName, false),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -142,9 +125,9 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Create a new URL cache with a long TTL // Create a new URL cache with a long TTL
uc, err := newURLCache( uc, err := newURLCache(
suite.driveID, suite.driveID,
prevDelta.URL, du.URL,
1*time.Hour, 1*time.Hour,
driveItemPager, suite.ac.Drives(),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -195,6 +178,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
require.Equal(t, 1, uc.deltaQueryCount) require.Equal(t, 1, uc.deltaQueryCount)
} }
// ---------------------------------------------------------------------------
// unit
// ---------------------------------------------------------------------------
type URLCacheUnitSuite struct { type URLCacheUnitSuite struct {
tester.Suite tester.Suite
} }
@ -205,27 +192,20 @@ func TestURLCacheUnitSuite(t *testing.T) {
func (suite *URLCacheUnitSuite) TestGetItemProperties() { func (suite *URLCacheUnitSuite) TestGetItemProperties() {
deltaString := "delta" deltaString := "delta"
next := "next"
driveID := "drive1" driveID := "drive1"
table := []struct { table := []struct {
name string name string
pagerResult map[string][]apiMock.PagerResult[models.DriveItemable] pagerItems map[string][]models.DriveItemable
pagerErr map[string]error
expectedItemProps map[string]itemProps expectedItemProps map[string]itemProps
expectedErr require.ErrorAssertionFunc expectedErr require.ErrorAssertionFunc
cacheAssert func(*urlCache, time.Time) cacheAssert func(*urlCache, time.Time)
}{ }{
{ {
name: "single item in cache", name: "single item in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": { "1": {
@ -242,19 +222,14 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "multiple items in cache", name: "multiple items in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false), fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false), fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("4", "file4", "root", "root", "https://dummy4.com", false), fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
fileItem("5", "file5", "root", "root", "https://dummy5.com", false), fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
}, },
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": { "1": {
@ -287,19 +262,14 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "duplicate items with potentially new urls", name: "duplicate items with potentially new urls",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false), fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false), fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("1", "file1", "root", "root", "https://test1.com", false), fileItem("1", "file1", "root", "root", "https://test1.com", false),
fileItem("2", "file2", "root", "root", "https://test2.com", false), fileItem("2", "file2", "root", "root", "https://test2.com", false),
}, },
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": { "1": {
@ -324,17 +294,12 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "deleted items", name: "deleted items",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false), fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", true), fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
}, },
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": { "1": {
@ -355,15 +320,8 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "item not found in cache", name: "item not found in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"2": {}, "2": {},
@ -376,23 +334,10 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
}, },
{ {
name: "multi-page delta query error", name: "delta query error",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{},
driveID: { pagerErr: map[string]error{
{ driveID: errors.New("delta query error"),
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
NextLink: &next,
},
{
Values: []models.DriveItemable{
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
},
DeltaLink: &deltaString,
Err: errors.New("delta query error"),
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": {}, "1": {},
@ -408,16 +353,11 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
{ {
name: "folder item", name: "folder item",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
driveItem("2", "folder2", "root", "root", false, true, false), driveItem("2", "folder2", "root", "root", false, true, false),
}, },
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"2": {}, "2": {},
@ -437,15 +377,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
itemPager := &apiMock.DeltaPager[models.DriveItemable]{ medi := mock.EnumeratesDriveItemsDelta{
ToReturn: test.pagerResult[driveID], Items: test.pagerItems,
Err: test.pagerErr,
DeltaUpdate: map[string]api.DeltaUpdate{driveID: {URL: deltaString}},
} }
cache, err := newURLCache( cache, err := newURLCache(
driveID, driveID,
"", "",
1*time.Hour, 1*time.Hour,
itemPager, &medi,
fault.New(true)) fault.New(true))
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
@ -480,15 +422,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
// Test needsRefresh // Test needsRefresh
func (suite *URLCacheUnitSuite) TestNeedsRefresh() { func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
driveID := "drive1" var (
t := suite.T() t = suite.T()
refreshInterval := 1 * time.Second driveID = "drive1"
refreshInterval = 1 * time.Second
)
cache, err := newURLCache( cache, err := newURLCache(
driveID, driveID,
"", "",
refreshInterval, refreshInterval,
&apiMock.DeltaPager[models.DriveItemable]{}, &mock.EnumeratesDriveItemsDelta{},
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -510,14 +454,12 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
require.False(t, cache.needsRefresh()) require.False(t, cache.needsRefresh())
} }
// Test newURLCache
func (suite *URLCacheUnitSuite) TestNewURLCache() { func (suite *URLCacheUnitSuite) TestNewURLCache() {
// table driven tests
table := []struct { table := []struct {
name string name string
driveID string driveID string
refreshInt time.Duration refreshInt time.Duration
itemPager api.DeltaPager[models.DriveItemable] itemPager EnumerateDriveItemsDeltaer
errors *fault.Bus errors *fault.Bus
expectedErr require.ErrorAssertionFunc expectedErr require.ErrorAssertionFunc
}{ }{
@ -525,7 +467,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid driveID", name: "invalid driveID",
driveID: "", driveID: "",
refreshInt: 1 * time.Hour, refreshInt: 1 * time.Hour,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{}, itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true), errors: fault.New(true),
expectedErr: require.Error, expectedErr: require.Error,
}, },
@ -533,12 +475,12 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid refresh interval", name: "invalid refresh interval",
driveID: "drive1", driveID: "drive1",
refreshInt: 100 * time.Millisecond, refreshInt: 100 * time.Millisecond,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{}, itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true), errors: fault.New(true),
expectedErr: require.Error, expectedErr: require.Error,
}, },
{ {
name: "invalid itemPager", name: "invalid item enumerator",
driveID: "drive1", driveID: "drive1",
refreshInt: 1 * time.Hour, refreshInt: 1 * time.Hour,
itemPager: nil, itemPager: nil,
@ -549,7 +491,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "valid", name: "valid",
driveID: "drive1", driveID: "drive1",
refreshInt: 1 * time.Hour, refreshInt: 1 * time.Hour,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{}, itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true), errors: fault.New(true),
expectedErr: require.NoError, expectedErr: require.NoError,
}, },

View File

@ -160,7 +160,7 @@ func populateCollections(
ictx = clues.Add(ictx, "previous_path", prevPath) ictx = clues.Add(ictx, "previous_path", prevPath)
added, _, removed, newDelta, err := bh.itemEnumerator(). added, validModTimes, removed, newDelta, err := bh.itemEnumerator().
GetAddedAndRemovedItemIDs( GetAddedAndRemovedItemIDs(
ictx, ictx,
qp.ProtectedResource.ID(), qp.ProtectedResource.ID(),
@ -199,9 +199,7 @@ func populateCollections(
bh.itemHandler(), bh.itemHandler(),
added, added,
removed, removed,
// TODO(ashmrtn): Set to value returned by pager when we have deletion validModTimes,
// markers in files.
false,
statusUpdater) statusUpdater)
collections[cID] = edc collections[cID] = edc

View File

@ -278,7 +278,7 @@ func (col *prefetchCollection) streamItems(
return return
} }
item, err := data.NewPrefetchedItem( item, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(itemData)), io.NopCloser(bytes.NewReader(itemData)),
id, id,
details.ItemInfo{Exchange: info}) details.ItemInfo{Exchange: info})
@ -403,7 +403,7 @@ func (col *lazyFetchCollection) streamItems(
"service", path.ExchangeService.String(), "service", path.ExchangeService.String(),
"category", col.Category().String()) "category", col.Category().String())
stream <- data.NewLazyItem( stream <- data.NewLazyItemWithInfo(
ictx, ictx,
&lazyItemGetter{ &lazyItemGetter{
userID: user, userID: user,

View File

@ -56,7 +56,7 @@ func (suite *CollectionUnitSuite) TestPrefetchedItem_Reader() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ed, err := data.NewPrefetchedItem( ed, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(test.readData)), io.NopCloser(bytes.NewReader(test.readData)),
"itemID", "itemID",
details.ItemInfo{}) details.ItemInfo{})
@ -494,7 +494,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
li := data.NewLazyItem( li := data.NewLazyItemWithInfo(
ctx, ctx,
nil, nil,
"itemID", "itemID",
@ -552,7 +552,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_GetDataErrors() {
SerializeErr: test.serializeErr, SerializeErr: test.serializeErr,
} }
li := data.NewLazyItem( li := data.NewLazyItemWithInfo(
ctx, ctx,
&lazyItemGetter{ &lazyItemGetter{
userID: "userID", userID: "userID",
@ -592,7 +592,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_ReturnsEmptyReaderOnDeletedInFlig
getter := &mock.ItemGetSerialize{GetErr: graph.ErrDeletedInFlight} getter := &mock.ItemGetSerialize{GetErr: graph.ErrDeletedInFlight}
li := data.NewLazyItem( li := data.NewLazyItemWithInfo(
ctx, ctx,
&lazyItemGetter{ &lazyItemGetter{
userID: "userID", userID: "userID",
@ -645,7 +645,7 @@ func (suite *CollectionUnitSuite) TestLazyItem() {
getter := &mock.ItemGetSerialize{GetData: testData} getter := &mock.ItemGetSerialize{GetData: testData}
li := data.NewLazyItem( li := data.NewLazyItemWithInfo(
ctx, ctx,
&lazyItemGetter{ &lazyItemGetter{
userID: "userID", userID: "userID",

View File

@ -2,7 +2,6 @@ package groups
import ( import (
"context" "context"
"fmt"
"testing" "testing"
"time" "time"
@ -527,8 +526,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
require.NotEmpty(t, c.FullPath().Folder(false)) require.NotEmpty(t, c.FullPath().Folder(false))
fmt.Printf("\n-----\nfolder %+v\n-----\n", c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface. // interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) { if !assert.Implements(t, (*data.LocationPather)(nil), c) {
@ -537,8 +534,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
loc := c.(data.LocationPather).LocationPath().String() loc := c.(data.LocationPather).LocationPath().String()
fmt.Printf("\n-----\nloc %+v\n-----\n", c.(data.LocationPather).LocationPath().String())
require.NotEmpty(t, loc) require.NotEmpty(t, loc)
delete(test.channelNames, loc) delete(test.channelNames, loc)

View File

@ -176,7 +176,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
info.ParentPath = col.LocationPath().String() info.ParentPath = col.LocationPath().String()
storeItem, err := data.NewPrefetchedItem( storeItem, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(itemData)), io.NopCloser(bytes.NewReader(itemData)),
id, id,
details.ItemInfo{Groups: info}) details.ItemInfo{Groups: info})

View File

@ -49,7 +49,7 @@ func (suite *CollectionUnitSuite) TestPrefetchedItem_Reader() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ed, err := data.NewPrefetchedItem( ed, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(test.readData)), io.NopCloser(bytes.NewReader(test.readData)),
"itemID", "itemID",
details.ItemInfo{}) details.ItemInfo{})

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -23,6 +24,7 @@ func NewExportCollection(
backingCollections []data.RestoreCollection, backingCollections []data.RestoreCollection,
backupVersion int, backupVersion int,
cec control.ExportConfig, cec control.ExportConfig,
stats *data.ExportStats,
) export.Collectioner { ) export.Collectioner {
return export.BaseCollection{ return export.BaseCollection{
BaseDir: baseDir, BaseDir: baseDir,
@ -30,6 +32,7 @@ func NewExportCollection(
BackupVersion: backupVersion, BackupVersion: backupVersion,
Cfg: cec, Cfg: cec,
Stream: streamItems, Stream: streamItems,
Stats: stats,
} }
} }
@ -40,6 +43,7 @@ func streamItems(
backupVersion int, backupVersion int,
cec control.ExportConfig, cec control.ExportConfig,
ch chan<- export.Item, ch chan<- export.Item,
stats *data.ExportStats,
) { ) {
defer close(ch) defer close(ch)
@ -54,6 +58,9 @@ func streamItems(
Error: err, Error: err,
} }
} else { } else {
stats.UpdateResourceCount(path.ChannelMessagesCategory)
body = data.ReaderWithStats(body, path.ChannelMessagesCategory, stats)
ch <- export.Item{ ch <- export.Item{
ID: item.ID(), ID: item.ID(),
// channel message items have no name // channel message items have no name

View File

@ -90,7 +90,8 @@ func (suite *ExportUnitSuite) TestStreamItems() {
[]data.RestoreCollection{test.backingColl}, []data.RestoreCollection{test.backingColl},
version.NoBackup, version.NoBackup,
control.DefaultExportConfig(), control.DefaultExportConfig(),
ch) ch,
&data.ExportStats{})
var ( var (
itm export.Item itm export.Item

View File

@ -38,7 +38,7 @@ func CollectLibraries(
colls = drive.NewCollections( colls = drive.NewCollections(
bh, bh,
tenantID, tenantID,
bpc.ProtectedResource.ID(), bpc.ProtectedResource,
su, su,
bpc.Options) bpc.Options)
) )

View File

@ -212,7 +212,7 @@ func (sc *Collection) retrieveLists(
metrics.Successes++ metrics.Successes++
item, err := data.NewPrefetchedItem( item, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)), io.NopCloser(bytes.NewReader(byteArray)),
ptr.Val(lst.GetId()), ptr.Val(lst.GetId()),
details.ItemInfo{SharePoint: ListToSPInfo(lst, size)}) details.ItemInfo{SharePoint: ListToSPInfo(lst, size)})
@ -279,7 +279,7 @@ func (sc *Collection) retrievePages(
metrics.Bytes += size metrics.Bytes += size
metrics.Successes++ metrics.Successes++
item, err := data.NewPrefetchedItem( item, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)), io.NopCloser(bytes.NewReader(byteArray)),
ptr.Val(pg.GetId()), ptr.Val(pg.GetId()),
details.ItemInfo{SharePoint: pageToSPInfo(pg, root, size)}) details.ItemInfo{SharePoint: pageToSPInfo(pg, root, size)})

View File

@ -103,7 +103,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
byteArray, err := ow.GetSerializedContent() byteArray, err := ow.GetSerializedContent()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
data, err := data.NewPrefetchedItem( data, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)), io.NopCloser(bytes.NewReader(byteArray)),
name, name,
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))}) details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
@ -133,7 +133,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
page, err := betaAPI.CreatePageFromBytes(byteArray) page, err := betaAPI.CreatePageFromBytes(byteArray)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
data, err := data.NewPrefetchedItem( data, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)), io.NopCloser(bytes.NewReader(byteArray)),
itemName, itemName,
details.ItemInfo{SharePoint: betaAPI.PageInfo(page, int64(len(byteArray)))}) details.ItemInfo{SharePoint: betaAPI.PageInfo(page, int64(len(byteArray)))})
@ -196,7 +196,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
byteArray, err := service.Serialize(listing) byteArray, err := service.Serialize(listing)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
listData, err := data.NewPrefetchedItem( listData, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)), io.NopCloser(bytes.NewReader(byteArray)),
testName, testName,
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))}) details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})

View File

@ -36,7 +36,7 @@ type Controller struct {
tenant string tenant string
credentials account.M365Config credentials account.M365Config
ownerLookup getOwnerIDAndNamer ownerLookup idname.GetResourceIDAndNamer
// maps of resource owner ids to names, and names to ids. // maps of resource owner ids to names, and names to ids.
// not guaranteed to be populated, only here as a post-population // not guaranteed to be populated, only here as a post-population
// reference for processes that choose to populate the values. // reference for processes that choose to populate the values.
@ -229,38 +229,24 @@ type getIDAndNamer interface {
) )
} }
var _ getOwnerIDAndNamer = &resourceClient{} var _ idname.GetResourceIDAndNamer = &resourceClient{}
type getOwnerIDAndNamer interface { // GetResourceIDAndNameFrom looks up the resource's canonical id and display name.
getOwnerIDAndNameFrom( // If the resource is present in the idNameSwapper, then that interface's id and
ctx context.Context,
discovery api.Client,
owner string,
ins idname.Cacher,
) (
ownerID string,
ownerName string,
err error,
)
}
// getOwnerIDAndNameFrom looks up the owner's canonical id and display name.
// If the owner is present in the idNameSwapper, then that interface's id and
// name values are returned. As a fallback, the resource calls the discovery // name values are returned. As a fallback, the resource calls the discovery
// api to fetch the user or site using the owner value. This fallback assumes // api to fetch the user or site using the resource value. This fallback assumes
// that the owner is a well formed ID or display name of appropriate design // that the resource is a well formed ID or display name of appropriate design
// (PrincipalName for users, WebURL for sites). // (PrincipalName for users, WebURL for sites).
func (r resourceClient) getOwnerIDAndNameFrom( func (r resourceClient) GetResourceIDAndNameFrom(
ctx context.Context, ctx context.Context,
discovery api.Client,
owner string, owner string,
ins idname.Cacher, ins idname.Cacher,
) (string, string, error) { ) (idname.Provider, error) {
if ins != nil { if ins != nil {
if n, ok := ins.NameOf(owner); ok { if n, ok := ins.NameOf(owner); ok {
return owner, n, nil return idname.NewProvider(owner, n), nil
} else if i, ok := ins.IDOf(owner); ok { } else if i, ok := ins.IDOf(owner); ok {
return i, owner, nil return idname.NewProvider(i, owner), nil
} }
} }
@ -274,17 +260,21 @@ func (r resourceClient) getOwnerIDAndNameFrom(
id, name, err = r.getter.GetIDAndName(ctx, owner, api.CallConfig{}) id, name, err = r.getter.GetIDAndName(ctx, owner, api.CallConfig{})
if err != nil { if err != nil {
if graph.IsErrUserNotFound(err) { if graph.IsErrUserNotFound(err) {
return "", "", clues.Stack(graph.ErrResourceOwnerNotFound, err) return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
} }
return "", "", err if graph.IsErrResourceLocked(err) {
return nil, clues.Stack(graph.ErrResourceLocked, err)
}
return nil, err
} }
if len(id) == 0 || len(name) == 0 { if len(id) == 0 || len(name) == 0 {
return "", "", clues.Stack(graph.ErrResourceOwnerNotFound) return nil, clues.Stack(graph.ErrResourceOwnerNotFound)
} }
return id, name, nil return idname.NewProvider(id, name), nil
} }
// PopulateProtectedResourceIDAndName takes the provided owner identifier and produces // PopulateProtectedResourceIDAndName takes the provided owner identifier and produces
@ -297,15 +287,15 @@ func (r resourceClient) getOwnerIDAndNameFrom(
// data gets stored inside the controller instance for later re-use. // data gets stored inside the controller instance for later re-use.
func (ctrl *Controller) PopulateProtectedResourceIDAndName( func (ctrl *Controller) PopulateProtectedResourceIDAndName(
ctx context.Context, ctx context.Context,
owner string, // input value, can be either id or name resourceID string, // input value, can be either id or name
ins idname.Cacher, ins idname.Cacher,
) (string, string, error) { ) (idname.Provider, error) {
id, name, err := ctrl.ownerLookup.getOwnerIDAndNameFrom(ctx, ctrl.AC, owner, ins) pr, err := ctrl.ownerLookup.GetResourceIDAndNameFrom(ctx, resourceID, ins)
if err != nil { if err != nil {
return "", "", clues.Wrap(err, "identifying resource owner") return nil, clues.Wrap(err, "identifying resource owner")
} }
ctrl.IDNameLookup = idname.NewCache(map[string]string{id: name}) ctrl.IDNameLookup = idname.NewCache(map[string]string{pr.ID(): pr.Name()})
return id, name, nil return pr, nil
} }

View File

@ -66,113 +66,125 @@ func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
table := []struct { table := []struct {
name string name string
owner string protectedResource string
ins inMock.Cache ins inMock.Cache
rc *resourceClient rc *resourceClient
expectID string expectID string
expectName string expectName string
expectErr require.ErrorAssertionFunc expectErr require.ErrorAssertionFunc
expectNil require.ValueAssertionFunc
}{ }{
{ {
name: "nil ins", name: "nil ins",
owner: id, protectedResource: id,
rc: lookup, rc: lookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "nil ins no lookup", name: "nil ins no lookup",
owner: id, protectedResource: id,
rc: noLookup, rc: noLookup,
expectID: "", expectID: "",
expectName: "", expectName: "",
expectErr: require.Error, expectErr: require.Error,
expectNil: require.Nil,
}, },
{ {
name: "only id map with owner id", name: "only id map with owner id",
owner: id, protectedResource: id,
ins: inMock.NewCache(itn, nil), ins: inMock.NewCache(itn, nil),
rc: noLookup, rc: noLookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "only name map with owner id", name: "only name map with owner id",
owner: id, protectedResource: id,
ins: inMock.NewCache(nil, nti), ins: inMock.NewCache(nil, nti),
rc: noLookup, rc: noLookup,
expectID: "", expectID: "",
expectName: "", expectName: "",
expectErr: require.Error, expectErr: require.Error,
expectNil: require.Nil,
}, },
{ {
name: "only name map with owner id and lookup", name: "only name map with owner id and lookup",
owner: id, protectedResource: id,
ins: inMock.NewCache(nil, nti), ins: inMock.NewCache(nil, nti),
rc: lookup, rc: lookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "only id map with owner name", name: "only id map with owner name",
owner: name, protectedResource: name,
ins: inMock.NewCache(itn, nil), ins: inMock.NewCache(itn, nil),
rc: lookup, rc: lookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "only name map with owner name", name: "only name map with owner name",
owner: name, protectedResource: name,
ins: inMock.NewCache(nil, nti), ins: inMock.NewCache(nil, nti),
rc: noLookup, rc: noLookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "only id map with owner name", name: "only id map with owner name",
owner: name, protectedResource: name,
ins: inMock.NewCache(itn, nil), ins: inMock.NewCache(itn, nil),
rc: noLookup, rc: noLookup,
expectID: "", expectID: "",
expectName: "", expectName: "",
expectErr: require.Error, expectErr: require.Error,
expectNil: require.Nil,
}, },
{ {
name: "only id map with owner name and lookup", name: "only id map with owner name and lookup",
owner: name, protectedResource: name,
ins: inMock.NewCache(itn, nil), ins: inMock.NewCache(itn, nil),
rc: lookup, rc: lookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "both maps with owner id", name: "both maps with owner id",
owner: id, protectedResource: id,
ins: inMock.NewCache(itn, nti), ins: inMock.NewCache(itn, nti),
rc: noLookup, rc: noLookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "both maps with owner name", name: "both maps with owner name",
owner: name, protectedResource: name,
ins: inMock.NewCache(itn, nti), ins: inMock.NewCache(itn, nti),
rc: noLookup, rc: noLookup,
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "non-matching maps with owner id", name: "non-matching maps with owner id",
owner: id, protectedResource: id,
ins: inMock.NewCache( ins: inMock.NewCache(
map[string]string{"foo": "bar"}, map[string]string{"foo": "bar"},
map[string]string{"fnords": "smarf"}), map[string]string{"fnords": "smarf"}),
@ -180,10 +192,11 @@ func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
expectID: "", expectID: "",
expectName: "", expectName: "",
expectErr: require.Error, expectErr: require.Error,
expectNil: require.Nil,
}, },
{ {
name: "non-matching with owner name", name: "non-matching with owner name",
owner: name, protectedResource: name,
ins: inMock.NewCache( ins: inMock.NewCache(
map[string]string{"foo": "bar"}, map[string]string{"foo": "bar"},
map[string]string{"fnords": "smarf"}), map[string]string{"fnords": "smarf"}),
@ -191,10 +204,11 @@ func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
expectID: "", expectID: "",
expectName: "", expectName: "",
expectErr: require.Error, expectErr: require.Error,
expectNil: require.Nil,
}, },
{ {
name: "non-matching maps with owner id and lookup", name: "non-matching maps with owner id and lookup",
owner: id, protectedResource: id,
ins: inMock.NewCache( ins: inMock.NewCache(
map[string]string{"foo": "bar"}, map[string]string{"foo": "bar"},
map[string]string{"fnords": "smarf"}), map[string]string{"fnords": "smarf"}),
@ -202,10 +216,11 @@ func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
{ {
name: "non-matching with owner name and lookup", name: "non-matching with owner name and lookup",
owner: name, protectedResource: name,
ins: inMock.NewCache( ins: inMock.NewCache(
map[string]string{"foo": "bar"}, map[string]string{"foo": "bar"},
map[string]string{"fnords": "smarf"}), map[string]string{"fnords": "smarf"}),
@ -213,6 +228,7 @@ func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
expectID: id, expectID: id,
expectName: name, expectName: name,
expectErr: require.NoError, expectErr: require.NoError,
expectNil: require.NotNil,
}, },
} }
for _, test := range table { for _, test := range table {
@ -224,10 +240,16 @@ func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
ctrl := &Controller{ownerLookup: test.rc} ctrl := &Controller{ownerLookup: test.rc}
rID, rName, err := ctrl.PopulateProtectedResourceIDAndName(ctx, test.owner, test.ins) resource, err := ctrl.PopulateProtectedResourceIDAndName(ctx, test.protectedResource, test.ins)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectID, rID, "id") test.expectNil(t, resource)
assert.Equal(t, test.expectName, rName, "name")
if err != nil {
return
}
assert.Equal(t, test.expectID, resource.ID(), "id")
assert.Equal(t, test.expectName, resource.Name(), "name")
}) })
} }
} }
@ -1362,15 +1384,15 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
start = time.Now() start = time.Now()
) )
id, name, err := backupCtrl.PopulateProtectedResourceIDAndName(ctx, backupSel.DiscreteOwner, nil) resource, err := backupCtrl.PopulateProtectedResourceIDAndName(ctx, backupSel.DiscreteOwner, nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
backupSel.SetDiscreteOwnerIDName(id, name) backupSel.SetDiscreteOwnerIDName(resource.ID(), resource.Name())
bpc := inject.BackupProducerConfig{ bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup, LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(), Options: control.DefaultOptions(),
ProtectedResource: inMock.NewProvider(id, name), ProtectedResource: resource,
Selector: backupSel, Selector: backupSel,
} }

View File

@ -27,6 +27,7 @@ func (ctrl *Controller) ProduceExportCollections(
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
ctx, end := diagnostics.Span(ctx, "m365:export") ctx, end := diagnostics.Span(ctx, "m365:export")
@ -51,6 +52,7 @@ func (ctrl *Controller) ProduceExportCollections(
opts, opts,
dcs, dcs,
deets, deets,
stats,
errs) errs)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
expCollections, err = sharepoint.ProduceExportCollections( expCollections, err = sharepoint.ProduceExportCollections(
@ -61,6 +63,7 @@ func (ctrl *Controller) ProduceExportCollections(
dcs, dcs,
ctrl.backupDriveIDNames, ctrl.backupDriveIDNames,
deets, deets,
stats,
errs) errs)
case selectors.ServiceGroups: case selectors.ServiceGroups:
expCollections, err = groups.ProduceExportCollections( expCollections, err = groups.ProduceExportCollections(
@ -72,6 +75,7 @@ func (ctrl *Controller) ProduceExportCollections(
ctrl.backupDriveIDNames, ctrl.backupDriveIDNames,
ctrl.backupSiteIDWebURL, ctrl.backupSiteIDWebURL,
deets, deets,
stats,
errs) errs)
default: default:

View File

@ -150,10 +150,11 @@ const (
// https://learn.microsoft.com/en-us/sharepoint/dev/general-development // https://learn.microsoft.com/en-us/sharepoint/dev/general-development
// /how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online#application-throttling // /how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online#application-throttling
defaultLC = 1 defaultLC = 1
driveDefaultLC = 2
// limit consumption rate for single-item GETs requests, // limit consumption rate for single-item GETs requests,
// or delta-based multi-item GETs. // or delta-based multi-item GETs, or item content download requests.
SingleGetOrDeltaLC = 1 SingleGetOrDeltaLC = 1
// delta queries without a delta token cost 2 units
DeltaNoTokenLC = 2
// limit consumption rate for anything permissions related // limit consumption rate for anything permissions related
PermissionsLC = 5 PermissionsLC = 5
) )
@ -185,13 +186,7 @@ func ctxLimiterConsumption(ctx context.Context, defaultConsumption int) int {
// the next token set is available. // the next token set is available.
func QueueRequest(ctx context.Context) { func QueueRequest(ctx context.Context) {
limiter := ctxLimiter(ctx) limiter := ctxLimiter(ctx)
defaultConsumed := defaultLC consume := ctxLimiterConsumption(ctx, defaultLC)
if limiter == driveLimiter {
defaultConsumed = driveDefaultLC
}
consume := ctxLimiterConsumption(ctx, defaultConsumed)
if err := limiter.WaitN(ctx, consume); err != nil { if err := limiter.WaitN(ctx, consume); err != nil {
logger.CtxErr(ctx, err).Error("graph middleware waiting on the limiter") logger.CtxErr(ctx, err).Error("graph middleware waiting on the limiter")

View File

@ -15,6 +15,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
) )
@ -50,6 +51,7 @@ const (
// nameAlreadyExists occurs when a request with // nameAlreadyExists occurs when a request with
// @microsoft.graph.conflictBehavior=fail finds a conflicting file. // @microsoft.graph.conflictBehavior=fail finds a conflicting file.
nameAlreadyExists errorCode = "nameAlreadyExists" nameAlreadyExists errorCode = "nameAlreadyExists"
NotAllowed errorCode = "notAllowed"
noResolvedUsers errorCode = "noResolvedUsers" noResolvedUsers errorCode = "noResolvedUsers"
QuotaExceeded errorCode = "ErrorQuotaExceeded" QuotaExceeded errorCode = "ErrorQuotaExceeded"
RequestResourceNotFound errorCode = "Request_ResourceNotFound" RequestResourceNotFound errorCode = "Request_ResourceNotFound"
@ -61,6 +63,11 @@ const (
syncStateNotFound errorCode = "SyncStateNotFound" syncStateNotFound errorCode = "SyncStateNotFound"
) )
// inner error codes
const (
ResourceLocked errorCode = "resourceLocked"
)
type errorMessage string type errorMessage string
const ( const (
@ -113,6 +120,11 @@ var (
// replies, no error should get returned. // replies, no error should get returned.
ErrMultipleResultsMatchIdentifier = clues.New("multiple results match the identifier") ErrMultipleResultsMatchIdentifier = clues.New("multiple results match the identifier")
// ErrResourceLocked occurs when a resource has had its access locked.
// Example case: https://learn.microsoft.com/en-us/sharepoint/manage-lock-status
// This makes the resource inaccessible for any Corso operations.
ErrResourceLocked = clues.New("resource has been locked and must be unlocked by an administrator")
// ErrServiceNotEnabled identifies that a resource owner does not have // ErrServiceNotEnabled identifies that a resource owner does not have
// access to a given service. // access to a given service.
ErrServiceNotEnabled = clues.New("service is not enabled for that resource owner") ErrServiceNotEnabled = clues.New("service is not enabled for that resource owner")
@ -124,6 +136,8 @@ var (
ErrTimeout = clues.New("communication timeout") ErrTimeout = clues.New("communication timeout")
ErrResourceOwnerNotFound = clues.New("resource owner not found in tenant") ErrResourceOwnerNotFound = clues.New("resource owner not found in tenant")
ErrTokenExpired = clues.New("jwt token expired")
) )
func IsErrApplicationThrottled(err error) bool { func IsErrApplicationThrottled(err error) bool {
@ -224,7 +238,8 @@ func IsErrUnauthorized(err error) bool {
// TODO: refine this investigation. We don't currently know if // TODO: refine this investigation. We don't currently know if
// a specific item download url expired, or if the full connection // a specific item download url expired, or if the full connection
// auth expired. // auth expired.
return clues.HasLabel(err, LabelStatus(http.StatusUnauthorized)) return clues.HasLabel(err, LabelStatus(http.StatusUnauthorized)) ||
errors.Is(err, ErrTokenExpired)
} }
func IsErrItemAlreadyExistsConflict(err error) bool { func IsErrItemAlreadyExistsConflict(err error) bool {
@ -264,6 +279,12 @@ func IsErrSiteNotFound(err error) bool {
return hasErrorMessage(err, requestedSiteCouldNotBeFound) return hasErrorMessage(err, requestedSiteCouldNotBeFound)
} }
func IsErrResourceLocked(err error) bool {
return errors.Is(err, ErrResourceLocked) ||
hasInnerErrorCode(err, ResourceLocked) ||
hasErrorCode(err, NotAllowed)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// error parsers // error parsers
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -291,6 +312,34 @@ func hasErrorCode(err error, codes ...errorCode) bool {
return filters.Equal(cs).Compare(code) return filters.Equal(cs).Compare(code)
} }
func hasInnerErrorCode(err error, codes ...errorCode) bool {
if err == nil {
return false
}
var oDataError odataerrors.ODataErrorable
if !errors.As(err, &oDataError) {
return false
}
inner := oDataError.GetErrorEscaped().GetInnerError()
if inner == nil {
return false
}
code, err := str.AnyValueToString("code", inner.GetAdditionalData())
if err != nil {
return false
}
cs := make([]string, len(codes))
for i, c := range codes {
cs[i] = string(c)
}
return filters.Equal(cs).Compare(code)
}
// only use this as a last resort. Prefer the code or statuscode if possible. // only use this as a last resort. Prefer the code or statuscode if possible.
func hasErrorMessage(err error, msgs ...errorMessage) bool { func hasErrorMessage(err error, msgs ...errorMessage) bool {
if err == nil { if err == nil {

View File

@ -478,11 +478,16 @@ func (suite *GraphErrorsUnitSuite) TestIsErrUnauthorized() {
expect: assert.False, expect: assert.False,
}, },
{ {
name: "as", name: "graph 401",
err: clues.Stack(assert.AnError). err: clues.Stack(assert.AnError).
Label(LabelStatus(http.StatusUnauthorized)), Label(LabelStatus(http.StatusUnauthorized)),
expect: assert.True, expect: assert.True,
}, },
{
name: "token expired",
err: clues.Stack(assert.AnError, ErrTokenExpired),
expect: assert.True,
},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
@ -808,3 +813,57 @@ func (suite *GraphErrorsUnitSuite) TestIsErrItemNotFound() {
}) })
} }
} }
func (suite *GraphErrorsUnitSuite) TestIsErrResourceLocked() {
innerMatch := odErr("not-match")
merr := odataerrors.NewMainError()
inerr := odataerrors.NewInnerError()
inerr.SetAdditionalData(map[string]any{
"code": string(ResourceLocked),
})
merr.SetInnerError(inerr)
merr.SetCode(ptr.To("not-match"))
innerMatch.SetErrorEscaped(merr)
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "non-matching oDataErr",
err: odErrMsg("InvalidRequest", "resource is locked"),
expect: assert.False,
},
{
name: "matching oDataErr code",
err: odErr(string(NotAllowed)),
expect: assert.True,
},
{
name: "matching oDataErr inner code",
err: innerMatch,
expect: assert.True,
},
{
name: "matching err sentinel",
err: ErrResourceLocked,
expect: assert.True,
},
}
for _, test := range table {
suite.Run(test.name, func() {
test.expect(suite.T(), IsErrResourceLocked(test.err))
})
}
}

View File

@ -82,7 +82,7 @@ func (hw httpWrapper) Request(
body io.Reader, body io.Reader,
headers map[string]string, headers map[string]string,
) (*http.Response, error) { ) (*http.Response, error) {
req, err := http.NewRequest(method, url, body) req, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "new http request") return nil, clues.Wrap(err, "new http request")
} }

View File

@ -57,7 +57,7 @@ func (mce MetadataCollectionEntry) toMetadataItem() (metadataItem, error) {
return metadataItem{}, clues.Wrap(err, "serializing metadata") return metadataItem{}, clues.Wrap(err, "serializing metadata")
} }
item, err := data.NewUnindexedPrefetchedItem( item, err := data.NewPrefetchedItem(
io.NopCloser(buf), io.NopCloser(buf),
mce.fileName, mce.fileName,
time.Now()) time.Now())

View File

@ -70,7 +70,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
items := []metadataItem{} items := []metadataItem{}
for i := 0; i < len(itemNames); i++ { for i := 0; i < len(itemNames); i++ {
item, err := data.NewUnindexedPrefetchedItem( item, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader(itemData[i])), io.NopCloser(bytes.NewReader(itemData[i])),
itemNames[i], itemNames[i],
time.Time{}) time.Time{})

View File

@ -304,21 +304,21 @@ func checkContact(
// assert.Equal(t, expected.GetBusinessPhones(), got.GetBusinessPhones()) // assert.Equal(t, expected.GetBusinessPhones(), got.GetBusinessPhones())
// TODO(ashmrtn): Remove this when we properly set and handle categories in // TODO(ashmrtn): Remove this when we properly set and handle categories in
// addition to folders for contacts. // addition to folders for contacts. See #2785 and #3550.
folders := colPath.Folder(false) // folders := colPath.Folder(false)
gotCategories := []string{} // gotCategories := []string{}
for _, cat := range got.GetCategories() { // for _, cat := range got.GetCategories() {
// Don't add a category for the current folder since we didn't create the // // Don't add a category for the current folder since we didn't create the
// item with it and it throws off our comparisons. // // item with it and it throws off our comparisons.
if cat == folders { // if cat == folders {
continue // continue
} // }
gotCategories = append(gotCategories, cat) // gotCategories = append(gotCategories, cat)
} // }
assert.ElementsMatch(t, expected.GetCategories(), gotCategories, "Categories") // assert.ElementsMatch(t, expected.GetCategories(), gotCategories, "Categories")
// Skip ChangeKey as it's tied to this specific instance of the item. // Skip ChangeKey as it's tied to this specific instance of the item.

View File

@ -90,6 +90,7 @@ func (ctrl Controller) ProduceExportCollections(
_ control.ExportConfig, _ control.ExportConfig,
_ control.Options, _ control.Options,
_ []data.RestoreCollection, _ []data.RestoreCollection,
_ *data.ExportStats,
_ *fault.Bus, _ *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
return nil, ctrl.Err return nil, ctrl.Err
@ -99,8 +100,7 @@ func (ctrl Controller) PopulateProtectedResourceIDAndName(
ctx context.Context, ctx context.Context,
protectedResource string, // input value, can be either id or name protectedResource string, // input value, can be either id or name
ins idname.Cacher, ins idname.Cacher,
) (string, string, error) { ) (idname.Provider, error) {
return ctrl.ProtectedResourceID, return idname.NewProvider(ctrl.ProtectedResourceID, ctrl.ProtectedResourceName),
ctrl.ProtectedResourceName,
ctrl.ProtectedResourceErr ctrl.ProtectedResourceErr
} }

View File

@ -93,7 +93,7 @@ func ProduceBackupCollections(
} }
for _, s := range sites { for _, s := range sites {
pr := idname.NewProvider(ptr.Val(s.GetId()), ptr.Val(s.GetName())) pr := idname.NewProvider(ptr.Val(s.GetId()), ptr.Val(s.GetWebUrl()))
sbpc := inject.BackupProducerConfig{ sbpc := inject.BackupProducerConfig{
LastBackupVersion: bpc.LastBackupVersion, LastBackupVersion: bpc.LastBackupVersion,
Options: bpc.Options, Options: bpc.Options,

View File

@ -29,6 +29,7 @@ func ProduceExportCollections(
backupDriveIDNames idname.Cacher, backupDriveIDNames idname.Cacher,
backupSiteIDWebURL idname.Cacher, backupSiteIDWebURL idname.Cacher,
deets *details.Builder, deets *details.Builder,
stats *data.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (
@ -52,7 +53,8 @@ func ProduceExportCollections(
path.Builder{}.Append(folders...).String(), path.Builder{}.Append(folders...).String(),
[]data.RestoreCollection{restoreColl}, []data.RestoreCollection{restoreColl},
backupVersion, backupVersion,
exportCfg) exportCfg,
stats)
case path.LibrariesCategory: case path.LibrariesCategory:
drivePath, err := path.ToDrivePath(restoreColl.FullPath()) drivePath, err := path.ToDrivePath(restoreColl.FullPath())
if err != nil { if err != nil {
@ -91,7 +93,8 @@ func ProduceExportCollections(
coll = drive.NewExportCollection( coll = drive.NewExportCollection(
baseDir.String(), baseDir.String(),
[]data.RestoreCollection{restoreColl}, []data.RestoreCollection{restoreColl},
backupVersion) backupVersion,
stats)
default: default:
el.AddRecoverable( el.AddRecoverable(
ctx, ctx,

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -64,8 +65,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
itemID = "itemID" itemID = "itemID"
containerName = "channelID" containerName = "channelID"
dii = groupMock.ItemInfo() dii = groupMock.ItemInfo()
body = io.NopCloser(bytes.NewBufferString( content = `{"displayname": "` + dii.Groups.ItemName + `"}`
`{"displayname": "` + dii.Groups.ItemName + `"}`)) body = io.NopCloser(bytes.NewBufferString(content))
exportCfg = control.ExportConfig{} exportCfg = control.ExportConfig{}
expectedPath = path.ChannelMessagesCategory.HumanString() + "/" + containerName expectedPath = path.ChannelMessagesCategory.HumanString() + "/" + containerName
expectedItems = []export.Item{ expectedItems = []export.Item{
@ -96,6 +97,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
}, },
} }
stats := data.ExportStats{}
ecs, err := ProduceExportCollections( ecs, err := ProduceExportCollections(
ctx, ctx,
int(version.Backup), int(version.Backup),
@ -105,6 +108,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
nil, nil,
nil, nil,
nil, nil,
&stats,
fault.New(true)) fault.New(true))
assert.NoError(t, err, "export collections error") assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, 1, "num of collections") assert.Len(t, ecs, 1, "num of collections")
@ -113,7 +117,15 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
fitems := []export.Item{} fitems := []export.Item{}
size := 0
for item := range ecs[0].Items(ctx) { for item := range ecs[0].Items(ctx) {
b, err := io.ReadAll(item.Body)
assert.NoError(t, err, clues.ToCore(err))
// count up size for tests
size += len(b)
// have to nil out body, otherwise assert fails due to // have to nil out body, otherwise assert fails due to
// pointer memory location differences // pointer memory location differences
item.Body = nil item.Body = nil
@ -121,6 +133,11 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
} }
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{}
expectedStats.UpdateBytes(path.ChannelMessagesCategory, int64(size))
expectedStats.UpdateResourceCount(path.ChannelMessagesCategory)
assert.Equal(t, expectedStats, stats, "stats")
} }
func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() { func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
@ -182,6 +199,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
}, },
} }
stats := data.ExportStats{}
ecs, err := ProduceExportCollections( ecs, err := ProduceExportCollections(
ctx, ctx,
int(version.Backup), int(version.Backup),
@ -191,6 +210,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
driveNameCache, driveNameCache,
siteWebURLCache, siteWebURLCache,
nil, nil,
&stats,
fault.New(true)) fault.New(true))
assert.NoError(t, err, "export collections error") assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, 1, "num of collections") assert.Len(t, ecs, 1, "num of collections")
@ -199,9 +219,24 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
fitems := []export.Item{} fitems := []export.Item{}
size := 0
for item := range ecs[0].Items(ctx) { for item := range ecs[0].Items(ctx) {
// unwrap the body from stats reader
b, err := io.ReadAll(item.Body)
assert.NoError(t, err, clues.ToCore(err))
size += len(b)
bitem := io.NopCloser(bytes.NewBuffer(b))
item.Body = bitem
fitems = append(fitems, item) fitems = append(fitems, item)
} }
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
expectedStats.UpdateResourceCount(path.FilesCategory)
assert.Equal(t, expectedStats, stats, "stats")
} }

View File

@ -51,7 +51,7 @@ func ProduceBackupCollections(
nc := drive.NewCollections( nc := drive.NewCollections(
drive.NewItemBackupHandler(ac.Drives(), bpc.ProtectedResource.ID(), scope), drive.NewItemBackupHandler(ac.Drives(), bpc.ProtectedResource.ID(), scope),
tenant, tenant,
bpc.ProtectedResource.ID(), bpc.ProtectedResource,
su, su,
bpc.Options) bpc.Options)

View File

@ -30,6 +30,10 @@ func IsServiceEnabled(
return false, clues.Stack(graph.ErrResourceOwnerNotFound, err) return false, clues.Stack(graph.ErrResourceOwnerNotFound, err)
} }
if graph.IsErrResourceLocked(err) {
return false, clues.Stack(graph.ErrResourceLocked, err)
}
return false, clues.Stack(err) return false, clues.Stack(err)
} }

View File

@ -105,6 +105,17 @@ func (suite *EnabledUnitSuite) TestIsServiceEnabled() {
assert.Error(t, err, clues.ToCore(err)) assert.Error(t, err, clues.ToCore(err))
}, },
}, },
{
name: "resource locked",
mock: func(ctx context.Context) getDefaultDriver {
odErr := odErrMsg(string(graph.NotAllowed), "resource")
return mockDGDD{nil, graph.Stack(ctx, odErr)}
},
expect: assert.False,
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
{ {
name: "arbitrary error", name: "arbitrary error",
mock: func(ctx context.Context) getDefaultDriver { mock: func(ctx context.Context) getDefaultDriver {

View File

@ -23,6 +23,7 @@ func ProduceExportCollections(
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
stats *data.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (
@ -43,7 +44,8 @@ func ProduceExportCollections(
drive.NewExportCollection( drive.NewExportCollection(
baseDir.String(), baseDir.String(),
[]data.RestoreCollection{dc}, []data.RestoreCollection{dc},
backupVersion)) backupVersion,
stats))
} }
return ec, el.Failure() return ec, el.Failure()

View File

@ -6,6 +6,7 @@ import (
"io" "io"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -19,6 +20,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
) )
type ExportUnitSuite struct { type ExportUnitSuite struct {
@ -245,15 +247,32 @@ func (suite *ExportUnitSuite) TestGetItems() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
stats := data.ExportStats{}
ec := drive.NewExportCollection( ec := drive.NewExportCollection(
"", "",
[]data.RestoreCollection{test.backingCollection}, []data.RestoreCollection{test.backingCollection},
test.version) test.version,
&stats)
items := ec.Items(ctx) items := ec.Items(ctx)
count := 0
size := 0
fitems := []export.Item{} fitems := []export.Item{}
for item := range items { for item := range items {
if item.Error == nil {
count++
}
if item.Body != nil {
b, err := io.ReadAll(item.Body)
assert.NoError(t, err, clues.ToCore(err))
size += len(b)
item.Body = io.NopCloser(bytes.NewBuffer(b))
}
fitems = append(fitems, item) fitems = append(fitems, item)
} }
@ -268,6 +287,19 @@ func (suite *ExportUnitSuite) TestGetItems() {
assert.Equal(t, test.expectedItems[i].Body, item.Body, "body") assert.Equal(t, test.expectedItems[i].Body, item.Body, "body")
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error) assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
} }
var expectedStats data.ExportStats
if size+count > 0 { // it is only initialized if we have something
expectedStats = data.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
for i := 0; i < count; i++ {
expectedStats.UpdateResourceCount(path.FilesCategory)
}
}
assert.Equal(t, expectedStats, stats, "stats")
}) })
} }
} }
@ -312,6 +344,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
}, },
} }
stats := data.ExportStats{}
ecs, err := ProduceExportCollections( ecs, err := ProduceExportCollections(
ctx, ctx,
int(version.Backup), int(version.Backup),
@ -319,14 +353,30 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
control.DefaultOptions(), control.DefaultOptions(),
dcs, dcs,
nil, nil,
&stats,
fault.New(true)) fault.New(true))
assert.NoError(t, err, "export collections error") assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, 1, "num of collections") assert.Len(t, ecs, 1, "num of collections")
fitems := []export.Item{} fitems := []export.Item{}
size := 0
for item := range ecs[0].Items(ctx) { for item := range ecs[0].Items(ctx) {
// unwrap the body from stats reader
b, err := io.ReadAll(item.Body)
assert.NoError(t, err, clues.ToCore(err))
size += len(b)
bitem := io.NopCloser(bytes.NewBuffer(b))
item.Body = bitem
fitems = append(fitems, item) fitems = append(fitems, item)
} }
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
expectedStats.UpdateResourceCount(path.FilesCategory)
assert.Equal(t, expectedStats, stats, "stats")
} }

View File

@ -8,11 +8,14 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -22,6 +25,8 @@ import (
type BackupHandler struct { type BackupHandler struct {
ItemInfo details.ItemInfo ItemInfo details.ItemInfo
DriveItemEnumeration EnumeratesDriveItemsDelta
GI GetsItem GI GetsItem
GIP GetsItemPermission GIP GetsItemPermission
@ -34,7 +39,7 @@ type BackupHandler struct {
CanonPathFn canonPather CanonPathFn canonPather
CanonPathErr error CanonPathErr error
ResourceOwner string ProtectedResource idname.Provider
Service path.ServiceType Service path.ServiceType
Category path.CategoryType Category path.CategoryType
@ -55,12 +60,13 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler {
OneDrive: &details.OneDriveInfo{}, OneDrive: &details.OneDriveInfo{},
Extension: &details.ExtensionData{}, Extension: &details.ExtensionData{},
}, },
DriveItemEnumeration: EnumeratesDriveItemsDelta{},
GI: GetsItem{Err: clues.New("not defined")}, GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")}, GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultOneDrivePathPrefixer, PathPrefixFn: defaultOneDrivePathPrefixer,
MetadataPathPrefixFn: defaultOneDriveMetadataPathPrefixer, MetadataPathPrefixFn: defaultOneDriveMetadataPathPrefixer,
CanonPathFn: defaultOneDriveCanonPather, CanonPathFn: defaultOneDriveCanonPather,
ResourceOwner: resourceOwner, ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
Service: path.OneDriveService, Service: path.OneDriveService,
Category: path.FilesCategory, Category: path.FilesCategory,
LocationIDFn: defaultOneDriveLocationIDer, LocationIDFn: defaultOneDriveLocationIDer,
@ -80,7 +86,7 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler {
PathPrefixFn: defaultSharePointPathPrefixer, PathPrefixFn: defaultSharePointPathPrefixer,
MetadataPathPrefixFn: defaultSharePointMetadataPathPrefixer, MetadataPathPrefixFn: defaultSharePointMetadataPathPrefixer,
CanonPathFn: defaultSharePointCanonPather, CanonPathFn: defaultSharePointCanonPather,
ResourceOwner: resourceOwner, ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
Service: path.SharePointService, Service: path.SharePointService,
Category: path.LibrariesCategory, Category: path.LibrariesCategory,
LocationIDFn: defaultSharePointLocationIDer, LocationIDFn: defaultSharePointLocationIDer,
@ -90,7 +96,7 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler {
} }
func (h BackupHandler) PathPrefix(tID, driveID string) (path.Path, error) { func (h BackupHandler) PathPrefix(tID, driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, h.ResourceOwner, driveID) pp, err := h.PathPrefixFn(tID, h.ProtectedResource.ID(), driveID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -99,7 +105,7 @@ func (h BackupHandler) PathPrefix(tID, driveID string) (path.Path, error) {
} }
func (h BackupHandler) MetadataPathPrefix(tID string) (path.Path, error) { func (h BackupHandler) MetadataPathPrefix(tID string) (path.Path, error) {
pp, err := h.MetadataPathPrefixFn(tID, h.ResourceOwner) pp, err := h.MetadataPathPrefixFn(tID, h.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -108,7 +114,7 @@ func (h BackupHandler) MetadataPathPrefix(tID string) (path.Path, error) {
} }
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) { func (h BackupHandler) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) {
cp, err := h.CanonPathFn(pb, tID, h.ResourceOwner) cp, err := h.CanonPathFn(pb, tID, h.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -124,10 +130,6 @@ func (h BackupHandler) NewDrivePager(string, []string) api.Pager[models.Driveabl
return h.DrivePagerV return h.DrivePagerV
} }
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DeltaPager[models.DriveItemable] {
return h.ItemPagerV[driveID]
}
func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string { func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string {
return "/" + pb.String() return "/" + pb.String()
} }
@ -136,7 +138,13 @@ func (h BackupHandler) NewLocationIDer(driveID string, elems ...string) details.
return h.LocationIDFn(driveID, elems...) return h.LocationIDFn(driveID, elems...)
} }
func (h BackupHandler) AugmentItemInfo(details.ItemInfo, models.DriveItemable, int64, *path.Builder) details.ItemInfo { func (h BackupHandler) AugmentItemInfo(
details.ItemInfo,
idname.Provider,
models.DriveItemable,
int64,
*path.Builder,
) details.ItemInfo {
return h.ItemInfo return h.ItemInfo
} }
@ -152,6 +160,18 @@ func (h *BackupHandler) Get(context.Context, string, map[string]string) (*http.R
return h.GetResps[c], h.GetErrs[c] return h.GetResps[c], h.GetErrs[c]
} }
func (h BackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
selectProps []string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.DriveItemEnumeration.EnumerateDriveItemsDelta(
ctx,
driveID,
prevDeltaLink,
selectProps)
}
func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) { func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) {
return h.GI.GetItem(ctx, "", "") return h.GI.GetItem(ctx, "", "")
} }
@ -254,6 +274,66 @@ func (m GetsItem) GetItem(
return m.Item, m.Err return m.Item, m.Err
} }
// ---------------------------------------------------------------------------
// Enumerates Drive Items
// ---------------------------------------------------------------------------
type EnumeratesDriveItemsDelta struct {
Items map[string][]models.DriveItemable
DeltaUpdate map[string]api.DeltaUpdate
Err map[string]error
}
func (edi EnumeratesDriveItemsDelta) EnumerateDriveItemsDelta(
_ context.Context,
driveID, _ string,
_ []string,
) (
[]models.DriveItemable,
api.DeltaUpdate,
error,
) {
return edi.Items[driveID], edi.DeltaUpdate[driveID], edi.Err[driveID]
}
func PagerResultToEDID(
m map[string][]apiMock.PagerResult[models.DriveItemable],
) EnumeratesDriveItemsDelta {
edi := EnumeratesDriveItemsDelta{
Items: map[string][]models.DriveItemable{},
DeltaUpdate: map[string]api.DeltaUpdate{},
Err: map[string]error{},
}
for driveID, results := range m {
var (
err error
items = []models.DriveItemable{}
deltaUpdate api.DeltaUpdate
)
for _, pr := range results {
items = append(items, pr.Values...)
if pr.DeltaLink != nil {
deltaUpdate = api.DeltaUpdate{URL: ptr.Val(pr.DeltaLink)}
}
if pr.Err != nil {
err = pr.Err
}
deltaUpdate.Reset = deltaUpdate.Reset || pr.ResetDelta
}
edi.Items[driveID] = items
edi.Err[driveID] = err
edi.DeltaUpdate[driveID] = deltaUpdate
}
return edi
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Get Item Permissioner // Get Item Permissioner
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -308,6 +388,7 @@ func (h RestoreHandler) NewDrivePager(string, []string) api.Pager[models.Driveab
func (h *RestoreHandler) AugmentItemInfo( func (h *RestoreHandler) AugmentItemInfo(
details.ItemInfo, details.ItemInfo,
idname.Provider,
models.DriveItemable, models.DriveItemable,
int64, int64,
*path.Builder, *path.Builder,

View File

@ -109,7 +109,7 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
//nolint:lll //nolint:lll
byteArray := spMock.Page("Byte Test") byteArray := spMock.Page("Byte Test")
pageData, err := data.NewUnindexedPrefetchedItem( pageData, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader(byteArray)), io.NopCloser(bytes.NewReader(byteArray)),
testName, testName,
time.Now()) time.Now())

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/m365/collection/drive" "github.com/alcionai/corso/src/internal/m365/collection/drive"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -90,11 +91,8 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
var ( var (
paths = map[string]string{} paths = map[string]string{}
newPaths = map[string]string{} currPaths = map[string]string{}
excluded = map[string]struct{}{} excluded = map[string]struct{}{}
itemColls = map[string]map[string]string{
driveID: {},
}
collMap = map[string]map[string]*drive.Collection{ collMap = map[string]map[string]*drive.Collection{
driveID: {}, driveID: {},
} }
@ -103,21 +101,20 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
c := drive.NewCollections( c := drive.NewCollections(
drive.NewLibraryBackupHandler(api.Drives{}, siteID, test.scope, path.SharePointService), drive.NewLibraryBackupHandler(api.Drives{}, siteID, test.scope, path.SharePointService),
tenantID, tenantID,
siteID, idname.NewProvider(siteID, siteID),
nil, nil,
control.DefaultOptions()) control.DefaultOptions())
c.CollectionMap = collMap c.CollectionMap = collMap
err := c.UpdateCollections( _, err := c.UpdateCollections(
ctx, ctx,
driveID, driveID,
"General", "General",
test.items, test.items,
paths, paths,
newPaths, currPaths,
excluded, excluded,
itemColls,
true, true,
fault.New(true)) fault.New(true))

View File

@ -26,6 +26,7 @@ func ProduceExportCollections(
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
backupDriveIDNames idname.CacheBuilder, backupDriveIDNames idname.CacheBuilder,
deets *details.Builder, deets *details.Builder,
stats *data.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (
@ -56,7 +57,8 @@ func ProduceExportCollections(
drive.NewExportCollection( drive.NewExportCollection(
baseDir.String(), baseDir.String(),
[]data.RestoreCollection{dc}, []data.RestoreCollection{dc},
backupVersion)) backupVersion,
stats))
} }
return ec, el.Failure() return ec, el.Failure()

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -98,6 +99,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
}, },
} }
stats := data.ExportStats{}
ecs, err := ProduceExportCollections( ecs, err := ProduceExportCollections(
ctx, ctx,
int(version.Backup), int(version.Backup),
@ -106,6 +109,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
dcs, dcs,
cache, cache,
nil, nil,
&stats,
fault.New(true)) fault.New(true))
assert.NoError(t, err, "export collections error") assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, 1, "num of collections") assert.Len(t, ecs, 1, "num of collections")
@ -113,9 +117,24 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir") assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
fitems := []export.Item{} fitems := []export.Item{}
size := 0
for item := range ecs[0].Items(ctx) { for item := range ecs[0].Items(ctx) {
// unwrap the body from stats reader
b, err := io.ReadAll(item.Body)
assert.NoError(t, err, clues.ToCore(err))
size += len(b)
bitem := io.NopCloser(bytes.NewBuffer(b))
item.Body = bitem
fitems = append(fitems, item) fitems = append(fitems, item)
} }
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
expectedStats.UpdateResourceCount(path.FilesCategory)
assert.Equal(t, expectedStats, stats, "stats")
} }

View File

@ -247,15 +247,6 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
"incremental", op.incremental, "incremental", op.incremental,
"disable_assist_backup", op.disableAssistBackup) "disable_assist_backup", op.disableAssistBackup)
op.bus.Event(
ctx,
events.BackupStart,
map[string]any{
events.StartTime: startTime,
events.Service: op.Selectors.Service.String(),
events.BackupID: op.Results.BackupID,
})
defer func() { defer func() {
if op.Errors.Failure() != nil { if op.Errors.Failure() != nil {
op.bus.Event( op.bus.Event(

View File

@ -27,6 +27,7 @@ import (
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -46,6 +47,7 @@ type ExportOperation struct {
Selectors selectors.Selector Selectors selectors.Selector
ExportCfg control.ExportConfig ExportCfg control.ExportConfig
Version string Version string
stats data.ExportStats
acct account.Account acct account.Account
ec inject.ExportConsumer ec inject.ExportConsumer
@ -72,6 +74,7 @@ func NewExportOperation(
Selectors: sel, Selectors: sel,
Version: "v0", Version: "v0",
ec: ec, ec: ec,
stats: data.ExportStats{},
} }
if err := op.validate(); err != nil { if err := op.validate(); err != nil {
return ExportOperation{}, err return ExportOperation{}, err
@ -242,16 +245,6 @@ func (op *ExportOperation) do(
"backup_snapshot_id", bup.SnapshotID, "backup_snapshot_id", bup.SnapshotID,
"backup_version", bup.Version) "backup_version", bup.Version)
op.bus.Event(
ctx,
events.ExportStart,
map[string]any{
events.StartTime: start,
events.BackupID: op.BackupID,
events.BackupCreateTime: bup.CreationTime,
events.ExportID: opStats.exportID,
})
observe.Message(ctx, fmt.Sprintf("Discovered %d items in backup %s to export", len(paths), op.BackupID)) observe.Message(ctx, fmt.Sprintf("Discovered %d items in backup %s to export", len(paths), op.BackupID))
kopiaComplete := observe.MessageWithCompletion(ctx, "Enumerating items in repository") kopiaComplete := observe.MessageWithCompletion(ctx, "Enumerating items in repository")
@ -270,7 +263,7 @@ func (op *ExportOperation) do(
opStats.resourceCount = 1 opStats.resourceCount = 1
opStats.cs = dcs opStats.cs = dcs
expCollections, err := exportRestoreCollections( expCollections, err := produceExportCollections(
ctx, ctx,
op.ec, op.ec,
bup.Version, bup.Version,
@ -278,6 +271,9 @@ func (op *ExportOperation) do(
op.ExportCfg, op.ExportCfg,
op.Options, op.Options,
dcs, dcs,
// We also have opStats, but that tracks different data.
// Maybe we can look into merging them some time in the future.
&op.stats,
op.Errors) op.Errors)
if err != nil { if err != nil {
return nil, clues.Stack(err) return nil, clues.Stack(err)
@ -333,11 +329,19 @@ func (op *ExportOperation) finalizeMetrics(
return op.Errors.Failure() return op.Errors.Failure()
} }
// GetStats returns the stats of the export operation. You should only
// be calling this once the export collections have been read and process
// as the data that will be available here will be the data that was read
// and processed.
func (op *ExportOperation) GetStats() map[path.CategoryType]data.KindStats {
return op.stats.GetStats()
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Exporter funcs // Exporter funcs
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func exportRestoreCollections( func produceExportCollections(
ctx context.Context, ctx context.Context,
ec inject.ExportConsumer, ec inject.ExportConsumer,
backupVersion int, backupVersion int,
@ -345,6 +349,7 @@ func exportRestoreCollections(
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
exportStats *data.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
complete := observe.MessageWithCompletion(ctx, "Preparing export") complete := observe.MessageWithCompletion(ctx, "Preparing export")
@ -360,6 +365,7 @@ func exportRestoreCollections(
exportCfg, exportCfg,
opts, opts,
dcs, dcs,
exportStats,
errs) errs)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "exporting collections") return nil, clues.Wrap(err, "exporting collections")

View File

@ -34,7 +34,7 @@ func ControllerWithSelector(
t.FailNow() t.FailNow()
} }
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, sel.DiscreteOwner, ins) resource, err := ctrl.PopulateProtectedResourceIDAndName(ctx, sel.DiscreteOwner, ins)
if !assert.NoError(t, err, clues.ToCore(err)) { if !assert.NoError(t, err, clues.ToCore(err)) {
if onFail != nil { if onFail != nil {
onFail() onFail()
@ -43,7 +43,7 @@ func ControllerWithSelector(
t.FailNow() t.FailNow()
} }
sel = sel.SetDiscreteOwnerIDName(id, name) sel = sel.SetDiscreteOwnerIDName(resource.ID(), resource.Name())
return ctrl, sel return ctrl, sel
} }

View File

@ -50,7 +50,7 @@ func LogFaultErrors(ctx context.Context, fe *fault.Errors, prefix string) {
var ( var (
log = logger.Ctx(ctx) log = logger.Ctx(ctx)
pfxMsg = prefix + ":" pfxMsg = prefix + ":"
li, ls, lr = len(fe.Items), len(fe.Skipped), len(fe.Recovered) li, ls, lr, la = len(fe.Items), len(fe.Skipped), len(fe.Recovered), len(fe.Alerts)
) )
if fe.Failure == nil && li+ls+lr == 0 { if fe.Failure == nil && li+ls+lr == 0 {
@ -73,4 +73,8 @@ func LogFaultErrors(ctx context.Context, fe *fault.Errors, prefix string) {
for i, err := range fe.Recovered { for i, err := range fe.Recovered {
log.With("recovered_error", err).Errorf("%s recoverable error %d of %d: %s", pfxMsg, i+1, lr, err.Msg) log.With("recovered_error", err).Errorf("%s recoverable error %d of %d: %s", pfxMsg, i+1, lr, err.Msg)
} }
for i, alert := range fe.Alerts {
log.With("alert", alert).Infof("%s alert %d of %d: %s", pfxMsg, i+1, la, alert.Message)
}
} }

View File

@ -88,6 +88,7 @@ type (
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) ) ([]export.Collectioner, error)
@ -109,10 +110,7 @@ type (
ctx context.Context, ctx context.Context,
owner string, // input value, can be either id or name owner string, // input value, can be either id or name
ins idname.Cacher, ins idname.Cacher,
) ( ) (idname.Provider, error)
id, name string,
err error,
)
} }
RepoMaintenancer interface { RepoMaintenancer interface {

View File

@ -57,13 +57,6 @@ func (op *MaintenanceOperation) Run(ctx context.Context) (err error) {
op.Results.StartedAt = time.Now() op.Results.StartedAt = time.Now()
op.bus.Event(
ctx,
events.MaintenanceStart,
map[string]any{
events.StartTime: op.Results.StartedAt,
})
defer func() { defer func() {
if op.Errors.Failure() != nil { if op.Errors.Failure() != nil {
op.bus.Event( op.bus.Event(

View File

@ -279,16 +279,6 @@ func (op *RestoreOperation) do(
"backup_snapshot_id", bup.SnapshotID, "backup_snapshot_id", bup.SnapshotID,
"backup_version", bup.Version) "backup_version", bup.Version)
op.bus.Event(
ctx,
events.RestoreStart,
map[string]any{
events.StartTime: start,
events.BackupID: op.BackupID,
events.BackupCreateTime: bup.CreationTime,
events.RestoreID: opStats.restoreID,
})
observe.Message(ctx, fmt.Sprintf("Discovered %d items in backup %s to restore", len(paths), op.BackupID)) observe.Message(ctx, fmt.Sprintf("Discovered %d items in backup %s to restore", len(paths), op.BackupID))
progressBar := observe.MessageWithCompletion(ctx, "Enumerating items in repository") progressBar := observe.MessageWithCompletion(ctx, "Enumerating items in repository")
@ -375,12 +365,12 @@ func chooseRestoreResource(
return orig, nil return orig, nil
} }
id, name, err := pprian.PopulateProtectedResourceIDAndName( resource, err := pprian.PopulateProtectedResourceIDAndName(
ctx, ctx,
restoreCfg.ProtectedResource, restoreCfg.ProtectedResource,
nil) nil)
return idname.NewProvider(id, name), clues.Stack(err).OrNil() return resource, clues.Stack(err).OrNil()
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -366,8 +366,6 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() {
require.Nil(t, ds, "restoreOp.Run() should not produce details") require.Nil(t, ds, "restoreOp.Run() should not produce details")
assert.Zero(t, ro.Results.ResourceOwners, "resource owners") assert.Zero(t, ro.Results.ResourceOwners, "resource owners")
assert.Zero(t, ro.Results.BytesRead, "bytes read") assert.Zero(t, ro.Results.BytesRead, "bytes read")
// no restore start, because we'd need to find the backup first.
assert.Equal(t, 0, mb.TimesCalled[events.RestoreStart], "restore-start events")
assert.Equal(t, 1, mb.TimesCalled[events.CorsoError], "corso-error events") assert.Equal(t, 1, mb.TimesCalled[events.CorsoError], "corso-error events")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreEnd], "restore-end events") assert.Equal(t, 1, mb.TimesCalled[events.RestoreEnd], "restore-end events")
} }

View File

@ -76,28 +76,28 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
category path.CategoryType category path.CategoryType
metadataFiles [][]string metadataFiles [][]string
}{ }{
// { {
// name: "Mail", name: "Mail",
// selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
// sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
// sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
// sel.DiscreteOwner = suite.its.user.ID sel.DiscreteOwner = suite.its.user.ID
// return sel return sel
// }, },
// category: path.EmailCategory, category: path.EmailCategory,
// metadataFiles: exchange.MetadataFileNames(path.EmailCategory), metadataFiles: MetadataFileNames(path.EmailCategory),
// }, },
// { {
// name: "Contacts", name: "Contacts",
// selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
// sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
// sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch())) sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
// return sel return sel
// }, },
// category: path.ContactsCategory, category: path.ContactsCategory,
// metadataFiles: exchange.MetadataFileNames(path.ContactsCategory), metadataFiles: MetadataFileNames(path.ContactsCategory),
// }, },
{ {
name: "Calendar Events", name: "Calendar Events",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
@ -226,12 +226,8 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
assert.Equal(t, bo.Results.ResourceOwners, incBO.Results.ResourceOwners, "incremental backup resource owner") assert.Equal(t, bo.Results.ResourceOwners, incBO.Results.ResourceOwners, "incremental backup resource owner")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure())) assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "count incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "count incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 0, incMB.TimesCalled[events.CorsoError], "corso error events") assert.Equal(t, 0, incMB.TimesCalled[events.CorsoError], "corso error events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
assert.Equal(t,
incMB.CalledWith[events.BackupStart][0][events.BackupID],
incBO.Results.BackupID, "incremental backupID pre-declaration")
}) })
} }
} }
@ -877,11 +873,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// assert.Equal(t, test.nonMetaItemsWritten, incBO.Results.ItemsWritten, "non meta incremental items write") // assert.Equal(t, test.nonMetaItemsWritten, incBO.Results.ItemsWritten, "non meta incremental items write")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure())) assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
assert.Equal(t,
incMB.CalledWith[events.BackupStart][0][events.BackupID],
bupID, "incremental backupID pre-declaration")
}) })
} }
} }

View File

@ -226,18 +226,18 @@ func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsWithAdvancedOp
suite.its.group.RootSite.DriveRootFolderID) suite.its.group.RootSite.DriveRootFolderID)
} }
func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsAlternateProtectedResource() { // func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsAlternateProtectedResource() {
sel := selectors.NewGroupsBackup([]string{suite.its.group.ID}) // sel := selectors.NewGroupsBackup([]string{suite.its.group.ID})
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel)) // sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
sel.Filter(sel.Library("documents")) // sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.its.group.ID // sel.DiscreteOwner = suite.its.group.ID
runDriveRestoreToAlternateProtectedResource( // runDriveRestoreToAlternateProtectedResource(
suite.T(), // suite.T(),
suite, // suite,
suite.its.ac, // suite.its.ac,
sel.Selector, // sel.Selector,
suite.its.group.RootSite, // suite.its.group.RootSite,
suite.its.secondaryGroup.RootSite, // suite.its.secondaryGroup.RootSite,
suite.its.secondaryGroup.ID) // suite.its.secondaryGroup.ID)
} // }

View File

@ -224,11 +224,7 @@ func runAndCheckBackup(
assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners") assert.Equal(t, 1, bo.Results.ResourceOwners, "count of resource owners")
assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure())) assert.NoError(t, bo.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, bo.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, mb.TimesCalled[events.BackupStart], "backup-start events")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events") assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
assert.Equal(t,
mb.CalledWith[events.BackupStart][0][events.BackupID],
bo.Results.BackupID, "backupID pre-declaration")
} }
func checkBackupIsInManifests( func checkBackupIsInManifests(
@ -550,7 +546,7 @@ func ControllerWithSelector(
t.FailNow() t.FailNow()
} }
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, sel.DiscreteOwner, ins) resource, err := ctrl.PopulateProtectedResourceIDAndName(ctx, sel.DiscreteOwner, ins)
if !assert.NoError(t, err, clues.ToCore(err)) { if !assert.NoError(t, err, clues.ToCore(err)) {
if onFail != nil { if onFail != nil {
onFail(t, ctx) onFail(t, ctx)
@ -559,7 +555,7 @@ func ControllerWithSelector(
t.FailNow() t.FailNow()
} }
sel = sel.SetDiscreteOwnerIDName(id, name) sel = sel.SetDiscreteOwnerIDName(resource.ID(), resource.Name())
return ctrl, sel return ctrl, sel
} }

View File

@ -801,11 +801,7 @@ func runDriveIncrementalTest(
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure())) assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
assert.Equal(t,
incMB.CalledWith[events.BackupStart][0][events.BackupID],
bupID, "incremental backupID pre-declaration")
}) })
} }
} }
@ -912,11 +908,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
assert.LessOrEqual(t, 2, incBO.Results.ItemsRead, "items read") assert.LessOrEqual(t, 2, incBO.Results.ItemsRead, "items read")
assert.NoError(t, incBO.Errors.Failure(), "non-recoverable error", clues.ToCore(incBO.Errors.Failure())) assert.NoError(t, incBO.Errors.Failure(), "non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "backup-start events")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "backup-end events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "backup-end events")
assert.Equal(t,
incMB.CalledWith[events.BackupStart][0][events.BackupID],
incBO.Results.BackupID, "backupID pre-declaration")
bid := incBO.Results.BackupID bid := incBO.Results.BackupID
bup := &backup.Backup{} bup := &backup.Backup{}

View File

@ -205,7 +205,6 @@ func runAndCheckRestore(
assert.NotZero(t, ro.Results.ItemsRead, "count of items read") assert.NotZero(t, ro.Results.ItemsRead, "count of items read")
assert.NotZero(t, ro.Results.BytesRead, "bytes read") assert.NotZero(t, ro.Results.BytesRead, "bytes read")
assert.Equal(t, 1, ro.Results.ResourceOwners, "count of resource owners") assert.Equal(t, 1, ro.Results.ResourceOwners, "count of resource owners")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreStart], "restore-start events")
assert.Equal(t, 1, mb.TimesCalled[events.RestoreEnd], "restore-end events") assert.Equal(t, 1, mb.TimesCalled[events.RestoreEnd], "restore-end events")
return deets return deets

View File

@ -182,7 +182,7 @@ func collect(
return nil, clues.Wrap(err, "marshalling body").WithClues(ctx) return nil, clues.Wrap(err, "marshalling body").WithClues(ctx)
} }
item, err := data.NewUnindexedPrefetchedItem( item, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader(bs)), io.NopCloser(bytes.NewReader(bs)),
col.itemName, col.itemName,
time.Now()) time.Now())

View File

@ -102,7 +102,7 @@ func New(
switch true { switch true {
case s.HasCause(fault.SkipMalware): case s.HasCause(fault.SkipMalware):
malware++ malware++
case s.HasCause(fault.SkipBigOneNote): case s.HasCause(fault.SkipOneNote):
invalidONFile++ invalidONFile++
default: default:
otherSkips++ otherSkips++

View File

@ -16,6 +16,7 @@ const (
ApplicationThrottled errEnum = "application-throttled" ApplicationThrottled errEnum = "application-throttled"
BackupNotFound errEnum = "backup-not-found" BackupNotFound errEnum = "backup-not-found"
RepoAlreadyExists errEnum = "repository-already-exists" RepoAlreadyExists errEnum = "repository-already-exists"
ResourceNotAccessible errEnum = "resource-not-accesible"
ResourceOwnerNotFound errEnum = "resource-owner-not-found" ResourceOwnerNotFound errEnum = "resource-owner-not-found"
ServiceNotEnabled errEnum = "service-not-enabled" ServiceNotEnabled errEnum = "service-not-enabled"
) )
@ -27,6 +28,7 @@ var internalToExternal = map[errEnum][]error{
ApplicationThrottled: {graph.ErrApplicationThrottled}, ApplicationThrottled: {graph.ErrApplicationThrottled},
BackupNotFound: {repository.ErrorBackupNotFound}, BackupNotFound: {repository.ErrorBackupNotFound},
RepoAlreadyExists: {repository.ErrorRepoAlreadyExists}, RepoAlreadyExists: {repository.ErrorRepoAlreadyExists},
ResourceNotAccessible: {graph.ErrResourceLocked},
ResourceOwnerNotFound: {graph.ErrResourceOwnerNotFound}, ResourceOwnerNotFound: {graph.ErrResourceOwnerNotFound},
ServiceNotEnabled: {graph.ErrServiceNotEnabled}, ServiceNotEnabled: {graph.ErrServiceNotEnabled},
} }

View File

@ -29,6 +29,7 @@ func (suite *ErrUnitSuite) TestInternal() {
{BackupNotFound, []error{repository.ErrorBackupNotFound}}, {BackupNotFound, []error{repository.ErrorBackupNotFound}},
{ServiceNotEnabled, []error{graph.ErrServiceNotEnabled}}, {ServiceNotEnabled, []error{graph.ErrServiceNotEnabled}},
{ResourceOwnerNotFound, []error{graph.ErrResourceOwnerNotFound}}, {ResourceOwnerNotFound, []error{graph.ErrResourceOwnerNotFound}},
{ResourceNotAccessible, []error{graph.ErrResourceLocked}},
} }
for _, test := range table { for _, test := range table {
suite.Run(string(test.get), func() { suite.Run(string(test.get), func() {
@ -46,6 +47,7 @@ func (suite *ErrUnitSuite) TestIs() {
{BackupNotFound, repository.ErrorBackupNotFound}, {BackupNotFound, repository.ErrorBackupNotFound},
{ServiceNotEnabled, graph.ErrServiceNotEnabled}, {ServiceNotEnabled, graph.ErrServiceNotEnabled},
{ResourceOwnerNotFound, graph.ErrResourceOwnerNotFound}, {ResourceOwnerNotFound, graph.ErrResourceOwnerNotFound},
{ResourceNotAccessible, graph.ErrResourceLocked},
} }
for _, test := range table { for _, test := range table {
suite.Run(string(test.target), func() { suite.Run(string(test.target), func() {

Some files were not shown because too many files have changed in this diff Show More