Merge branch 'main' into nestedAttachment

This commit is contained in:
neha_gupta 2023-06-27 13:13:32 +05:30 committed by GitHub
commit 93e1519ae7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
93 changed files with 4405 additions and 1433 deletions

View File

@ -7,6 +7,27 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] (beta) ## [Unreleased] (beta)
### Added
### Fixed
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
## [v0.10.0] (beta) - 2023-06-26
### Added
- Exceptions and cancellations for recurring events are now backed up and restored
- Introduced a URL cache for OneDrive that helps reduce Graph API calls for long running (>1hr) backups
- Improve incremental backup behavior by leveraging information from incomplete backups
- Improve restore performance and memory use for Exchange and OneDrive
### Fixed
- Handle OLE conversion errors when trying to fetch attachments
- Fix uploading large attachments for emails and calendar
- Fixed high memory use in OneDrive backup related to logging
### Changed
- Switched to Go 1.20
## [v0.9.0] (beta) - 2023-06-05 ## [v0.9.0] (beta) - 2023-06-05
### Added ### Added
@ -18,7 +39,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Fix Exchange folder cache population error when parent folder isn't found. - Fix Exchange folder cache population error when parent folder isn't found.
- Fix Exchange backup issue caused by incorrect json serialization - Fix Exchange backup issue caused by incorrect json serialization
- Fix issues with details model containing duplicate entry for api consumers - Fix issues with details model containing duplicate entry for api consumers
- Handle OLE conversion errors when trying to fetch attachments
### Changed ### Changed
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`. - Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.

View File

@ -9,6 +9,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
@ -58,31 +59,21 @@ func AddCommands(cmd *cobra.Command) {
// common flags and flag attachers for commands // common flags and flag attachers for commands
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// list output filter flags
var (
failedItemsFN = "failed-items"
listFailedItems string
skippedItemsFN = "skipped-items"
listSkippedItems string
recoveredErrorsFN = "recovered-errors"
listRecoveredErrors string
)
func addFailedItemsFN(cmd *cobra.Command) { func addFailedItemsFN(cmd *cobra.Command) {
cmd.Flags().StringVar( cmd.Flags().StringVar(
&listFailedItems, failedItemsFN, "show", &flags.ListFailedItemsFV, flags.FailedItemsFN, "show",
"Toggles showing or hiding the list of items that failed.") "Toggles showing or hiding the list of items that failed.")
} }
func addSkippedItemsFN(cmd *cobra.Command) { func addSkippedItemsFN(cmd *cobra.Command) {
cmd.Flags().StringVar( cmd.Flags().StringVar(
&listSkippedItems, skippedItemsFN, "show", &flags.ListSkippedItemsFV, flags.SkippedItemsFN, "show",
"Toggles showing or hiding the list of items that were skipped.") "Toggles showing or hiding the list of items that were skipped.")
} }
func addRecoveredErrorsFN(cmd *cobra.Command) { func addRecoveredErrorsFN(cmd *cobra.Command) {
cmd.Flags().StringVar( cmd.Flags().StringVar(
&listRecoveredErrors, recoveredErrorsFN, "show", &flags.ListRecoveredErrorsFV, flags.RecoveredErrorsFN, "show",
"Toggles showing or hiding the list of errors which corso recovered from.") "Toggles showing or hiding the list of errors which corso recovered from.")
} }
@ -318,7 +309,11 @@ func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType
} }
b.Print(ctx) b.Print(ctx)
fe.PrintItems(ctx, !ifShow(listFailedItems), !ifShow(listSkippedItems), !ifShow(listRecoveredErrors)) fe.PrintItems(
ctx,
!ifShow(flags.ListFailedItemsFV),
!ifShow(flags.ListSkippedItemsFV),
!ifShow(flags.ListRecoveredErrorsFV))
return nil return nil
} }

View File

@ -8,7 +8,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -31,7 +31,7 @@ const (
const ( const (
exchangeServiceCommand = "exchange" exchangeServiceCommand = "exchange"
exchangeServiceCommandCreateUseSuffix = "--mailbox <email> | '" + utils.Wildcard + "'" exchangeServiceCommandCreateUseSuffix = "--mailbox <email> | '" + flags.Wildcard + "'"
exchangeServiceCommandDeleteUseSuffix = "--backup <backupId>" exchangeServiceCommandDeleteUseSuffix = "--backup <backupId>"
exchangeServiceCommandDetailsUseSuffix = "--backup <backupId>" exchangeServiceCommandDetailsUseSuffix = "--backup <backupId>"
) )
@ -82,20 +82,20 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence. // More generic (ex: --user) and more frequently used flags take precedence.
utils.AddMailBoxFlag(c) flags.AddMailBoxFlag(c)
utils.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false) flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
options.AddFetchParallelismFlag(c) flags.AddFetchParallelismFlag(c)
options.AddFailFastFlag(c) flags.AddFailFastFlag(c)
options.AddDisableIncrementalsFlag(c) flags.AddDisableIncrementalsFlag(c)
options.AddDisableDeltaFlag(c) flags.AddDisableDeltaFlag(c)
options.AddEnableImmutableIDFlag(c) flags.AddEnableImmutableIDFlag(c)
options.AddDisableConcurrencyLimiterFlag(c) flags.AddDisableConcurrencyLimiterFlag(c)
case listCommand: case listCommand:
c, fs = utils.AddCommand(cmd, exchangeListCmd()) c, fs = utils.AddCommand(cmd, exchangeListCmd())
fs.SortFlags = false fs.SortFlags = false
utils.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
addFailedItemsFN(c) addFailedItemsFN(c)
addSkippedItemsFN(c) addSkippedItemsFN(c)
addRecoveredErrorsFN(c) addRecoveredErrorsFN(c)
@ -107,12 +107,12 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix
c.Example = exchangeServiceCommandDetailsExamples c.Example = exchangeServiceCommandDetailsExamples
options.AddSkipReduceFlag(c) flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence. // More generic (ex: --user) and more frequently used flags take precedence.
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
utils.AddExchangeDetailsAndRestoreFlags(c) flags.AddExchangeDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, fs = utils.AddCommand(cmd, exchangeDeleteCmd()) c, fs = utils.AddCommand(cmd, exchangeDeleteCmd())
@ -121,7 +121,7 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix
c.Example = exchangeServiceCommandDeleteExamples c.Example = exchangeServiceCommandDeleteExamples
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
} }
return c return c
@ -149,7 +149,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if err := validateExchangeBackupCreateFlags(utils.UserFV, utils.CategoryDataFV); err != nil { if err := validateExchangeBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
return err return err
} }
@ -160,7 +160,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
sel := exchangeBackupCreateSelectors(utils.UserFV, utils.CategoryDataFV) sel := exchangeBackupCreateSelectors(flags.UserFV, flags.CategoryDataFV)
ins, err := utils.UsersMap(ctx, *acct, fault.New(true)) ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
if err != nil { if err != nil {
@ -235,7 +235,7 @@ func exchangeListCmd() *cobra.Command {
// lists the history of backup operations // lists the history of backup operations
func listExchangeCmd(cmd *cobra.Command, args []string) error { func listExchangeCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, utils.BackupIDFV, path.ExchangeService, args) return genericListCommand(cmd, flags.BackupIDFV, path.ExchangeService, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -269,9 +269,9 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
ctrlOpts := options.Control() ctrlOpts := utils.Control()
ds, err := runDetailsExchangeCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce) ds, err := runDetailsExchangeCmd(ctx, r, flags.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -340,5 +340,5 @@ func exchangeDeleteCmd() *cobra.Command {
// deletes an exchange service backup. // deletes an exchange service backup.
func deleteExchangeCmd(cmd *cobra.Command, args []string) error { func deleteExchangeCmd(cmd *cobra.Command, args []string) error {
return genericDeleteCommand(cmd, utils.BackupIDFV, "Exchange", args) return genericDeleteCommand(cmd, flags.BackupIDFV, "Exchange", args)
} }

View File

@ -16,8 +16,8 @@ import (
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/m365/exchange" "github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
@ -469,7 +469,7 @@ func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category p
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(bID)) "--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.recorder) cmd.SetOut(&suite.recorder)
@ -568,7 +568,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID)) "--"+flags.BackupFN, string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
@ -597,7 +597,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, uuid.NewString()) "--"+flags.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
@ -617,8 +617,8 @@ func buildExchangeBackupCmd(
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "create", "exchange", "backup", "create", "exchange",
"--config-file", configFile, "--config-file", configFile,
"--"+utils.UserFN, user, "--"+flags.UserFN, user,
"--"+utils.CategoryDataFN, category) "--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(recorder) cmd.SetOut(recorder)

View File

@ -10,8 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
@ -43,14 +42,14 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
expectUse + " " + exchangeServiceCommandCreateUseSuffix, expectUse + " " + exchangeServiceCommandCreateUseSuffix,
exchangeCreateCmd().Short, exchangeCreateCmd().Short,
[]string{ []string{
utils.UserFN, flags.UserFN,
utils.CategoryDataFN, flags.CategoryDataFN,
options.DisableIncrementalsFN, flags.DisableIncrementalsFN,
options.DisableDeltaFN, flags.DisableDeltaFN,
options.FailFastFN, flags.FailFastFN,
options.FetchParallelismFN, flags.FetchParallelismFN,
options.SkipReduceFN, flags.SkipReduceFN,
options.NoStatsFN, flags.NoStatsFN,
}, },
createExchangeCmd, createExchangeCmd,
}, },
@ -60,10 +59,10 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
expectUse, expectUse,
exchangeListCmd().Short, exchangeListCmd().Short,
[]string{ []string{
utils.BackupFN, flags.BackupFN,
failedItemsFN, flags.FailedItemsFN,
skippedItemsFN, flags.SkippedItemsFN,
recoveredErrorsFN, flags.RecoveredErrorsFN,
}, },
listExchangeCmd, listExchangeCmd,
}, },
@ -73,23 +72,23 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
expectUse + " " + exchangeServiceCommandDetailsUseSuffix, expectUse + " " + exchangeServiceCommandDetailsUseSuffix,
exchangeDetailsCmd().Short, exchangeDetailsCmd().Short,
[]string{ []string{
utils.BackupFN, flags.BackupFN,
utils.ContactFN, flags.ContactFN,
utils.ContactFolderFN, flags.ContactFolderFN,
utils.ContactNameFN, flags.ContactNameFN,
utils.EmailFN, flags.EmailFN,
utils.EmailFolderFN, flags.EmailFolderFN,
utils.EmailReceivedAfterFN, flags.EmailReceivedAfterFN,
utils.EmailReceivedBeforeFN, flags.EmailReceivedBeforeFN,
utils.EmailSenderFN, flags.EmailSenderFN,
utils.EmailSubjectFN, flags.EmailSubjectFN,
utils.EventFN, flags.EventFN,
utils.EventCalendarFN, flags.EventCalendarFN,
utils.EventOrganizerFN, flags.EventOrganizerFN,
utils.EventRecursFN, flags.EventRecursFN,
utils.EventStartsAfterFN, flags.EventStartsAfterFN,
utils.EventStartsBeforeFN, flags.EventStartsBeforeFN,
utils.EventSubjectFN, flags.EventSubjectFN,
}, },
detailsExchangeCmd, detailsExchangeCmd,
}, },
@ -98,7 +97,7 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
deleteCommand, deleteCommand,
expectUse + " " + exchangeServiceCommandDeleteUseSuffix, expectUse + " " + exchangeServiceCommandDeleteUseSuffix,
exchangeDeleteCmd().Short, exchangeDeleteCmd().Short,
[]string{utils.BackupFN}, []string{flags.BackupFN},
deleteExchangeCmd, deleteExchangeCmd,
}, },
} }
@ -171,7 +170,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, no data", name: "any users, no data",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
expectIncludeLen: 3, expectIncludeLen: 3,
}, },
{ {
@ -181,7 +180,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, contacts", name: "any users, contacts",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
data: []string{dataContacts}, data: []string{dataContacts},
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
@ -193,7 +192,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, email", name: "any users, email",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
data: []string{dataEmail}, data: []string{dataEmail},
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
@ -205,7 +204,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, events", name: "any users, events",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
data: []string{dataEvents}, data: []string{dataEvents},
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
@ -217,7 +216,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, contacts + email", name: "any users, contacts + email",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
data: []string{dataContacts, dataEmail}, data: []string{dataContacts, dataEmail},
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
@ -229,7 +228,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, email + events", name: "any users, email + events",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
data: []string{dataEmail, dataEvents}, data: []string{dataEmail, dataEvents},
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
@ -241,7 +240,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, events + contacts", name: "any users, events + contacts",
user: []string{utils.Wildcard}, user: []string{flags.Wildcard},
data: []string{dataEvents, dataContacts}, data: []string{dataEvents, dataContacts},
expectIncludeLen: 2, expectIncludeLen: 2,
}, },

View File

@ -8,7 +8,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -25,7 +25,7 @@ import (
const ( const (
oneDriveServiceCommand = "onedrive" oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandCreateUseSuffix = "--user <email> | '" + utils.Wildcard + "'" oneDriveServiceCommandCreateUseSuffix = "--user <email> | '" + flags.Wildcard + "'"
oneDriveServiceCommandDeleteUseSuffix = "--backup <backupId>" oneDriveServiceCommandDeleteUseSuffix = "--backup <backupId>"
oneDriveServiceCommandDetailsUseSuffix = "--backup <backupId>" oneDriveServiceCommandDetailsUseSuffix = "--backup <backupId>"
) )
@ -70,15 +70,15 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix
c.Example = oneDriveServiceCommandCreateExamples c.Example = oneDriveServiceCommandCreateExamples
utils.AddUserFlag(c) flags.AddUserFlag(c)
options.AddFailFastFlag(c) flags.AddFailFastFlag(c)
options.AddDisableIncrementalsFlag(c) flags.AddDisableIncrementalsFlag(c)
case listCommand: case listCommand:
c, fs = utils.AddCommand(cmd, oneDriveListCmd()) c, fs = utils.AddCommand(cmd, oneDriveListCmd())
fs.SortFlags = false fs.SortFlags = false
utils.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
addFailedItemsFN(c) addFailedItemsFN(c)
addSkippedItemsFN(c) addSkippedItemsFN(c)
addRecoveredErrorsFN(c) addRecoveredErrorsFN(c)
@ -90,9 +90,9 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix
c.Example = oneDriveServiceCommandDetailsExamples c.Example = oneDriveServiceCommandDetailsExamples
options.AddSkipReduceFlag(c) flags.AddSkipReduceFlag(c)
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
utils.AddOneDriveDetailsAndRestoreFlags(c) flags.AddOneDriveDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, fs = utils.AddCommand(cmd, oneDriveDeleteCmd()) c, fs = utils.AddCommand(cmd, oneDriveDeleteCmd())
@ -101,7 +101,7 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix
c.Example = oneDriveServiceCommandDeleteExamples c.Example = oneDriveServiceCommandDeleteExamples
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
} }
return c return c
@ -130,7 +130,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if err := validateOneDriveBackupCreateFlags(utils.UserFV); err != nil { if err := validateOneDriveBackupCreateFlags(flags.UserFV); err != nil {
return err return err
} }
@ -141,7 +141,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
sel := oneDriveBackupCreateSelectors(utils.UserFV) sel := oneDriveBackupCreateSelectors(flags.UserFV)
ins, err := utils.UsersMap(ctx, *acct, fault.New(true)) ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
if err != nil { if err != nil {
@ -193,7 +193,7 @@ func oneDriveListCmd() *cobra.Command {
// lists the history of backup operations // lists the history of backup operations
func listOneDriveCmd(cmd *cobra.Command, args []string) error { func listOneDriveCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, utils.BackupIDFV, path.OneDriveService, args) return genericListCommand(cmd, flags.BackupIDFV, path.OneDriveService, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -227,9 +227,9 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
ctrlOpts := options.Control() ctrlOpts := utils.Control()
ds, err := runDetailsOneDriveCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce) ds, err := runDetailsOneDriveCmd(ctx, r, flags.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -295,5 +295,5 @@ func oneDriveDeleteCmd() *cobra.Command {
// deletes a oneDrive service backup. // deletes a oneDrive service backup.
func deleteOneDriveCmd(cmd *cobra.Command, args []string) error { func deleteOneDriveCmd(cmd *cobra.Command, args []string) error {
return genericDeleteCommand(cmd, utils.BackupIDFV, "OneDrive", args) return genericDeleteCommand(cmd, flags.BackupIDFV, "OneDrive", args)
} }

View File

@ -14,8 +14,8 @@ import (
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -108,7 +108,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_UserNotInTenant() {
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "create", "onedrive", "backup", "create", "onedrive",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.UserFN, "foo@nothere.com") "--"+flags.UserFN, "foo@nothere.com")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&recorder) cmd.SetOut(&recorder)
@ -200,7 +200,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID)) "--"+flags.BackupFN, string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.recorder) cmd.SetErr(&suite.recorder)
@ -240,7 +240,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, uuid.NewString()) "--"+flags.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup

View File

@ -10,8 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
@ -43,9 +42,9 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
expectUse + " " + oneDriveServiceCommandCreateUseSuffix, expectUse + " " + oneDriveServiceCommandCreateUseSuffix,
oneDriveCreateCmd().Short, oneDriveCreateCmd().Short,
[]string{ []string{
utils.UserFN, flags.UserFN,
options.DisableIncrementalsFN, flags.DisableIncrementalsFN,
options.FailFastFN, flags.FailFastFN,
}, },
createOneDriveCmd, createOneDriveCmd,
}, },
@ -55,10 +54,10 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
expectUse, expectUse,
oneDriveListCmd().Short, oneDriveListCmd().Short,
[]string{ []string{
utils.BackupFN, flags.BackupFN,
failedItemsFN, flags.FailedItemsFN,
skippedItemsFN, flags.SkippedItemsFN,
recoveredErrorsFN, flags.RecoveredErrorsFN,
}, },
listOneDriveCmd, listOneDriveCmd,
}, },
@ -68,13 +67,13 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
expectUse + " " + oneDriveServiceCommandDetailsUseSuffix, expectUse + " " + oneDriveServiceCommandDetailsUseSuffix,
oneDriveDetailsCmd().Short, oneDriveDetailsCmd().Short,
[]string{ []string{
utils.BackupFN, flags.BackupFN,
utils.FolderFN, flags.FolderFN,
utils.FileFN, flags.FileFN,
utils.FileCreatedAfterFN, flags.FileCreatedAfterFN,
utils.FileCreatedBeforeFN, flags.FileCreatedBeforeFN,
utils.FileModifiedAfterFN, flags.FileModifiedAfterFN,
utils.FileModifiedBeforeFN, flags.FileModifiedBeforeFN,
}, },
detailsOneDriveCmd, detailsOneDriveCmd,
}, },
@ -83,7 +82,7 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
deleteCommand, deleteCommand,
expectUse + " " + oneDriveServiceCommandDeleteUseSuffix, expectUse + " " + oneDriveServiceCommandDeleteUseSuffix,
oneDriveDeleteCmd().Short, oneDriveDeleteCmd().Short,
[]string{utils.BackupFN}, []string{flags.BackupFN},
deleteOneDriveCmd, deleteOneDriveCmd,
}, },
} }

View File

@ -9,7 +9,7 @@ import (
"github.com/spf13/pflag" "github.com/spf13/pflag"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
@ -34,7 +34,7 @@ const (
const ( const (
sharePointServiceCommand = "sharepoint" sharePointServiceCommand = "sharepoint"
sharePointServiceCommandCreateUseSuffix = "--site <siteURL> | '" + utils.Wildcard + "'" sharePointServiceCommandCreateUseSuffix = "--site <siteURL> | '" + flags.Wildcard + "'"
sharePointServiceCommandDeleteUseSuffix = "--backup <backupId>" sharePointServiceCommandDeleteUseSuffix = "--backup <backupId>"
sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>" sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>"
) )
@ -84,17 +84,17 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
c.Example = sharePointServiceCommandCreateExamples c.Example = sharePointServiceCommandCreateExamples
utils.AddSiteFlag(c) flags.AddSiteFlag(c)
utils.AddSiteIDFlag(c) flags.AddSiteIDFlag(c)
utils.AddDataFlag(c, []string{dataLibraries}, true) flags.AddDataFlag(c, []string{dataLibraries}, true)
options.AddFailFastFlag(c) flags.AddFailFastFlag(c)
options.AddDisableIncrementalsFlag(c) flags.AddDisableIncrementalsFlag(c)
case listCommand: case listCommand:
c, fs = utils.AddCommand(cmd, sharePointListCmd()) c, fs = utils.AddCommand(cmd, sharePointListCmd())
fs.SortFlags = false fs.SortFlags = false
utils.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
addFailedItemsFN(c) addFailedItemsFN(c)
addSkippedItemsFN(c) addSkippedItemsFN(c)
addRecoveredErrorsFN(c) addRecoveredErrorsFN(c)
@ -106,9 +106,9 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
c.Example = sharePointServiceCommandDetailsExamples c.Example = sharePointServiceCommandDetailsExamples
options.AddSkipReduceFlag(c) flags.AddSkipReduceFlag(c)
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
utils.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, fs = utils.AddCommand(cmd, sharePointDeleteCmd()) c, fs = utils.AddCommand(cmd, sharePointDeleteCmd())
@ -117,7 +117,7 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
c.Example = sharePointServiceCommandDeleteExamples c.Example = sharePointServiceCommandDeleteExamples
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
} }
return c return c
@ -146,7 +146,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if err := validateSharePointBackupCreateFlags(utils.SiteIDFV, utils.WebURLFV, utils.CategoryDataFV); err != nil { if err := validateSharePointBackupCreateFlags(flags.SiteIDFV, flags.WebURLFV, flags.CategoryDataFV); err != nil {
return err return err
} }
@ -165,7 +165,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites"))
} }
sel, err := sharePointBackupCreateSelectors(ctx, ins, utils.SiteIDFV, utils.WebURLFV, utils.CategoryDataFV) sel, err := sharePointBackupCreateSelectors(ctx, ins, flags.SiteIDFV, flags.WebURLFV, flags.CategoryDataFV)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL")) return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL"))
} }
@ -188,8 +188,8 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
if len(sites) == 0 && len(weburls) == 0 { if len(sites) == 0 && len(weburls) == 0 {
return clues.New( return clues.New(
"requires one or more --" + "requires one or more --" +
utils.SiteFN + " urls, or the wildcard --" + flags.SiteFN + " urls, or the wildcard --" +
utils.SiteFN + " *", flags.SiteFN + " *",
) )
} }
@ -214,11 +214,11 @@ func sharePointBackupCreateSelectors(
return selectors.NewSharePointBackup(selectors.None()), nil return selectors.NewSharePointBackup(selectors.None()), nil
} }
if filters.PathContains(sites).Compare(utils.Wildcard) { if filters.PathContains(sites).Compare(flags.Wildcard) {
return includeAllSitesWithCategories(ins, cats), nil return includeAllSitesWithCategories(ins, cats), nil
} }
if filters.PathContains(weburls).Compare(utils.Wildcard) { if filters.PathContains(weburls).Compare(flags.Wildcard) {
return includeAllSitesWithCategories(ins, cats), nil return includeAllSitesWithCategories(ins, cats), nil
} }
@ -265,7 +265,7 @@ func sharePointListCmd() *cobra.Command {
// lists the history of backup operations // lists the history of backup operations
func listSharePointCmd(cmd *cobra.Command, args []string) error { func listSharePointCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, utils.BackupIDFV, path.SharePointService, args) return genericListCommand(cmd, flags.BackupIDFV, path.SharePointService, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -285,7 +285,7 @@ func sharePointDeleteCmd() *cobra.Command {
// deletes a sharePoint service backup. // deletes a sharePoint service backup.
func deleteSharePointCmd(cmd *cobra.Command, args []string) error { func deleteSharePointCmd(cmd *cobra.Command, args []string) error {
return genericDeleteCommand(cmd, utils.BackupIDFV, "SharePoint", args) return genericDeleteCommand(cmd, flags.BackupIDFV, "SharePoint", args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -319,9 +319,9 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
ctrlOpts := options.Control() ctrlOpts := utils.Control()
ds, err := runDetailsSharePointCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce) ds, err := runDetailsSharePointCmd(ctx, r, flags.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -14,8 +14,8 @@ import (
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -164,7 +164,7 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "sharepoint", "backup", "delete", "sharepoint",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID)) "--"+flags.BackupFN, string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.recorder) cmd.SetErr(&suite.recorder)
@ -205,7 +205,7 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unkno
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "sharepoint", "backup", "delete", "sharepoint",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, uuid.NewString()) "--"+flags.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup

View File

@ -10,8 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -45,9 +44,9 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
expectUse + " " + sharePointServiceCommandCreateUseSuffix, expectUse + " " + sharePointServiceCommandCreateUseSuffix,
sharePointCreateCmd().Short, sharePointCreateCmd().Short,
[]string{ []string{
utils.SiteFN, flags.SiteFN,
options.DisableIncrementalsFN, flags.DisableIncrementalsFN,
options.FailFastFN, flags.FailFastFN,
}, },
createSharePointCmd, createSharePointCmd,
}, },
@ -57,10 +56,10 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
expectUse, expectUse,
sharePointListCmd().Short, sharePointListCmd().Short,
[]string{ []string{
utils.BackupFN, flags.BackupFN,
failedItemsFN, flags.FailedItemsFN,
skippedItemsFN, flags.SkippedItemsFN,
recoveredErrorsFN, flags.RecoveredErrorsFN,
}, },
listSharePointCmd, listSharePointCmd,
}, },
@ -70,14 +69,14 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
expectUse + " " + sharePointServiceCommandDetailsUseSuffix, expectUse + " " + sharePointServiceCommandDetailsUseSuffix,
sharePointDetailsCmd().Short, sharePointDetailsCmd().Short,
[]string{ []string{
utils.BackupFN, flags.BackupFN,
utils.LibraryFN, flags.LibraryFN,
utils.FolderFN, flags.FolderFN,
utils.FileFN, flags.FileFN,
utils.FileCreatedAfterFN, flags.FileCreatedAfterFN,
utils.FileCreatedBeforeFN, flags.FileCreatedBeforeFN,
utils.FileModifiedAfterFN, flags.FileModifiedAfterFN,
utils.FileModifiedBeforeFN, flags.FileModifiedBeforeFN,
}, },
detailsSharePointCmd, detailsSharePointCmd,
}, },
@ -86,7 +85,7 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
deleteCommand, deleteCommand,
expectUse + " " + sharePointServiceCommandDeleteUseSuffix, expectUse + " " + sharePointServiceCommandDeleteUseSuffix,
sharePointDeleteCmd().Short, sharePointDeleteCmd().Short,
[]string{utils.BackupFN}, []string{flags.BackupFN},
deleteSharePointCmd, deleteSharePointCmd,
}, },
} }
@ -183,13 +182,13 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
}, },
{ {
name: "site wildcard", name: "site wildcard",
site: []string{utils.Wildcard}, site: []string{flags.Wildcard},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2, expectScopesLen: 2,
}, },
{ {
name: "url wildcard", name: "url wildcard",
weburl: []string{utils.Wildcard}, weburl: []string{flags.Wildcard},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2, expectScopesLen: 2,
}, },
@ -221,7 +220,7 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
}, },
{ {
name: "unnecessary site wildcard", name: "unnecessary site wildcard",
site: []string{id1, utils.Wildcard}, site: []string{id1, flags.Wildcard},
weburl: []string{url1, url2}, weburl: []string{url1, url2},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2, expectScopesLen: 2,
@ -229,7 +228,7 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
{ {
name: "unnecessary url wildcard", name: "unnecessary url wildcard",
site: []string{id1}, site: []string{id1},
weburl: []string{url1, utils.Wildcard}, weburl: []string{url1, flags.Wildcard},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2, expectScopesLen: 2,
}, },

View File

@ -11,8 +11,8 @@ import (
"github.com/alcionai/corso/src/cli/backup" "github.com/alcionai/corso/src/cli/backup"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/help" "github.com/alcionai/corso/src/cli/help"
"github.com/alcionai/corso/src/cli/options"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/repo" "github.com/alcionai/corso/src/cli/repo"
"github.com/alcionai/corso/src/cli/restore" "github.com/alcionai/corso/src/cli/restore"
@ -44,11 +44,11 @@ func preRun(cc *cobra.Command, args []string) error {
ctx := cc.Context() ctx := cc.Context()
log := logger.Ctx(ctx) log := logger.Ctx(ctx)
flags := utils.GetPopulatedFlags(cc) fs := flags.GetPopulatedFlags(cc)
flagSl := make([]string, 0, len(flags)) flagSl := make([]string, 0, len(fs))
// currently only tracking flag names to avoid pii leakage. // currently only tracking flag names to avoid pii leakage.
for f := range flags { for f := range fs {
flagSl = append(flagSl, f) flagSl = append(flagSl, f)
} }
@ -87,7 +87,7 @@ func preRun(cc *cobra.Command, args []string) error {
cfg.Account.ID(), cfg.Account.ID(),
map[string]any{"command": cc.CommandPath()}, map[string]any{"command": cc.CommandPath()},
cfg.RepoID, cfg.RepoID,
options.Control()) utils.Control())
} }
// handle deprecated user flag in Backup exchange command // handle deprecated user flag in Backup exchange command
@ -138,7 +138,7 @@ func CorsoCommand() *cobra.Command {
func BuildCommandTree(cmd *cobra.Command) { func BuildCommandTree(cmd *cobra.Command) {
// want to order flags explicitly // want to order flags explicitly
cmd.PersistentFlags().SortFlags = false cmd.PersistentFlags().SortFlags = false
utils.AddRunModeFlag(cmd, true) flags.AddRunModeFlag(cmd, true)
cmd.Flags().BoolP("version", "v", false, "current version info") cmd.Flags().BoolP("version", "v", false, "current version info")
cmd.PersistentPreRunE = preRun cmd.PersistentPreRunE = preRun
@ -146,7 +146,7 @@ func BuildCommandTree(cmd *cobra.Command) {
logger.AddLoggingFlags(cmd) logger.AddLoggingFlags(cmd)
observe.AddProgressBarFlags(cmd) observe.AddProgressBarFlags(cmd)
print.AddOutputFlag(cmd) print.AddOutputFlag(cmd)
options.AddGlobalOperationFlags(cmd) flags.AddGlobalOperationFlags(cmd)
cmd.SetUsageTemplate(indentExamplesTemplate(corsoCmd.UsageTemplate())) cmd.SetUsageTemplate(indentExamplesTemplate(corsoCmd.UsageTemplate()))
cmd.CompletionOptions.DisableDefaultCmd = true cmd.CompletionOptions.DisableDefaultCmd = true

124
src/cli/flags/exchange.go Normal file
View File

@ -0,0 +1,124 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
ContactFN = "contact"
ContactFolderFN = "contact-folder"
ContactNameFN = "contact-name"
EmailFN = "email"
EmailFolderFN = "email-folder"
EmailReceivedAfterFN = "email-received-after"
EmailReceivedBeforeFN = "email-received-before"
EmailSenderFN = "email-sender"
EmailSubjectFN = "email-subject"
EventFN = "event"
EventCalendarFN = "event-calendar"
EventOrganizerFN = "event-organizer"
EventRecursFN = "event-recurs"
EventStartsAfterFN = "event-starts-after"
EventStartsBeforeFN = "event-starts-before"
EventSubjectFN = "event-subject"
)
// flag values (ie: FV)
var (
ContactFV []string
ContactFolderFV []string
ContactNameFV string
EmailFV []string
EmailFolderFV []string
EmailReceivedAfterFV string
EmailReceivedBeforeFV string
EmailSenderFV string
EmailSubjectFV string
EventFV []string
EventCalendarFV []string
EventOrganizerFV string
EventRecursFV string
EventStartsAfterFV string
EventStartsBeforeFV string
EventSubjectFV string
)
// AddExchangeDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
// email flags
fs.StringSliceVar(
&EmailFV,
EmailFN, nil,
"Select email messages by ID; accepts '"+Wildcard+"' to select all emails.")
fs.StringSliceVar(
&EmailFolderFV,
EmailFolderFN, nil,
"Select emails within a folder; accepts '"+Wildcard+"' to select all email folders.")
fs.StringVar(
&EmailSubjectFV,
EmailSubjectFN, "",
"Select emails with a subject containing this value.")
fs.StringVar(
&EmailSenderFV,
EmailSenderFN, "",
"Select emails from a specific sender.")
fs.StringVar(
&EmailReceivedAfterFV,
EmailReceivedAfterFN, "",
"Select emails received after this datetime.")
fs.StringVar(
&EmailReceivedBeforeFV,
EmailReceivedBeforeFN, "",
"Select emails received before this datetime.")
// event flags
fs.StringSliceVar(
&EventFV,
EventFN, nil,
"Select events by event ID; accepts '"+Wildcard+"' to select all events.")
fs.StringSliceVar(
&EventCalendarFV,
EventCalendarFN, nil,
"Select events under a calendar; accepts '"+Wildcard+"' to select all events.")
fs.StringVar(
&EventSubjectFV,
EventSubjectFN, "",
"Select events with a subject containing this value.")
fs.StringVar(
&EventOrganizerFV,
EventOrganizerFN, "",
"Select events from a specific organizer.")
fs.StringVar(
&EventRecursFV,
EventRecursFN, "",
"Select recurring events. Use `--event-recurs false` to select non-recurring events.")
fs.StringVar(
&EventStartsAfterFV,
EventStartsAfterFN, "",
"Select events starting after this datetime.")
fs.StringVar(
&EventStartsBeforeFV,
EventStartsBeforeFN, "",
"Select events starting before this datetime.")
// contact flags
fs.StringSliceVar(
&ContactFV,
ContactFN, nil,
"Select contacts by contact ID; accepts '"+Wildcard+"' to select all contacts.")
fs.StringSliceVar(
&ContactFolderFV,
ContactFolderFN, nil,
"Select contacts within a folder; accepts '"+Wildcard+"' to select all contact folders.")
fs.StringVar(
&ContactNameFV,
ContactNameFN, "",
"Select contacts whose contact name contains this value.")
}

36
src/cli/flags/flags.go Normal file
View File

@ -0,0 +1,36 @@
package flags
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
const Wildcard = "*"
type PopulatedFlags map[string]struct{}
func (fs PopulatedFlags) populate(pf *pflag.Flag) {
if pf == nil {
return
}
if pf.Changed {
fs[pf.Name] = struct{}{}
}
}
// GetPopulatedFlags returns a map of flags that have been
// populated by the user. Entry keys match the flag's long
// name. Values are empty.
func GetPopulatedFlags(cmd *cobra.Command) PopulatedFlags {
pop := PopulatedFlags{}
fs := cmd.Flags()
if fs == nil {
return pop
}
fs.VisitAll(pop.populate)
return pop
}

View File

@ -0,0 +1,42 @@
package flags
import (
"fmt"
"strings"
"github.com/spf13/cobra"
)
var CategoryDataFV []string
const CategoryDataFN = "data"
func AddDataFlag(cmd *cobra.Command, allowed []string, hide bool) {
var (
allowedMsg string
fs = cmd.Flags()
)
switch len(allowed) {
case 0:
return
case 1:
allowedMsg = allowed[0]
case 2:
allowedMsg = fmt.Sprintf("%s or %s", allowed[0], allowed[1])
default:
allowedMsg = fmt.Sprintf(
"%s or %s",
strings.Join(allowed[:len(allowed)-1], ", "),
allowed[len(allowed)-1])
}
fs.StringSliceVar(
&CategoryDataFV,
CategoryDataFN, nil,
"Select one or more types of data to backup: "+allowedMsg+".")
if hide {
cobra.CheckErr(fs.MarkHidden(CategoryDataFN))
}
}

View File

@ -0,0 +1,40 @@
package flags
import (
"fmt"
"github.com/spf13/cobra"
)
const (
UserFN = "user"
MailBoxFN = "mailbox"
)
var UserFV []string
// AddUserFlag adds the --user flag.
func AddUserFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&UserFV,
UserFN, nil,
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
cobra.CheckErr(cmd.MarkFlagRequired(UserFN))
}
// AddMailBoxFlag adds the --user and --mailbox flag.
func AddMailBoxFlag(cmd *cobra.Command) {
flags := cmd.Flags()
flags.StringSliceVar(
&UserFV,
UserFN, nil,
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
cobra.CheckErr(flags.MarkDeprecated(UserFN, fmt.Sprintf("use --%s instead", MailBoxFN)))
flags.StringSliceVar(
&UserFV,
MailBoxFN, nil,
"Backup a specific mailbox's data; accepts '"+Wildcard+"' to select all mailbox.")
}

View File

@ -0,0 +1,41 @@
package flags
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/control/repository"
)
const (
MaintenanceModeFN = "mode"
ForceMaintenanceFN = "force"
)
var (
MaintenanceModeFV string
ForceMaintenanceFV bool
)
func AddMaintenanceModeFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&MaintenanceModeFV,
MaintenanceModeFN,
repository.CompleteMaintenance.String(),
"Type of maintenance operation to run. Pass '"+
repository.MetadataMaintenance.String()+"' to run a faster maintenance "+
"that does minimal clean-up and optimization. Pass '"+
repository.CompleteMaintenance.String()+"' to fully compact existing "+
"data and delete unused data.")
cobra.CheckErr(fs.MarkHidden(MaintenanceModeFN))
}
func AddForceMaintenanceFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&ForceMaintenanceFV,
ForceMaintenanceFN,
false,
"Force maintenance. Caution: user must ensure this is not run concurrently on a single repo")
cobra.CheckErr(fs.MarkHidden(ForceMaintenanceFN))
}

60
src/cli/flags/onedrive.go Normal file
View File

@ -0,0 +1,60 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
FileFN = "file"
FolderFN = "folder"
FileCreatedAfterFN = "file-created-after"
FileCreatedBeforeFN = "file-created-before"
FileModifiedAfterFN = "file-modified-after"
FileModifiedBeforeFN = "file-modified-before"
)
var (
FolderPathFV []string
FileNameFV []string
FileCreatedAfterFV string
FileCreatedBeforeFV string
FileModifiedAfterFV string
FileModifiedBeforeFV string
)
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddOneDriveDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringSliceVar(
&FolderPathFV,
FolderFN, nil,
"Select files by OneDrive folder; defaults to root.")
fs.StringSliceVar(
&FileNameFV,
FileFN, nil,
"Select files by name.")
fs.StringVar(
&FileCreatedAfterFV,
FileCreatedAfterFN, "",
"Select files created after this datetime.")
fs.StringVar(
&FileCreatedBeforeFV,
FileCreatedBeforeFN, "",
"Select files created before this datetime.")
fs.StringVar(
&FileModifiedAfterFV,
FileModifiedAfterFN, "",
"Select files modified after this datetime.")
fs.StringVar(
&FileModifiedBeforeFV,
FileModifiedBeforeFN, "",
"Select files modified before this datetime.")
}

View File

@ -1,65 +1,59 @@
package options package flags
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/control"
) )
// Control produces the control options based on the user's flags.
func Control() control.Options {
opt := control.Defaults()
if failFastFV {
opt.FailureHandling = control.FailFast
}
opt.DisableMetrics = noStatsFV
opt.RestorePermissions = restorePermissionsFV
opt.SkipReduce = skipReduceFV
opt.ToggleFeatures.DisableIncrementals = disableIncrementalsFV
opt.ToggleFeatures.DisableDelta = disableDeltaFV
opt.ToggleFeatures.ExchangeImmutableIDs = enableImmutableID
opt.ToggleFeatures.DisableConcurrencyLimiter = disableConcurrencyLimiterFV
opt.Parallelism.ItemFetch = fetchParallelismFV
return opt
}
// ---------------------------------------------------------------------------
// Operations Flags
// ---------------------------------------------------------------------------
const ( const (
FailFastFN = "fail-fast" DisableConcurrencyLimiterFN = "disable-concurrency-limiter"
FetchParallelismFN = "fetch-parallelism"
NoStatsFN = "no-stats"
RestorePermissionsFN = "restore-permissions"
SkipReduceFN = "skip-reduce"
DisableDeltaFN = "disable-delta" DisableDeltaFN = "disable-delta"
DisableIncrementalsFN = "disable-incrementals" DisableIncrementalsFN = "disable-incrementals"
EnableImmutableIDFN = "enable-immutable-id" EnableImmutableIDFN = "enable-immutable-id"
DisableConcurrencyLimiterFN = "disable-concurrency-limiter" FailFastFN = "fail-fast"
FailedItemsFN = "failed-items"
FetchParallelismFN = "fetch-parallelism"
NoStatsFN = "no-stats"
RecoveredErrorsFN = "recovered-errors"
RestorePermissionsFN = "restore-permissions"
RunModeFN = "run-mode"
SkippedItemsFN = "skipped-items"
SkipReduceFN = "skip-reduce"
) )
var ( var (
failFastFV bool DisableConcurrencyLimiterFV bool
fetchParallelismFV int DisableDeltaFV bool
noStatsFV bool DisableIncrementalsFV bool
restorePermissionsFV bool EnableImmutableIDFV bool
skipReduceFV bool FailFastFV bool
FetchParallelismFV int
ListFailedItemsFV string
ListSkippedItemsFV string
ListRecoveredErrorsFV string
NoStatsFV bool
// RunMode describes the type of run, such as:
// flagtest, dry, run. Should default to 'run'.
RunModeFV string
RestorePermissionsFV bool
SkipReduceFV bool
)
// well-known flag values
const (
RunModeFlagTest = "flag-test"
RunModeRun = "run"
) )
// AddGlobalOperationFlags adds the global operations flag set. // AddGlobalOperationFlags adds the global operations flag set.
func AddGlobalOperationFlags(cmd *cobra.Command) { func AddGlobalOperationFlags(cmd *cobra.Command) {
fs := cmd.PersistentFlags() fs := cmd.PersistentFlags()
fs.BoolVar(&noStatsFV, NoStatsFN, false, "disable anonymous usage statistics gathering") fs.BoolVar(&NoStatsFV, NoStatsFN, false, "disable anonymous usage statistics gathering")
} }
// AddFailFastFlag adds a flag to toggle fail-fast error handling behavior. // AddFailFastFlag adds a flag to toggle fail-fast error handling behavior.
func AddFailFastFlag(cmd *cobra.Command) { func AddFailFastFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar(&failFastFV, FailFastFN, false, "stop processing immediately if any error occurs") fs.BoolVar(&FailFastFV, FailFastFN, false, "stop processing immediately if any error occurs")
// TODO: reveal this flag when fail-fast support is implemented // TODO: reveal this flag when fail-fast support is implemented
cobra.CheckErr(fs.MarkHidden(FailFastFN)) cobra.CheckErr(fs.MarkHidden(FailFastFN))
} }
@ -67,14 +61,14 @@ func AddFailFastFlag(cmd *cobra.Command) {
// AddRestorePermissionsFlag adds OneDrive flag for restoring permissions // AddRestorePermissionsFlag adds OneDrive flag for restoring permissions
func AddRestorePermissionsFlag(cmd *cobra.Command) { func AddRestorePermissionsFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar(&restorePermissionsFV, RestorePermissionsFN, false, "Restore permissions for files and folders") fs.BoolVar(&RestorePermissionsFV, RestorePermissionsFN, false, "Restore permissions for files and folders")
} }
// AddSkipReduceFlag adds a hidden flag that allows callers to skip the selector // AddSkipReduceFlag adds a hidden flag that allows callers to skip the selector
// reduction step. Currently only intended for details commands, not restore. // reduction step. Currently only intended for details commands, not restore.
func AddSkipReduceFlag(cmd *cobra.Command) { func AddSkipReduceFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar(&skipReduceFV, SkipReduceFN, false, "Skip the selector reduce filtering") fs.BoolVar(&SkipReduceFV, SkipReduceFN, false, "Skip the selector reduce filtering")
cobra.CheckErr(fs.MarkHidden(SkipReduceFN)) cobra.CheckErr(fs.MarkHidden(SkipReduceFN))
} }
@ -83,28 +77,19 @@ func AddSkipReduceFlag(cmd *cobra.Command) {
func AddFetchParallelismFlag(cmd *cobra.Command) { func AddFetchParallelismFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.IntVar( fs.IntVar(
&fetchParallelismFV, &FetchParallelismFV,
FetchParallelismFN, FetchParallelismFN,
4, 4,
"Control the number of concurrent data fetches for Exchange. Valid range is [1-4]. Default: 4") "Control the number of concurrent data fetches for Exchange. Valid range is [1-4]. Default: 4")
cobra.CheckErr(fs.MarkHidden(FetchParallelismFN)) cobra.CheckErr(fs.MarkHidden(FetchParallelismFN))
} }
// ---------------------------------------------------------------------------
// Feature Flags
// ---------------------------------------------------------------------------
var (
disableIncrementalsFV bool
disableDeltaFV bool
)
// Adds the hidden '--disable-incrementals' cli flag which, when set, disables // Adds the hidden '--disable-incrementals' cli flag which, when set, disables
// incremental backups. // incremental backups.
func AddDisableIncrementalsFlag(cmd *cobra.Command) { func AddDisableIncrementalsFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar( fs.BoolVar(
&disableIncrementalsFV, &DisableIncrementalsFV,
DisableIncrementalsFN, DisableIncrementalsFN,
false, false,
"Disable incremental data retrieval in backups.") "Disable incremental data retrieval in backups.")
@ -116,38 +101,45 @@ func AddDisableIncrementalsFlag(cmd *cobra.Command) {
func AddDisableDeltaFlag(cmd *cobra.Command) { func AddDisableDeltaFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar( fs.BoolVar(
&disableDeltaFV, &DisableDeltaFV,
DisableDeltaFN, DisableDeltaFN,
false, false,
"Disable delta based data retrieval in backups.") "Disable delta based data retrieval in backups.")
cobra.CheckErr(fs.MarkHidden(DisableDeltaFN)) cobra.CheckErr(fs.MarkHidden(DisableDeltaFN))
} }
var enableImmutableID bool
// Adds the hidden '--enable-immutable-id' cli flag which, when set, enables // Adds the hidden '--enable-immutable-id' cli flag which, when set, enables
// immutable IDs for Exchange // immutable IDs for Exchange
func AddEnableImmutableIDFlag(cmd *cobra.Command) { func AddEnableImmutableIDFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar( fs.BoolVar(
&enableImmutableID, &EnableImmutableIDFV,
EnableImmutableIDFN, EnableImmutableIDFN,
false, false,
"Enable exchange immutable ID.") "Enable exchange immutable ID.")
cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN)) cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN))
} }
var disableConcurrencyLimiterFV bool
// AddDisableConcurrencyLimiterFlag adds a hidden cli flag which, when set, // AddDisableConcurrencyLimiterFlag adds a hidden cli flag which, when set,
// removes concurrency limits when communicating with graph API. This // removes concurrency limits when communicating with graph API. This
// flag is only relevant for exchange backups for now // flag is only relevant for exchange backups for now
func AddDisableConcurrencyLimiterFlag(cmd *cobra.Command) { func AddDisableConcurrencyLimiterFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.BoolVar( fs.BoolVar(
&disableConcurrencyLimiterFV, &DisableConcurrencyLimiterFV,
DisableConcurrencyLimiterFN, DisableConcurrencyLimiterFN,
false, false,
"Disable concurrency limiter middleware. Default: false") "Disable concurrency limiter middleware. Default: false")
cobra.CheckErr(fs.MarkHidden(DisableConcurrencyLimiterFN)) cobra.CheckErr(fs.MarkHidden(DisableConcurrencyLimiterFN))
} }
// AddRunModeFlag adds the hidden --run-mode flag.
func AddRunModeFlag(cmd *cobra.Command, persistent bool) {
fs := cmd.Flags()
if persistent {
fs = cmd.PersistentFlags()
}
fs.StringVar(&RunModeFV, RunModeFN, "run", "What mode to run: dry, test, run. Defaults to run.")
cobra.CheckErr(fs.MarkHidden(RunModeFN))
}

18
src/cli/flags/repo.go Normal file
View File

@ -0,0 +1,18 @@
package flags
import (
"github.com/spf13/cobra"
)
const BackupFN = "backup"
var BackupIDFV string
// AddBackupIDFlag adds the --backup flag.
func AddBackupIDFlag(cmd *cobra.Command, require bool) {
cmd.Flags().StringVar(&BackupIDFV, BackupFN, "", "ID of the backup to retrieve.")
if require {
cobra.CheckErr(cmd.MarkFlagRequired(BackupFN))
}
}

113
src/cli/flags/sharepoint.go Normal file
View File

@ -0,0 +1,113 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
LibraryFN = "library"
ListFolderFN = "list"
ListItemFN = "list-item"
PageFolderFN = "page-folder"
PageFN = "page"
SiteFN = "site" // site only accepts WebURL values
SiteIDFN = "site-id" // site-id accepts actual site ids
)
var (
LibraryFV string
ListFolderFV []string
ListItemFV []string
PageFolderFV []string
PageFV []string
SiteIDFV []string
WebURLFV []string
)
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
// libraries
fs.StringVar(
&LibraryFV,
LibraryFN, "",
"Select only this library; defaults to all libraries.")
fs.StringSliceVar(
&FolderPathFV,
FolderFN, nil,
"Select by folder; defaults to root.")
fs.StringSliceVar(
&FileNameFV,
FileFN, nil,
"Select by file name.")
fs.StringVar(
&FileCreatedAfterFV,
FileCreatedAfterFN, "",
"Select files created after this datetime.")
fs.StringVar(
&FileCreatedBeforeFV,
FileCreatedBeforeFN, "",
"Select files created before this datetime.")
fs.StringVar(
&FileModifiedAfterFV,
FileModifiedAfterFN, "",
"Select files modified after this datetime.")
fs.StringVar(
&FileModifiedBeforeFV,
FileModifiedBeforeFN, "",
"Select files modified before this datetime.")
// lists
fs.StringSliceVar(
&ListFolderFV,
ListFolderFN, nil,
"Select lists by name; accepts '"+Wildcard+"' to select all lists.")
cobra.CheckErr(fs.MarkHidden(ListFolderFN))
fs.StringSliceVar(
&ListItemFV,
ListItemFN, nil,
"Select lists by item name; accepts '"+Wildcard+"' to select all lists.")
cobra.CheckErr(fs.MarkHidden(ListItemFN))
// pages
fs.StringSliceVar(
&PageFolderFV,
PageFolderFN, nil,
"Select pages by folder name; accepts '"+Wildcard+"' to select all pages.")
cobra.CheckErr(fs.MarkHidden(PageFolderFN))
fs.StringSliceVar(
&PageFV,
PageFN, nil,
"Select pages by item name; accepts '"+Wildcard+"' to select all pages.")
cobra.CheckErr(fs.MarkHidden(PageFN))
}
// AddSiteIDFlag adds the --site-id flag, which accepts site ID values.
// This flag is hidden, since we expect users to prefer the --site url
// and do not want to encourage confusion.
func AddSiteIDFlag(cmd *cobra.Command) {
fs := cmd.Flags()
// note string ARRAY var. IDs naturally contain commas, so we cannot accept
// duplicate values within a flag declaration. ie: --site-id a,b,c does not
// work. Users must call --site-id a --site-id b --site-id c.
fs.StringArrayVar(
&SiteIDFV,
SiteIDFN, nil,
//nolint:lll
"Backup data by site ID; accepts '"+Wildcard+"' to select all sites. Args cannot be comma-delimited and must use multiple flags.")
cobra.CheckErr(fs.MarkHidden(SiteIDFN))
}
// AddSiteFlag adds the --site flag, which accepts webURL values.
func AddSiteFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&WebURLFV,
SiteFN, nil,
"Backup data by site URL; accepts '"+Wildcard+"' to select all sites.")
}

View File

@ -1,66 +0,0 @@
package options
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
)
type OptionsUnitSuite struct {
tester.Suite
}
func TestOptionsUnitSuite(t *testing.T) {
suite.Run(t, &OptionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OptionsUnitSuite) TestAddExchangeCommands() {
t := suite.T()
cmd := &cobra.Command{
Use: "test",
Run: func(cmd *cobra.Command, args []string) {
assert.True(t, failFastFV, FailFastFN)
assert.True(t, disableIncrementalsFV, DisableIncrementalsFN)
assert.True(t, disableDeltaFV, DisableDeltaFN)
assert.True(t, noStatsFV, NoStatsFN)
assert.True(t, restorePermissionsFV, RestorePermissionsFN)
assert.True(t, skipReduceFV, SkipReduceFN)
assert.Equal(t, 2, fetchParallelismFV, FetchParallelismFN)
assert.True(t, disableConcurrencyLimiterFV, DisableConcurrencyLimiterFN)
},
}
// adds no-stats
AddGlobalOperationFlags(cmd)
AddFailFastFlag(cmd)
AddDisableIncrementalsFlag(cmd)
AddDisableDeltaFlag(cmd)
AddRestorePermissionsFlag(cmd)
AddSkipReduceFlag(cmd)
AddFetchParallelismFlag(cmd)
AddDisableConcurrencyLimiterFlag(cmd)
// Test arg parsing for few args
cmd.SetArgs([]string{
"test",
"--" + FailFastFN,
"--" + DisableIncrementalsFN,
"--" + DisableDeltaFN,
"--" + NoStatsFN,
"--" + RestorePermissionsFN,
"--" + SkipReduceFN,
"--" + FetchParallelismFN, "2",
"--" + DisableConcurrencyLimiterFN,
})
err := cmd.Execute()
require.NoError(t, err, clues.ToCore(err))
}

View File

@ -7,6 +7,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
@ -42,8 +43,8 @@ func AddCommands(cmd *cobra.Command) {
maintenanceCmd, maintenanceCmd,
utils.HideCommand(), utils.HideCommand(),
utils.MarkPreReleaseCommand()) utils.MarkPreReleaseCommand())
utils.AddMaintenanceModeFlag(maintenanceCmd) flags.AddMaintenanceModeFlag(maintenanceCmd)
utils.AddForceMaintenanceFlag(maintenanceCmd) flags.AddForceMaintenanceFlag(maintenanceCmd)
for _, addRepoTo := range repoCommands { for _, addRepoTo := range repoCommands {
addRepoTo(initCmd) addRepoTo(initCmd)
@ -116,7 +117,7 @@ func maintenanceCmd() *cobra.Command {
func handleMaintenanceCmd(cmd *cobra.Command, args []string) error { func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
t, err := getMaintenanceType(utils.MaintenanceModeFV) t, err := getMaintenanceType(flags.MaintenanceModeFV)
if err != nil { if err != nil {
return err return err
} }
@ -133,7 +134,7 @@ func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
repository.Maintenance{ repository.Maintenance{
Type: t, Type: t,
Safety: repository.FullMaintenanceSafety, Safety: repository.FullMaintenanceSafety,
Force: utils.ForceMaintenanceFV, Force: flags.ForceMaintenanceFV,
}) })
if err != nil { if err != nil {
return print.Only(ctx, err) return print.Only(ctx, err)

View File

@ -10,7 +10,6 @@ import (
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
@ -124,7 +123,7 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
cfg.Account.ID(), cfg.Account.ID(),
map[string]any{"command": "init repo"}, map[string]any{"command": "init repo"},
cfg.Account.ID(), cfg.Account.ID(),
options.Control()) utils.Control())
s3Cfg, err := cfg.Storage.S3Config() s3Cfg, err := cfg.Storage.S3Config()
if err != nil { if err != nil {
@ -143,7 +142,7 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config")) return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config"))
} }
r, err := repository.Initialize(ctx, cfg.Account, cfg.Storage, options.Control()) r, err := repository.Initialize(ctx, cfg.Account, cfg.Storage, utils.Control())
if err != nil { if err != nil {
if succeedIfExists && errors.Is(err, repository.ErrorRepoAlreadyExists) { if succeedIfExists && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil return nil
@ -214,7 +213,7 @@ func connectS3Cmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.New(invalidEndpointErr)) return Only(ctx, clues.New(invalidEndpointErr))
} }
r, err := repository.ConnectAndSendConnectEvent(ctx, cfg.Account, cfg.Storage, repoID, options.Control()) r, err := repository.ConnectAndSendConnectEvent(ctx, cfg.Account, cfg.Storage, repoID, utils.Control())
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to connect to the S3 repository")) return Only(ctx, clues.Wrap(err, "Failed to connect to the S3 repository"))
} }

View File

@ -6,7 +6,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
@ -32,9 +32,9 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
// general flags // general flags
fs.SortFlags = false fs.SortFlags = false
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
utils.AddExchangeDetailsAndRestoreFlags(c) flags.AddExchangeDetailsAndRestoreFlags(c)
options.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }
return c return c
@ -81,11 +81,11 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error {
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
if utils.RunModeFV == utils.RunModeFlagTest { if flags.RunModeFV == flags.RunModeFlagTest {
return nil return nil
} }
if err := utils.ValidateExchangeRestoreFlags(utils.BackupIDFV, opts); err != nil { if err := utils.ValidateExchangeRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err return err
} }
@ -102,7 +102,7 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeExchangeRestoreDataSelectors(opts) sel := utils.IncludeExchangeRestoreDataSelectors(opts)
utils.FilterExchangeRestoreInfoSelectors(sel, opts) utils.FilterExchangeRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg) ro, err := r.NewRestore(ctx, flags.BackupIDFV, sel.Selector, restoreCfg)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize Exchange restore")) return Only(ctx, clues.Wrap(err, "Failed to initialize Exchange restore"))
} }
@ -110,7 +110,7 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error {
ds, err := ro.Run(ctx) ds, err := ro.Run(ctx)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, data.ErrNotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+utils.BackupIDFV)) return Only(ctx, clues.New("Backup or backup details missing for id "+flags.BackupIDFV))
} }
return Only(ctx, clues.Wrap(err, "Failed to run Exchange restore")) return Only(ctx, clues.Wrap(err, "Failed to run Exchange restore"))

View File

@ -12,7 +12,7 @@ import (
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/m365/exchange" "github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
@ -135,7 +135,7 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd() {
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"restore", "exchange", "restore", "exchange",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOps[set].Results.BackupID)) "--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
@ -162,15 +162,15 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badTimeFlags() {
var timeFilter string var timeFilter string
switch set { switch set {
case email: case email:
timeFilter = "--" + utils.EmailReceivedAfterFN timeFilter = "--" + flags.EmailReceivedAfterFN
case events: case events:
timeFilter = "--" + utils.EventStartsAfterFN timeFilter = "--" + flags.EventStartsAfterFN
} }
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"restore", "exchange", "restore", "exchange",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOps[set].Results.BackupID), "--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID),
timeFilter, "smarf") timeFilter, "smarf")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -198,13 +198,13 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badBoolFlags() {
var timeFilter string var timeFilter string
switch set { switch set {
case events: case events:
timeFilter = "--" + utils.EventRecursFN timeFilter = "--" + flags.EventRecursFN
} }
cmd := tester.StubRootCmd( cmd := tester.StubRootCmd(
"restore", "exchange", "restore", "exchange",
"--config-file", suite.cfgFP, "--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOps[set].Results.BackupID), "--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID),
timeFilter, "wingbat") timeFilter, "wingbat")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -43,7 +44,7 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
// normally a persistent flag from the root. // normally a persistent flag from the root.
// required to ensure a dry run. // required to ensure a dry run.
utils.AddRunModeFlag(cmd, true) flags.AddRunModeFlag(cmd, true)
c := addExchangeCommands(cmd) c := addExchangeCommands(cmd)
require.NotNil(t, c) require.NotNil(t, c)
@ -59,27 +60,24 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
// Test arg parsing for few args // Test arg parsing for few args
cmd.SetArgs([]string{ cmd.SetArgs([]string{
"exchange", "exchange",
"--" + utils.RunModeFN, utils.RunModeFlagTest, "--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + utils.BackupFN, testdata.BackupInput, "--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.ContactFN, testdata.FlgInputs(testdata.ContactInput),
"--" + utils.ContactFN, testdata.FlgInputs(testdata.ContactInput), "--" + flags.ContactFolderFN, testdata.FlgInputs(testdata.ContactFldInput),
"--" + utils.ContactFolderFN, testdata.FlgInputs(testdata.ContactFldInput), "--" + flags.ContactNameFN, testdata.ContactNameInput,
"--" + utils.ContactNameFN, testdata.ContactNameInput, "--" + flags.EmailFN, testdata.FlgInputs(testdata.EmailInput),
"--" + flags.EmailFolderFN, testdata.FlgInputs(testdata.EmailFldInput),
"--" + utils.EmailFN, testdata.FlgInputs(testdata.EmailInput), "--" + flags.EmailReceivedAfterFN, testdata.EmailReceivedAfterInput,
"--" + utils.EmailFolderFN, testdata.FlgInputs(testdata.EmailFldInput), "--" + flags.EmailReceivedBeforeFN, testdata.EmailReceivedBeforeInput,
"--" + utils.EmailReceivedAfterFN, testdata.EmailReceivedAfterInput, "--" + flags.EmailSenderFN, testdata.EmailSenderInput,
"--" + utils.EmailReceivedBeforeFN, testdata.EmailReceivedBeforeInput, "--" + flags.EmailSubjectFN, testdata.EmailSubjectInput,
"--" + utils.EmailSenderFN, testdata.EmailSenderInput, "--" + flags.EventFN, testdata.FlgInputs(testdata.EventInput),
"--" + utils.EmailSubjectFN, testdata.EmailSubjectInput, "--" + flags.EventCalendarFN, testdata.FlgInputs(testdata.EventCalInput),
"--" + flags.EventOrganizerFN, testdata.EventOrganizerInput,
"--" + utils.EventFN, testdata.FlgInputs(testdata.EventInput), "--" + flags.EventRecursFN, testdata.EventRecursInput,
"--" + utils.EventCalendarFN, testdata.FlgInputs(testdata.EventCalInput), "--" + flags.EventStartsAfterFN, testdata.EventStartsAfterInput,
"--" + utils.EventOrganizerFN, testdata.EventOrganizerInput, "--" + flags.EventStartsBeforeFN, testdata.EventStartsBeforeInput,
"--" + utils.EventRecursFN, testdata.EventRecursInput, "--" + flags.EventSubjectFN, testdata.EventSubjectInput,
"--" + utils.EventStartsAfterFN, testdata.EventStartsAfterInput,
"--" + utils.EventStartsBeforeFN, testdata.EventStartsBeforeInput,
"--" + utils.EventSubjectFN, testdata.EventSubjectInput,
}) })
cmd.SetOut(new(bytes.Buffer)) // drop output cmd.SetOut(new(bytes.Buffer)) // drop output
@ -88,7 +86,7 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
assert.Equal(t, testdata.BackupInput, utils.BackupIDFV) assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.ElementsMatch(t, testdata.ContactInput, opts.Contact) assert.ElementsMatch(t, testdata.ContactInput, opts.Contact)
assert.ElementsMatch(t, testdata.ContactFldInput, opts.ContactFolder) assert.ElementsMatch(t, testdata.ContactFldInput, opts.ContactFolder)

View File

@ -6,7 +6,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
@ -31,12 +31,10 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
// More generic (ex: --user) and more frequently used flags take precedence. // More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false fs.SortFlags = false
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
utils.AddOneDriveDetailsAndRestoreFlags(c) flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddRestorePermissionsFlag(c)
// restore permissions flags.AddFailFastFlag(c)
options.AddRestorePermissionsFlag(c)
options.AddFailFastFlag(c)
} }
return c return c
@ -82,11 +80,11 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error {
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
if utils.RunModeFV == utils.RunModeFlagTest { if flags.RunModeFV == flags.RunModeFlagTest {
return nil return nil
} }
if err := utils.ValidateOneDriveRestoreFlags(utils.BackupIDFV, opts); err != nil { if err := utils.ValidateOneDriveRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err return err
} }
@ -103,7 +101,7 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeOneDriveRestoreDataSelectors(opts) sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
utils.FilterOneDriveRestoreInfoSelectors(sel, opts) utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg) ro, err := r.NewRestore(ctx, flags.BackupIDFV, sel.Selector, restoreCfg)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize OneDrive restore")) return Only(ctx, clues.Wrap(err, "Failed to initialize OneDrive restore"))
} }
@ -111,7 +109,7 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error {
ds, err := ro.Run(ctx) ds, err := ro.Run(ctx)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, data.ErrNotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+utils.BackupIDFV)) return Only(ctx, clues.New("Backup or backup details missing for id "+flags.BackupIDFV))
} }
return Only(ctx, clues.Wrap(err, "Failed to run OneDrive restore")) return Only(ctx, clues.Wrap(err, "Failed to run OneDrive restore"))

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -43,7 +44,7 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
// normally a persistent flag from the root. // normally a persistent flag from the root.
// required to ensure a dry run. // required to ensure a dry run.
utils.AddRunModeFlag(cmd, true) flags.AddRunModeFlag(cmd, true)
c := addOneDriveCommands(cmd) c := addOneDriveCommands(cmd)
require.NotNil(t, c) require.NotNil(t, c)
@ -58,15 +59,14 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
cmd.SetArgs([]string{ cmd.SetArgs([]string{
"onedrive", "onedrive",
"--" + utils.RunModeFN, utils.RunModeFlagTest, "--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + utils.BackupFN, testdata.BackupInput, "--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.FileFN, testdata.FlgInputs(testdata.FileNameInput),
"--" + utils.FileFN, testdata.FlgInputs(testdata.FileNameInput), "--" + flags.FolderFN, testdata.FlgInputs(testdata.FolderPathInput),
"--" + utils.FolderFN, testdata.FlgInputs(testdata.FolderPathInput), "--" + flags.FileCreatedAfterFN, testdata.FileCreatedAfterInput,
"--" + utils.FileCreatedAfterFN, testdata.FileCreatedAfterInput, "--" + flags.FileCreatedBeforeFN, testdata.FileCreatedBeforeInput,
"--" + utils.FileCreatedBeforeFN, testdata.FileCreatedBeforeInput, "--" + flags.FileModifiedAfterFN, testdata.FileModifiedAfterInput,
"--" + utils.FileModifiedAfterFN, testdata.FileModifiedAfterInput, "--" + flags.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput,
"--" + utils.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput,
}) })
cmd.SetOut(new(bytes.Buffer)) // drop output cmd.SetOut(new(bytes.Buffer)) // drop output
@ -75,7 +75,7 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
assert.Equal(t, testdata.BackupInput, utils.BackupIDFV) assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.ElementsMatch(t, testdata.FileNameInput, opts.FileName) assert.ElementsMatch(t, testdata.FileNameInput, opts.FileName)
assert.ElementsMatch(t, testdata.FolderPathInput, opts.FolderPath) assert.ElementsMatch(t, testdata.FolderPathInput, opts.FolderPath)

View File

@ -6,7 +6,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
@ -31,11 +31,10 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
// More generic (ex: --site) and more frequently used flags take precedence. // More generic (ex: --site) and more frequently used flags take precedence.
fs.SortFlags = false fs.SortFlags = false
utils.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
utils.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddRestorePermissionsFlag(c)
options.AddRestorePermissionsFlag(c) flags.AddFailFastFlag(c)
options.AddFailFastFlag(c)
} }
return c return c
@ -87,11 +86,11 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
if utils.RunModeFV == utils.RunModeFlagTest { if flags.RunModeFV == flags.RunModeFlagTest {
return nil return nil
} }
if err := utils.ValidateSharePointRestoreFlags(utils.BackupIDFV, opts); err != nil { if err := utils.ValidateSharePointRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err return err
} }
@ -108,7 +107,7 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts) utils.FilterSharePointRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg) ro, err := r.NewRestore(ctx, flags.BackupIDFV, sel.Selector, restoreCfg)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize SharePoint restore")) return Only(ctx, clues.Wrap(err, "Failed to initialize SharePoint restore"))
} }
@ -116,7 +115,7 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
ds, err := ro.Run(ctx) ds, err := ro.Run(ctx)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, data.ErrNotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+utils.BackupIDFV)) return Only(ctx, clues.New("Backup or backup details missing for id "+flags.BackupIDFV))
} }
return Only(ctx, clues.Wrap(err, "Failed to run SharePoint restore")) return Only(ctx, clues.Wrap(err, "Failed to run SharePoint restore"))

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -43,7 +44,7 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
// normally a persistent flag from the root. // normally a persistent flag from the root.
// required to ensure a dry run. // required to ensure a dry run.
utils.AddRunModeFlag(cmd, true) flags.AddRunModeFlag(cmd, true)
c := addSharePointCommands(cmd) c := addSharePointCommands(cmd)
require.NotNil(t, c) require.NotNil(t, c)
@ -58,22 +59,19 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
cmd.SetArgs([]string{ cmd.SetArgs([]string{
"sharepoint", "sharepoint",
"--" + utils.RunModeFN, utils.RunModeFlagTest, "--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + utils.BackupFN, testdata.BackupInput, "--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.LibraryFN, testdata.LibraryInput,
"--" + utils.LibraryFN, testdata.LibraryInput, "--" + flags.FileFN, testdata.FlgInputs(testdata.FileNameInput),
"--" + utils.FileFN, testdata.FlgInputs(testdata.FileNameInput), "--" + flags.FolderFN, testdata.FlgInputs(testdata.FolderPathInput),
"--" + utils.FolderFN, testdata.FlgInputs(testdata.FolderPathInput), "--" + flags.FileCreatedAfterFN, testdata.FileCreatedAfterInput,
"--" + utils.FileCreatedAfterFN, testdata.FileCreatedAfterInput, "--" + flags.FileCreatedBeforeFN, testdata.FileCreatedBeforeInput,
"--" + utils.FileCreatedBeforeFN, testdata.FileCreatedBeforeInput, "--" + flags.FileModifiedAfterFN, testdata.FileModifiedAfterInput,
"--" + utils.FileModifiedAfterFN, testdata.FileModifiedAfterInput, "--" + flags.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput,
"--" + utils.FileModifiedBeforeFN, testdata.FileModifiedBeforeInput, "--" + flags.ListItemFN, testdata.FlgInputs(testdata.ListItemInput),
"--" + flags.ListFolderFN, testdata.FlgInputs(testdata.ListFolderInput),
"--" + utils.ListItemFN, testdata.FlgInputs(testdata.ListItemInput), "--" + flags.PageFN, testdata.FlgInputs(testdata.PageInput),
"--" + utils.ListFolderFN, testdata.FlgInputs(testdata.ListFolderInput), "--" + flags.PageFolderFN, testdata.FlgInputs(testdata.PageFolderInput),
"--" + utils.PageFN, testdata.FlgInputs(testdata.PageInput),
"--" + utils.PageFolderFN, testdata.FlgInputs(testdata.PageFolderInput),
}) })
cmd.SetOut(new(bytes.Buffer)) // drop output cmd.SetOut(new(bytes.Buffer)) // drop output
@ -82,7 +80,7 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
assert.Equal(t, testdata.BackupInput, utils.BackupIDFV) assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.Equal(t, testdata.LibraryInput, opts.Library) assert.Equal(t, testdata.LibraryInput, opts.Library)
assert.ElementsMatch(t, testdata.FileNameInput, opts.FileName) assert.ElementsMatch(t, testdata.FileNameInput, opts.FileName)

View File

@ -4,53 +4,10 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
// flag names (id: FN)
const (
ContactFN = "contact"
ContactFolderFN = "contact-folder"
ContactNameFN = "contact-name"
EmailFN = "email"
EmailFolderFN = "email-folder"
EmailReceivedAfterFN = "email-received-after"
EmailReceivedBeforeFN = "email-received-before"
EmailSenderFN = "email-sender"
EmailSubjectFN = "email-subject"
EventFN = "event"
EventCalendarFN = "event-calendar"
EventOrganizerFN = "event-organizer"
EventRecursFN = "event-recurs"
EventStartsAfterFN = "event-starts-after"
EventStartsBeforeFN = "event-starts-before"
EventSubjectFN = "event-subject"
)
// flag values (ie: FV)
var (
ContactFV []string
ContactFolderFV []string
ContactNameFV string
EmailFV []string
EmailFolderFV []string
EmailReceivedAfterFV string
EmailReceivedBeforeFV string
EmailSenderFV string
EmailSubjectFV string
EventFV []string
EventCalendarFV []string
EventOrganizerFV string
EventRecursFV string
EventStartsAfterFV string
EventStartsBeforeFV string
EventSubjectFV string
)
type ExchangeOpts struct { type ExchangeOpts struct {
Users []string Users []string
@ -73,113 +30,37 @@ type ExchangeOpts struct {
EventStartsBefore string EventStartsBefore string
EventSubject string EventSubject string
Populated PopulatedFlags Populated flags.PopulatedFlags
} }
// populates an ExchangeOpts struct with the command's current flags. // populates an ExchangeOpts struct with the command's current flags.
func MakeExchangeOpts(cmd *cobra.Command) ExchangeOpts { func MakeExchangeOpts(cmd *cobra.Command) ExchangeOpts {
return ExchangeOpts{ return ExchangeOpts{
Users: UserFV, Users: flags.UserFV,
Contact: ContactFV, Contact: flags.ContactFV,
ContactFolder: ContactFolderFV, ContactFolder: flags.ContactFolderFV,
ContactName: ContactNameFV, ContactName: flags.ContactNameFV,
Email: EmailFV, Email: flags.EmailFV,
EmailFolder: EmailFolderFV, EmailFolder: flags.EmailFolderFV,
EmailReceivedAfter: EmailReceivedAfterFV, EmailReceivedAfter: flags.EmailReceivedAfterFV,
EmailReceivedBefore: EmailReceivedBeforeFV, EmailReceivedBefore: flags.EmailReceivedBeforeFV,
EmailSender: EmailSenderFV, EmailSender: flags.EmailSenderFV,
EmailSubject: EmailSubjectFV, EmailSubject: flags.EmailSubjectFV,
Event: EventFV, Event: flags.EventFV,
EventCalendar: EventCalendarFV, EventCalendar: flags.EventCalendarFV,
EventOrganizer: EventOrganizerFV, EventOrganizer: flags.EventOrganizerFV,
EventRecurs: EventRecursFV, EventRecurs: flags.EventRecursFV,
EventStartsAfter: EventStartsAfterFV, EventStartsAfter: flags.EventStartsAfterFV,
EventStartsBefore: EventStartsBeforeFV, EventStartsBefore: flags.EventStartsBeforeFV,
EventSubject: EventSubjectFV, EventSubject: flags.EventSubjectFV,
Populated: GetPopulatedFlags(cmd), Populated: flags.GetPopulatedFlags(cmd),
} }
} }
// AddExchangeDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
// email flags
fs.StringSliceVar(
&EmailFV,
EmailFN, nil,
"Select email messages by ID; accepts '"+Wildcard+"' to select all emails.")
fs.StringSliceVar(
&EmailFolderFV,
EmailFolderFN, nil,
"Select emails within a folder; accepts '"+Wildcard+"' to select all email folders.")
fs.StringVar(
&EmailSubjectFV,
EmailSubjectFN, "",
"Select emails with a subject containing this value.")
fs.StringVar(
&EmailSenderFV,
EmailSenderFN, "",
"Select emails from a specific sender.")
fs.StringVar(
&EmailReceivedAfterFV,
EmailReceivedAfterFN, "",
"Select emails received after this datetime.")
fs.StringVar(
&EmailReceivedBeforeFV,
EmailReceivedBeforeFN, "",
"Select emails received before this datetime.")
// event flags
fs.StringSliceVar(
&EventFV,
EventFN, nil,
"Select events by event ID; accepts '"+Wildcard+"' to select all events.")
fs.StringSliceVar(
&EventCalendarFV,
EventCalendarFN, nil,
"Select events under a calendar; accepts '"+Wildcard+"' to select all events.")
fs.StringVar(
&EventSubjectFV,
EventSubjectFN, "",
"Select events with a subject containing this value.")
fs.StringVar(
&EventOrganizerFV,
EventOrganizerFN, "",
"Select events from a specific organizer.")
fs.StringVar(
&EventRecursFV,
EventRecursFN, "",
"Select recurring events. Use `--event-recurs false` to select non-recurring events.")
fs.StringVar(
&EventStartsAfterFV,
EventStartsAfterFN, "",
"Select events starting after this datetime.")
fs.StringVar(
&EventStartsBeforeFV,
EventStartsBeforeFN, "",
"Select events starting before this datetime.")
// contact flags
fs.StringSliceVar(
&ContactFV,
ContactFN, nil,
"Select contacts by contact ID; accepts '"+Wildcard+"' to select all contacts.")
fs.StringSliceVar(
&ContactFolderFV,
ContactFolderFN, nil,
"Select contacts within a folder; accepts '"+Wildcard+"' to select all contact folders.")
fs.StringVar(
&ContactNameFV,
ContactNameFN, "",
"Select contacts whose contact name contains this value.")
}
// AddExchangeInclude adds the scope of the provided values to the selector's // AddExchangeInclude adds the scope of the provided values to the selector's
// inclusion set. Any unpopulated slice will be replaced with selectors.Any() // inclusion set. Any unpopulated slice will be replaced with selectors.Any()
// to act as a wildcard. // to act as a wildcard.
@ -231,23 +112,23 @@ func ValidateExchangeRestoreFlags(backupID string, opts ExchangeOpts) error {
return clues.New("a backup ID is required") return clues.New("a backup ID is required")
} }
if _, ok := opts.Populated[EmailReceivedAfterFN]; ok && !IsValidTimeFormat(opts.EmailReceivedAfter) { if _, ok := opts.Populated[flags.EmailReceivedAfterFN]; ok && !IsValidTimeFormat(opts.EmailReceivedAfter) {
return clues.New("invalid time format for email-received-after") return clues.New("invalid time format for email-received-after")
} }
if _, ok := opts.Populated[EmailReceivedBeforeFN]; ok && !IsValidTimeFormat(opts.EmailReceivedBefore) { if _, ok := opts.Populated[flags.EmailReceivedBeforeFN]; ok && !IsValidTimeFormat(opts.EmailReceivedBefore) {
return clues.New("invalid time format for email-received-before") return clues.New("invalid time format for email-received-before")
} }
if _, ok := opts.Populated[EventStartsAfterFN]; ok && !IsValidTimeFormat(opts.EventStartsAfter) { if _, ok := opts.Populated[flags.EventStartsAfterFN]; ok && !IsValidTimeFormat(opts.EventStartsAfter) {
return clues.New("invalid time format for event-starts-after") return clues.New("invalid time format for event-starts-after")
} }
if _, ok := opts.Populated[EventStartsBeforeFN]; ok && !IsValidTimeFormat(opts.EventStartsBefore) { if _, ok := opts.Populated[flags.EventStartsBeforeFN]; ok && !IsValidTimeFormat(opts.EventStartsBefore) {
return clues.New("invalid time format for event-starts-before") return clues.New("invalid time format for event-starts-before")
} }
if _, ok := opts.Populated[EventRecursFN]; ok && !IsValidBool(opts.EventRecurs) { if _, ok := opts.Populated[flags.EventRecursFN]; ok && !IsValidBool(opts.EventRecurs) {
return clues.New("invalid format for event-recurs") return clues.New("invalid format for event-recurs")
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -62,7 +63,7 @@ func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
func (suite *ExchangeUtilsSuite) TestIncludeExchangeRestoreDataSelectors() { func (suite *ExchangeUtilsSuite) TestIncludeExchangeRestoreDataSelectors() {
stub := []string{"id-stub"} stub := []string{"id-stub"}
many := []string{"fnord", "smarf"} many := []string{"fnord", "smarf"}
a := []string{utils.Wildcard} a := []string{flags.Wildcard}
table := []struct { table := []struct {
name string name string

View File

@ -1,233 +1,13 @@
package utils package utils
import ( import (
"fmt"
"strconv" "strconv"
"strings"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
// common flag vars (eg: FV)
var (
// RunMode describes the type of run, such as:
// flagtest, dry, run. Should default to 'run'.
RunModeFV string
BackupIDFV string
FolderPathFV []string
FileNameFV []string
FileCreatedAfterFV string
FileCreatedBeforeFV string
FileModifiedAfterFV string
FileModifiedBeforeFV string
LibraryFV string
SiteIDFV []string
WebURLFV []string
UserFV []string
// for selection of data by category. eg: `--data email,contacts`
CategoryDataFV []string
MaintenanceModeFV string
ForceMaintenanceFV bool
)
// common flag names (eg: FN)
const (
RunModeFN = "run-mode"
BackupFN = "backup"
CategoryDataFN = "data"
SiteFN = "site" // site only accepts WebURL values
SiteIDFN = "site-id" // site-id accepts actual site ids
UserFN = "user"
MailBoxFN = "mailbox"
LibraryFN = "library"
FileFN = "file"
FolderFN = "folder"
FileCreatedAfterFN = "file-created-after"
FileCreatedBeforeFN = "file-created-before"
FileModifiedAfterFN = "file-modified-after"
FileModifiedBeforeFN = "file-modified-before"
// Maintenance stuff.
MaintenanceModeFN = "mode"
ForceMaintenanceFN = "force"
)
// well-known flag values
const (
RunModeFlagTest = "flag-test"
RunModeRun = "run"
)
// AddBackupIDFlag adds the --backup flag.
func AddBackupIDFlag(cmd *cobra.Command, require bool) {
cmd.Flags().StringVar(&BackupIDFV, BackupFN, "", "ID of the backup to retrieve.")
if require {
cobra.CheckErr(cmd.MarkFlagRequired(BackupFN))
}
}
func AddDataFlag(cmd *cobra.Command, allowed []string, hide bool) {
var (
allowedMsg string
fs = cmd.Flags()
)
switch len(allowed) {
case 0:
return
case 1:
allowedMsg = allowed[0]
case 2:
allowedMsg = fmt.Sprintf("%s or %s", allowed[0], allowed[1])
default:
allowedMsg = fmt.Sprintf(
"%s or %s",
strings.Join(allowed[:len(allowed)-1], ", "),
allowed[len(allowed)-1])
}
fs.StringSliceVar(
&CategoryDataFV,
CategoryDataFN, nil,
"Select one or more types of data to backup: "+allowedMsg+".")
if hide {
cobra.CheckErr(fs.MarkHidden(CategoryDataFN))
}
}
// AddRunModeFlag adds the hidden --run-mode flag.
func AddRunModeFlag(cmd *cobra.Command, persistent bool) {
fs := cmd.Flags()
if persistent {
fs = cmd.PersistentFlags()
}
fs.StringVar(&RunModeFV, RunModeFN, "run", "What mode to run: dry, test, run. Defaults to run.")
cobra.CheckErr(fs.MarkHidden(RunModeFN))
}
// AddUserFlag adds the --user flag.
func AddUserFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&UserFV,
UserFN, nil,
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
cobra.CheckErr(cmd.MarkFlagRequired(UserFN))
}
// AddMailBoxFlag adds the --user and --mailbox flag.
func AddMailBoxFlag(cmd *cobra.Command) {
flags := cmd.Flags()
flags.StringSliceVar(
&UserFV,
UserFN, nil,
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
cobra.CheckErr(flags.MarkDeprecated(UserFN, fmt.Sprintf("use --%s instead", MailBoxFN)))
flags.StringSliceVar(
&UserFV,
MailBoxFN, nil,
"Backup a specific mailbox's data; accepts '"+Wildcard+"' to select all mailbox.")
}
// AddSiteIDFlag adds the --site-id flag, which accepts site ID values.
// This flag is hidden, since we expect users to prefer the --site url
// and do not want to encourage confusion.
func AddSiteIDFlag(cmd *cobra.Command) {
fs := cmd.Flags()
// note string ARRAY var. IDs naturally contain commas, so we cannot accept
// duplicate values within a flag declaration. ie: --site-id a,b,c does not
// work. Users must call --site-id a --site-id b --site-id c.
fs.StringArrayVar(
&SiteIDFV,
SiteIDFN, nil,
//nolint:lll
"Backup data by site ID; accepts '"+Wildcard+"' to select all sites. Args cannot be comma-delimited and must use multiple flags.")
cobra.CheckErr(fs.MarkHidden(SiteIDFN))
}
// AddSiteFlag adds the --site flag, which accepts webURL values.
func AddSiteFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&WebURLFV,
SiteFN, nil,
"Backup data by site URL; accepts '"+Wildcard+"' to select all sites.")
}
func AddMaintenanceModeFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&MaintenanceModeFV,
MaintenanceModeFN,
repository.CompleteMaintenance.String(),
"Type of maintenance operation to run. Pass '"+
repository.MetadataMaintenance.String()+"' to run a faster maintenance "+
"that does minimal clean-up and optimization. Pass '"+
repository.CompleteMaintenance.String()+"' to fully compact existing "+
"data and delete unused data.")
cobra.CheckErr(fs.MarkHidden(MaintenanceModeFN))
}
func AddForceMaintenanceFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&ForceMaintenanceFV,
ForceMaintenanceFN,
false,
"Force maintenance. Caution: user must ensure this is not run concurrently on a single repo")
cobra.CheckErr(fs.MarkHidden(ForceMaintenanceFN))
}
type PopulatedFlags map[string]struct{}
func (fs PopulatedFlags) populate(pf *pflag.Flag) {
if pf == nil {
return
}
if pf.Changed {
fs[pf.Name] = struct{}{}
}
}
// GetPopulatedFlags returns a map of flags that have been
// populated by the user. Entry keys match the flag's long
// name. Values are empty.
func GetPopulatedFlags(cmd *cobra.Command) PopulatedFlags {
pop := PopulatedFlags{}
fs := cmd.Flags()
if fs == nil {
return pop
}
fs.VisitAll(pop.populate)
return pop
}
// IsValidTimeFormat returns true if the input is recognized as a // IsValidTimeFormat returns true if the input is recognized as a
// supported format by the common time parser. // supported format by the common time parser.
func IsValidTimeFormat(in string) bool { func IsValidTimeFormat(in string) bool {

View File

@ -4,6 +4,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -17,78 +18,43 @@ type OneDriveOpts struct {
FileModifiedAfter string FileModifiedAfter string
FileModifiedBefore string FileModifiedBefore string
Populated PopulatedFlags Populated flags.PopulatedFlags
} }
func MakeOneDriveOpts(cmd *cobra.Command) OneDriveOpts { func MakeOneDriveOpts(cmd *cobra.Command) OneDriveOpts {
return OneDriveOpts{ return OneDriveOpts{
Users: UserFV, Users: flags.UserFV,
FileName: FileNameFV, FileName: flags.FileNameFV,
FolderPath: FolderPathFV, FolderPath: flags.FolderPathFV,
FileCreatedAfter: FileCreatedAfterFV, FileCreatedAfter: flags.FileCreatedAfterFV,
FileCreatedBefore: FileCreatedBeforeFV, FileCreatedBefore: flags.FileCreatedBeforeFV,
FileModifiedAfter: FileModifiedAfterFV, FileModifiedAfter: flags.FileModifiedAfterFV,
FileModifiedBefore: FileModifiedBeforeFV, FileModifiedBefore: flags.FileModifiedBeforeFV,
Populated: GetPopulatedFlags(cmd), Populated: flags.GetPopulatedFlags(cmd),
} }
} }
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddOneDriveDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringSliceVar(
&FolderPathFV,
FolderFN, nil,
"Select files by OneDrive folder; defaults to root.")
fs.StringSliceVar(
&FileNameFV,
FileFN, nil,
"Select files by name.")
fs.StringVar(
&FileCreatedAfterFV,
FileCreatedAfterFN, "",
"Select files created after this datetime.")
fs.StringVar(
&FileCreatedBeforeFV,
FileCreatedBeforeFN, "",
"Select files created before this datetime.")
fs.StringVar(
&FileModifiedAfterFV,
FileModifiedAfterFN, "",
"Select files modified after this datetime.")
fs.StringVar(
&FileModifiedBeforeFV,
FileModifiedBeforeFN, "",
"Select files modified before this datetime.")
}
// ValidateOneDriveRestoreFlags checks common flags for correctness and interdependencies // ValidateOneDriveRestoreFlags checks common flags for correctness and interdependencies
func ValidateOneDriveRestoreFlags(backupID string, opts OneDriveOpts) error { func ValidateOneDriveRestoreFlags(backupID string, opts OneDriveOpts) error {
if len(backupID) == 0 { if len(backupID) == 0 {
return clues.New("a backup ID is required") return clues.New("a backup ID is required")
} }
if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) { if _, ok := opts.Populated[flags.FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
return clues.New("invalid time format for created-after") return clues.New("invalid time format for created-after")
} }
if _, ok := opts.Populated[FileCreatedBeforeFN]; ok && !IsValidTimeFormat(opts.FileCreatedBefore) { if _, ok := opts.Populated[flags.FileCreatedBeforeFN]; ok && !IsValidTimeFormat(opts.FileCreatedBefore) {
return clues.New("invalid time format for created-before") return clues.New("invalid time format for created-before")
} }
if _, ok := opts.Populated[FileModifiedAfterFN]; ok && !IsValidTimeFormat(opts.FileModifiedAfter) { if _, ok := opts.Populated[flags.FileModifiedAfterFN]; ok && !IsValidTimeFormat(opts.FileModifiedAfter) {
return clues.New("invalid time format for modified-after") return clues.New("invalid time format for modified-after")
} }
if _, ok := opts.Populated[FileModifiedBeforeFN]; ok && !IsValidTimeFormat(opts.FileModifiedBefore) { if _, ok := opts.Populated[flags.FileModifiedBeforeFN]; ok && !IsValidTimeFormat(opts.FileModifiedBefore) {
return clues.New("invalid time format for modified-before") return clues.New("invalid time format for modified-before")
} }

26
src/cli/utils/options.go Normal file
View File

@ -0,0 +1,26 @@
package utils
import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/control"
)
// Control produces the control options based on the user's flags.
func Control() control.Options {
opt := control.Defaults()
if flags.FailFastFV {
opt.FailureHandling = control.FailFast
}
opt.DisableMetrics = flags.NoStatsFV
opt.RestorePermissions = flags.RestorePermissionsFV
opt.SkipReduce = flags.SkipReduceFV
opt.ToggleFeatures.DisableIncrementals = flags.DisableIncrementalsFV
opt.ToggleFeatures.DisableDelta = flags.DisableDeltaFV
opt.ToggleFeatures.ExchangeImmutableIDs = flags.EnableImmutableIDFV
opt.ToggleFeatures.DisableConcurrencyLimiter = flags.DisableConcurrencyLimiterFV
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
return opt
}

View File

@ -0,0 +1,67 @@
package utils
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/tester"
)
type OptionsUnitSuite struct {
tester.Suite
}
func TestOptionsUnitSuite(t *testing.T) {
suite.Run(t, &OptionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OptionsUnitSuite) TestAddExchangeCommands() {
t := suite.T()
cmd := &cobra.Command{
Use: "test",
Run: func(cmd *cobra.Command, args []string) {
assert.True(t, flags.FailFastFV, flags.FailFastFN)
assert.True(t, flags.DisableIncrementalsFV, flags.DisableIncrementalsFN)
assert.True(t, flags.DisableDeltaFV, flags.DisableDeltaFN)
assert.True(t, flags.NoStatsFV, flags.NoStatsFN)
assert.True(t, flags.RestorePermissionsFV, flags.RestorePermissionsFN)
assert.True(t, flags.SkipReduceFV, flags.SkipReduceFN)
assert.Equal(t, 2, flags.FetchParallelismFV, flags.FetchParallelismFN)
assert.True(t, flags.DisableConcurrencyLimiterFV, flags.DisableConcurrencyLimiterFN)
},
}
// adds no-stats
flags.AddGlobalOperationFlags(cmd)
flags.AddFailFastFlag(cmd)
flags.AddDisableIncrementalsFlag(cmd)
flags.AddDisableDeltaFlag(cmd)
flags.AddRestorePermissionsFlag(cmd)
flags.AddSkipReduceFlag(cmd)
flags.AddFetchParallelismFlag(cmd)
flags.AddDisableConcurrencyLimiterFlag(cmd)
// Test arg parsing for few args
cmd.SetArgs([]string{
"test",
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.DisableDeltaFN,
"--" + flags.NoStatsFN,
"--" + flags.RestorePermissionsFN,
"--" + flags.SkipReduceFN,
"--" + flags.FetchParallelismFN, "2",
"--" + flags.DisableConcurrencyLimiterFN,
})
err := cmd.Execute()
require.NoError(t, err, clues.ToCore(err))
}

View File

@ -8,25 +8,11 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
const (
ListFolderFN = "list"
ListItemFN = "list-item"
PageFolderFN = "page-folder"
PageFN = "page"
)
// flag population variables
var (
ListFolder []string
ListItem []string
PageFolder []string
Page []string
)
type SharePointOpts struct { type SharePointOpts struct {
SiteID []string SiteID []string
WebURL []string WebURL []string
@ -45,95 +31,32 @@ type SharePointOpts struct {
PageFolder []string PageFolder []string
Page []string Page []string
Populated PopulatedFlags Populated flags.PopulatedFlags
} }
func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts { func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
return SharePointOpts{ return SharePointOpts{
SiteID: SiteIDFV, SiteID: flags.SiteIDFV,
WebURL: WebURLFV, WebURL: flags.WebURLFV,
Library: LibraryFV, Library: flags.LibraryFV,
FileName: FileNameFV, FileName: flags.FileNameFV,
FolderPath: FolderPathFV, FolderPath: flags.FolderPathFV,
FileCreatedAfter: FileCreatedAfterFV, FileCreatedAfter: flags.FileCreatedAfterFV,
FileCreatedBefore: FileCreatedBeforeFV, FileCreatedBefore: flags.FileCreatedBeforeFV,
FileModifiedAfter: FileModifiedAfterFV, FileModifiedAfter: flags.FileModifiedAfterFV,
FileModifiedBefore: FileModifiedBeforeFV, FileModifiedBefore: flags.FileModifiedBeforeFV,
ListFolder: ListFolder, ListFolder: flags.ListFolderFV,
ListItem: ListItem, ListItem: flags.ListItemFV,
Page: Page, Page: flags.PageFV,
PageFolder: PageFolder, PageFolder: flags.PageFolderFV,
Populated: GetPopulatedFlags(cmd), Populated: flags.GetPopulatedFlags(cmd),
} }
} }
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
// libraries
fs.StringVar(
&LibraryFV,
LibraryFN, "",
"Select only this library; defaults to all libraries.")
fs.StringSliceVar(
&FolderPathFV,
FolderFN, nil,
"Select by folder; defaults to root.")
fs.StringSliceVar(
&FileNameFV,
FileFN, nil,
"Select by file name.")
fs.StringVar(
&FileCreatedAfterFV,
FileCreatedAfterFN, "",
"Select files created after this datetime.")
fs.StringVar(
&FileCreatedBeforeFV,
FileCreatedBeforeFN, "",
"Select files created before this datetime.")
fs.StringVar(
&FileModifiedAfterFV,
FileModifiedAfterFN, "",
"Select files modified after this datetime.")
fs.StringVar(
&FileModifiedBeforeFV,
FileModifiedBeforeFN, "",
"Select files modified before this datetime.")
// lists
fs.StringSliceVar(
&ListFolder,
ListFolderFN, nil,
"Select lists by name; accepts '"+Wildcard+"' to select all lists.")
cobra.CheckErr(fs.MarkHidden(ListFolderFN))
fs.StringSliceVar(
&ListItem,
ListItemFN, nil,
"Select lists by item name; accepts '"+Wildcard+"' to select all lists.")
cobra.CheckErr(fs.MarkHidden(ListItemFN))
// pages
fs.StringSliceVar(
&PageFolder,
PageFolderFN, nil,
"Select pages by folder name; accepts '"+Wildcard+"' to select all pages.")
cobra.CheckErr(fs.MarkHidden(PageFolderFN))
fs.StringSliceVar(
&Page,
PageFN, nil,
"Select pages by item name; accepts '"+Wildcard+"' to select all pages.")
cobra.CheckErr(fs.MarkHidden(PageFN))
}
// ValidateSharePointRestoreFlags checks common flags for correctness and interdependencies // ValidateSharePointRestoreFlags checks common flags for correctness and interdependencies
func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error { func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error {
if len(backupID) == 0 { if len(backupID) == 0 {
@ -141,7 +64,7 @@ func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error
} }
// ensure url can parse all weburls provided by --site. // ensure url can parse all weburls provided by --site.
if _, ok := opts.Populated[SiteFN]; ok { if _, ok := opts.Populated[flags.SiteFN]; ok {
for _, wu := range opts.WebURL { for _, wu := range opts.WebURL {
if _, err := url.Parse(wu); err != nil { if _, err := url.Parse(wu); err != nil {
return clues.New("invalid site url: " + wu) return clues.New("invalid site url: " + wu)
@ -149,20 +72,20 @@ func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error
} }
} }
if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) { if _, ok := opts.Populated[flags.FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
return clues.New("invalid time format for " + FileCreatedAfterFN) return clues.New("invalid time format for " + flags.FileCreatedAfterFN)
} }
if _, ok := opts.Populated[FileCreatedBeforeFN]; ok && !IsValidTimeFormat(opts.FileCreatedBefore) { if _, ok := opts.Populated[flags.FileCreatedBeforeFN]; ok && !IsValidTimeFormat(opts.FileCreatedBefore) {
return clues.New("invalid time format for " + FileCreatedBeforeFN) return clues.New("invalid time format for " + flags.FileCreatedBeforeFN)
} }
if _, ok := opts.Populated[FileModifiedAfterFN]; ok && !IsValidTimeFormat(opts.FileModifiedAfter) { if _, ok := opts.Populated[flags.FileModifiedAfterFN]; ok && !IsValidTimeFormat(opts.FileModifiedAfter) {
return clues.New("invalid time format for " + FileModifiedAfterFN) return clues.New("invalid time format for " + flags.FileModifiedAfterFN)
} }
if _, ok := opts.Populated[FileModifiedBeforeFN]; ok && !IsValidTimeFormat(opts.FileModifiedBefore) { if _, ok := opts.Populated[flags.FileModifiedBeforeFN]; ok && !IsValidTimeFormat(opts.FileModifiedBefore) {
return clues.New("invalid time format for " + FileModifiedBeforeFN) return clues.New("invalid time format for " + flags.FileModifiedBeforeFN)
} }
return nil return nil

View File

@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -297,12 +298,12 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
FileCreatedBefore: dttm.Now(), FileCreatedBefore: dttm.Now(),
FileModifiedAfter: dttm.Now(), FileModifiedAfter: dttm.Now(),
FileModifiedBefore: dttm.Now(), FileModifiedBefore: dttm.Now(),
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.SiteFN: {}, flags.SiteFN: struct{}{},
utils.FileCreatedAfterFN: {}, flags.FileCreatedAfterFN: struct{}{},
utils.FileCreatedBeforeFN: {}, flags.FileCreatedBeforeFN: struct{}{},
utils.FileModifiedAfterFN: {}, flags.FileModifiedAfterFN: struct{}{},
utils.FileModifiedBeforeFN: {}, flags.FileModifiedBeforeFN: struct{}{},
}, },
}, },
expect: assert.NoError, expect: assert.NoError,
@ -318,8 +319,8 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
backupID: "id", backupID: "id",
opts: utils.SharePointOpts{ opts: utils.SharePointOpts{
WebURL: []string{"slander://:vree.garbles/:"}, WebURL: []string{"slander://:vree.garbles/:"},
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.SiteFN: {}, flags.SiteFN: struct{}{},
}, },
}, },
expect: assert.Error, expect: assert.Error,
@ -329,8 +330,8 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
backupID: "id", backupID: "id",
opts: utils.SharePointOpts{ opts: utils.SharePointOpts{
FileCreatedAfter: "1235", FileCreatedAfter: "1235",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileCreatedAfterFN: {}, flags.FileCreatedAfterFN: struct{}{},
}, },
}, },
expect: assert.Error, expect: assert.Error,
@ -340,8 +341,8 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
backupID: "id", backupID: "id",
opts: utils.SharePointOpts{ opts: utils.SharePointOpts{
FileCreatedBefore: "1235", FileCreatedBefore: "1235",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileCreatedBeforeFN: {}, flags.FileCreatedBeforeFN: struct{}{},
}, },
}, },
expect: assert.Error, expect: assert.Error,
@ -351,8 +352,8 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
backupID: "id", backupID: "id",
opts: utils.SharePointOpts{ opts: utils.SharePointOpts{
FileModifiedAfter: "1235", FileModifiedAfter: "1235",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileModifiedAfterFN: {}, flags.FileModifiedAfterFN: struct{}{},
}, },
}, },
expect: assert.Error, expect: assert.Error,
@ -362,8 +363,8 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
backupID: "id", backupID: "id",
opts: utils.SharePointOpts{ opts: utils.SharePointOpts{
FileModifiedBefore: "1235", FileModifiedBefore: "1235",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileModifiedBeforeFN: {}, flags.FileModifiedBeforeFN: struct{}{},
}, },
}, },
expect: assert.Error, expect: assert.Error,

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
@ -37,8 +38,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EmailReceivedAfter: "foo", EmailReceivedAfter: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EmailReceivedAfterFN: struct{}{}, flags.EmailReceivedAfterFN: struct{}{},
}, },
} }
}, },
@ -48,8 +49,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EmailReceivedAfter: "", EmailReceivedAfter: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EmailReceivedAfterFN: struct{}{}, flags.EmailReceivedAfterFN: struct{}{},
}, },
} }
}, },
@ -59,8 +60,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EmailReceivedBefore: "foo", EmailReceivedBefore: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EmailReceivedBeforeFN: struct{}{}, flags.EmailReceivedBeforeFN: struct{}{},
}, },
} }
}, },
@ -70,8 +71,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EmailReceivedBefore: "", EmailReceivedBefore: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EmailReceivedBeforeFN: struct{}{}, flags.EmailReceivedBeforeFN: struct{}{},
}, },
} }
}, },
@ -81,8 +82,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EventRecurs: "foo", EventRecurs: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EventRecursFN: struct{}{}, flags.EventRecursFN: struct{}{},
}, },
} }
}, },
@ -92,8 +93,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EventRecurs: "", EventRecurs: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EventRecursFN: struct{}{}, flags.EventRecursFN: struct{}{},
}, },
} }
}, },
@ -103,8 +104,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EventStartsAfter: "foo", EventStartsAfter: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EventStartsAfterFN: struct{}{}, flags.EventStartsAfterFN: struct{}{},
}, },
} }
}, },
@ -114,8 +115,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EventStartsAfter: "", EventStartsAfter: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EventStartsAfterFN: struct{}{}, flags.EventStartsAfterFN: struct{}{},
}, },
} }
}, },
@ -125,8 +126,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EventStartsBefore: "foo", EventStartsBefore: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EventStartsBeforeFN: struct{}{}, flags.EventStartsBeforeFN: struct{}{},
}, },
} }
}, },
@ -136,8 +137,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts {
return utils.ExchangeOpts{ return utils.ExchangeOpts{
EventStartsBefore: "", EventStartsBefore: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.EventStartsBeforeFN: struct{}{}, flags.EventStartsBeforeFN: struct{}{},
}, },
} }
}, },
@ -441,8 +442,8 @@ var (
return utils.OneDriveOpts{ return utils.OneDriveOpts{
Users: selectors.Any(), Users: selectors.Any(),
FileCreatedAfter: "foo", FileCreatedAfter: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileCreatedAfterFN: struct{}{}, flags.FileCreatedAfterFN: struct{}{},
}, },
} }
}, },
@ -452,8 +453,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileCreatedAfter: "", FileCreatedAfter: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileCreatedAfterFN: struct{}{}, flags.FileCreatedAfterFN: struct{}{},
}, },
} }
}, },
@ -463,8 +464,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileCreatedBefore: "foo", FileCreatedBefore: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileCreatedBeforeFN: struct{}{}, flags.FileCreatedBeforeFN: struct{}{},
}, },
} }
}, },
@ -474,8 +475,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileCreatedBefore: "", FileCreatedBefore: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileCreatedBeforeFN: struct{}{}, flags.FileCreatedBeforeFN: struct{}{},
}, },
} }
}, },
@ -485,8 +486,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileModifiedAfter: "foo", FileModifiedAfter: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileModifiedAfterFN: struct{}{}, flags.FileModifiedAfterFN: struct{}{},
}, },
} }
}, },
@ -496,8 +497,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileModifiedAfter: "", FileModifiedAfter: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileModifiedAfterFN: struct{}{}, flags.FileModifiedAfterFN: struct{}{},
}, },
} }
}, },
@ -507,8 +508,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileModifiedBefore: "foo", FileModifiedBefore: "foo",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileModifiedBeforeFN: struct{}{}, flags.FileModifiedBeforeFN: struct{}{},
}, },
} }
}, },
@ -518,8 +519,8 @@ var (
Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
return utils.OneDriveOpts{ return utils.OneDriveOpts{
FileModifiedBefore: "", FileModifiedBefore: "",
Populated: utils.PopulatedFlags{ Populated: flags.PopulatedFlags{
utils.FileModifiedBeforeFN: struct{}{}, flags.FileModifiedBeforeFN: struct{}{},
}, },
} }
}, },
@ -751,8 +752,8 @@ var (
// Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { // Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
// return utils.SharePointOpts{ // return utils.SharePointOpts{
// FileCreatedBefore: "foo", // FileCreatedBefore: "foo",
// Populated: utils.PopulatedFlags{ // Populated: flags.PopulatedFlags{
// utils.FileCreatedBeforeFN: struct{}{}, // flags.FileCreatedBeforeFN: struct{}{},
// }, // },
// } // }
// }, // },
@ -762,8 +763,8 @@ var (
// Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { // Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts {
// return utils.SharePointOpts{ // return utils.SharePointOpts{
// FileCreatedBefore: "", // FileCreatedBefore: "",
// Populated: utils.PopulatedFlags{ // Populated: flags.PopulatedFlags{
// utils.FileCreatedBeforeFN: struct{}{}, // flags.FileCreatedBeforeFN: struct{}{},
// }, // },
// } // }
// }, // },

View File

@ -9,7 +9,6 @@ import (
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -20,10 +19,6 @@ import (
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
const (
Wildcard = "*"
)
func GetAccountAndConnect(ctx context.Context) (repository.Repository, *storage.Storage, *account.Account, error) { func GetAccountAndConnect(ctx context.Context) (repository.Repository, *storage.Storage, *account.Account, error) {
cfg, err := config.GetConfigRepoDetails(ctx, true, nil) cfg, err := config.GetConfigRepoDetails(ctx, true, nil)
if err != nil { if err != nil {
@ -35,7 +30,7 @@ func GetAccountAndConnect(ctx context.Context) (repository.Repository, *storage.
repoID = events.RepoIDNotFound repoID = events.RepoIDNotFound
} }
r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, repoID, options.Control()) r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, repoID, Control())
if err != nil { if err != nil {
return nil, nil, nil, clues.Wrap(err, "connecting to the "+cfg.Storage.Provider.String()+" repository") return nil, nil, nil, clues.Wrap(err, "connecting to the "+cfg.Storage.Provider.String()+" repository")
} }

View File

@ -114,7 +114,10 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
func(id, now, subject, body string) []byte { func(id, now, subject, body string) []byte {
return exchMock.EventWith( return exchMock.EventWith(
User, subject, body, body, User, subject, body, body,
now, now, exchMock.NoRecurrence, exchMock.NoAttendees, false) exchMock.NoOriginalStartDate, now, now,
exchMock.NoRecurrence, exchMock.NoAttendees,
exchMock.NoAttachments, exchMock.NoCancelledOccurrences,
exchMock.NoExceptionOccurrences)
}, },
control.Defaults(), control.Defaults(),
errs) errs)

View File

@ -8,7 +8,7 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225
github.com/armon/go-metrics v0.4.1 github.com/armon/go-metrics v0.4.1
github.com/aws/aws-sdk-go v1.44.283 github.com/aws/aws-sdk-go v1.44.287
github.com/aws/aws-xray-sdk-go v1.8.1 github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.1 github.com/cenkalti/backoff/v4 v4.2.1
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0

View File

@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go v1.44.283 h1:ObMaIvdhHJM2sIrbcljd7muHBaFb+Kp/QsX6iflGDg4= github.com/aws/aws-sdk-go v1.44.287 h1:CUq2/h0gZ2LOCF61AgQSEMPMfas4gTiQfHBO88gGET0=
github.com/aws/aws-sdk-go v1.44.283/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.44.287/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=

View File

@ -29,6 +29,10 @@ type getWithInfoer interface {
GetInfoer GetInfoer
} }
type GetDefaultDriver interface {
GetDefaultDrive(ctx context.Context, userID string) (models.Driveable, error)
}
type getAller interface { type getAller interface {
GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable, error) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable, error)
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type attachmentPoster interface { type attachmentPoster interface {
@ -20,15 +21,14 @@ type attachmentPoster interface {
PostLargeAttachment( PostLargeAttachment(
ctx context.Context, ctx context.Context,
userID, containerID, itemID, name string, userID, containerID, itemID, name string,
size int64, content []byte,
body models.Attachmentable, ) (string, error)
) (models.UploadSessionable, error)
} }
const ( const (
// Use large attachment logic for attachments > 3MB // Use large attachment logic for attachments > 3MB
// https://learn.microsoft.com/en-us/graph/outlook-large-attachments // https://learn.microsoft.com/en-us/graph/outlook-large-attachments
largeAttachmentSize = int32(3 * 1024 * 1024) largeAttachmentSize = 3 * 1024 * 1024
fileAttachmentOdataValue = "#microsoft.graph.fileAttachment" fileAttachmentOdataValue = "#microsoft.graph.fileAttachment"
itemAttachmentOdataValue = "#microsoft.graph.itemAttachment" itemAttachmentOdataValue = "#microsoft.graph.itemAttachment"
referenceAttachmentOdataValue = "#microsoft.graph.referenceAttachment" referenceAttachmentOdataValue = "#microsoft.graph.referenceAttachment"
@ -53,7 +53,7 @@ func attachmentType(attachment models.Attachmentable) models.AttachmentType {
// uploadAttachment will upload the specified message attachment to M365 // uploadAttachment will upload the specified message attachment to M365
func uploadAttachment( func uploadAttachment(
ctx context.Context, ctx context.Context,
cli attachmentPoster, ap attachmentPoster,
userID, containerID, parentItemID string, userID, containerID, parentItemID string,
attachment models.Attachmentable, attachment models.Attachmentable,
) error { ) error {
@ -95,12 +95,20 @@ func uploadAttachment(
// for file attachments sized >= 3MB // for file attachments sized >= 3MB
if attachmentType == models.FILE_ATTACHMENTTYPE && size >= largeAttachmentSize { if attachmentType == models.FILE_ATTACHMENTTYPE && size >= largeAttachmentSize {
_, err := cli.PostLargeAttachment(ctx, userID, containerID, parentItemID, name, int64(size), attachment) // We expect the entire attachment to fit in memory.
// Max attachment size is 150MB.
content, err := api.GetAttachmentContent(attachment)
if err != nil {
return clues.Wrap(err, "serializing attachment content").WithClues(ctx)
}
_, err = ap.PostLargeAttachment(ctx, userID, containerID, parentItemID, name, content)
return err return err
} }
// for all other attachments // for all other attachments
return cli.PostSmallAttachment(ctx, userID, containerID, parentItemID, attachment) return ap.PostSmallAttachment(ctx, userID, containerID, parentItemID, attachment)
} }
func getOutlookOdataType(query models.Attachmentable) string { func getOutlookOdataType(query models.Attachmentable) string {

View File

@ -382,7 +382,7 @@ func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.Con
return updater return updater
} }
type DataCollectionsIntegrationSuite struct { type BackupIntgSuite struct {
tester.Suite tester.Suite
user string user string
site string site string
@ -390,16 +390,15 @@ type DataCollectionsIntegrationSuite struct {
ac api.Client ac api.Client
} }
func TestDataCollectionsIntegrationSuite(t *testing.T) { func TestBackupIntgSuite(t *testing.T) {
suite.Run(t, &DataCollectionsIntegrationSuite{ suite.Run(t, &BackupIntgSuite{
Suite: tester.NewIntegrationSuite( Suite: tester.NewIntegrationSuite(
t, t,
[][]string{tester.M365AcctCredEnvs}, [][]string{tester.M365AcctCredEnvs}),
),
}) })
} }
func (suite *DataCollectionsIntegrationSuite) SetupSuite() { func (suite *BackupIntgSuite) SetupSuite() {
suite.user = tester.M365UserID(suite.T()) suite.user = tester.M365UserID(suite.T())
suite.site = tester.M365SiteID(suite.T()) suite.site = tester.M365SiteID(suite.T())
@ -415,7 +414,7 @@ func (suite *DataCollectionsIntegrationSuite) SetupSuite() {
tester.LogTimeOfTest(suite.T()) tester.LogTimeOfTest(suite.T())
} }
func (suite *DataCollectionsIntegrationSuite) TestMailFetch() { func (suite *BackupIntgSuite) TestMailFetch() {
var ( var (
userID = tester.M365UserID(suite.T()) userID = tester.M365UserID(suite.T())
users = []string{userID} users = []string{userID}
@ -499,7 +498,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
} }
} }
func (suite *DataCollectionsIntegrationSuite) TestDelta() { func (suite *BackupIntgSuite) TestDelta() {
var ( var (
userID = tester.M365UserID(suite.T()) userID = tester.M365UserID(suite.T())
users = []string{userID} users = []string{userID}
@ -604,7 +603,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
// TestMailSerializationRegression verifies that all mail data stored in the // TestMailSerializationRegression verifies that all mail data stored in the
// test account can be successfully downloaded into bytes and restored into // test account can be successfully downloaded into bytes and restored into
// M365 mail objects // M365 mail objects
func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() { func (suite *BackupIntgSuite) TestMailSerializationRegression() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
@ -668,7 +667,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
// TestContactSerializationRegression verifies ability to query contact items // TestContactSerializationRegression verifies ability to query contact items
// and to store contact within Collection. Downloaded contacts are run through // and to store contact within Collection. Downloaded contacts are run through
// a regression test to ensure that downloaded items can be uploaded. // a regression test to ensure that downloaded items can be uploaded.
func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() { func (suite *BackupIntgSuite) TestContactSerializationRegression() {
var ( var (
users = []string{suite.user} users = []string{suite.user}
handlers = BackupHandlers(suite.ac) handlers = BackupHandlers(suite.ac)
@ -756,7 +755,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
// TestEventsSerializationRegression ensures functionality of createCollections // TestEventsSerializationRegression ensures functionality of createCollections
// to be able to successfully query, download and restore event objects // to be able to successfully query, download and restore event objects
func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() { func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)

View File

@ -9,7 +9,9 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -18,7 +20,6 @@ var _ itemRestorer = &contactRestoreHandler{}
type contactRestoreHandler struct { type contactRestoreHandler struct {
ac api.Contacts ac api.Contacts
ip itemPoster[models.Contactable]
} }
func newContactRestoreHandler( func newContactRestoreHandler(
@ -26,7 +27,6 @@ func newContactRestoreHandler(
) contactRestoreHandler { ) contactRestoreHandler {
return contactRestoreHandler{ return contactRestoreHandler{
ac: ac.Contacts(), ac: ac.Contacts(),
ip: ac.Contacts(),
} }
} }
@ -65,6 +65,27 @@ func (h contactRestoreHandler) restore(
ctx context.Context, ctx context.Context,
body []byte, body []byte,
userID, destinationID string, userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus,
) (*details.ExchangeInfo, error) {
return restoreContact(
ctx,
h.ac,
body,
userID, destinationID,
collisionKeyToItemID,
collisionPolicy,
errs)
}
func restoreContact(
ctx context.Context,
pi postItemer[models.Contactable],
body []byte,
userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus, errs *fault.Bus,
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
contact, err := api.BytesToContactable(body) contact, err := api.BytesToContactable(body)
@ -73,8 +94,20 @@ func (h contactRestoreHandler) restore(
} }
ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId())) ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId()))
collisionKey := api.ContactCollisionKey(contact)
item, err := h.ip.PostItem(ctx, userID, destinationID, contact) if _, ok := collisionKeyToItemID[collisionKey]; ok {
log := logger.Ctx(ctx).With("collision_key", clues.Hide(collisionKey))
log.Debug("item collision")
// TODO(rkeepers): Replace probably shouldn't no-op. Just a starting point.
if collisionPolicy == control.Skip || collisionPolicy == control.Replace {
log.Debug("skipping item with collision")
return nil, graph.ErrItemAlreadyExistsConflict
}
}
item, err := pi.PostItem(ctx, userID, destinationID, contact)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "restoring mail message") return nil, graph.Wrap(ctx, err, "restoring mail message")
} }
@ -84,3 +117,15 @@ func (h contactRestoreHandler) restore(
return info, nil return info, nil
} }
func (h contactRestoreHandler) getItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error) {
m, err := h.ac.GetItemsInContainerByCollisionKey(ctx, userID, containerID)
if err != nil {
return nil, err
}
return m, nil
}

View File

@ -1,24 +1,46 @@
package exchange package exchange
import ( import (
"context"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
var _ postItemer[models.Contactable] = &mockContactRestorer{}
type mockContactRestorer struct {
postItemErr error
}
func (m mockContactRestorer) PostItem(
ctx context.Context,
userID, containerID string,
body models.Contactable,
) (models.Contactable, error) {
return models.NewContact(), m.postItemErr
}
// ---------------------------------------------------------------------------
// tests
// ---------------------------------------------------------------------------
type ContactsRestoreIntgSuite struct { type ContactsRestoreIntgSuite struct {
tester.Suite tester.Suite
creds account.M365Config its intgTesterSetup
ac api.Client
userID string
} }
func TestContactsRestoreIntgSuite(t *testing.T) { func TestContactsRestoreIntgSuite(t *testing.T) {
@ -30,29 +52,110 @@ func TestContactsRestoreIntgSuite(t *testing.T) {
} }
func (suite *ContactsRestoreIntgSuite) SetupSuite() { func (suite *ContactsRestoreIntgSuite) SetupSuite() {
t := suite.T() suite.its = newIntegrationTesterSetup(suite.T())
a := tester.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.creds = creds
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
suite.userID = tester.M365UserID(t)
} }
// Testing to ensure that cache system works for in multiple different environments // Testing to ensure that cache system works for in multiple different environments
func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() { func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() {
runCreateDestinationTest( runCreateDestinationTest(
suite.T(), suite.T(),
newMailRestoreHandler(suite.ac), newContactRestoreHandler(suite.its.ac),
path.EmailCategory, path.ContactsCategory,
suite.creds.AzureTenantID, suite.its.creds.AzureTenantID,
suite.userID, suite.its.userID,
testdata.DefaultRestoreConfig("").Location, testdata.DefaultRestoreConfig("").Location,
[]string{"Hufflepuff"}, []string{"Hufflepuff"},
[]string{"Ravenclaw"}) []string{"Ravenclaw"})
} }
func (suite *ContactsRestoreIntgSuite) TestRestoreContact() {
body := mock.ContactBytes("middlename")
stub, err := api.BytesToContactable(body)
require.NoError(suite.T(), err, clues.ToCore(err))
collisionKey := api.ContactCollisionKey(stub)
table := []struct {
name string
apiMock postItemer[models.Contactable]
collisionMap map[string]string
onCollision control.CollisionPolicy
expectErr func(*testing.T, error)
}{
{
name: "no collision: skip",
apiMock: mockContactRestorer{},
collisionMap: map[string]string{},
onCollision: control.Copy,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no collision: copy",
apiMock: mockContactRestorer{},
collisionMap: map[string]string{},
onCollision: control.Skip,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no collision: replace",
apiMock: mockContactRestorer{},
collisionMap: map[string]string{},
onCollision: control.Replace,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "collision: skip",
apiMock: mockContactRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Skip,
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err))
},
},
{
name: "collision: copy",
apiMock: mockContactRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Copy,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "collision: replace",
apiMock: mockContactRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Replace,
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := restoreContact(
ctx,
test.apiMock,
body,
suite.its.userID,
"destination",
test.collisionMap,
test.onCollision,
fault.New(true))
test.expectErr(t, err)
})
}
}

View File

@ -1,15 +1,23 @@
package exchange package exchange
import ( import (
"bytes"
"context" "context"
"fmt"
"strings"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -18,17 +26,13 @@ var _ itemRestorer = &eventRestoreHandler{}
type eventRestoreHandler struct { type eventRestoreHandler struct {
ac api.Events ac api.Events
ip itemPoster[models.Eventable]
} }
func newEventRestoreHandler( func newEventRestoreHandler(
ac api.Client, ac api.Client,
) eventRestoreHandler { ) eventRestoreHandler {
ace := ac.Events()
return eventRestoreHandler{ return eventRestoreHandler{
ac: ace, ac: ac.Events(),
ip: ace,
} }
} }
@ -67,6 +71,32 @@ func (h eventRestoreHandler) restore(
ctx context.Context, ctx context.Context,
body []byte, body []byte,
userID, destinationID string, userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus,
) (*details.ExchangeInfo, error) {
return restoreEvent(
ctx,
h.ac,
body,
userID, destinationID,
collisionKeyToItemID,
collisionPolicy,
errs)
}
type eventRestorer interface {
postItemer[models.Eventable]
eventInstanceAndAttachmenter
}
func restoreEvent(
ctx context.Context,
er eventRestorer,
body []byte,
userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus, errs *fault.Bus,
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
event, err := api.BytesToEventable(body) event, err := api.BytesToEventable(body)
@ -75,6 +105,18 @@ func (h eventRestoreHandler) restore(
} }
ctx = clues.Add(ctx, "item_id", ptr.Val(event.GetId())) ctx = clues.Add(ctx, "item_id", ptr.Val(event.GetId()))
collisionKey := api.EventCollisionKey(event)
if _, ok := collisionKeyToItemID[collisionKey]; ok {
log := logger.Ctx(ctx).With("collision_key", clues.Hide(collisionKey))
log.Debug("item collision")
// TODO(rkeepers): Replace probably shouldn't no-op. Just a starting point.
if collisionPolicy == control.Skip || collisionPolicy == control.Replace {
log.Debug("skipping item with collision")
return nil, graph.ErrItemAlreadyExistsConflict
}
}
event = toEventSimplified(event) event = toEventSimplified(event)
@ -82,17 +124,19 @@ func (h eventRestoreHandler) restore(
if ptr.Val(event.GetHasAttachments()) { if ptr.Val(event.GetHasAttachments()) {
attachments = event.GetAttachments() attachments = event.GetAttachments()
event.SetAttachments([]models.Attachmentable{}) // We cannot use `[]models.Attbachmentable{}` instead of nil
// for beta endpoint.
event.SetAttachments(nil)
} }
item, err := h.ip.PostItem(ctx, userID, destinationID, event) item, err := er.PostItem(ctx, userID, destinationID, event)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "restoring mail message") return nil, graph.Wrap(ctx, err, "restoring calendar item")
} }
err = uploadAttachments( err = uploadAttachments(
ctx, ctx,
h.ac, er,
attachments, attachments,
userID, userID,
destinationID, destinationID,
@ -102,8 +146,359 @@ func (h eventRestoreHandler) restore(
return nil, clues.Stack(err) return nil, clues.Stack(err)
} }
// Have to parse event again as we modified the original event and
// removed cancelled and exceptions events form it
event, err = api.BytesToEventable(body)
if err != nil {
return nil, clues.Wrap(err, "creating event from bytes").WithClues(ctx)
}
// Fix up event instances in case we have a recurring event
err = updateRecurringEvents(
ctx,
er,
userID,
destinationID,
ptr.Val(item.GetId()),
event,
errs,
)
if err != nil {
return nil, clues.Stack(err)
}
info := api.EventInfo(event) info := api.EventInfo(event)
info.Size = int64(len(body)) info.Size = int64(len(body))
return info, nil return info, nil
} }
func updateRecurringEvents(
ctx context.Context,
eiaa eventInstanceAndAttachmenter,
userID, containerID, itemID string,
event models.Eventable,
errs *fault.Bus,
) error {
if event.GetRecurrence() == nil {
return nil
}
// Cancellations and exceptions are currently in additional data
// but will get their own fields once the beta API lands and
// should be moved then
cancelledOccurrences := event.GetAdditionalData()["cancelledOccurrences"]
exceptionOccurrences := event.GetAdditionalData()["exceptionOccurrences"]
err := updateCancelledOccurrences(ctx, eiaa, userID, itemID, cancelledOccurrences)
if err != nil {
return clues.Wrap(err, "update cancelled occurrences")
}
err = updateExceptionOccurrences(ctx, eiaa, userID, containerID, itemID, exceptionOccurrences, errs)
if err != nil {
return clues.Wrap(err, "update exception occurrences")
}
return nil
}
type eventInstanceAndAttachmenter interface {
attachmentGetDeletePoster
DeleteItem(
ctx context.Context,
userID, itemID string,
) error
GetItemInstances(
ctx context.Context,
userID, itemID string,
startDate, endDate string,
) ([]models.Eventable, error)
PatchItem(
ctx context.Context,
userID, eventID string,
body models.Eventable,
) (models.Eventable, error)
}
// updateExceptionOccurrences take events that have exceptions, uses
// the originalStart date to find the instance and modify it to match
// the backup by updating the instance to match the backed up one
func updateExceptionOccurrences(
ctx context.Context,
eiaa eventInstanceAndAttachmenter,
userID string,
containerID string,
itemID string,
exceptionOccurrences any,
errs *fault.Bus,
) error {
if exceptionOccurrences == nil {
return nil
}
eo, ok := exceptionOccurrences.([]any)
if !ok {
return clues.New("converting exceptionOccurrences to []any").
With("type", fmt.Sprintf("%T", exceptionOccurrences))
}
for _, instance := range eo {
instance, ok := instance.(map[string]any)
if !ok {
return clues.New("converting instance to map[string]any").
With("type", fmt.Sprintf("%T", instance))
}
evt, err := api.EventFromMap(instance)
if err != nil {
return clues.Wrap(err, "parsing exception event")
}
start := ptr.Val(evt.GetOriginalStart())
startStr := dttm.FormatTo(start, dttm.DateOnly)
endStr := dttm.FormatTo(start.Add(24*time.Hour), dttm.DateOnly)
ictx := clues.Add(ctx, "event_instance_id", ptr.Val(evt.GetId()), "event_instance_date", start)
// Get all instances on the day of the instance which should
// just the one we need to modify
instances, err := eiaa.GetItemInstances(ictx, userID, itemID, startStr, endStr)
if err != nil {
return clues.Wrap(err, "getting instances")
}
// Since the min recurrence interval is 1 day and we are
// querying for only a single day worth of instances, we
// should not have more than one instance here.
if len(instances) != 1 {
return clues.New("invalid number of instances for modified").
With("instances_count", len(instances), "search_start", startStr, "search_end", endStr)
}
evt = toEventSimplified(evt)
_, err = eiaa.PatchItem(ictx, userID, ptr.Val(instances[0].GetId()), evt)
if err != nil {
return clues.Wrap(err, "updating event instance")
}
// We are creating event again from map as `toEventSimplified`
// removed the attachments and creating a clone from start of
// the event is non-trivial
evt, err = api.EventFromMap(instance)
if err != nil {
return clues.Wrap(err, "parsing event instance")
}
err = updateAttachments(
ictx,
eiaa,
userID,
containerID,
ptr.Val(instances[0].GetId()),
evt,
errs)
if err != nil {
return clues.Wrap(err, "updating event instance attachments")
}
}
return nil
}
type attachmentGetDeletePoster interface {
attachmentPoster
GetAttachments(
ctx context.Context,
immutableIDs bool,
userID string,
itemID string,
) ([]models.Attachmentable, error)
DeleteAttachment(
ctx context.Context,
userID, calendarID, eventID, attachmentID string,
) error
}
// updateAttachments updates the attachments of an event to match what
// is present in the backed up event. Ideally we could make use of the
// id of the series master event's attachments to see if we had
// added/removed any attachments, but as soon an event is modified,
// the id changes which makes the ids unusable. In this function, we
// use the name and content bytes to detect the changes. This function
// can be used to update the attachments of any event irrespective of
// whether they are event instances of a series master although for
// newer event, since we probably won't already have any events it
// would be better use Post[Small|Large]Attachment.
func updateAttachments(
ctx context.Context,
agdp attachmentGetDeletePoster,
userID, containerID, eventID string,
event models.Eventable,
errs *fault.Bus,
) error {
el := errs.Local()
attachments, err := agdp.GetAttachments(ctx, false, userID, eventID)
if err != nil {
return clues.Wrap(err, "getting attachments")
}
// Delete attachments that are not present in the backup but are
// present in the event(ones that were automatically inherited
// from series master).
for _, att := range attachments {
if el.Failure() != nil {
return el.Failure()
}
name := ptr.Val(att.GetName())
id := ptr.Val(att.GetId())
content, err := api.GetAttachmentContent(att)
if err != nil {
return clues.Wrap(err, "getting attachment").With("attachment_id", id)
}
found := false
for _, nAtt := range event.GetAttachments() {
nName := ptr.Val(nAtt.GetName())
nContent, err := api.GetAttachmentContent(nAtt)
if err != nil {
return clues.Wrap(err, "getting attachment").With("attachment_id", ptr.Val(nAtt.GetId()))
}
if name == nName && bytes.Equal(content, nContent) {
found = true
break
}
}
if !found {
err = agdp.DeleteAttachment(ctx, userID, containerID, eventID, id)
if err != nil {
logger.CtxErr(ctx, err).With("attachment_name", name).Info("attachment delete failed")
el.AddRecoverable(ctx, clues.Wrap(err, "deleting event attachment").
WithClues(ctx).With("attachment_name", name))
}
}
}
// Upload missing(attachments that are present in the individual
// instance but not in the series master event) attachments
for _, att := range event.GetAttachments() {
name := ptr.Val(att.GetName())
id := ptr.Val(att.GetId())
content, err := api.GetAttachmentContent(att)
if err != nil {
return clues.Wrap(err, "getting attachment").With("attachment_id", id)
}
found := false
for _, nAtt := range attachments {
nName := ptr.Val(nAtt.GetName())
bContent, err := api.GetAttachmentContent(nAtt)
if err != nil {
return clues.Wrap(err, "getting attachment").With("attachment_id", ptr.Val(nAtt.GetId()))
}
// Max size allowed for an outlook attachment is 150MB
if name == nName && bytes.Equal(content, bContent) {
found = true
break
}
}
if !found {
err = uploadAttachment(ctx, agdp, userID, containerID, eventID, att)
if err != nil {
return clues.Wrap(err, "uploading attachment").
With("attachment_id", id)
}
}
}
return el.Failure()
}
// updateCancelledOccurrences get the cancelled occurrences which is a
// list of strings of the format "<id>.<date>", parses the date out of
// that and uses the to get the event instance at that date to delete.
func updateCancelledOccurrences(
ctx context.Context,
eiaa eventInstanceAndAttachmenter,
userID string,
itemID string,
cancelledOccurrences any,
) error {
if cancelledOccurrences == nil {
return nil
}
co, ok := cancelledOccurrences.([]any)
if !ok {
return clues.New("converting cancelledOccurrences to []any").
With("type", fmt.Sprintf("%T", cancelledOccurrences))
}
// OPTIMIZATION: We can fetch a date range instead of fetching
// instances if we have multiple cancelled events which are nearby
// and reduce the number of API calls that we have to make
for _, instance := range co {
instance, err := str.AnyToString(instance)
if err != nil {
return err
}
splits := strings.Split(instance, ".")
startStr := splits[len(splits)-1]
start, err := dttm.ParseTime(startStr)
if err != nil {
return clues.Wrap(err, "parsing cancelled event date")
}
endStr := dttm.FormatTo(start.Add(24*time.Hour), dttm.DateOnly)
// Get all instances on the day of the instance which should
// just the one we need to modify
instances, err := eiaa.GetItemInstances(ctx, userID, itemID, startStr, endStr)
if err != nil {
return clues.Wrap(err, "getting instances")
}
// Since the min recurrence interval is 1 day and we are
// querying for only a single day worth of instances, we
// should not have more than one instance here.
if len(instances) != 1 {
return clues.New("invalid number of instances for cancelled").
With("instances_count", len(instances), "search_start", startStr, "search_end", endStr)
}
err = eiaa.DeleteItem(ctx, userID, ptr.Val(instances[0].GetId()))
if err != nil {
return clues.Wrap(err, "deleting event instance")
}
}
return nil
}
func (h eventRestoreHandler) getItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error) {
m, err := h.ac.GetItemsInContainerByCollisionKey(ctx, userID, containerID)
if err != nil {
return nil, err
}
return m, nil
}

View File

@ -1,24 +1,101 @@
package exchange package exchange
import ( import (
"context"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
var _ eventRestorer = &mockEventRestorer{}
type mockEventRestorer struct {
postItemErr error
postAttachmentErr error
}
func (m mockEventRestorer) PostItem(
ctx context.Context,
userID, containerID string,
body models.Eventable,
) (models.Eventable, error) {
return models.NewEvent(), m.postItemErr
}
func (m mockEventRestorer) PostSmallAttachment(
_ context.Context,
_, _, _ string,
_ models.Attachmentable,
) error {
return m.postAttachmentErr
}
func (m mockEventRestorer) PostLargeAttachment(
_ context.Context,
_, _, _, _ string,
_ []byte,
) (string, error) {
return uuid.NewString(), m.postAttachmentErr
}
func (m mockEventRestorer) DeleteAttachment(
ctx context.Context,
userID, calendarID, eventID, attachmentID string,
) error {
return nil
}
func (m mockEventRestorer) DeleteItem(
ctx context.Context,
userID, itemID string,
) error {
return nil
}
func (m mockEventRestorer) GetAttachments(
_ context.Context,
_ bool,
_, _ string,
) ([]models.Attachmentable, error) {
return []models.Attachmentable{}, nil
}
func (m mockEventRestorer) GetItemInstances(
_ context.Context,
_, _, _, _ string,
) ([]models.Eventable, error) {
return []models.Eventable{}, nil
}
func (m mockEventRestorer) PatchItem(
_ context.Context,
_, _ string,
_ models.Eventable,
) (models.Eventable, error) {
return models.NewEvent(), nil
}
// ---------------------------------------------------------------------------
// tests
// ---------------------------------------------------------------------------
type EventsRestoreIntgSuite struct { type EventsRestoreIntgSuite struct {
tester.Suite tester.Suite
creds account.M365Config its intgTesterSetup
ac api.Client
userID string
} }
func TestEventsRestoreIntgSuite(t *testing.T) { func TestEventsRestoreIntgSuite(t *testing.T) {
@ -30,29 +107,110 @@ func TestEventsRestoreIntgSuite(t *testing.T) {
} }
func (suite *EventsRestoreIntgSuite) SetupSuite() { func (suite *EventsRestoreIntgSuite) SetupSuite() {
t := suite.T() suite.its = newIntegrationTesterSetup(suite.T())
a := tester.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.creds = creds
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
suite.userID = tester.M365UserID(t)
} }
// Testing to ensure that cache system works for in multiple different environments // Testing to ensure that cache system works for in multiple different environments
func (suite *EventsRestoreIntgSuite) TestCreateContainerDestination() { func (suite *EventsRestoreIntgSuite) TestCreateContainerDestination() {
runCreateDestinationTest( runCreateDestinationTest(
suite.T(), suite.T(),
newMailRestoreHandler(suite.ac), newEventRestoreHandler(suite.its.ac),
path.EmailCategory, path.EventsCategory,
suite.creds.AzureTenantID, suite.its.creds.AzureTenantID,
suite.userID, suite.its.userID,
testdata.DefaultRestoreConfig("").Location, testdata.DefaultRestoreConfig("").Location,
[]string{"Durmstrang"}, []string{"Durmstrang"},
[]string{"Beauxbatons"}) []string{"Beauxbatons"})
} }
func (suite *EventsRestoreIntgSuite) TestRestoreEvent() {
body := mock.EventBytes("subject")
stub, err := api.BytesToEventable(body)
require.NoError(suite.T(), err, clues.ToCore(err))
collisionKey := api.EventCollisionKey(stub)
table := []struct {
name string
apiMock eventRestorer
collisionMap map[string]string
onCollision control.CollisionPolicy
expectErr func(*testing.T, error)
}{
{
name: "no collision: skip",
apiMock: mockEventRestorer{},
collisionMap: map[string]string{},
onCollision: control.Copy,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no collision: copy",
apiMock: mockEventRestorer{},
collisionMap: map[string]string{},
onCollision: control.Skip,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no collision: replace",
apiMock: mockEventRestorer{},
collisionMap: map[string]string{},
onCollision: control.Replace,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "collision: skip",
apiMock: mockEventRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Skip,
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err))
},
},
{
name: "collision: copy",
apiMock: mockEventRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Copy,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "collision: replace",
apiMock: mockEventRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Replace,
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := restoreEvent(
ctx,
test.apiMock,
body,
suite.its.userID,
"destination",
test.collisionMap,
test.onCollision,
fault.New(true))
test.expectErr(t, err)
})
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
@ -60,6 +61,7 @@ func BackupHandlers(ac api.Client) map[path.CategoryType]backupHandler {
type restoreHandler interface { type restoreHandler interface {
itemRestorer itemRestorer
containerAPI containerAPI
getItemsByCollisionKeyser
newContainerCache(userID string) graph.ContainerResolver newContainerCache(userID string) graph.ContainerResolver
formatRestoreDestination( formatRestoreDestination(
destinationContainerName string, destinationContainerName string,
@ -75,19 +77,12 @@ type itemRestorer interface {
ctx context.Context, ctx context.Context,
body []byte, body []byte,
userID, destinationID string, userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus, errs *fault.Bus,
) (*details.ExchangeInfo, error) ) (*details.ExchangeInfo, error)
} }
// runs the actual graph API post request.
type itemPoster[T any] interface {
PostItem(
ctx context.Context,
userID, dirID string,
body T,
) (T, error)
}
// produces structs that interface with the graph/cache_container // produces structs that interface with the graph/cache_container
// CachedContainer interface. // CachedContainer interface.
type containerAPI interface { type containerAPI interface {
@ -129,3 +124,24 @@ func restoreHandlers(
path.EventsCategory: newEventRestoreHandler(ac), path.EventsCategory: newEventRestoreHandler(ac),
} }
} }
type getItemsByCollisionKeyser interface {
// GetItemsInContainerByCollisionKey looks up all items currently in
// the container, and returns them in a map[collisionKey]itemID.
// The collision key is uniquely defined by each category of data.
// Collision key checks are used during restore to handle the on-
// collision restore configurations that cause the item restore to get
// skipped, replaced, or copied.
getItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error)
}
type postItemer[T any] interface {
PostItem(
ctx context.Context,
userID, containerID string,
body T,
) (T, error)
}

View File

@ -0,0 +1,38 @@
package exchange
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type intgTesterSetup struct {
ac api.Client
creds account.M365Config
userID string
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
a := tester.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
its.creds = creds
its.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
its.userID = tester.GetM365UserID(ctx)
return its
}

View File

@ -10,7 +10,9 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -19,17 +21,13 @@ var _ itemRestorer = &mailRestoreHandler{}
type mailRestoreHandler struct { type mailRestoreHandler struct {
ac api.Mail ac api.Mail
ip itemPoster[models.Messageable]
} }
func newMailRestoreHandler( func newMailRestoreHandler(
ac api.Client, ac api.Client,
) mailRestoreHandler { ) mailRestoreHandler {
acm := ac.Mail()
return mailRestoreHandler{ return mailRestoreHandler{
ac: acm, ac: ac.Mail(),
ip: acm,
} }
} }
@ -72,6 +70,32 @@ func (h mailRestoreHandler) restore(
ctx context.Context, ctx context.Context,
body []byte, body []byte,
userID, destinationID string, userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus,
) (*details.ExchangeInfo, error) {
return restoreMail(
ctx,
h.ac,
body,
userID, destinationID,
collisionKeyToItemID,
collisionPolicy,
errs)
}
type mailRestorer interface {
postItemer[models.Messageable]
attachmentPoster
}
func restoreMail(
ctx context.Context,
mr mailRestorer,
body []byte,
userID, destinationID string,
collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
errs *fault.Bus, errs *fault.Bus,
) (*details.ExchangeInfo, error) { ) (*details.ExchangeInfo, error) {
msg, err := api.BytesToMessageable(body) msg, err := api.BytesToMessageable(body)
@ -80,20 +104,33 @@ func (h mailRestoreHandler) restore(
} }
ctx = clues.Add(ctx, "item_id", ptr.Val(msg.GetId())) ctx = clues.Add(ctx, "item_id", ptr.Val(msg.GetId()))
collisionKey := api.MailCollisionKey(msg)
if _, ok := collisionKeyToItemID[collisionKey]; ok {
log := logger.Ctx(ctx).With("collision_key", clues.Hide(collisionKey))
log.Debug("item collision")
// TODO(rkeepers): Replace probably shouldn't no-op. Just a starting point.
if collisionPolicy == control.Skip || collisionPolicy == control.Replace {
log.Debug("skipping item with collision")
return nil, graph.ErrItemAlreadyExistsConflict
}
}
msg = setMessageSVEPs(toMessage(msg)) msg = setMessageSVEPs(toMessage(msg))
attachments := msg.GetAttachments() attachments := msg.GetAttachments()
// Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized // Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized
msg.SetAttachments([]models.Attachmentable{}) msg.SetAttachments([]models.Attachmentable{})
item, err := h.ip.PostItem(ctx, userID, destinationID, msg) item, err := mr.PostItem(ctx, userID, destinationID, msg)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "restoring mail message") return nil, graph.Wrap(ctx, err, "restoring mail message")
} }
err = uploadAttachments( err = uploadAttachments(
ctx, ctx,
h.ac, mr,
attachments, attachments,
userID, userID,
destinationID, destinationID,
@ -138,3 +175,15 @@ func setMessageSVEPs(msg models.Messageable) models.Messageable {
return msg return msg
} }
func (h mailRestoreHandler) getItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error) {
m, err := h.ac.GetItemsInContainerByCollisionKey(ctx, userID, containerID)
if err != nil {
return nil, err
}
return m, nil
}

View File

@ -1,24 +1,64 @@
package exchange package exchange
import ( import (
"context"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
var _ mailRestorer = &mockMailRestorer{}
type mockMailRestorer struct {
postItemErr error
postAttachmentErr error
}
func (m mockMailRestorer) PostItem(
ctx context.Context,
userID, containerID string,
body models.Messageable,
) (models.Messageable, error) {
return models.NewMessage(), m.postItemErr
}
func (m mockMailRestorer) PostSmallAttachment(
_ context.Context,
_, _, _ string,
_ models.Attachmentable,
) error {
return m.postAttachmentErr
}
func (m mockMailRestorer) PostLargeAttachment(
_ context.Context,
_, _, _, _ string,
_ []byte,
) (string, error) {
return uuid.NewString(), m.postAttachmentErr
}
// ---------------------------------------------------------------------------
// tests
// ---------------------------------------------------------------------------
type MailRestoreIntgSuite struct { type MailRestoreIntgSuite struct {
tester.Suite tester.Suite
creds account.M365Config its intgTesterSetup
ac api.Client
userID string
} }
func TestMailRestoreIntgSuite(t *testing.T) { func TestMailRestoreIntgSuite(t *testing.T) {
@ -30,29 +70,109 @@ func TestMailRestoreIntgSuite(t *testing.T) {
} }
func (suite *MailRestoreIntgSuite) SetupSuite() { func (suite *MailRestoreIntgSuite) SetupSuite() {
t := suite.T() suite.its = newIntegrationTesterSetup(suite.T())
a := tester.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.creds = creds
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
suite.userID = tester.M365UserID(t)
} }
// Testing to ensure that cache system works for in multiple different environments
func (suite *MailRestoreIntgSuite) TestCreateContainerDestination() { func (suite *MailRestoreIntgSuite) TestCreateContainerDestination() {
runCreateDestinationTest( runCreateDestinationTest(
suite.T(), suite.T(),
newMailRestoreHandler(suite.ac), newMailRestoreHandler(suite.its.ac),
path.EmailCategory, path.EmailCategory,
suite.creds.AzureTenantID, suite.its.creds.AzureTenantID,
suite.userID, suite.its.userID,
testdata.DefaultRestoreConfig("").Location, testdata.DefaultRestoreConfig("").Location,
[]string{"Griffindor", "Croix"}, []string{"Griffindor", "Croix"},
[]string{"Griffindor", "Felicius"}) []string{"Griffindor", "Felicius"})
} }
func (suite *MailRestoreIntgSuite) TestRestoreMail() {
body := mock.MessageBytes("subject")
stub, err := api.BytesToMessageable(body)
require.NoError(suite.T(), err, clues.ToCore(err))
collisionKey := api.MailCollisionKey(stub)
table := []struct {
name string
apiMock mailRestorer
collisionMap map[string]string
onCollision control.CollisionPolicy
expectErr func(*testing.T, error)
}{
{
name: "no collision: skip",
apiMock: mockMailRestorer{},
collisionMap: map[string]string{},
onCollision: control.Copy,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no collision: copy",
apiMock: mockMailRestorer{},
collisionMap: map[string]string{},
onCollision: control.Skip,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no collision: replace",
apiMock: mockMailRestorer{},
collisionMap: map[string]string{},
onCollision: control.Replace,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "collision: skip",
apiMock: mockMailRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Skip,
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err))
},
},
{
name: "collision: copy",
apiMock: mockMailRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Copy,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "collision: replace",
apiMock: mockMailRestorer{},
collisionMap: map[string]string{collisionKey: "smarf"},
onCollision: control.Replace,
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := restoreMail(
ctx,
test.apiMock,
body,
suite.its.userID,
"destination",
test.collisionMap,
test.onCollision,
fault.New(true))
test.expectErr(t, err)
})
}
}

View File

@ -23,13 +23,8 @@ import (
// 10. attendees // 10. attendees
//nolint:lll //nolint:lll
const ( var (
eventTmpl = `{ eventTmpl = `{
"id":"AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwBGAAAAAADCNgjhM9QmQYWNcI7hCpPrBwDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAADSEBNbUIB9RL6ePDeF3FIYAAAAAG76AAA=",
"calendar@odata.navigationLink":"https://graph.microsoft.com/v1.0/users('foobar@8qzvrj.onmicrosoft.com')/calendars('AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAAA=')",
"calendar@odata.associationLink":"https://graph.microsoft.com/v1.0/users('foobar@8qzvrj.onmicrosoft.com')/calendars('AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAAA=')/$ref",
"@odata.etag":"W/\"0hATW1CAfUS+njw3hdxSGAAAJIxNug==\"",
"@odata.context":"https://graph.microsoft.com/v1.0/$metadata#users('foobar%%408qzvrj.onmicrosoft.com')/events/$entity",
"categories":[], "categories":[],
"changeKey":"0hATW1CAfUS+njw3hdxSGAAAJIxNug==", "changeKey":"0hATW1CAfUS+njw3hdxSGAAAJIxNug==",
"createdDateTime":"2022-03-28T03:42:03Z", "createdDateTime":"2022-03-28T03:42:03Z",
@ -46,7 +41,6 @@ const (
"timeZone":"UTC" "timeZone":"UTC"
}, },
"hideAttendees":false, "hideAttendees":false,
"iCalUId":"040000008200E00074C5B7101A82E0080000000035723BC75542D801000000000000000010000000E1E7C8F785242E4894DA13AEFB947B85",
"importance":"normal", "importance":"normal",
"isAllDay":false, "isAllDay":false,
"isCancelled":false, "isCancelled":false,
@ -75,6 +69,7 @@ const (
"name":"Anu Pierson" "name":"Anu Pierson"
} }
}, },
%s
"originalEndTimeZone":"UTC", "originalEndTimeZone":"UTC",
"originalStartTimeZone":"UTC", "originalStartTimeZone":"UTC",
"reminderMinutesBeforeStart":15, "reminderMinutesBeforeStart":15,
@ -90,19 +85,23 @@ const (
"timeZone":"UTC" "timeZone":"UTC"
}, },
"subject":"%s", "subject":"%s",
"type":"singleInstance", "type":"%s",
"hasAttachments":%v, "hasAttachments":%v,
%s %s
"webLink":"https://outlook.office365.com/owa/?itemid=AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwBGAAAAAADCNgjhM9QmQYWNcI7hCpPrBwDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAADSEBNbUIB9RL6ePDeF3FIYAAAAAG76AAA%%3D&exvsurl=1&path=/calendar/item", "webLink":"https://outlook.office365.com/owa/?itemid=AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwBGAAAAAADCNgjhM9QmQYWNcI7hCpPrBwDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAADSEBNbUIB9RL6ePDeF3FIYAAAAAG76AAA%%3D&exvsurl=1&path=/calendar/item",
"recurrence":%s, "recurrence":%s,
%s
%s
"attendees":%s "attendees":%s
}` }`
defaultEventBody = "This meeting is to review the latest Tailspin Toys project proposal.<br>\\r\\nBut why not eat some sushi while were at it? :)" defaultEventBody = "This meeting is to review the latest Tailspin Toys project proposal.<br>\\r\\nBut why not eat some sushi while were at it? :)"
defaultEventBodyPreview = "This meeting is to review the latest Tailspin Toys project proposal.\\r\\nBut why not eat some sushi while were at it? :)" defaultEventBodyPreview = "This meeting is to review the latest Tailspin Toys project proposal.\\r\\nBut why not eat some sushi while were at it? :)"
defaultEventOrganizer = "foobar@8qzvrj.onmicrosoft.com" defaultEventOrganizer = "foobar@8qzvrj.onmicrosoft.com"
eventAttachment = "\"attachments\":[{\"id\":\"AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwBGAAAAAADCNgjhM9QmQYWNcI7hCpPrBwDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAADSEBNbUIB9RL6ePDeF3FIYAACLjfLQAAABEgAQAHoI0xBbBBVEh6bFMU78ZUo=\",\"@odata.type\":\"#microsoft.graph.fileAttachment\"," +
"\"@odata.mediaContentType\":\"application/octet-stream\",\"contentType\":\"application/octet-stream\",\"isInline\":false,\"lastModifiedDateTime\":\"2022-10-26T15:19:42Z\",\"name\":\"database.db\",\"size\":11418," + NoAttachments = ""
eventAttachmentFormat = "{\"id\":\"AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwBGAAAAAADCNgjhM9QmQYWNcI7hCpPrBwDSEBNbUIB9RL6ePDeF3FIYAAAAAAENAADSEBNbUIB9RL6ePDeF3FIYAACLjfLQAAABEgAQAHoI0xBbBBVEh6bFMU78ZUo=\",\"@odata.type\":\"#microsoft.graph.fileAttachment\"," +
"\"@odata.mediaContentType\":\"application/octet-stream\",\"contentType\":\"application/octet-stream\",\"isInline\":false,\"lastModifiedDateTime\":\"2022-10-26T15:19:42Z\",\"name\":\"%s\",\"size\":11418," +
"\"contentBytes\":\"U1FMaXRlIGZvcm1hdCAzAAQAAQEAQCAgAAAATQAAAAsAAAAEAAAACAAAAAsAAAAEAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABNAC3mBw0DZwACAg8AAxUCDwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "\"contentBytes\":\"U1FMaXRlIGZvcm1hdCAzAAQAAQEAQCAgAAAATQAAAAsAAAAEAAAACAAAAAsAAAAEAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABNAC3mBw0DZwACAg8AAxUCDwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACCAwMHFxUVAYNpdGFibGVkYXRhZGF0YQJDUkVBVEUgVEFCTEUgZGF0YSAoCiAgICAgICAgIGlkIGludGVnZXIgcHJpbWFyeSBrZXkgYXV0b2luY3JlbWVudCwKICAgICAgICAgbWVhbiB0ZXh0IG5vdCBudWxsLAogICAgICAgICBtYXggdGV4dCBub3QgbnVsbCwKICAgICAgICAgbWluIHRleHQgbm90IG51bGwsCiAgICAgICAgIGRhdGEgdGV" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACCAwMHFxUVAYNpdGFibGVkYXRhZGF0YQJDUkVBVEUgVEFCTEUgZGF0YSAoCiAgICAgICAgIGlkIGludGVnZXIgcHJpbWFyeSBrZXkgYXV0b2luY3JlbWVudCwKICAgICAgICAgbWVhbiB0ZXh0IG5vdCBudWxsLAogICAgICAgICBtYXggdGV4dCBub3QgbnVsbCwKICAgICAgICAgbWluIHRleHQgbm90IG51bGwsCiAgICAgICAgIGRhdGEgdGV" +
@ -149,15 +148,19 @@ const (
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\"}]," "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\"}"
defaultEventAttachments = "\"attachments\":[" + fmt.Sprintf(eventAttachmentFormat, "database.db") + "],"
originalStartDateFormat = `"originalStart": "%s",`
NoOriginalStartDate = ``
NoRecurrence = `null` NoRecurrence = `null`
recurrenceTmpl = `{ recurrenceTmpl = `{
"pattern": { "pattern": {
"type": "absoluteYearly", "type": "absoluteYearly",
"interval": 1, "interval": 1,
"month": 1, "month": %s,
"dayOfMonth": 1, "dayOfMonth": %s,
"firstDayOfWeek": "sunday", "firstDayOfWeek": "sunday",
"index": "first" "index": "first"
}, },
@ -170,6 +173,13 @@ const (
} }
}` }`
cancelledOccurrencesFormat = `"cancelledOccurrences": [%s],`
cancelledOccurrenceInstanceFormat = `"OID.AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAADHGTZoAAA=.%s"`
NoCancelledOccurrences = ""
exceptionOccurrencesFormat = `"exceptionOccurrences": [%s],`
NoExceptionOccurrences = ""
NoAttendees = `[]` NoAttendees = `[]`
attendeesTmpl = `[{ attendeesTmpl = `[{
"emailAddress": { "emailAddress": {
@ -219,38 +229,48 @@ func EventBytes(subject string) []byte {
} }
func EventWithSubjectBytes(subject string) []byte { func EventWithSubjectBytes(subject string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) var (
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) tomorrow = time.Now().UTC().AddDate(0, 0, 1)
atTime := dttm.Format(at) at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
endTime := dttm.Format(at.Add(30 * time.Minute)) atTime = dttm.Format(at)
endTime = dttm.Format(at.Add(30 * time.Minute))
)
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview, defaultEventBody, defaultEventBodyPreview,
atTime, endTime, NoRecurrence, NoAttendees, false, NoOriginalStartDate, atTime, endTime, NoRecurrence, NoAttendees,
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
) )
} }
func EventWithAttachment(subject string) []byte { func EventWithAttachment(subject string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) var (
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) tomorrow = time.Now().UTC().AddDate(0, 0, 1)
atTime := dttm.Format(at) at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime = dttm.Format(at)
)
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview, defaultEventBody, defaultEventBodyPreview,
atTime, atTime, NoRecurrence, NoAttendees, true, NoOriginalStartDate, atTime, atTime, NoRecurrence, NoAttendees,
defaultEventAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
) )
} }
func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte { func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) var (
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) tomorrow = time.Now().UTC().AddDate(0, 0, 1)
atTime := dttm.Format(at) at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
timeSlice := strings.Split(atTime, "T") atTime = dttm.Format(at)
timeSlice = strings.Split(atTime, "T")
)
recurrence := string(fmt.Sprintf( recurrence := string(fmt.Sprintf(
recurrenceTmpl, recurrenceTmpl,
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0], timeSlice[0],
recurrenceTimeZone, recurrenceTimeZone,
)) ))
@ -258,19 +278,125 @@ func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte {
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview, defaultEventBody, defaultEventBodyPreview,
atTime, atTime, recurrence, attendeesTmpl, true, NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
) )
} }
func EventWithAttendeesBytes(subject string) []byte { func EventWithRecurrenceAndCancellationBytes(subject string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) var (
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) tomorrow = time.Now().UTC().AddDate(0, 0, 1)
atTime := dttm.Format(at) at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime = dttm.Format(at)
timeSlice = strings.Split(atTime, "T")
nextYear = tomorrow.AddDate(1, 0, 0)
)
recurrence := string(fmt.Sprintf(
recurrenceTmpl,
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
`"UTC"`,
))
cancelledInstances := []string{fmt.Sprintf(cancelledOccurrenceInstanceFormat, dttm.FormatTo(nextYear, dttm.DateOnly))}
cancelledOccurrences := fmt.Sprintf(cancelledOccurrencesFormat, strings.Join(cancelledInstances, ","))
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview, defaultEventBody, defaultEventBodyPreview,
atTime, atTime, NoRecurrence, attendeesTmpl, true, NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
defaultEventAttachments, cancelledOccurrences, NoExceptionOccurrences,
)
}
func EventWithRecurrenceAndExceptionBytes(subject string) []byte {
var (
tomorrow = time.Now().UTC().AddDate(0, 0, 1)
at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime = dttm.Format(at)
timeSlice = strings.Split(atTime, "T")
newTime = dttm.Format(tomorrow.AddDate(0, 0, 1))
originalStartDate = dttm.FormatTo(at, dttm.TabularOutput)
)
recurrence := string(fmt.Sprintf(
recurrenceTmpl,
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
`"UTC"`,
))
exceptionEvent := EventWith(
defaultEventOrganizer, subject+"(modified)",
defaultEventBody, defaultEventBodyPreview,
fmt.Sprintf(originalStartDateFormat, originalStartDate),
newTime, newTime, NoRecurrence, attendeesTmpl,
NoAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
)
exceptionOccurrences := fmt.Sprintf(exceptionOccurrencesFormat, exceptionEvent)
return EventWith(
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
defaultEventAttachments, NoCancelledOccurrences, exceptionOccurrences,
)
}
func EventWithRecurrenceAndExceptionAndAttachmentBytes(subject string) []byte {
var (
tomorrow = time.Now().UTC().AddDate(0, 0, 1)
at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime = dttm.Format(at)
timeSlice = strings.Split(atTime, "T")
newTime = dttm.Format(tomorrow.AddDate(0, 0, 1))
originalStartDate = dttm.FormatTo(at, dttm.TabularOutput)
)
recurrence := string(fmt.Sprintf(
recurrenceTmpl,
strconv.Itoa(int(at.Month())),
strconv.Itoa(at.Day()),
timeSlice[0],
`"UTC"`,
))
exceptionEvent := EventWith(
defaultEventOrganizer, subject+"(modified)",
defaultEventBody, defaultEventBodyPreview,
fmt.Sprintf(originalStartDateFormat, originalStartDate),
newTime, newTime, NoRecurrence, attendeesTmpl,
"\"attachments\":["+fmt.Sprintf(eventAttachmentFormat, "exception-database.db")+"],",
NoCancelledOccurrences, NoExceptionOccurrences,
)
exceptionOccurrences := fmt.Sprintf(
exceptionOccurrencesFormat,
strings.Join([]string{string(exceptionEvent)}, ","),
)
return EventWith(
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, recurrence, attendeesTmpl,
defaultEventAttachments, NoCancelledOccurrences, exceptionOccurrences,
)
}
func EventWithAttendeesBytes(subject string) []byte {
var (
tomorrow = time.Now().UTC().AddDate(0, 0, 1)
at = time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime = dttm.Format(at)
)
return EventWith(
defaultEventOrganizer, subject,
defaultEventBody, defaultEventBodyPreview,
NoOriginalStartDate, atTime, atTime, NoRecurrence, attendeesTmpl,
defaultEventAttachments, NoCancelledOccurrences, NoExceptionOccurrences,
) )
} }
@ -281,14 +407,10 @@ func EventWithAttendeesBytes(subject string) []byte {
// Body must contain a well-formatted string, consumable in a json payload. IE: no unescaped newlines. // Body must contain a well-formatted string, consumable in a json payload. IE: no unescaped newlines.
func EventWith( func EventWith(
organizer, subject, body, bodyPreview, organizer, subject, body, bodyPreview,
startDateTime, endDateTime, recurrence, attendees string, originalStartDate, startDateTime, endDateTime, recurrence, attendees string,
hasAttachments bool, attachments string, cancelledOccurrences, exceptionOccurrences string,
) []byte { ) []byte {
var attachments string hasAttachments := len(attachments) > 0
if hasAttachments {
attachments = eventAttachment
}
startDateTime = strings.TrimSuffix(startDateTime, "Z") startDateTime = strings.TrimSuffix(startDateTime, "Z")
endDateTime = strings.TrimSuffix(endDateTime, "Z") endDateTime = strings.TrimSuffix(endDateTime, "Z")
@ -300,17 +422,26 @@ func EventWith(
endDateTime += ".0000000" endDateTime += ".0000000"
} }
eventType := "singleInstance"
if recurrence != "null" {
eventType = "seriesMaster"
}
return []byte(fmt.Sprintf( return []byte(fmt.Sprintf(
eventTmpl, eventTmpl,
body, body,
bodyPreview, bodyPreview,
endDateTime, endDateTime,
organizer, organizer,
originalStartDate,
startDateTime, startDateTime,
subject, subject,
eventType,
hasAttachments, hasAttachments,
attachments, attachments,
recurrence, recurrence,
cancelledOccurrences,
exceptionOccurrences,
attendees, attendees,
)) ))
} }

View File

@ -41,9 +41,7 @@ func ConsumeRestoreCollections(
directoryCache = make(map[path.CategoryType]graph.ContainerResolver) directoryCache = make(map[path.CategoryType]graph.ContainerResolver)
handlers = restoreHandlers(ac) handlers = restoreHandlers(ac)
metrics support.CollectionMetrics metrics support.CollectionMetrics
// TODO policy to be updated from external source after completion of refactoring el = errs.Local()
policy = control.Copy
el = errs.Local()
) )
ctx = clues.Add(ctx, "resource_owner", clues.Hide(userID)) ctx = clues.Add(ctx, "resource_owner", clues.Hide(userID))
@ -87,16 +85,22 @@ func ConsumeRestoreCollections(
} }
directoryCache[category] = gcc directoryCache[category] = gcc
ictx = clues.Add(ictx, "restore_destination_id", containerID) ictx = clues.Add(ictx, "restore_destination_id", containerID)
collisionKeyToItemID, err := handler.getItemsInContainerByCollisionKey(ctx, userID, containerID)
if err != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "building item collision cache"))
continue
}
temp, err := restoreCollection( temp, err := restoreCollection(
ictx, ictx,
handler, handler,
dc, dc,
userID, userID,
containerID, containerID,
policy, collisionKeyToItemID,
restoreCfg.OnCollision,
deets, deets,
errs) errs)
@ -127,7 +131,8 @@ func restoreCollection(
ir itemRestorer, ir itemRestorer,
dc data.RestoreCollection, dc data.RestoreCollection,
userID, destinationID string, userID, destinationID string,
policy control.CollisionPolicy, collisionKeyToItemID map[string]string,
collisionPolicy control.CollisionPolicy,
deets *details.Builder, deets *details.Builder,
errs *fault.Bus, errs *fault.Bus,
) (support.CollectionMetrics, error) { ) (support.CollectionMetrics, error) {
@ -172,9 +177,19 @@ func restoreCollection(
body := buf.Bytes() body := buf.Bytes()
info, err := ir.restore(ictx, body, userID, destinationID, errs) info, err := ir.restore(
ictx,
body,
userID,
destinationID,
collisionKeyToItemID,
collisionPolicy,
errs)
if err != nil { if err != nil {
el.AddRecoverable(ictx, err) if !graph.IsErrItemAlreadyExistsConflict(err) {
el.AddRecoverable(ictx, err)
}
continue continue
} }

View File

@ -13,6 +13,7 @@ import (
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -74,6 +75,8 @@ func (suite *RestoreIntgSuite) TestRestoreContact() {
ctx, ctx,
exchMock.ContactBytes("Corso TestContact"), exchMock.ContactBytes("Corso TestContact"),
userID, folderID, userID, folderID,
nil,
control.Copy,
fault.New(true)) fault.New(true))
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "contact item info") assert.NotNil(t, info, "contact item info")
@ -116,9 +119,26 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() {
name: "Test recurrenceTimeZone: Empty", name: "Test recurrenceTimeZone: Empty",
bytes: exchMock.EventWithRecurrenceBytes(subject, `""`), bytes: exchMock.EventWithRecurrenceBytes(subject, `""`),
}, },
{
name: "Test cancelledOccurrences",
bytes: exchMock.EventWithRecurrenceAndCancellationBytes(subject),
},
{
name: "Test exceptionOccurrences",
bytes: exchMock.EventWithRecurrenceAndExceptionBytes(subject),
},
{
name: "Test exceptionOccurrences with different attachments",
bytes: exchMock.EventWithRecurrenceAndExceptionAndAttachmentBytes(subject),
},
} }
for _, test := range tests { for _, test := range tests {
// Skip till https://github.com/alcionai/corso/issues/3675 is fixed
if test.name == "Test exceptionOccurrences" {
t.Skip("Bug 3675")
}
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
@ -129,6 +149,8 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() {
ctx, ctx,
test.bytes, test.bytes,
userID, calendarID, userID, calendarID,
nil,
control.Copy,
fault.New(true)) fault.New(true))
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "event item info") assert.NotNil(t, info, "event item info")
@ -357,9 +379,82 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
ctx, ctx,
test.bytes, test.bytes,
userID, destination, userID, destination,
nil,
control.Copy,
fault.New(true)) fault.New(true))
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "item info was not populated") assert.NotNil(t, info, "item info was not populated")
}) })
} }
} }
func (suite *RestoreIntgSuite) TestRestoreAndBackupEvent_recurringInstancesWithAttachments() {
t := suite.T()
t.Skip("Bug 3675")
ctx, flush := tester.NewContext(t)
defer flush()
var (
userID = tester.M365UserID(t)
subject = testdata.DefaultRestoreConfig("event").Location
handler = newEventRestoreHandler(suite.ac)
)
calendar, err := handler.ac.CreateContainer(ctx, userID, subject, "")
require.NoError(t, err, clues.ToCore(err))
calendarID := ptr.Val(calendar.GetId())
bytes := exchMock.EventWithRecurrenceAndExceptionAndAttachmentBytes("Reoccurring event restore and backup test")
info, err := handler.restore(
ctx,
bytes,
userID, calendarID,
nil,
control.Copy,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "event item info")
ec, err := handler.ac.Stable.
Client().
Users().
ByUserId(userID).
Calendars().
ByCalendarId(calendarID).
Events().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(err))
evts := ec.GetValue()
assert.Len(t, evts, 1, "count of events")
sp, info, err := suite.ac.Events().GetItem(ctx, userID, ptr.Val(evts[0].GetId()), false, fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "event item info")
body, err := suite.ac.Events().Serialize(ctx, sp, userID, ptr.Val(evts[0].GetId()))
require.NoError(t, err, clues.ToCore(err))
event, err := api.BytesToEventable(body)
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, event.GetRecurrence(), "recurrence")
eo := event.GetAdditionalData()["exceptionOccurrences"]
assert.NotNil(t, eo, "exceptionOccurrences")
assert.NotEqual(
t,
ptr.Val(event.GetSubject()),
ptr.Val(eo.([]any)[0].(map[string]any)["subject"].(*string)),
"name equal")
atts := eo.([]any)[0].(map[string]any)["attachments"]
assert.NotEqual(
t,
ptr.Val(event.GetAttachments()[0].GetName()),
ptr.Val(atts.([]any)[0].(map[string]any)["name"].(*string)),
"attachment name equal")
}

View File

@ -70,7 +70,6 @@ func toEventSimplified(orig models.Eventable) models.Eventable {
newContent := insertStringToBody(origBody, attendees) newContent := insertStringToBody(origBody, attendees)
newBody := models.NewItemBody() newBody := models.NewItemBody()
newBody.SetContentType(origBody.GetContentType()) newBody.SetContentType(origBody.GetContentType())
newBody.SetAdditionalData(origBody.GetAdditionalData())
newBody.SetOdataType(origBody.GetOdataType()) newBody.SetOdataType(origBody.GetOdataType())
newBody.SetContent(&newContent) newBody.SetContent(&newContent)
orig.SetBody(newBody) orig.SetBody(newBody)
@ -89,6 +88,14 @@ func toEventSimplified(orig models.Eventable) models.Eventable {
} }
} }
// Remove exceptions for recurring events
// These will be present in objects once we start using the API
// that is currently in beta
additionalData := orig.GetAdditionalData()
delete(additionalData, "cancelledOccurrences")
delete(additionalData, "exceptionOccurrences")
orig.SetAdditionalData(additionalData)
return orig return orig
} }

View File

@ -121,6 +121,32 @@ func (suite *TransformUnitTest) TestToEventSimplified_recurrence() {
return ptr.Val(e.GetRecurrence().GetRange().GetRecurrenceTimeZone()) == "Pacific Standard Time" return ptr.Val(e.GetRecurrence().GetRange().GetRecurrenceTimeZone()) == "Pacific Standard Time"
}, },
}, },
{
name: "Test cancelledOccurrences",
event: func() models.Eventable {
bytes := exchMock.EventWithRecurrenceAndCancellationBytes(subject)
event, err := api.BytesToEventable(bytes)
require.NoError(t, err, clues.ToCore(err))
return event
},
validateOutput: func(e models.Eventable) bool {
return e.GetAdditionalData()["cancelledOccurrences"] == nil
},
},
{
name: "Test exceptionOccurrences",
event: func() models.Eventable {
bytes := exchMock.EventWithRecurrenceAndExceptionBytes(subject)
event, err := api.BytesToEventable(bytes)
require.NoError(t, err, clues.ToCore(err))
return event
},
validateOutput: func(e models.Eventable) bool {
return e.GetAdditionalData()["exceptionOccurrences"] == nil
},
},
} }
for _, test := range tests { for _, test := range tests {

View File

@ -73,17 +73,13 @@ func (cl *concurrencyLimiter) Intercept(
const ( const (
// Default goal is to keep calls below the 10k-per-10-minute threshold. // Default goal is to keep calls below the 10k-per-10-minute threshold.
// 14 tokens every second nets 840 per minute. That's 8400 every 10 minutes, // 16 tokens every second nets 960 per minute. That's 9600 every 10 minutes,
// which is a bit below the mark. // which is a bit below the mark.
// But suppose we have a minute-long dry spell followed by a 10 minute tsunami. // If the bucket is full, we can push out 200 calls immediately, which brings
// We'll have built up 750 tokens in reserve, so the first 750 calls go through // the total in the first 10 minutes to 9800. We can toe that line if we want,
// immediately. Over the next 10 minutes, we'll partition out the other calls // but doing so risks timeouts. It's better to give the limits breathing room.
// at a rate of 840-per-minute, ending at a total of 9150. Theoretically, if defaultPerSecond = 16 // 16 * 60 * 10 = 9600
// the volume keeps up after that, we'll always stay between 8400 and 9150 out defaultMaxCap = 200 // real cap is 10k-per-10-minutes
// of 10k. Worst case scenario, we have an extra minute of padding to allow
// up to 9990.
defaultPerSecond = 14 // 14 * 60 = 840
defaultMaxCap = 750 // real cap is 10k-per-10-minutes
// since drive runs on a per-minute, rather than per-10-minute bucket, we have // since drive runs on a per-minute, rather than per-10-minute bucket, we have
// to keep the max cap equal to the per-second cap. A large maxCap pool (say, // to keep the max cap equal to the per-second cap. A large maxCap pool (say,
// 1200, similar to the per-minute cap) would allow us to make a flood of 2400 // 1200, similar to the per-minute cap) would allow us to make a flood of 2400

View File

@ -37,7 +37,7 @@ const (
// @microsoft.graph.conflictBehavior=fail finds a conflicting file. // @microsoft.graph.conflictBehavior=fail finds a conflicting file.
nameAlreadyExists errorCode = "nameAlreadyExists" nameAlreadyExists errorCode = "nameAlreadyExists"
quotaExceeded errorCode = "ErrorQuotaExceeded" quotaExceeded errorCode = "ErrorQuotaExceeded"
requestResourceNotFound errorCode = "Request_ResourceNotFound" RequestResourceNotFound errorCode = "Request_ResourceNotFound"
resourceNotFound errorCode = "ResourceNotFound" resourceNotFound errorCode = "ResourceNotFound"
resyncRequired errorCode = "ResyncRequired" // alt: resyncRequired resyncRequired errorCode = "ResyncRequired" // alt: resyncRequired
syncFolderNotFound errorCode = "ErrorSyncFolderNotFound" syncFolderNotFound errorCode = "ErrorSyncFolderNotFound"
@ -56,17 +56,16 @@ const (
type errorMessage string type errorMessage string
const ( const (
IOErrDuringRead errorMessage = "IO error during request payload read" IOErrDuringRead errorMessage = "IO error during request payload read"
MysiteURLNotFound errorMessage = "unable to retrieve user's mysite url"
MysiteNotFound errorMessage = "user's mysite not found"
NoSPLicense errorMessage = "Tenant does not have a SPO license"
) )
const ( const (
mysiteURLNotFound = "unable to retrieve user's mysite url" LabelsMalware = "malware_detected"
mysiteNotFound = "user's mysite not found" LabelsMysiteNotFound = "mysite_not_found"
) LabelsNoSharePointLicense = "no_sharepoint_license"
const (
LabelsMalware = "malware_detected"
LabelsMysiteNotFound = "mysite_not_found"
// LabelsSkippable is used to determine if an error is skippable // LabelsSkippable is used to determine if an error is skippable
LabelsSkippable = "skippable_errors" LabelsSkippable = "skippable_errors"
@ -132,7 +131,7 @@ func IsErrExchangeMailFolderNotFound(err error) bool {
} }
func IsErrUserNotFound(err error) bool { func IsErrUserNotFound(err error) bool {
return hasErrorCode(err, requestResourceNotFound) return hasErrorCode(err, RequestResourceNotFound)
} }
func IsErrResourceNotFound(err error) bool { func IsErrResourceNotFound(err error) bool {
@ -297,11 +296,17 @@ func setLabels(err *clues.Err, msg string) *clues.Err {
return nil return nil
} }
ml := strings.ToLower(msg) f := filters.Contains([]string{msg})
if strings.Contains(ml, mysiteNotFound) || strings.Contains(ml, mysiteURLNotFound) {
if f.Compare(string(MysiteNotFound)) ||
f.Compare(string(MysiteURLNotFound)) {
err = err.Label(LabelsMysiteNotFound) err = err.Label(LabelsMysiteNotFound)
} }
if f.Compare(string(NoSPLicense)) {
err = err.Label(LabelsNoSharePointLicense)
}
if IsMalware(err) { if IsMalware(err) {
err = err.Label(LabelsMalware) err = err.Label(LabelsMalware)
} }

View File

@ -33,6 +33,16 @@ func odErr(code string) *odataerrors.ODataError {
return odErr return odErr
} }
func odErrMsg(code, message string) *odataerrors.ODataError {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(&code)
merr.SetMessage(&message)
odErr.SetError(merr)
return odErr
}
func (suite *GraphErrorsUnitSuite) TestIsErrConnectionReset() { func (suite *GraphErrorsUnitSuite) TestIsErrConnectionReset() {
table := []struct { table := []struct {
name string name string
@ -223,7 +233,7 @@ func (suite *GraphErrorsUnitSuite) TestIsErrUserNotFound() {
}, },
{ {
name: "request resource not found oDataErr", name: "request resource not found oDataErr",
err: odErr(string(requestResourceNotFound)), err: odErr(string(RequestResourceNotFound)),
expect: assert.True, expect: assert.True,
}, },
} }
@ -423,3 +433,56 @@ func (suite *GraphErrorsUnitSuite) TestIsErrCannotOpenFileAttachment() {
}) })
} }
} }
func (suite *GraphErrorsUnitSuite) TestGraphStack_labels() {
table := []struct {
name string
err error
expect []string
}{
{
name: "nil",
err: nil,
expect: []string{},
},
{
name: "not-odata",
err: assert.AnError,
expect: []string{},
},
{
name: "oDataErr matches no labels",
err: odErr("code"),
expect: []string{},
},
{
name: "mysite not found",
err: odErrMsg("code", string(MysiteNotFound)),
expect: []string{},
},
{
name: "mysite url not found",
err: odErrMsg("code", string(MysiteURLNotFound)),
expect: []string{},
},
{
name: "no sp license",
err: odErrMsg("code", string(NoSPLicense)),
expect: []string{},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
result := Stack(ctx, test.err)
for _, e := range test.expect {
assert.True(t, clues.HasLabel(result, e), clues.ToCore(result))
}
})
}
}

View File

@ -5,6 +5,7 @@ import (
"context" "context"
"fmt" "fmt"
"net/http" "net/http"
"strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -21,8 +22,11 @@ const (
// Writer implements an io.Writer for a M365 // Writer implements an io.Writer for a M365
// UploadSession URL // UploadSession URL
type largeItemWriter struct { type largeItemWriter struct {
// ID is the id of the item created.
// Will be available after the upload is complete
ID string
// Identifier // Identifier
id string parentID string
// Upload URL for this item // Upload URL for this item
url string url string
// Tracks how much data will be written // Tracks how much data will be written
@ -32,8 +36,13 @@ type largeItemWriter struct {
client httpWrapper client httpWrapper
} }
func NewLargeItemWriter(id, url string, size int64) *largeItemWriter { func NewLargeItemWriter(parentID, url string, size int64) *largeItemWriter {
return &largeItemWriter{id: id, url: url, contentLength: size, client: *NewNoTimeoutHTTPWrapper()} return &largeItemWriter{
parentID: parentID,
url: url,
contentLength: size,
client: *NewNoTimeoutHTTPWrapper(),
}
} }
// Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on // Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on
@ -44,7 +53,7 @@ func (iw *largeItemWriter) Write(p []byte) (int, error) {
logger.Ctx(ctx). logger.Ctx(ctx).
Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d", Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d",
iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength) iw.parentID, rangeLength, iw.lastWrittenOffset, iw.contentLength)
endOffset := iw.lastWrittenOffset + int64(rangeLength) endOffset := iw.lastWrittenOffset + int64(rangeLength)
@ -58,7 +67,7 @@ func (iw *largeItemWriter) Write(p []byte) (int, error) {
iw.contentLength) iw.contentLength)
headers[contentLengthHeaderKey] = fmt.Sprintf("%d", rangeLength) headers[contentLengthHeaderKey] = fmt.Sprintf("%d", rangeLength)
_, err := iw.client.Request( resp, err := iw.client.Request(
ctx, ctx,
http.MethodPut, http.MethodPut,
iw.url, iw.url,
@ -66,7 +75,7 @@ func (iw *largeItemWriter) Write(p []byte) (int, error) {
headers) headers)
if err != nil { if err != nil {
return 0, clues.Wrap(err, "uploading item").With( return 0, clues.Wrap(err, "uploading item").With(
"upload_id", iw.id, "upload_id", iw.parentID,
"upload_chunk_size", rangeLength, "upload_chunk_size", rangeLength,
"upload_offset", iw.lastWrittenOffset, "upload_offset", iw.lastWrittenOffset,
"upload_size", iw.contentLength) "upload_size", iw.contentLength)
@ -75,5 +84,22 @@ func (iw *largeItemWriter) Write(p []byte) (int, error) {
// Update last offset // Update last offset
iw.lastWrittenOffset = endOffset iw.lastWrittenOffset = endOffset
// Once the upload is complete, we get a Location header in the
// below format from which we can get the id of the uploaded
// item. This will only be available after we have uploaded the
// entire content(based on the size in the req header).
// https://outlook.office.com/api/v2.0/Users('<user-id>')/Messages('<message-id>')/Attachments('<attachment-id>')
// Ref: https://learn.microsoft.com/en-us/graph/outlook-large-attachments?tabs=http
loc := resp.Header.Get("Location")
if loc != "" {
splits := strings.Split(loc, "'")
if len(splits) != 7 || splits[4] != ")/Attachments(" || len(splits[5]) == 0 {
return 0, clues.New("invalid format for upload completion url").
With("location", loc)
}
iw.ID = splits[5]
}
return rangeLength, nil return rangeLength, nil
} }

View File

@ -84,6 +84,8 @@ type Collection struct {
// should only be true if the old delta token expired // should only be true if the old delta token expired
doNotMergeItems bool doNotMergeItems bool
urlCache getItemPropertyer
} }
func pathToLocation(p path.Path) (*path.Builder, error) { func pathToLocation(p path.Path) (*path.Builder, error) {
@ -109,6 +111,7 @@ func NewCollection(
ctrlOpts control.Options, ctrlOpts control.Options,
colScope collectionScope, colScope collectionScope,
doNotMergeItems bool, doNotMergeItems bool,
urlCache getItemPropertyer,
) (*Collection, error) { ) (*Collection, error) {
// TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need // TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need
// to be changed as we won't be able to extract path information from the // to be changed as we won't be able to extract path information from the
@ -132,7 +135,8 @@ func NewCollection(
statusUpdater, statusUpdater,
ctrlOpts, ctrlOpts,
colScope, colScope,
doNotMergeItems) doNotMergeItems,
urlCache)
c.locPath = locPath c.locPath = locPath
c.prevLocPath = prevLocPath c.prevLocPath = prevLocPath
@ -149,6 +153,7 @@ func newColl(
ctrlOpts control.Options, ctrlOpts control.Options,
colScope collectionScope, colScope collectionScope,
doNotMergeItems bool, doNotMergeItems bool,
urlCache getItemPropertyer,
) *Collection { ) *Collection {
c := &Collection{ c := &Collection{
handler: handler, handler: handler,
@ -162,6 +167,7 @@ func newColl(
state: data.StateOf(prevPath, currPath), state: data.StateOf(prevPath, currPath),
scope: colScope, scope: colScope,
doNotMergeItems: doNotMergeItems, doNotMergeItems: doNotMergeItems,
urlCache: urlCache,
} }
return c return c
@ -267,7 +273,7 @@ func (oc *Collection) getDriveItemContent(
el = errs.Local() el = errs.Local()
) )
itemData, err := downloadContent(ctx, oc.handler, item, oc.driveID) itemData, err := downloadContent(ctx, oc.handler, oc.urlCache, item, oc.driveID)
if err != nil { if err != nil {
if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) { if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) {
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware") logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware")
@ -320,9 +326,13 @@ type itemAndAPIGetter interface {
func downloadContent( func downloadContent(
ctx context.Context, ctx context.Context,
iaag itemAndAPIGetter, iaag itemAndAPIGetter,
uc getItemPropertyer,
item models.DriveItemable, item models.DriveItemable,
driveID string, driveID string,
) (io.ReadCloser, error) { ) (io.ReadCloser, error) {
itemID := ptr.Val(item.GetId())
ctx = clues.Add(ctx, "item_id", itemID)
content, err := downloadItem(ctx, iaag, item) content, err := downloadItem(ctx, iaag, item)
if err == nil { if err == nil {
return content, nil return content, nil
@ -332,8 +342,19 @@ func downloadContent(
// Assume unauthorized requests are a sign of an expired jwt // Assume unauthorized requests are a sign of an expired jwt
// token, and that we've overrun the available window to // token, and that we've overrun the available window to
// download the actual file. Re-downloading the item will // download the file. Get a fresh url from the cache and attempt to
// refresh that download url. // download again.
content, err = readItemContents(ctx, iaag, uc, itemID)
if err == nil {
logger.Ctx(ctx).Debug("found item in url cache")
return content, nil
}
// Consider cache errors(including deleted items) as cache misses. This is
// to preserve existing behavior. Fallback to refetching the item using the
// API.
logger.CtxErr(ctx, err).Info("url cache miss: refetching from API")
di, err := iaag.GetItem(ctx, driveID, ptr.Val(item.GetId())) di, err := iaag.GetItem(ctx, driveID, ptr.Val(item.GetId()))
if err != nil { if err != nil {
return nil, clues.Wrap(err, "retrieving expired item") return nil, clues.Wrap(err, "retrieving expired item")
@ -347,6 +368,41 @@ func downloadContent(
return content, nil return content, nil
} }
// readItemContents fetches latest download URL from the cache and attempts to
// download the file using the new URL.
func readItemContents(
ctx context.Context,
iaag itemAndAPIGetter,
uc getItemPropertyer,
itemID string,
) (io.ReadCloser, error) {
if uc == nil {
return nil, clues.New("nil url cache")
}
props, err := uc.getItemProperties(ctx, itemID)
if err != nil {
return nil, err
}
// Handle newly deleted items
if props.isDeleted {
logger.Ctx(ctx).Info("item deleted in cache")
return nil, graph.ErrDeletedInFlight
}
rc, err := downloadFile(ctx, iaag, props.downloadURL)
if graph.IsErrUnauthorized(err) {
logger.CtxErr(ctx, err).Info("stale item in cache")
}
if err != nil {
return nil, err
}
return rc, nil
}
// populateItems iterates through items added to the collection // populateItems iterates through items added to the collection
// and uses the collection `itemReader` to read the item // and uses the collection `itemReader` to read the item
func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) { func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {

View File

@ -2,6 +2,7 @@ package onedrive
import ( import (
"bytes" "bytes"
"context"
"encoding/json" "encoding/json"
"io" "io"
"net/http" "net/http"
@ -204,7 +205,8 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
suite.testStatusUpdater(&wg, &collStatus), suite.testStatusUpdater(&wg, &collStatus),
control.Options{ToggleFeatures: control.Toggles{}}, control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder, CollectionScopeFolder,
true) true,
nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, coll) require.NotNil(t, coll)
assert.Equal(t, folderPath, coll.FullPath()) assert.Equal(t, folderPath, coll.FullPath())
@ -312,7 +314,8 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
suite.testStatusUpdater(&wg, &collStatus), suite.testStatusUpdater(&wg, &collStatus),
control.Options{ToggleFeatures: control.Toggles{}}, control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder, CollectionScopeFolder,
true) true,
nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
stubItem := odTD.NewStubDriveItem( stubItem := odTD.NewStubDriveItem(
@ -388,7 +391,8 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
suite.testStatusUpdater(&wg, &collStatus), suite.testStatusUpdater(&wg, &collStatus),
control.Options{ToggleFeatures: control.Toggles{}}, control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder, CollectionScopeFolder,
true) true,
nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
coll.Add(stubItem) coll.Add(stubItem)
@ -442,7 +446,8 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
suite.testStatusUpdater(&wg, &collStatus), suite.testStatusUpdater(&wg, &collStatus),
control.Options{ToggleFeatures: control.Toggles{}}, control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder, CollectionScopeFolder,
true) true,
nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
mtime := time.Now().AddDate(0, -1, 0) mtime := time.Now().AddDate(0, -1, 0)
@ -600,6 +605,19 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
} }
} }
var _ getItemPropertyer = &mockURLCache{}
type mockURLCache struct {
Get func(ctx context.Context, itemID string) (itemProps, error)
}
func (muc *mockURLCache) getItemProperties(
ctx context.Context,
itemID string,
) (itemProps, error) {
return muc.Get(ctx, itemID)
}
func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() { func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
var ( var (
driveID string driveID string
@ -611,6 +629,12 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
itemWID.SetId(ptr.To("brainhooldy")) itemWID.SetId(ptr.To("brainhooldy"))
m := &mockURLCache{
Get: func(ctx context.Context, itemID string) (itemProps, error) {
return itemProps{}, clues.Stack(assert.AnError)
},
}
table := []struct { table := []struct {
name string name string
mgi mock.GetsItem mgi mock.GetsItem
@ -619,6 +643,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
getErr []error getErr []error
expectErr require.ErrorAssertionFunc expectErr require.ErrorAssertionFunc
expect require.ValueAssertionFunc expect require.ValueAssertionFunc
muc *mockURLCache
}{ }{
{ {
name: "good", name: "good",
@ -627,6 +652,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
getErr: []error{nil}, getErr: []error{nil},
expectErr: require.NoError, expectErr: require.NoError,
expect: require.NotNil, expect: require.NotNil,
muc: m,
}, },
{ {
name: "expired url redownloads", name: "expired url redownloads",
@ -636,6 +662,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
getErr: []error{errUnauth, nil}, getErr: []error{errUnauth, nil},
expectErr: require.NoError, expectErr: require.NoError,
expect: require.NotNil, expect: require.NotNil,
muc: m,
}, },
{ {
name: "immediate error", name: "immediate error",
@ -643,6 +670,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
getErr: []error{assert.AnError}, getErr: []error{assert.AnError},
expectErr: require.Error, expectErr: require.Error,
expect: require.Nil, expect: require.Nil,
muc: m,
}, },
{ {
name: "re-fetching the item fails", name: "re-fetching the item fails",
@ -651,6 +679,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
mgi: mock.GetsItem{Item: nil, Err: assert.AnError}, mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
expectErr: require.Error, expectErr: require.Error,
expect: require.Nil, expect: require.Nil,
muc: m,
}, },
{ {
name: "expired url fails redownload", name: "expired url fails redownload",
@ -660,6 +689,57 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
getErr: []error{errUnauth, assert.AnError}, getErr: []error{errUnauth, assert.AnError},
expectErr: require.Error, expectErr: require.Error,
expect: require.Nil, expect: require.Nil,
muc: m,
},
{
name: "url refreshed from cache",
mgi: mock.GetsItem{Item: itemWID, Err: nil},
itemInfo: details.ItemInfo{},
respBody: []io.ReadCloser{nil, iorc},
getErr: []error{errUnauth, nil},
expectErr: require.NoError,
expect: require.NotNil,
muc: &mockURLCache{
Get: func(ctx context.Context, itemID string) (itemProps, error) {
return itemProps{
downloadURL: "http://example.com",
isDeleted: false,
},
nil
},
},
},
{
name: "url refreshed from cache but item deleted",
mgi: mock.GetsItem{Item: itemWID, Err: graph.ErrDeletedInFlight},
itemInfo: details.ItemInfo{},
respBody: []io.ReadCloser{nil, nil, nil},
getErr: []error{errUnauth, graph.ErrDeletedInFlight, graph.ErrDeletedInFlight},
expectErr: require.Error,
expect: require.Nil,
muc: &mockURLCache{
Get: func(ctx context.Context, itemID string) (itemProps, error) {
return itemProps{
downloadURL: "http://example.com",
isDeleted: true,
},
nil
},
},
},
{
name: "fallback to item fetch on any cache error",
mgi: mock.GetsItem{Item: itemWID, Err: nil},
itemInfo: details.ItemInfo{},
respBody: []io.ReadCloser{nil, iorc},
getErr: []error{errUnauth, nil},
expectErr: require.NoError,
expect: require.NotNil,
muc: &mockURLCache{
Get: func(ctx context.Context, itemID string) (itemProps, error) {
return itemProps{}, assert.AnError
},
},
}, },
} }
for _, test := range table { for _, test := range table {
@ -685,7 +765,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
mbh.GetResps = resps mbh.GetResps = resps
mbh.GetErrs = test.getErr mbh.GetErrs = test.getErr
r, err := downloadContent(ctx, mbh, item, driveID) r, err := downloadContent(ctx, mbh, test.muc, item, driveID)
test.expect(t, r) test.expect(t, r)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
}) })

View File

@ -255,7 +255,8 @@ func (c *Collections) Get(
// Drive ID -> delta URL for drive // Drive ID -> delta URL for drive
deltaURLs = map[string]string{} deltaURLs = map[string]string{}
// Drive ID -> folder ID -> folder path // Drive ID -> folder ID -> folder path
folderPaths = map[string]map[string]string{} folderPaths = map[string]map[string]string{}
numPrevItems = 0
) )
for _, d := range drives { for _, d := range drives {
@ -322,6 +323,23 @@ func (c *Collections) Get(
"num_deltas_entries", numDeltas, "num_deltas_entries", numDeltas,
"delta_reset", delta.Reset) "delta_reset", delta.Reset)
numDriveItems := c.NumItems - numPrevItems
numPrevItems = c.NumItems
// Attach an url cache
if numDriveItems < urlCacheDriveItemThreshold {
logger.Ctx(ictx).Info("adding url cache for drive")
err = c.addURLCacheToDriveCollections(
ictx,
driveID,
prevDelta,
errs)
if err != nil {
return nil, false, err
}
}
// For both cases we don't need to do set difference on folder map if the // For both cases we don't need to do set difference on folder map if the
// delta token was valid because we should see all the changes. // delta token was valid because we should see all the changes.
if !delta.Reset { if !delta.Reset {
@ -370,7 +388,8 @@ func (c *Collections) Get(
c.statusUpdater, c.statusUpdater,
c.ctrl, c.ctrl,
CollectionScopeUnknown, CollectionScopeUnknown,
true) true,
nil)
if err != nil { if err != nil {
return nil, false, clues.Wrap(err, "making collection").WithClues(ictx) return nil, false, clues.Wrap(err, "making collection").WithClues(ictx)
} }
@ -405,7 +424,8 @@ func (c *Collections) Get(
c.statusUpdater, c.statusUpdater,
c.ctrl, c.ctrl,
CollectionScopeUnknown, CollectionScopeUnknown,
true) true,
nil)
if err != nil { if err != nil {
return nil, false, clues.Wrap(err, "making drive tombstone").WithClues(ctx) return nil, false, clues.Wrap(err, "making drive tombstone").WithClues(ctx)
} }
@ -438,6 +458,33 @@ func (c *Collections) Get(
return collections, canUsePreviousBackup, nil return collections, canUsePreviousBackup, nil
} }
// addURLCacheToDriveCollections adds an URL cache to all collections belonging to
// a drive.
func (c *Collections) addURLCacheToDriveCollections(
ctx context.Context,
driveID, prevDelta string,
errs *fault.Bus,
) error {
uc, err := newURLCache(
driveID,
prevDelta,
urlCacheRefreshInterval,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
errs)
if err != nil {
return err
}
// Set the URL cache for all collections in this drive
for _, driveColls := range c.CollectionMap {
for _, coll := range driveColls {
coll.urlCache = uc
}
}
return nil
}
func updateCollectionPaths( func updateCollectionPaths(
driveID, itemID string, driveID, itemID string,
cmap map[string]map[string]*Collection, cmap map[string]map[string]*Collection,
@ -557,7 +604,8 @@ func (c *Collections) handleDelete(
c.ctrl, c.ctrl,
CollectionScopeUnknown, CollectionScopeUnknown,
// DoNotMerge is not checked for deleted items. // DoNotMerge is not checked for deleted items.
false) false,
nil)
if err != nil { if err != nil {
return clues.Wrap(err, "making collection").With( return clues.Wrap(err, "making collection").With(
"drive_id", driveID, "drive_id", driveID,
@ -740,7 +788,8 @@ func (c *Collections) UpdateCollections(
c.statusUpdater, c.statusUpdater,
c.ctrl, c.ctrl,
colScope, colScope,
invalidPrevDelta) invalidPrevDelta,
nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ictx) return clues.Stack(err).WithClues(ictx)
} }

View File

@ -2,6 +2,7 @@ package onedrive
import ( import (
"context" "context"
"strconv"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -2678,3 +2679,86 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
}) })
} }
} }
func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
driveID := "test-drive"
collCount := 3
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
table := []struct {
name string
items []deltaPagerResult
deltaURL string
prevDeltaSuccess bool
prevDelta string
err error
}{
{
name: "cache is attached",
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
itemPagers := map[string]api.DriveItemEnumerator{}
itemPagers[driveID] = &mockItemPager{}
mbh := mock.DefaultOneDriveBH()
mbh.ItemPagerV = itemPagers
c := NewCollections(
mbh,
"test-tenant",
"test-user",
nil,
control.Options{ToggleFeatures: control.Toggles{}})
if _, ok := c.CollectionMap[driveID]; !ok {
c.CollectionMap[driveID] = map[string]*Collection{}
}
// Add a few collections
for i := 0; i < collCount; i++ {
coll, err := NewCollection(
&itemBackupHandler{api.Drives{}, anyFolder},
nil,
nil,
driveID,
nil,
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true,
nil)
require.NoError(t, err, clues.ToCore(err))
c.CollectionMap[driveID][strconv.Itoa(i)] = coll
require.Equal(t, nil, coll.urlCache, "cache not nil")
}
err := c.addURLCacheToDriveCollections(
ctx,
driveID,
"",
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// Check that all collections have the same cache instance attached
// to them
var uc *urlCache
for _, driveColls := range c.CollectionMap {
for _, coll := range driveColls {
require.NotNil(t, coll.urlCache, "cache is nil")
if uc == nil {
uc = coll.urlCache.(*urlCache)
} else {
require.Equal(t, uc, coll.urlCache, "cache not equal")
}
}
}
})
}
}

View File

@ -77,7 +77,7 @@ func downloadFile(
return nil, clues.New("malware detected").Label(graph.LabelsMalware) return nil, clues.New("malware detected").Label(graph.LabelsMalware)
} }
if (resp.StatusCode / 100) != 2 { if resp != nil && (resp.StatusCode/100) != 2 {
// upstream error checks can compare the status with // upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode)) // clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
return nil, clues. return nil, clues.

View File

@ -228,7 +228,7 @@ func (m GetsItemPermission) GetItemPermission(
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore Handler // Restore Handler
// --------------------------------------------------------------------------- // --------------------------------------------------------------------------
type RestoreHandler struct { type RestoreHandler struct {
ItemInfo details.ItemInfo ItemInfo details.ItemInfo

View File

@ -15,14 +15,29 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
const (
urlCacheDriveItemThreshold = 300 * 1000
urlCacheRefreshInterval = 1 * time.Hour
)
type getItemPropertyer interface {
getItemProperties(
ctx context.Context,
itemID string,
) (itemProps, error)
}
type itemProps struct { type itemProps struct {
downloadURL string downloadURL string
isDeleted bool isDeleted bool
} }
var _ getItemPropertyer = &urlCache{}
// urlCache caches download URLs for drive items // urlCache caches download URLs for drive items
type urlCache struct { type urlCache struct {
driveID string driveID string
prevDelta string
idToProps map[string]itemProps idToProps map[string]itemProps
lastRefreshTime time.Time lastRefreshTime time.Time
refreshInterval time.Duration refreshInterval time.Duration
@ -39,7 +54,7 @@ type urlCache struct {
// newURLache creates a new URL cache for the specified drive ID // newURLache creates a new URL cache for the specified drive ID
func newURLCache( func newURLCache(
driveID string, driveID, prevDelta string,
refreshInterval time.Duration, refreshInterval time.Duration,
itemPager api.DriveItemEnumerator, itemPager api.DriveItemEnumerator,
errs *fault.Bus, errs *fault.Bus,
@ -56,6 +71,7 @@ func newURLCache(
idToProps: make(map[string]itemProps), idToProps: make(map[string]itemProps),
lastRefreshTime: time.Time{}, lastRefreshTime: time.Time{},
driveID: driveID, driveID: driveID,
prevDelta: prevDelta,
refreshInterval: refreshInterval, refreshInterval: refreshInterval,
itemPager: itemPager, itemPager: itemPager,
errs: errs, errs: errs,
@ -165,6 +181,8 @@ func (uc *urlCache) deltaQuery(
ctx context.Context, ctx context.Context,
) error { ) error {
logger.Ctx(ctx).Debug("starting delta query") logger.Ctx(ctx).Debug("starting delta query")
// Reset item pager to remove any previous state
uc.itemPager.Reset()
_, _, _, err := collectItems( _, _, _, err := collectItems(
ctx, ctx,
@ -173,7 +191,7 @@ func (uc *urlCache) deltaQuery(
"", "",
uc.updateCache, uc.updateCache,
map[string]string{}, map[string]string{},
"", uc.prevDelta,
uc.errs) uc.errs)
if err != nil { if err != nil {
return clues.Wrap(err, "delta query") return clues.Wrap(err, "delta query")

View File

@ -1,6 +1,7 @@
package onedrive package onedrive
import ( import (
"context"
"errors" "errors"
"math/rand" "math/rand"
"net/http" "net/http"
@ -89,10 +90,38 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
nfid := ptr.Val(newFolder.GetId()) nfid := ptr.Val(newFolder.GetId())
collectorFunc := func(
context.Context,
string,
string,
[]models.DriveItemable,
map[string]string,
map[string]string,
map[string]struct{},
map[string]map[string]string,
bool,
*fault.Bus,
) error {
return nil
}
// Get the previous delta to feed into url cache
prevDelta, _, _, err := collectItems(
ctx,
suite.ac.Drives().NewItemPager(driveID, "", api.DriveItemSelectDefault()),
suite.driveID,
"drive-name",
collectorFunc,
map[string]string{},
"",
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, prevDelta.URL)
// Create a bunch of files in the new folder // Create a bunch of files in the new folder
var items []models.DriveItemable var items []models.DriveItemable
for i := 0; i < 10; i++ { for i := 0; i < 5; i++ {
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting) newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
item, err := ac.Drives().PostItemInContainer( item, err := ac.Drives().PostItemInContainer(
@ -110,15 +139,12 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
} }
// Create a new URL cache with a long TTL // Create a new URL cache with a long TTL
cache, err := newURLCache( uc, err := newURLCache(
suite.driveID, suite.driveID,
prevDelta.URL,
1*time.Hour, 1*time.Hour,
driveItemPager, driveItemPager,
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err))
err = cache.refreshCache(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// Launch parallel requests to the cache, one per item // Launch parallel requests to the cache, one per item
@ -130,11 +156,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
defer wg.Done() defer wg.Done()
// Read item from URL cache // Read item from URL cache
props, err := cache.getItemProperties( props, err := uc.getItemProperties(
ctx, ctx,
ptr.Val(items[i].GetId())) ptr.Val(items[i].GetId()))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, props) require.NotNil(t, props)
require.NotEmpty(t, props.downloadURL) require.NotEmpty(t, props.downloadURL)
require.Equal(t, false, props.isDeleted) require.Equal(t, false, props.isDeleted)
@ -148,15 +174,14 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
props.downloadURL, props.downloadURL,
nil, nil,
nil) nil)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.Equal(t, http.StatusOK, resp.StatusCode) require.Equal(t, http.StatusOK, resp.StatusCode)
}(i) }(i)
} }
wg.Wait() wg.Wait()
// Validate that <= 1 delta queries were made // Validate that <= 1 delta queries were made by url cache
require.LessOrEqual(t, cache.deltaQueryCount, 1) require.LessOrEqual(t, uc.deltaQueryCount, 1)
} }
type URLCacheUnitSuite struct { type URLCacheUnitSuite struct {
@ -407,6 +432,7 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
cache, err := newURLCache( cache, err := newURLCache(
driveID, driveID,
"",
1*time.Hour, 1*time.Hour,
itemPager, itemPager,
fault.New(true)) fault.New(true))
@ -449,6 +475,7 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
cache, err := newURLCache( cache, err := newURLCache(
driveID, driveID,
"",
refreshInterval, refreshInterval,
&mockItemPager{}, &mockItemPager{},
fault.New(true)) fault.New(true))
@ -522,6 +549,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
t := suite.T() t := suite.T()
_, err := newURLCache( _, err := newURLCache(
test.driveID, test.driveID,
"",
test.refreshInt, test.refreshInt,
test.itemPager, test.itemPager,
test.errors) test.errors)

View File

@ -813,7 +813,10 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
eventDBF := func(id, timeStamp, subject, body string) []byte { eventDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.EventWith( return exchMock.EventWith(
suite.user, subject, body, body, suite.user, subject, body, body,
now, now, exchMock.NoRecurrence, exchMock.NoAttendees, false) exchMock.NoOriginalStartDate, now, now,
exchMock.NoRecurrence, exchMock.NoAttendees,
exchMock.NoAttachments, exchMock.NoCancelledOccurrences,
exchMock.NoExceptionOccurrences)
} }
// test data set // test data set
@ -961,7 +964,8 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
table := []struct { table := []struct {
name string name string
// performs the incremental update required for the test. // performs the incremental update required for the test.
updateUserData func(t *testing.T) //revive:disable-next-line:context-as-argument
updateUserData func(t *testing.T, ctx context.Context)
deltaItemsRead int deltaItemsRead int
deltaItemsWritten int deltaItemsWritten int
nonDeltaItemsRead int nonDeltaItemsRead int
@ -970,7 +974,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}{ }{
{ {
name: "clean, no changes", name: "clean, no changes",
updateUserData: func(t *testing.T) {}, updateUserData: func(t *testing.T, ctx context.Context) {},
deltaItemsRead: 0, deltaItemsRead: 0,
deltaItemsWritten: 0, deltaItemsWritten: 0,
nonDeltaItemsRead: 8, nonDeltaItemsRead: 8,
@ -979,7 +983,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}, },
{ {
name: "move an email folder to a subfolder", name: "move an email folder to a subfolder",
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T, ctx context.Context) {
cat := path.EmailCategory cat := path.EmailCategory
// contacts and events cannot be sufoldered; this is an email-only change // contacts and events cannot be sufoldered; this is an email-only change
@ -1003,7 +1007,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}, },
{ {
name: "delete a folder", name: "delete a folder",
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T, ctx context.Context) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container2].containerID containerID := d.dests[container2].containerID
@ -1030,7 +1034,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}, },
{ {
name: "add a new folder", name: "add a new folder",
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T, ctx context.Context) {
for category, gen := range dataset { for category, gen := range dataset {
deets := generateContainerOfItems( deets := generateContainerOfItems(
t, t,
@ -1075,7 +1079,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}, },
{ {
name: "rename a folder", name: "rename a folder",
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T, ctx context.Context) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container3].containerID containerID := d.dests[container3].containerID
newLoc := containerRename newLoc := containerRename
@ -1131,7 +1135,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}, },
{ {
name: "add a new item", name: "add a new item",
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T, ctx context.Context) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container1].containerID containerID := d.dests[container1].containerID
@ -1185,7 +1189,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
}, },
{ {
name: "delete an existing item", name: "delete an existing item",
updateUserData: func(t *testing.T) { updateUserData: func(t *testing.T, ctx context.Context) {
for category, d := range dataset { for category, d := range dataset {
containerID := d.dests[container1].containerID containerID := d.dests[container1].containerID
@ -1244,11 +1248,22 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
var ( var (
t = suite.T() t = suite.T()
incMB = evmock.NewBus() incMB = evmock.NewBus()
incBO = newTestBackupOp(t, ctx, kw, ms, ctrl, acct, sels, incMB, toggles, closer)
atid = creds.AzureTenantID atid = creds.AzureTenantID
) )
test.updateUserData(t) ctx, flush := tester.WithContext(t, ctx)
defer flush()
incBO := newTestBackupOp(t, ctx, kw, ms, ctrl, acct, sels, incMB, toggles, closer)
suite.Run("PreTestSetup", func() {
t := suite.T()
ctx, flush := tester.WithContext(t, ctx)
defer flush()
test.updateUserData(t, ctx)
})
err := incBO.Run(ctx) err := incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -1259,16 +1274,21 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
checkMetadataFilesExist(t, ctx, bupID, kw, ms, atid, uidn.ID(), service, categories) checkMetadataFilesExist(t, ctx, bupID, kw, ms, atid, uidn.ID(), service, categories)
deeTD.CheckBackupDetails(t, ctx, bupID, whatSet, ms, ss, expectDeets, true) deeTD.CheckBackupDetails(t, ctx, bupID, whatSet, ms, ss, expectDeets, true)
// FIXME: commented tests are flaky due to interference with other tests
// we need to find a better way to make good assertions here.
// The addition of the deeTD package gives us enough coverage to comment
// out the tests for now and look to their improvemeng later.
// do some additional checks to ensure the incremental dealt with fewer items. // do some additional checks to ensure the incremental dealt with fewer items.
// +4 on read/writes to account for metadata: 1 delta and 1 path for each type. // +4 on read/writes to account for metadata: 1 delta and 1 path for each type.
if !toggles.DisableDelta { // if !toggles.DisableDelta {
assert.Equal(t, test.deltaItemsRead+4, incBO.Results.ItemsRead, "incremental items read") // assert.Equal(t, test.deltaItemsRead+4, incBO.Results.ItemsRead, "incremental items read")
assert.Equal(t, test.deltaItemsWritten+4, incBO.Results.ItemsWritten, "incremental items written") // assert.Equal(t, test.deltaItemsWritten+4, incBO.Results.ItemsWritten, "incremental items written")
} else { // } else {
assert.Equal(t, test.nonDeltaItemsRead+4, incBO.Results.ItemsRead, "non delta items read") // assert.Equal(t, test.nonDeltaItemsRead+4, incBO.Results.ItemsRead, "non delta items read")
assert.Equal(t, test.nonDeltaItemsWritten+4, incBO.Results.ItemsWritten, "non delta items written") // assert.Equal(t, test.nonDeltaItemsWritten+4, incBO.Results.ItemsWritten, "non delta items written")
} // }
assert.Equal(t, test.nonMetaItemsWritten, incBO.Results.ItemsWritten, "non meta incremental items write") // assert.Equal(t, test.nonMetaItemsWritten, incBO.Results.ItemsWritten, "non meta incremental items write")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure())) assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events") assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "incremental backup-start events")
@ -1542,20 +1562,21 @@ func runDriveIncrementalTest(
table := []struct { table := []struct {
name string name string
// performs the incremental update required for the test. // performs the incremental update required for the test.
updateFiles func(t *testing.T) //revive:disable-next-line:context-as-argument
updateFiles func(t *testing.T, ctx context.Context)
itemsRead int itemsRead int
itemsWritten int itemsWritten int
nonMetaItemsWritten int nonMetaItemsWritten int
}{ }{
{ {
name: "clean incremental, no changes", name: "clean incremental, no changes",
updateFiles: func(t *testing.T) {}, updateFiles: func(t *testing.T, ctx context.Context) {},
itemsRead: 0, itemsRead: 0,
itemsWritten: 0, itemsWritten: 0,
}, },
{ {
name: "create a new file", name: "create a new file",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
targetContainer := containerIDs[container1] targetContainer := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName) driveItem.SetName(&newFileName)
@ -1578,7 +1599,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "add permission to new file", name: "add permission to new file",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
err = onedrive.UpdatePermissions( err = onedrive.UpdatePermissions(
ctx, ctx,
rh, rh,
@ -1596,7 +1617,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "remove permission from new file", name: "remove permission from new file",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
err = onedrive.UpdatePermissions( err = onedrive.UpdatePermissions(
ctx, ctx,
rh, rh,
@ -1614,7 +1635,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "add permission to container", name: "add permission to container",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
targetContainer := containerIDs[container1] targetContainer := containerIDs[container1]
err = onedrive.UpdatePermissions( err = onedrive.UpdatePermissions(
ctx, ctx,
@ -1633,7 +1654,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "remove permission from container", name: "remove permission from container",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
targetContainer := containerIDs[container1] targetContainer := containerIDs[container1]
err = onedrive.UpdatePermissions( err = onedrive.UpdatePermissions(
ctx, ctx,
@ -1652,7 +1673,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "update contents of a file", name: "update contents of a file",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
err := suite.ac.Drives().PutItemContent( err := suite.ac.Drives().PutItemContent(
ctx, ctx,
driveID, driveID,
@ -1667,7 +1688,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "rename a file", name: "rename a file",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
container := containerIDs[container1] container := containerIDs[container1]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
@ -1691,7 +1712,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "move a file between folders", name: "move a file between folders",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
dest := containerIDs[container2] dest := containerIDs[container2]
driveItem := models.NewDriveItem() driveItem := models.NewDriveItem()
@ -1719,7 +1740,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "delete file", name: "delete file",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
err := suite.ac.Drives().DeleteItem( err := suite.ac.Drives().DeleteItem(
ctx, ctx,
driveID, driveID,
@ -1734,7 +1755,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "move a folder to a subfolder", name: "move a folder to a subfolder",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
parent := containerIDs[container1] parent := containerIDs[container1]
child := containerIDs[container2] child := containerIDs[container2]
@ -1762,7 +1783,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "rename a folder", name: "rename a folder",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
parent := containerIDs[container1] parent := containerIDs[container1]
child := containerIDs[container2] child := containerIDs[container2]
@ -1792,7 +1813,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "delete a folder", name: "delete a folder",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
container := containerIDs[containerRename] container := containerIDs[containerRename]
err := suite.ac.Drives().DeleteItem( err := suite.ac.Drives().DeleteItem(
ctx, ctx,
@ -1808,7 +1829,7 @@ func runDriveIncrementalTest(
}, },
{ {
name: "add a new folder", name: "add a new folder",
updateFiles: func(t *testing.T) { updateFiles: func(t *testing.T, ctx context.Context) {
generateContainerOfItems( generateContainerOfItems(
t, t,
ctx, ctx,
@ -1850,9 +1871,17 @@ func runDriveIncrementalTest(
incBO = newTestBackupOp(t, ctx, kw, ms, cleanCtrl, acct, sel, incMB, ffs, closer) incBO = newTestBackupOp(t, ctx, kw, ms, cleanCtrl, acct, sel, incMB, ffs, closer)
) )
tester.LogTimeOfTest(suite.T()) ctx, flush := tester.WithContext(t, ctx)
defer flush()
test.updateFiles(t) suite.Run("PreTestSetup", func() {
t := suite.T()
ctx, flush := tester.WithContext(t, ctx)
defer flush()
test.updateFiles(t, ctx)
})
err = incBO.Run(ctx) err = incBO.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))

View File

@ -8,6 +8,7 @@ import (
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/kopia/kopia/repo/logging"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"go.uber.org/zap" "go.uber.org/zap"
@ -60,6 +61,7 @@ const (
LogLevelFN = "log-level" LogLevelFN = "log-level"
ReadableLogsFN = "readable-logs" ReadableLogsFN = "readable-logs"
MaskSensitiveDataFN = "mask-sensitive-data" MaskSensitiveDataFN = "mask-sensitive-data"
logStorageFN = "log-storage"
) )
// flag values // flag values
@ -70,6 +72,7 @@ var (
LogLevelFV string LogLevelFV string
ReadableLogsFV bool ReadableLogsFV bool
MaskSensitiveDataFV bool MaskSensitiveDataFV bool
logStorageFV bool
ResolvedLogFile string // logFileFV after processing ResolvedLogFile string // logFileFV after processing
piiHandling string // piiHandling after MaskSensitiveDataFV processing piiHandling string // piiHandling after MaskSensitiveDataFV processing
@ -131,6 +134,13 @@ func addFlags(fs *pflag.FlagSet, defaultFile string) {
MaskSensitiveDataFN, MaskSensitiveDataFN,
false, false,
"anonymize personal data in log output") "anonymize personal data in log output")
fs.BoolVar(
&logStorageFV,
logStorageFN,
false,
"include logs produced by the downstream storage systems. Uses the same log level as the corso logger")
cobra.CheckErr(fs.MarkHidden(logStorageFN))
} }
// Due to races between the lazy evaluation of flags in cobra and the // Due to races between the lazy evaluation of flags in cobra and the
@ -197,6 +207,18 @@ func PreloadLoggingFlags(args []string) Settings {
set.PIIHandling = PIIHash set.PIIHandling = PIIHash
} }
// retrieve the user's preferred settings for storage engine logging in the
// corso log.
// defaults to not logging it.
storageLog, err := fs.GetBool(logStorageFN)
if err != nil {
return set
}
if storageLog {
set.LogStorage = storageLog
}
return set return set
} }
@ -241,6 +263,7 @@ type Settings struct {
Format logFormat // whether to format as text (console) or json (cloud) Format logFormat // whether to format as text (console) or json (cloud)
Level logLevel // what level to log at Level logLevel // what level to log at
PIIHandling piiAlg // how to obscure pii PIIHandling piiAlg // how to obscure pii
LogStorage bool // Whether kopia logs should be added to the corso log.
} }
// EnsureDefaults sets any non-populated settings to their default value. // EnsureDefaults sets any non-populated settings to their default value.
@ -390,7 +413,7 @@ const ctxKey loggingKey = "corsoLogger"
// a seeded context prior to cobra evaluating flags. // a seeded context prior to cobra evaluating flags.
func Seed(ctx context.Context, set Settings) (context.Context, *zap.SugaredLogger) { func Seed(ctx context.Context, set Settings) (context.Context, *zap.SugaredLogger) {
zsl := singleton(set) zsl := singleton(set)
return Set(ctx, zsl), zsl return SetWithSettings(ctx, zsl, set), zsl
} }
func setCluesSecretsHash(alg piiAlg) { func setCluesSecretsHash(alg piiAlg) {
@ -412,7 +435,7 @@ func CtxOrSeed(ctx context.Context, set Settings) (context.Context, *zap.Sugared
l := ctx.Value(ctxKey) l := ctx.Value(ctxKey)
if l == nil { if l == nil {
zsl := singleton(set) zsl := singleton(set)
return Set(ctx, zsl), zsl return SetWithSettings(ctx, zsl, set), zsl
} }
return ctx, l.(*zap.SugaredLogger) return ctx, l.(*zap.SugaredLogger)
@ -420,10 +443,31 @@ func CtxOrSeed(ctx context.Context, set Settings) (context.Context, *zap.Sugared
// Set allows users to embed their own zap.SugaredLogger within the context. // Set allows users to embed their own zap.SugaredLogger within the context.
func Set(ctx context.Context, logger *zap.SugaredLogger) context.Context { func Set(ctx context.Context, logger *zap.SugaredLogger) context.Context {
set := Settings{}.EnsureDefaults()
return SetWithSettings(ctx, logger, set)
}
// SetWithSettings allows users to embed their own zap.SugaredLogger within the
// context and with the given logger settings.
func SetWithSettings(
ctx context.Context,
logger *zap.SugaredLogger,
set Settings,
) context.Context {
if logger == nil { if logger == nil {
return ctx return ctx
} }
// Add the kopia logger as well. Unfortunately we need to do this here instead
// of a kopia-specific package because we want it to be in the context that's
// used for the rest of execution.
if set.LogStorage {
ctx = logging.WithLogger(ctx, func(module string) logging.Logger {
return logger.Named("kopia-lib/" + module)
})
}
return context.WithValue(ctx, ctxKey, logger) return context.WithValue(ctx, ctxKey, logger)
} }

View File

@ -17,8 +17,15 @@ const (
// get easily misspelled. // get easily misspelled.
// eg: we don't need a const for "id" // eg: we don't need a const for "id"
const ( const (
parentFolderID = "parentFolderId" attendees = "attendees"
bccRecipients = "bccRecipients"
ccRecipients = "ccRecipients"
createdDateTime = "createdDateTime"
displayName = "displayName" displayName = "displayName"
givenName = "givenName"
parentFolderID = "parentFolderId"
surname = "surname"
toRecipients = "toRecipients"
userPrincipalName = "userPrincipalName" userPrincipalName = "userPrincipalName"
) )

View File

@ -265,3 +265,17 @@ func ContactInfo(contact models.Contactable) *details.ExchangeInfo {
Modified: ptr.OrNow(contact.GetLastModifiedDateTime()), Modified: ptr.OrNow(contact.GetLastModifiedDateTime()),
} }
} }
func contactCollisionKeyProps() []string {
return idAnd(givenName)
}
// ContactCollisionKey constructs a key from the contactable's creation time and either displayName or given+surname.
// collision keys are used to identify duplicate item conflicts for handling advanced restoration config.
func ContactCollisionKey(item models.Contactable) string {
if item == nil {
return ""
}
return ptr.Val(item.GetId())
}

View File

@ -90,22 +90,98 @@ func (c Contacts) EnumerateContainers(
// item pager // item pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ itemPager = &contactPager{} var _ itemPager[models.Contactable] = &contactsPageCtrl{}
type contactPager struct { type contactsPageCtrl struct {
gs graph.Servicer gs graph.Servicer
builder *users.ItemContactFoldersItemContactsRequestBuilder builder *users.ItemContactFoldersItemContactsRequestBuilder
options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration
} }
func (c Contacts) NewContactPager( func (c Contacts) NewContactsPager(
userID, containerID string,
selectProps ...string,
) itemPager[models.Contactable] {
options := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{
Top: ptr.To[int32](maxNonDeltaPageSize),
},
}
if len(selectProps) > 0 {
options.QueryParameters.Select = selectProps
}
builder := c.Stable.
Client().
Users().
ByUserId(userID).
ContactFolders().
ByContactFolderId(containerID).
Contacts()
return &contactsPageCtrl{c.Stable, builder, options}
}
//lint:ignore U1000 False Positive
func (p *contactsPageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.Contactable], error) {
resp, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil
}
//lint:ignore U1000 False Positive
func (p *contactsPageCtrl) setNext(nextLink string) {
p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter())
}
//lint:ignore U1000 False Positive
func (c Contacts) GetItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error) {
ctx = clues.Add(ctx, "container_id", containerID)
pager := c.NewContactsPager(userID, containerID, contactCollisionKeyProps()...)
items, err := enumerateItems(ctx, pager)
if err != nil {
return nil, graph.Wrap(ctx, err, "enumerating contacts")
}
m := map[string]string{}
for _, item := range items {
m[ContactCollisionKey(item)] = ptr.Val(item.GetId())
}
return m, nil
}
// ---------------------------------------------------------------------------
// item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &contactIDPager{}
type contactIDPager struct {
gs graph.Servicer
builder *users.ItemContactFoldersItemContactsRequestBuilder
options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration
}
func (c Contacts) NewContactIDsPager(
ctx context.Context, ctx context.Context,
userID, containerID string, userID, containerID string,
immutableIDs bool, immutableIDs bool,
) itemPager { ) itemIDPager {
config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{ config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{ QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{
Select: idAnd(parentFolderID), Select: idAnd(parentFolderID),
Top: ptr.To[int32](maxNonDeltaPageSize),
}, },
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
} }
@ -118,10 +194,10 @@ func (c Contacts) NewContactPager(
ByContactFolderId(containerID). ByContactFolderId(containerID).
Contacts() Contacts()
return &contactPager{c.Stable, builder, config} return &contactIDPager{c.Stable, builder, config}
} }
func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) { func (p *contactIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) resp, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, graph.Stack(ctx, err) return nil, graph.Stack(ctx, err)
@ -130,24 +206,24 @@ func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil
} }
func (p *contactPager) setNext(nextLink string) { func (p *contactIDPager) setNext(nextLink string) {
p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter()) p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter())
} }
// non delta pagers don't need reset // non delta pagers don't need reset
func (p *contactPager) reset(context.Context) {} func (p *contactIDPager) reset(context.Context) {}
func (p *contactPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *contactIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Contactable](pl) return toValues[models.Contactable](pl)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// delta item pager // delta item ID pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ itemPager = &contactDeltaPager{} var _ itemIDPager = &contactDeltaIDPager{}
type contactDeltaPager struct { type contactDeltaIDPager struct {
gs graph.Servicer gs graph.Servicer
userID string userID string
containerID string containerID string
@ -165,14 +241,15 @@ func getContactDeltaBuilder(
return builder return builder
} }
func (c Contacts) NewContactDeltaPager( func (c Contacts) NewContactDeltaIDsPager(
ctx context.Context, ctx context.Context,
userID, containerID, oldDelta string, userID, containerID, oldDelta string,
immutableIDs bool, immutableIDs bool,
) itemPager { ) itemIDPager {
options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{ options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{ QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{
Select: idAnd(parentFolderID), Select: idAnd(parentFolderID),
// TOP is not allowed
}, },
Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)),
} }
@ -184,10 +261,10 @@ func (c Contacts) NewContactDeltaPager(
builder = getContactDeltaBuilder(ctx, c.Stable, userID, containerID, options) builder = getContactDeltaBuilder(ctx, c.Stable, userID, containerID, options)
} }
return &contactDeltaPager{c.Stable, userID, containerID, builder, options} return &contactDeltaIDPager{c.Stable, userID, containerID, builder, options}
} }
func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { func (p *contactDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) resp, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, graph.Stack(ctx, err) return nil, graph.Stack(ctx, err)
@ -196,15 +273,15 @@ func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error
return resp, nil return resp, nil
} }
func (p *contactDeltaPager) setNext(nextLink string) { func (p *contactDeltaIDPager) setNext(nextLink string) {
p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter()) p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter())
} }
func (p *contactDeltaPager) reset(ctx context.Context) { func (p *contactDeltaIDPager) reset(ctx context.Context) {
p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options) p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
} }
func (p *contactDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *contactDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Contactable](pl) return toValues[models.Contactable](pl)
} }
@ -219,8 +296,8 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
"category", selectors.ExchangeContact, "category", selectors.ExchangeContact,
"container_id", containerID) "container_id", containerID)
pager := c.NewContactPager(ctx, userID, containerID, immutableIDs) pager := c.NewContactIDsPager(ctx, userID, containerID, immutableIDs)
deltaPager := c.NewContactDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) deltaPager := c.NewContactDeltaIDsPager(ctx, userID, containerID, oldDelta, immutableIDs)
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
} }

View File

@ -0,0 +1,73 @@
package api_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ContactsPagerIntgSuite struct {
tester.Suite
cts clientTesterSetup
}
func TestContactsPagerIntgSuite(t *testing.T) {
suite.Run(t, &ContactsPagerIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *ContactsPagerIntgSuite) SetupSuite() {
suite.cts = newClientTesterSetup(suite.T())
}
func (suite *ContactsPagerIntgSuite) TestGetItemsInContainerByCollisionKey() {
t := suite.T()
ac := suite.cts.ac.Contacts()
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.cts.userID, "contacts")
require.NoError(t, err, clues.ToCore(err))
conts, err := ac.Stable.
Client().
Users().
ByUserId(suite.cts.userID).
ContactFolders().
ByContactFolderId(ptr.Val(container.GetId())).
Contacts().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(err))
cs := conts.GetValue()
expect := make([]string, 0, len(cs))
for _, c := range cs {
expect = append(expect, api.ContactCollisionKey(c))
}
results, err := ac.GetItemsInContainerByCollisionKey(ctx, suite.cts.userID, "contacts")
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
for k, v := range results {
assert.NotEmpty(t, k, "all keys should be populated")
assert.NotEmpty(t, v, "all values should be populated")
}
for _, e := range expect {
_, ok := results[e]
assert.Truef(t, ok, "expected results to contain collision key: %s", e)
}
}

View File

@ -292,8 +292,8 @@ func GetAllDrives(
for i := 0; i <= maxRetryCount; i++ { for i := 0; i <= maxRetryCount; i++ {
page, err = pager.GetPage(ctx) page, err = pager.GetPage(ctx)
if err != nil { if err != nil {
if clues.HasLabel(err, graph.LabelsMysiteNotFound) { if clues.HasLabel(err, graph.LabelsMysiteNotFound) || clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
logger.Ctx(ctx).Infof("resource owner does not have a drive") logger.CtxErr(ctx, err).Infof("resource owner does not have a drive")
return make([]models.Driveable, 0), nil // no license or drives. return make([]models.Driveable, 0), nil // no license or drives.
} }

View File

@ -3,8 +3,10 @@ package api
import ( import (
"bytes" "bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"io" "io"
"strings"
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -15,6 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -189,6 +192,14 @@ func (c Events) PatchCalendar(
return nil return nil
} }
const (
// Beta version cannot have /calendars/%s for get and Patch
// https://stackoverflow.com/questions/50492177/microsoft-graph-get-user-calendar-event-with-beta-version
eventExceptionsBetaURLTemplate = "https://graph.microsoft.com/beta/users/%s/events/%s?$expand=exceptionOccurrences"
eventPostBetaURLTemplate = "https://graph.microsoft.com/beta/users/%s/calendars/%s/events"
eventPatchBetaURLTemplate = "https://graph.microsoft.com/beta/users/%s/events/%s"
)
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// items // items
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -208,41 +219,233 @@ func (c Events) GetItem(
} }
) )
event, err = c.Stable. // Beta endpoint helps us fetch the event exceptions, but since we
// don't use the beta SDK, the exceptionOccurrences and
// cancelledOccurrences end up in AdditionalData
// https://learn.microsoft.com/en-us/graph/api/resources/event?view=graph-rest-beta#properties
rawURL := fmt.Sprintf(eventExceptionsBetaURLTemplate, userID, itemID)
builder := users.NewItemEventsEventItemRequestBuilder(rawURL, c.Stable.Adapter())
event, err = builder.Get(ctx, config)
if err != nil {
return nil, nil, graph.Stack(ctx, err)
}
err = validateCancelledOccurrences(event)
if err != nil {
return nil, nil, clues.Wrap(err, "verify cancelled occurrences")
}
err = fixupExceptionOccurrences(ctx, c, event, immutableIDs, userID)
if err != nil {
return nil, nil, clues.Wrap(err, "fixup exception occurrences")
}
var attachments []models.Attachmentable
if ptr.Val(event.GetHasAttachments()) || HasAttachments(event.GetBody()) {
attachments, err = c.GetAttachments(ctx, immutableIDs, userID, itemID)
if err != nil {
return nil, nil, err
}
}
event.SetAttachments(attachments)
return event, EventInfo(event), nil
}
// fixupExceptionOccurrences gets attachments and converts the data
// into a format that gets serialized when storing to kopia
func fixupExceptionOccurrences(
ctx context.Context,
client Events,
event models.Eventable,
immutableIDs bool,
userID string,
) error {
// Fetch attachments for exceptions
exceptionOccurrences := event.GetAdditionalData()["exceptionOccurrences"]
if exceptionOccurrences == nil {
return nil
}
eo, ok := exceptionOccurrences.([]any)
if !ok {
return clues.New("converting exceptionOccurrences to []any").
With("type", fmt.Sprintf("%T", exceptionOccurrences))
}
for _, instance := range eo {
instance, ok := instance.(map[string]any)
if !ok {
return clues.New("converting instance to map[string]any").
With("type", fmt.Sprintf("%T", instance))
}
evt, err := EventFromMap(instance)
if err != nil {
return clues.Wrap(err, "parsing exception event")
}
// OPTIMIZATION: We don't have to store any of the
// attachments that carry over from the original
var attachments []models.Attachmentable
if ptr.Val(event.GetHasAttachments()) || HasAttachments(event.GetBody()) {
attachments, err = client.GetAttachments(ctx, immutableIDs, userID, ptr.Val(evt.GetId()))
if err != nil {
return clues.Wrap(err, "getting event instance attachments").
With("event_instance_id", ptr.Val(evt.GetId()))
}
}
// This odd roundabout way of doing this is required as
// the json serialization at the end does not serialize if
// you just pass in a models.Attachmentable
convertedAttachments := []map[string]interface{}{}
for _, attachment := range attachments {
am, err := parseableToMap(attachment)
if err != nil {
return clues.Wrap(err, "converting attachment")
}
convertedAttachments = append(convertedAttachments, am)
}
instance["attachments"] = convertedAttachments
}
return nil
}
// Adding checks to ensure that the data is in the format that we expect M365 to return
func validateCancelledOccurrences(event models.Eventable) error {
cancelledOccurrences := event.GetAdditionalData()["cancelledOccurrences"]
if cancelledOccurrences != nil {
co, ok := cancelledOccurrences.([]any)
if !ok {
return clues.New("converting cancelledOccurrences to []any").
With("type", fmt.Sprintf("%T", cancelledOccurrences))
}
for _, instance := range co {
instance, err := str.AnyToString(instance)
if err != nil {
return err
}
// There might be multiple `.` in the ID and hence >2
splits := strings.Split(instance, ".")
if len(splits) < 2 {
return clues.New("unexpected cancelled event format").
With("instance", instance)
}
startStr := splits[len(splits)-1]
_, err = dttm.ParseTime(startStr)
if err != nil {
return clues.Wrap(err, "parsing cancelled event date")
}
}
}
return nil
}
func parseableToMap(att serialization.Parsable) (map[string]any, error) {
var item map[string]any
writer := kjson.NewJsonSerializationWriter()
defer writer.Close()
if err := writer.WriteObjectValue("", att); err != nil {
return nil, err
}
ats, err := writer.GetSerializedContent()
if err != nil {
return nil, err
}
err = json.Unmarshal(ats, &item)
if err != nil {
return nil, clues.Wrap(err, "unmarshalling serialized attachment")
}
return item, nil
}
func (c Events) GetAttachments(
ctx context.Context,
immutableIDs bool,
userID, itemID string,
) ([]models.Attachmentable, error) {
config := &users.ItemEventsItemAttachmentsRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemEventsItemAttachmentsRequestBuilderGetQueryParameters{
Expand: []string{"microsoft.graph.itemattachment/item"},
},
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
}
attached, err := c.LargeItem.
Client(). Client().
Users(). Users().
ByUserId(userID). ByUserId(userID).
Events(). Events().
ByEventId(itemID). ByEventId(itemID).
Attachments().
Get(ctx, config) Get(ctx, config)
if err != nil { if err != nil {
return nil, nil, graph.Stack(ctx, err) return nil, graph.Wrap(ctx, err, "event attachment download")
} }
if ptr.Val(event.GetHasAttachments()) || HasAttachments(event.GetBody()) { return attached.GetValue(), nil
config := &users.ItemEventsItemAttachmentsRequestBuilderGetRequestConfiguration{ }
QueryParameters: &users.ItemEventsItemAttachmentsRequestBuilderGetQueryParameters{
Expand: []string{"microsoft.graph.itemattachment/item"},
},
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
}
attached, err := c.LargeItem. func (c Events) DeleteAttachment(
Client(). ctx context.Context,
Users(). userID, calendarID, eventID, attachmentID string,
ByUserId(userID). ) error {
Events(). return c.Stable.
ByEventId(itemID). Client().
Attachments(). Users().
Get(ctx, config) ByUserId(userID).
if err != nil { Calendars().
return nil, nil, graph.Wrap(ctx, err, "event attachment download") ByCalendarId(calendarID).
} Events().
ByEventId(eventID).
Attachments().
ByAttachmentId(attachmentID).
Delete(ctx, nil)
}
event.SetAttachments(attached.GetValue()) func (c Events) GetItemInstances(
ctx context.Context,
userID, itemID, startDate, endDate string,
) ([]models.Eventable, error) {
config := &users.ItemEventsItemInstancesRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemEventsItemInstancesRequestBuilderGetQueryParameters{
Select: []string{"id"},
StartDateTime: ptr.To(startDate),
EndDateTime: ptr.To(endDate),
},
} }
return event, EventInfo(event), nil events, err := c.Stable.
Client().
Users().
ByUserId(userID).
Events().
ByEventId(itemID).
Instances().
Get(ctx, config)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return events.GetValue(), nil
} }
func (c Events) PostItem( func (c Events) PostItem(
@ -250,14 +453,10 @@ func (c Events) PostItem(
userID, containerID string, userID, containerID string,
body models.Eventable, body models.Eventable,
) (models.Eventable, error) { ) (models.Eventable, error) {
itm, err := c.Stable. rawURL := fmt.Sprintf(eventPostBetaURLTemplate, userID, containerID)
Client(). builder := users.NewItemCalendarsItemEventsRequestBuilder(rawURL, c.Stable.Adapter())
Users().
ByUserId(userID). itm, err := builder.Post(ctx, body, nil)
Calendars().
ByCalendarId(containerID).
Events().
Post(ctx, body, nil)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "creating calendar event") return nil, graph.Wrap(ctx, err, "creating calendar event")
} }
@ -265,6 +464,22 @@ func (c Events) PostItem(
return itm, nil return itm, nil
} }
func (c Events) PatchItem(
ctx context.Context,
userID, eventID string,
body models.Eventable,
) (models.Eventable, error) {
rawURL := fmt.Sprintf(eventPatchBetaURLTemplate, userID, eventID)
builder := users.NewItemCalendarsItemEventsEventItemRequestBuilder(rawURL, c.Stable.Adapter())
itm, err := builder.Patch(ctx, body, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "updating calendar event")
}
return itm, nil
}
func (c Events) DeleteItem( func (c Events) DeleteItem(
ctx context.Context, ctx context.Context,
userID, itemID string, userID, itemID string,
@ -315,14 +530,9 @@ func (c Events) PostSmallAttachment(
func (c Events) PostLargeAttachment( func (c Events) PostLargeAttachment(
ctx context.Context, ctx context.Context,
userID, containerID, parentItemID, itemName string, userID, containerID, parentItemID, itemName string,
size int64, content []byte,
body models.Attachmentable, ) (string, error) {
) (models.UploadSessionable, error) { size := int64(len(content))
bs, err := GetAttachmentContent(body)
if err != nil {
return nil, clues.Wrap(err, "serializing attachment content").WithClues(ctx)
}
session := users.NewItemCalendarEventsItemAttachmentsCreateUploadSessionPostRequestBody() session := users.NewItemCalendarEventsItemAttachmentsCreateUploadSessionPostRequestBody()
session.SetAttachmentItem(makeSessionAttachment(itemName, size)) session.SetAttachmentItem(makeSessionAttachment(itemName, size))
@ -338,19 +548,19 @@ func (c Events) PostLargeAttachment(
CreateUploadSession(). CreateUploadSession().
Post(ctx, session, nil) Post(ctx, session, nil)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "uploading large event attachment") return "", graph.Wrap(ctx, err, "uploading large event attachment")
} }
url := ptr.Val(us.GetUploadUrl()) url := ptr.Val(us.GetUploadUrl())
w := graph.NewLargeItemWriter(parentItemID, url, size) w := graph.NewLargeItemWriter(parentItemID, url, size)
copyBuffer := make([]byte, graph.AttachmentChunkSize) copyBuffer := make([]byte, graph.AttachmentChunkSize)
_, err = io.CopyBuffer(w, bytes.NewReader(bs), copyBuffer) _, err = io.CopyBuffer(w, bytes.NewReader(content), copyBuffer)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "buffering large attachment content").WithClues(ctx) return "", clues.Wrap(err, "buffering large attachment content").WithClues(ctx)
} }
return us, nil return w.ID, nil
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -472,3 +682,31 @@ func EventInfo(evt models.Eventable) *details.ExchangeInfo {
Modified: ptr.OrNow(evt.GetLastModifiedDateTime()), Modified: ptr.OrNow(evt.GetLastModifiedDateTime()),
} }
} }
func EventFromMap(ev map[string]any) (models.Eventable, error) {
instBytes, err := json.Marshal(ev)
if err != nil {
return nil, clues.Wrap(err, "marshaling event exception instance")
}
body, err := BytesToEventable(instBytes)
if err != nil {
return nil, clues.Wrap(err, "converting exception event bytes to Eventable")
}
return body, nil
}
func eventCollisionKeyProps() []string {
return idAnd("subject")
}
// EventCollisionKey constructs a key from the eventable's creation time, subject, and organizer.
// collision keys are used to identify duplicate item conflicts for handling advanced restoration config.
func EventCollisionKey(item models.Eventable) string {
if item == nil {
return ""
}
return ptr.Val(item.GetSubject())
}

View File

@ -98,21 +98,27 @@ func (c Events) EnumerateContainers(
// item pager // item pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ itemPager = &eventPager{} var _ itemPager[models.Eventable] = &eventsPageCtrl{}
type eventPager struct { type eventsPageCtrl struct {
gs graph.Servicer gs graph.Servicer
builder *users.ItemCalendarsItemEventsRequestBuilder builder *users.ItemCalendarsItemEventsRequestBuilder
options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration
} }
func (c Events) NewEventPager( func (c Events) NewEventsPager(
ctx context.Context,
userID, containerID string, userID, containerID string,
immutableIDs bool, selectProps ...string,
) (itemPager, error) { ) itemPager[models.Eventable] {
options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{ options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
QueryParameters: &users.ItemCalendarsItemEventsRequestBuilderGetQueryParameters{
Top: ptr.To[int32](maxNonDeltaPageSize),
},
}
if len(selectProps) > 0 {
options.QueryParameters.Select = selectProps
} }
builder := c.Stable. builder := c.Stable.
@ -123,10 +129,82 @@ func (c Events) NewEventPager(
ByCalendarId(containerID). ByCalendarId(containerID).
Events() Events()
return &eventPager{c.Stable, builder, options}, nil return &eventsPageCtrl{c.Stable, builder, options}
} }
func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) { //lint:ignore U1000 False Positive
func (p *eventsPageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.Eventable], error) {
resp, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return resp, nil
}
//lint:ignore U1000 False Positive
func (p *eventsPageCtrl) setNext(nextLink string) {
p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter())
}
//lint:ignore U1000 False Positive
func (c Events) GetItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error) {
ctx = clues.Add(ctx, "container_id", containerID)
pager := c.NewEventsPager(userID, containerID, eventCollisionKeyProps()...)
items, err := enumerateItems(ctx, pager)
if err != nil {
return nil, graph.Wrap(ctx, err, "enumerating events")
}
m := map[string]string{}
for _, item := range items {
m[EventCollisionKey(item)] = ptr.Val(item.GetId())
}
return m, nil
}
// ---------------------------------------------------------------------------
// item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &eventIDPager{}
type eventIDPager struct {
gs graph.Servicer
builder *users.ItemCalendarsItemEventsRequestBuilder
options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration
}
func (c Events) NewEventIDsPager(
ctx context.Context,
userID, containerID string,
immutableIDs bool,
) (itemIDPager, error) {
options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
QueryParameters: &users.ItemCalendarsItemEventsRequestBuilderGetQueryParameters{
Top: ptr.To[int32](maxNonDeltaPageSize),
},
}
builder := c.Stable.
Client().
Users().
ByUserId(userID).
Calendars().
ByCalendarId(containerID).
Events()
return &eventIDPager{c.Stable, builder, options}, nil
}
func (p *eventIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) resp, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, graph.Stack(ctx, err) return nil, graph.Stack(ctx, err)
@ -135,24 +213,24 @@ func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil
} }
func (p *eventPager) setNext(nextLink string) { func (p *eventIDPager) setNext(nextLink string) {
p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter()) p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter())
} }
// non delta pagers don't need reset // non delta pagers don't need reset
func (p *eventPager) reset(context.Context) {} func (p *eventIDPager) reset(context.Context) {}
func (p *eventPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *eventIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Eventable](pl) return toValues[models.Eventable](pl)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// delta item pager // delta item ID pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ itemPager = &eventDeltaPager{} var _ itemIDPager = &eventDeltaIDPager{}
type eventDeltaPager struct { type eventDeltaIDPager struct {
gs graph.Servicer gs graph.Servicer
userID string userID string
containerID string containerID string
@ -160,13 +238,16 @@ type eventDeltaPager struct {
options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration
} }
func (c Events) NewEventDeltaPager( func (c Events) NewEventDeltaIDsPager(
ctx context.Context, ctx context.Context,
userID, containerID, oldDelta string, userID, containerID, oldDelta string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{ options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)),
QueryParameters: &users.ItemCalendarsItemEventsDeltaRequestBuilderGetQueryParameters{
Top: ptr.To[int32](maxDeltaPageSize),
},
} }
var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder
@ -177,7 +258,7 @@ func (c Events) NewEventDeltaPager(
builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
} }
return &eventDeltaPager{c.Stable, userID, containerID, builder, options}, nil return &eventDeltaIDPager{c.Stable, userID, containerID, builder, options}, nil
} }
func getEventDeltaBuilder( func getEventDeltaBuilder(
@ -200,7 +281,7 @@ func getEventDeltaBuilder(
return builder return builder
} }
func (p *eventDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { func (p *eventDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) resp, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, graph.Stack(ctx, err) return nil, graph.Stack(ctx, err)
@ -209,15 +290,15 @@ func (p *eventDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error)
return resp, nil return resp, nil
} }
func (p *eventDeltaPager) setNext(nextLink string) { func (p *eventDeltaIDPager) setNext(nextLink string) {
p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter()) p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter())
} }
func (p *eventDeltaPager) reset(ctx context.Context) { func (p *eventDeltaIDPager) reset(ctx context.Context) {
p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options) p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
} }
func (p *eventDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *eventDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Eventable](pl) return toValues[models.Eventable](pl)
} }
@ -229,12 +310,12 @@ func (c Events) GetAddedAndRemovedItemIDs(
) ([]string, []string, DeltaUpdate, error) { ) ([]string, []string, DeltaUpdate, error) {
ctx = clues.Add(ctx, "container_id", containerID) ctx = clues.Add(ctx, "container_id", containerID)
pager, err := c.NewEventPager(ctx, userID, containerID, immutableIDs) pager, err := c.NewEventIDsPager(ctx, userID, containerID, immutableIDs)
if err != nil { if err != nil {
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager") return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager")
} }
deltaPager, err := c.NewEventDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) deltaPager, err := c.NewEventDeltaIDsPager(ctx, userID, containerID, oldDelta, immutableIDs)
if err != nil { if err != nil {
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager") return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager")
} }

View File

@ -0,0 +1,73 @@
package api_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type EventsPagerIntgSuite struct {
tester.Suite
cts clientTesterSetup
}
func TestEventsPagerIntgSuite(t *testing.T) {
suite.Run(t, &EventsPagerIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *EventsPagerIntgSuite) SetupSuite() {
suite.cts = newClientTesterSetup(suite.T())
}
func (suite *EventsPagerIntgSuite) TestGetItemsInContainerByCollisionKey() {
t := suite.T()
ac := suite.cts.ac.Events()
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.cts.userID, "calendar")
require.NoError(t, err, clues.ToCore(err))
evts, err := ac.Stable.
Client().
Users().
ByUserId(suite.cts.userID).
Calendars().
ByCalendarId(ptr.Val(container.GetId())).
Events().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(err))
es := evts.GetValue()
expect := make([]string, 0, len(es))
for _, e := range es {
expect = append(expect, api.EventCollisionKey(e))
}
results, err := ac.GetItemsInContainerByCollisionKey(ctx, suite.cts.userID, "calendar")
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
for k, v := range results {
assert.NotEmpty(t, k, "all keys should be populated")
assert.NotEmpty(t, v, "all values should be populated")
}
for _, e := range expect {
_, ok := results[e]
assert.Truef(t, ok, "expected results to contain collision key: %s", e)
}
}

View File

@ -11,9 +11,12 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control/testdata"
) )
type EventsAPIUnitSuite struct { type EventsAPIUnitSuite struct {
@ -212,3 +215,70 @@ func (suite *EventsAPIUnitSuite) TestBytesToEventable() {
}) })
} }
} }
type EventsAPIIntgSuite struct {
tester.Suite
credentials account.M365Config
ac Client
}
func TestEventsAPIntgSuite(t *testing.T) {
suite.Run(t, &EventsAPIIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *EventsAPIIntgSuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
suite.ac, err = NewClient(m365)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *EventsAPIIntgSuite) TestRestoreLargeAttachment() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
userID := tester.M365UserID(suite.T())
folderName := testdata.DefaultRestoreConfig("eventlargeattachmenttest").Location
evts := suite.ac.Events()
calendar, err := evts.CreateContainer(ctx, userID, folderName, "")
require.NoError(t, err, clues.ToCore(err))
tomorrow := time.Now().Add(24 * time.Hour)
evt := models.NewEvent()
sdtz := models.NewDateTimeTimeZone()
edtz := models.NewDateTimeTimeZone()
evt.SetSubject(ptr.To("Event with attachment"))
sdtz.SetDateTime(ptr.To(dttm.Format(tomorrow)))
sdtz.SetTimeZone(ptr.To("UTC"))
edtz.SetDateTime(ptr.To(dttm.Format(tomorrow.Add(30 * time.Minute))))
edtz.SetTimeZone(ptr.To("UTC"))
evt.SetStart(sdtz)
evt.SetEnd(edtz)
item, err := evts.PostItem(ctx, userID, ptr.Val(calendar.GetId()), evt)
require.NoError(t, err, clues.ToCore(err))
id, err := evts.PostLargeAttachment(
ctx,
userID,
ptr.Val(calendar.GetId()),
ptr.Val(item.GetId()),
"raboganm",
[]byte("mangobar"),
)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, id, "empty id for large attachment")
}

View File

@ -0,0 +1,34 @@
package api_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type clientTesterSetup struct {
ac api.Client
userID string
}
func newClientTesterSetup(t *testing.T) clientTesterSetup {
cts := clientTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
a := tester.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
cts.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
cts.userID = tester.GetM365UserID(ctx)
return cts
}

View File

@ -61,27 +61,48 @@ func (e EmptyDeltaLinker[T]) GetValue() []T {
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// generic handler for paging item ids in a container // generic handler for non-delta item paging in a container
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type itemPager interface { type itemPager[T any] interface {
// getPage get a page with the specified options from graph // getPage get a page with the specified options from graph
getPage(context.Context) (DeltaPageLinker, error) getPage(context.Context) (PageLinkValuer[T], error)
// setNext is used to pass in the next url got from graph // setNext is used to pass in the next url got from graph
setNext(string) setNext(string)
// reset is used to clear delta url in delta pagers. When
// reset is called, we reset the state(delta url) that we
// currently have and start a new delta query without the token.
reset(context.Context)
// valuesIn gets us the values in a page
valuesIn(PageLinker) ([]getIDAndAddtler, error)
} }
type getIDAndAddtler interface { func enumerateItems[T any](
GetId() *string ctx context.Context,
GetAdditionalData() map[string]any pager itemPager[T],
) ([]T, error) {
var (
result = make([]T, 0)
// stubbed initial value to ensure we enter the loop.
nextLink = "do-while"
)
for len(nextLink) > 0 {
// get the next page of data, check for standard errors
resp, err := pager.getPage(ctx)
if err != nil {
return nil, graph.Stack(ctx, err)
}
result = append(result, resp.GetValue()...)
nextLink = NextLink(resp)
pager.setNext(nextLink)
}
logger.Ctx(ctx).Infow("completed enumeration", "count", len(result))
return result, nil
} }
// ---------------------------------------------------------------------------
// generic handler for delta-based ittem paging in a container
// ---------------------------------------------------------------------------
// uses a models interface compliant with { GetValues() []T } // uses a models interface compliant with { GetValues() []T }
// to transform its results into a slice of getIDer interfaces. // to transform its results into a slice of getIDer interfaces.
// Generics used here to handle the variation of msoft interfaces // Generics used here to handle the variation of msoft interfaces
@ -110,16 +131,34 @@ func toValues[T any](a any) ([]getIDAndAddtler, error) {
return r, nil return r, nil
} }
type itemIDPager interface {
// getPage get a page with the specified options from graph
getPage(context.Context) (DeltaPageLinker, error)
// setNext is used to pass in the next url got from graph
setNext(string)
// reset is used to clear delta url in delta pagers. When
// reset is called, we reset the state(delta url) that we
// currently have and start a new delta query without the token.
reset(context.Context)
// valuesIn gets us the values in a page
valuesIn(PageLinker) ([]getIDAndAddtler, error)
}
type getIDAndAddtler interface {
GetId() *string
GetAdditionalData() map[string]any
}
func getAddedAndRemovedItemIDs( func getAddedAndRemovedItemIDs(
ctx context.Context, ctx context.Context,
service graph.Servicer, service graph.Servicer,
pager itemPager, pager itemIDPager,
deltaPager itemPager, deltaPager itemIDPager,
oldDelta string, oldDelta string,
canMakeDeltaQueries bool, canMakeDeltaQueries bool,
) ([]string, []string, DeltaUpdate, error) { ) ([]string, []string, DeltaUpdate, error) {
var ( var (
pgr itemPager pgr itemIDPager
resetDelta bool resetDelta bool
) )
@ -161,17 +200,16 @@ func getAddedAndRemovedItemIDs(
// generic controller for retrieving all item ids in a container. // generic controller for retrieving all item ids in a container.
func getItemsAddedAndRemovedFromContainer( func getItemsAddedAndRemovedFromContainer(
ctx context.Context, ctx context.Context,
pager itemPager, pager itemIDPager,
) ([]string, []string, string, error) { ) ([]string, []string, string, error) {
var ( var (
addedIDs = []string{} addedIDs = []string{}
removedIDs = []string{} removedIDs = []string{}
deltaURL string deltaURL string
itemCount int
page int
) )
itemCount := 0
page := 0
for { for {
// get the next page of data, check for standard errors // get the next page of data, check for standard errors
resp, err := pager.getPage(ctx) resp, err := pager.getPage(ctx)

View File

@ -5,6 +5,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -19,6 +20,8 @@ import (
// mock impls & stubs // mock impls & stubs
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// next and delta links
type nextLink struct { type nextLink struct {
nextLink *string nextLink *string
} }
@ -36,6 +39,8 @@ func (l deltaNextLink) GetOdataDeltaLink() *string {
return l.deltaLink return l.deltaLink
} }
// mock values
type testPagerValue struct { type testPagerValue struct {
id string id string
removed bool removed bool
@ -50,7 +55,11 @@ func (v testPagerValue) GetAdditionalData() map[string]any {
return map[string]any{} return map[string]any{}
} }
type testPage struct{} // mock page
type testPage struct {
values []any
}
func (p testPage) GetOdataNextLink() *string { func (p testPage) GetOdataNextLink() *string {
// no next, just one page // no next, just one page
@ -62,9 +71,33 @@ func (p testPage) GetOdataDeltaLink() *string {
return ptr.To("") return ptr.To("")
} }
var _ itemPager = &testPager{} func (p testPage) GetValue() []any {
return p.values
}
// mock item pager
var _ itemPager[any] = &testPager{}
type testPager struct { type testPager struct {
t *testing.T
pager testPage
pageErr error
}
//lint:ignore U1000 False Positive
func (p *testPager) getPage(ctx context.Context) (PageLinkValuer[any], error) {
return p.pager, p.pageErr
}
//lint:ignore U1000 False Positive
func (p *testPager) setNext(nextLink string) {}
// mock id pager
var _ itemIDPager = &testIDsPager{}
type testIDsPager struct {
t *testing.T t *testing.T
added []string added []string
removed []string removed []string
@ -72,7 +105,7 @@ type testPager struct {
needsReset bool needsReset bool
} }
func (p *testPager) getPage(ctx context.Context) (DeltaPageLinker, error) { func (p *testIDsPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
if p.errorCode != "" { if p.errorCode != "" {
ierr := odataerrors.NewMainError() ierr := odataerrors.NewMainError()
ierr.SetCode(&p.errorCode) ierr.SetCode(&p.errorCode)
@ -85,8 +118,8 @@ func (p *testPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return testPage{}, nil return testPage{}, nil
} }
func (p *testPager) setNext(string) {} func (p *testIDsPager) setNext(string) {}
func (p *testPager) reset(context.Context) { func (p *testIDsPager) reset(context.Context) {
if !p.needsReset { if !p.needsReset {
require.Fail(p.t, "reset should not be called") require.Fail(p.t, "reset should not be called")
} }
@ -95,7 +128,7 @@ func (p *testPager) reset(context.Context) {
p.errorCode = "" p.errorCode = ""
} }
func (p *testPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *testIDsPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
items := []getIDAndAddtler{} items := []getIDAndAddtler{}
for _, id := range p.added { for _, id := range p.added {
@ -121,11 +154,69 @@ func TestItemPagerUnitSuite(t *testing.T) {
suite.Run(t, &ItemPagerUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &ItemPagerUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *ItemPagerUnitSuite) TestEnumerateItems() {
tests := []struct {
name string
getPager func(*testing.T, context.Context) itemPager[any]
expect []any
expectErr require.ErrorAssertionFunc
}{
{
name: "happy path",
getPager: func(
t *testing.T,
ctx context.Context,
) itemPager[any] {
return &testPager{
t: t,
pager: testPage{[]any{"foo", "bar"}},
}
},
expect: []any{"foo", "bar"},
expectErr: require.NoError,
},
{
name: "next page err",
getPager: func(
t *testing.T,
ctx context.Context,
) itemPager[any] {
return &testPager{
t: t,
pageErr: assert.AnError,
}
},
expect: nil,
expectErr: require.Error,
},
}
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
result, err := enumerateItems(ctx, test.getPager(t, ctx))
test.expectErr(t, err, clues.ToCore(err))
require.EqualValues(t, test.expect, result)
})
}
}
func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() { func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
tests := []struct { tests := []struct {
name string name string
pagerGetter func(context.Context, graph.Servicer, string, string, bool) (itemPager, error) pagerGetter func(*testing.T, context.Context, graph.Servicer, string, string, bool) (itemIDPager, error)
deltaPagerGetter func(context.Context, graph.Servicer, string, string, string, bool) (itemPager, error) deltaPagerGetter func(
*testing.T,
context.Context,
graph.Servicer,
string, string, string,
bool,
) (itemIDPager, error)
added []string added []string
removed []string removed []string
deltaUpdate DeltaUpdate deltaUpdate DeltaUpdate
@ -135,25 +226,27 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
{ {
name: "no prev delta", name: "no prev delta",
pagerGetter: func( pagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
// this should not be called // this should not be called
return nil, assert.AnError return nil, assert.AnError
}, },
deltaPagerGetter: func( deltaPagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
delta string, delta string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
return &testPager{ return &testIDsPager{
t: suite.T(), t: t,
added: []string{"uno", "dos"}, added: []string{"uno", "dos"},
removed: []string{"tres", "quatro"}, removed: []string{"tres", "quatro"},
}, nil }, nil
@ -166,25 +259,27 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
{ {
name: "with prev delta", name: "with prev delta",
pagerGetter: func( pagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
// this should not be called // this should not be called
return nil, assert.AnError return nil, assert.AnError
}, },
deltaPagerGetter: func( deltaPagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
delta string, delta string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
return &testPager{ return &testIDsPager{
t: suite.T(), t: t,
added: []string{"uno", "dos"}, added: []string{"uno", "dos"},
removed: []string{"tres", "quatro"}, removed: []string{"tres", "quatro"},
}, nil }, nil
@ -198,25 +293,27 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
{ {
name: "delta expired", name: "delta expired",
pagerGetter: func( pagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
// this should not be called // this should not be called
return nil, assert.AnError return nil, assert.AnError
}, },
deltaPagerGetter: func( deltaPagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
delta string, delta string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
return &testPager{ return &testIDsPager{
t: suite.T(), t: t,
added: []string{"uno", "dos"}, added: []string{"uno", "dos"},
removed: []string{"tres", "quatro"}, removed: []string{"tres", "quatro"},
errorCode: "SyncStateNotFound", errorCode: "SyncStateNotFound",
@ -232,27 +329,29 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
{ {
name: "quota exceeded", name: "quota exceeded",
pagerGetter: func( pagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
return &testPager{ return &testIDsPager{
t: suite.T(), t: t,
added: []string{"uno", "dos"}, added: []string{"uno", "dos"},
removed: []string{"tres", "quatro"}, removed: []string{"tres", "quatro"},
}, nil }, nil
}, },
deltaPagerGetter: func( deltaPagerGetter: func(
t *testing.T,
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
user string, user string,
directory string, directory string,
delta string, delta string,
immutableIDs bool, immutableIDs bool,
) (itemPager, error) { ) (itemIDPager, error) {
return &testPager{errorCode: "ErrorQuotaExceeded"}, nil return &testIDsPager{errorCode: "ErrorQuotaExceeded"}, nil
}, },
added: []string{"uno", "dos"}, added: []string{"uno", "dos"},
removed: []string{"tres", "quatro"}, removed: []string{"tres", "quatro"},
@ -268,8 +367,8 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
pager, _ := tt.pagerGetter(ctx, graph.Service{}, "user", "directory", false) pager, _ := tt.pagerGetter(t, ctx, graph.Service{}, "user", "directory", false)
deltaPager, _ := tt.deltaPagerGetter(ctx, graph.Service{}, "user", "directory", tt.delta, false) deltaPager, _ := tt.deltaPagerGetter(t, ctx, graph.Service{}, "user", "directory", tt.delta, false)
added, removed, deltaUpdate, err := getAddedAndRemovedItemIDs( added, removed, deltaUpdate, err := getAddedAndRemovedItemIDs(
ctx, ctx,

View File

@ -63,6 +63,23 @@ func (c Mail) CreateMailFolder(
return mdl, nil return mdl, nil
} }
func (c Mail) DeleteMailFolder(
ctx context.Context,
userID, id string,
) error {
err := c.Stable.Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId(id).
Delete(ctx, nil)
if err != nil {
return graph.Wrap(ctx, err, "deleting mail folder")
}
return nil
}
func (c Mail) CreateContainer( func (c Mail) CreateContainer(
ctx context.Context, ctx context.Context,
userID, containerName, parentContainerID string, userID, containerName, parentContainerID string,
@ -407,14 +424,9 @@ func (c Mail) PostSmallAttachment(
func (c Mail) PostLargeAttachment( func (c Mail) PostLargeAttachment(
ctx context.Context, ctx context.Context,
userID, containerID, parentItemID, itemName string, userID, containerID, parentItemID, itemName string,
size int64, content []byte,
body models.Attachmentable, ) (string, error) {
) (models.UploadSessionable, error) { size := int64(len(content))
bs, err := GetAttachmentContent(body)
if err != nil {
return nil, clues.Wrap(err, "serializing attachment content").WithClues(ctx)
}
session := users.NewItemMailFoldersItemMessagesItemAttachmentsCreateUploadSessionPostRequestBody() session := users.NewItemMailFoldersItemMessagesItemAttachmentsCreateUploadSessionPostRequestBody()
session.SetAttachmentItem(makeSessionAttachment(itemName, size)) session.SetAttachmentItem(makeSessionAttachment(itemName, size))
@ -430,19 +442,19 @@ func (c Mail) PostLargeAttachment(
CreateUploadSession(). CreateUploadSession().
Post(ctx, session, nil) Post(ctx, session, nil)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "uploading large mail attachment") return "", graph.Wrap(ctx, err, "uploading large mail attachment")
} }
url := ptr.Val(us.GetUploadUrl()) url := ptr.Val(us.GetUploadUrl())
w := graph.NewLargeItemWriter(parentItemID, url, size) w := graph.NewLargeItemWriter(parentItemID, url, size)
copyBuffer := make([]byte, graph.AttachmentChunkSize) copyBuffer := make([]byte, graph.AttachmentChunkSize)
_, err = io.CopyBuffer(w, bytes.NewReader(bs), copyBuffer) _, err = io.CopyBuffer(w, bytes.NewReader(content), copyBuffer)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "buffering large attachment content").WithClues(ctx) return "", clues.Wrap(err, "buffering large attachment content").WithClues(ctx)
} }
return us, nil return w.ID, nil
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -528,3 +540,17 @@ func UnwrapEmailAddress(contact models.Recipientable) string {
return ptr.Val(contact.GetEmailAddress().GetAddress()) return ptr.Val(contact.GetEmailAddress().GetAddress())
} }
func mailCollisionKeyProps() []string {
return idAnd("subject")
}
// MailCollisionKey constructs a key from the messageable's subject, sender, and recipients (to, cc, bcc).
// collision keys are used to identify duplicate item conflicts for handling advanced restoration config.
func MailCollisionKey(item models.Messageable) string {
if item == nil {
return ""
}
return ptr.Val(item.GetSubject())
}

View File

@ -121,22 +121,76 @@ func (c Mail) EnumerateContainers(
// item pager // item pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ itemPager = &mailPager{} var _ itemPager[models.Messageable] = &mailPageCtrl{}
type mailPager struct { type mailPageCtrl struct {
gs graph.Servicer gs graph.Servicer
builder *users.ItemMailFoldersItemMessagesRequestBuilder builder *users.ItemMailFoldersItemMessagesRequestBuilder
options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration
} }
func (c Mail) NewMailPager( func (c Mail) NewMailPager(
userID, containerID string,
selectProps ...string,
) itemPager[models.Messageable] {
options := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{
Top: ptr.To[int32](maxNonDeltaPageSize),
},
}
if len(selectProps) > 0 {
options.QueryParameters.Select = selectProps
}
builder := c.Stable.
Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId(containerID).
Messages()
return &mailPageCtrl{c.Stable, builder, options}
}
//lint:ignore U1000 False Positive
func (p *mailPageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.Messageable], error) {
page, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil
}
//lint:ignore U1000 False Positive
func (p *mailPageCtrl) setNext(nextLink string) {
p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
}
// ---------------------------------------------------------------------------
// item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &mailIDPager{}
type mailIDPager struct {
gs graph.Servicer
builder *users.ItemMailFoldersItemMessagesRequestBuilder
options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration
}
func (c Mail) NewMailIDsPager(
ctx context.Context, ctx context.Context,
userID, containerID string, userID, containerID string,
immutableIDs bool, immutableIDs bool,
) itemPager { ) itemIDPager {
config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{ config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{ QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{
Select: idAnd("isRead"), Select: idAnd("isRead"),
Top: ptr.To[int32](maxNonDeltaPageSize),
}, },
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
} }
@ -149,10 +203,10 @@ func (c Mail) NewMailPager(
ByMailFolderId(containerID). ByMailFolderId(containerID).
Messages() Messages()
return &mailPager{c.Stable, builder, config} return &mailIDPager{c.Stable, builder, config}
} }
func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) { func (p *mailIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
page, err := p.builder.Get(ctx, p.options) page, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, graph.Stack(ctx, err) return nil, graph.Stack(ctx, err)
@ -161,24 +215,45 @@ func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil
} }
func (p *mailPager) setNext(nextLink string) { func (p *mailIDPager) setNext(nextLink string) {
p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter()) p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
} }
// non delta pagers don't have reset // non delta pagers don't have reset
func (p *mailPager) reset(context.Context) {} func (p *mailIDPager) reset(context.Context) {}
func (p *mailPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *mailIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Messageable](pl) return toValues[models.Messageable](pl)
} }
func (c Mail) GetItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
) (map[string]string, error) {
ctx = clues.Add(ctx, "container_id", containerID)
pager := c.NewMailPager(userID, containerID, mailCollisionKeyProps()...)
items, err := enumerateItems(ctx, pager)
if err != nil {
return nil, graph.Wrap(ctx, err, "enumerating mail")
}
m := map[string]string{}
for _, item := range items {
m[MailCollisionKey(item)] = ptr.Val(item.GetId())
}
return m, nil
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// delta item pager // delta item ID pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ itemPager = &mailDeltaPager{} var _ itemIDPager = &mailDeltaIDPager{}
type mailDeltaPager struct { type mailDeltaIDPager struct {
gs graph.Servicer gs graph.Servicer
userID string userID string
containerID string containerID string
@ -204,14 +279,15 @@ func getMailDeltaBuilder(
return builder return builder
} }
func (c Mail) NewMailDeltaPager( func (c Mail) NewMailDeltaIDsPager(
ctx context.Context, ctx context.Context,
userID, containerID, oldDelta string, userID, containerID, oldDelta string,
immutableIDs bool, immutableIDs bool,
) itemPager { ) itemIDPager {
config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{ config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{ QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
Select: idAnd("isRead"), Select: idAnd("isRead"),
Top: ptr.To[int32](maxDeltaPageSize),
}, },
Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)),
} }
@ -224,10 +300,10 @@ func (c Mail) NewMailDeltaPager(
builder = getMailDeltaBuilder(ctx, c.Stable, userID, containerID, config) builder = getMailDeltaBuilder(ctx, c.Stable, userID, containerID, config)
} }
return &mailDeltaPager{c.Stable, userID, containerID, builder, config} return &mailDeltaIDPager{c.Stable, userID, containerID, builder, config}
} }
func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { func (p *mailDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
page, err := p.builder.Get(ctx, p.options) page, err := p.builder.Get(ctx, p.options)
if err != nil { if err != nil {
return nil, graph.Stack(ctx, err) return nil, graph.Stack(ctx, err)
@ -236,11 +312,11 @@ func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return page, nil return page, nil
} }
func (p *mailDeltaPager) setNext(nextLink string) { func (p *mailDeltaIDPager) setNext(nextLink string) {
p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter()) p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
} }
func (p *mailDeltaPager) reset(ctx context.Context) { func (p *mailDeltaIDPager) reset(ctx context.Context) {
p.builder = p.gs. p.builder = p.gs.
Client(). Client().
Users(). Users().
@ -251,7 +327,7 @@ func (p *mailDeltaPager) reset(ctx context.Context) {
Delta() Delta()
} }
func (p *mailDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { func (p *mailDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Messageable](pl) return toValues[models.Messageable](pl)
} }
@ -266,8 +342,8 @@ func (c Mail) GetAddedAndRemovedItemIDs(
"category", selectors.ExchangeMail, "category", selectors.ExchangeMail,
"container_id", containerID) "container_id", containerID)
pager := c.NewMailPager(ctx, userID, containerID, immutableIDs) pager := c.NewMailIDsPager(ctx, userID, containerID, immutableIDs)
deltaPager := c.NewMailDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) deltaPager := c.NewMailDeltaIDsPager(ctx, userID, containerID, oldDelta, immutableIDs)
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
} }

View File

@ -0,0 +1,73 @@
package api_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type MailPagerIntgSuite struct {
tester.Suite
cts clientTesterSetup
}
func TestMailPagerIntgSuite(t *testing.T) {
suite.Run(t, &MailPagerIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs}),
})
}
func (suite *MailPagerIntgSuite) SetupSuite() {
suite.cts = newClientTesterSetup(suite.T())
}
func (suite *MailPagerIntgSuite) TestGetItemsInContainerByCollisionKey() {
t := suite.T()
ac := suite.cts.ac.Mail()
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.cts.userID, "inbox")
require.NoError(t, err, clues.ToCore(err))
msgs, err := ac.Stable.
Client().
Users().
ByUserId(suite.cts.userID).
MailFolders().
ByMailFolderId(ptr.Val(container.GetId())).
Messages().
Get(ctx, nil)
require.NoError(t, err, clues.ToCore(err))
ms := msgs.GetValue()
expect := make([]string, 0, len(ms))
for _, m := range ms {
expect = append(expect, api.MailCollisionKey(m))
}
results, err := ac.GetItemsInContainerByCollisionKey(ctx, suite.cts.userID, "inbox")
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
for k, v := range results {
assert.NotEmpty(t, k, "all keys should be populated")
assert.NotEmpty(t, v, "all values should be populated")
}
for _, e := range expect {
_, ok := results[e]
assert.Truef(t, ok, "expected results to contain collision key: %s", e)
}
}

View File

@ -19,6 +19,7 @@ import (
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/mock" "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
@ -202,8 +203,7 @@ func TestMailAPIIntgSuite(t *testing.T) {
suite.Run(t, &MailAPIIntgSuite{ suite.Run(t, &MailAPIIntgSuite{
Suite: tester.NewIntegrationSuite( Suite: tester.NewIntegrationSuite(
t, t,
[][]string{tester.M365AcctCredEnvs}, [][]string{tester.M365AcctCredEnvs}),
),
}) })
} }
@ -218,7 +218,7 @@ func (suite *MailAPIIntgSuite) SetupSuite() {
suite.ac, err = mock.NewClient(m365) suite.ac, err = mock.NewClient(m365)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
suite.user = tester.M365UserID(suite.T()) suite.user = tester.M365UserID(t)
} }
func getJSONObject(t *testing.T, thing serialization.Parsable) map[string]interface{} { func getJSONObject(t *testing.T, thing serialization.Parsable) map[string]interface{} {
@ -410,3 +410,34 @@ func (suite *MailAPIIntgSuite) TestHugeAttachmentListDownload() {
}) })
} }
} }
func (suite *MailAPIIntgSuite) TestRestoreLargeAttachment() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
userID := tester.M365UserID(suite.T())
folderName := testdata.DefaultRestoreConfig("maillargeattachmenttest").Location
msgs := suite.ac.Mail()
mailfolder, err := msgs.CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err))
msg := models.NewMessage()
msg.SetSubject(ptr.To("Mail with attachment"))
item, err := msgs.PostItem(ctx, userID, ptr.Val(mailfolder.GetId()), msg)
require.NoError(t, err, clues.ToCore(err))
id, err := msgs.PostLargeAttachment(
ctx,
userID,
ptr.Val(mailfolder.GetId()),
ptr.Val(item.GetId()),
"raboganm",
[]byte("mangobar"),
)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, id, "empty id for large attachment")
}

View File

@ -183,7 +183,7 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
// check whether the user is able to access their onedrive drive. // check whether the user is able to access their onedrive drive.
// if they cannot, we can assume they are ineligible for onedrive backups. // if they cannot, we can assume they are ineligible for onedrive backups.
if _, err := c.GetDefaultDrive(ctx, userID); err != nil { if _, err := c.GetDefaultDrive(ctx, userID); err != nil {
if !clues.HasLabel(err, graph.LabelsMysiteNotFound) { if !clues.HasLabel(err, graph.LabelsMysiteNotFound) || clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
logger.CtxErr(ctx, err).Error("getting user's drive") logger.CtxErr(ctx, err).Error("getting user's drive")
return nil, graph.Wrap(ctx, err, "getting user's drive") return nil, graph.Wrap(ctx, err, "getting user's drive")
} }

View File

@ -73,12 +73,12 @@ func UsersCompatNoInfo(ctx context.Context, acct account.Account) ([]*UserNoInfo
// UserHasMailbox returns true if the user has an exchange mailbox enabled // UserHasMailbox returns true if the user has an exchange mailbox enabled
// false otherwise, and a nil pointer and an error in case of error // false otherwise, and a nil pointer and an error in case of error
func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (bool, error) { func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (bool, error) {
uapi, err := makeUserAPI(acct) ac, err := makeAC(acct)
if err != nil { if err != nil {
return false, clues.Wrap(err, "getting mailbox").WithClues(ctx) return false, clues.Stack(err).WithClues(ctx)
} }
_, err = uapi.GetMailInbox(ctx, userID) _, err = ac.Users().GetMailInbox(ctx, userID)
if err != nil { if err != nil {
// we consider this a non-error case, since it // we consider this a non-error case, since it
// answers the question the caller is asking. // answers the question the caller is asking.
@ -103,16 +103,20 @@ func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (b
// UserHasDrives returns true if the user has any drives // UserHasDrives returns true if the user has any drives
// false otherwise, and a nil pointer and an error in case of error // false otherwise, and a nil pointer and an error in case of error
func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bool, error) { func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bool, error) {
uapi, err := makeUserAPI(acct) ac, err := makeAC(acct)
if err != nil { if err != nil {
return false, clues.Wrap(err, "getting drives").WithClues(ctx) return false, clues.Stack(err).WithClues(ctx)
} }
_, err = uapi.GetDefaultDrive(ctx, userID) return checkUserHasDrives(ctx, ac.Users(), userID)
}
func checkUserHasDrives(ctx context.Context, dgdd discovery.GetDefaultDriver, userID string) (bool, error) {
_, err := dgdd.GetDefaultDrive(ctx, userID)
if err != nil { if err != nil {
// we consider this a non-error case, since it // we consider this a non-error case, since it
// answers the question the caller is asking. // answers the question the caller is asking.
if clues.HasLabel(err, graph.LabelsMysiteNotFound) { if clues.HasLabel(err, graph.LabelsMysiteNotFound) || clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
return false, nil return false, nil
} }
@ -130,12 +134,12 @@ func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bo
// TODO: Remove this once we remove `Info` from `Users` and instead rely on the `GetUserInfo` API // TODO: Remove this once we remove `Info` from `Users` and instead rely on the `GetUserInfo` API
// to get user information // to get user information
func usersNoInfo(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*UserNoInfo, error) { func usersNoInfo(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*UserNoInfo, error) {
uapi, err := makeUserAPI(acct) ac, err := makeAC(acct)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting users").WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
us, err := discovery.Users(ctx, uapi, errs) us, err := discovery.Users(ctx, ac.Users(), errs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -162,12 +166,12 @@ func usersNoInfo(ctx context.Context, acct account.Account, errs *fault.Bus) ([]
// Users returns a list of users in the specified M365 tenant // Users returns a list of users in the specified M365 tenant
func Users(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*User, error) { func Users(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*User, error) {
uapi, err := makeUserAPI(acct) ac, err := makeAC(acct)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting users").WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
us, err := discovery.Users(ctx, uapi, errs) us, err := discovery.Users(ctx, ac.Users(), errs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -197,7 +201,7 @@ func Users(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*User,
func parseUser(item models.Userable) (*User, error) { func parseUser(item models.Userable) (*User, error) {
if item.GetUserPrincipalName() == nil { if item.GetUserPrincipalName() == nil {
return nil, clues.New("user missing principal name"). return nil, clues.New("user missing principal name").
With("user_id", *item.GetId()) // TODO: pii With("user_id", ptr.Val(item.GetId()))
} }
u := &User{ u := &User{
@ -215,12 +219,12 @@ func GetUserInfo(
acct account.Account, acct account.Account,
userID string, userID string,
) (*api.UserInfo, error) { ) (*api.UserInfo, error) {
uapi, err := makeUserAPI(acct) ac, err := makeAC(acct)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting user info").WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
ui, err := discovery.UserInfo(ctx, uapi, userID) ui, err := discovery.UserInfo(ctx, ac.Users(), userID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -249,9 +253,26 @@ type Site struct {
// Sites returns a list of Sites in a specified M365 tenant // Sites returns a list of Sites in a specified M365 tenant
func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) { func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) {
sites, err := discovery.Sites(ctx, acct, errs) ac, err := makeAC(acct)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "initializing M365 api connection") return nil, clues.Stack(err).WithClues(ctx)
}
return getAllSites(ctx, ac.Sites())
}
type getAllSiteser interface {
GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error)
}
func getAllSites(ctx context.Context, gas getAllSiteser) ([]*Site, error) {
sites, err := gas.GetAll(ctx, fault.New(true))
if err != nil {
if clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
return nil, clues.Stack(graph.ErrServiceNotEnabled, err)
}
return nil, clues.Wrap(err, "retrieving sites")
} }
ret := make([]*Site, 0, len(sites)) ret := make([]*Site, 0, len(sites))
@ -304,16 +325,16 @@ func SitesMap(
// helpers // helpers
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func makeUserAPI(acct account.Account) (api.Users, error) { func makeAC(acct account.Account) (api.Client, error) {
creds, err := acct.M365Config() creds, err := acct.M365Config()
if err != nil { if err != nil {
return api.Users{}, clues.Wrap(err, "getting m365 account creds") return api.Client{}, clues.Wrap(err, "getting m365 account creds")
} }
cli, err := api.NewClient(creds) cli, err := api.NewClient(creds)
if err != nil { if err != nil {
return api.Users{}, clues.Wrap(err, "constructing api client") return api.Client{}, clues.Wrap(err, "constructing api client")
} }
return cli.Users(), nil return cli, nil
} }

View File

@ -1,17 +1,22 @@
package m365_test package m365
import ( import (
"context"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/discovery"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365"
) )
type M365IntegrationSuite struct { type M365IntegrationSuite struct {
@ -22,8 +27,7 @@ func TestM365IntegrationSuite(t *testing.T) {
suite.Run(t, &M365IntegrationSuite{ suite.Run(t, &M365IntegrationSuite{
Suite: tester.NewIntegrationSuite( Suite: tester.NewIntegrationSuite(
t, t,
[][]string{tester.M365AcctCredEnvs}, [][]string{tester.M365AcctCredEnvs}),
),
}) })
} }
@ -35,7 +39,7 @@ func (suite *M365IntegrationSuite) TestUsers() {
acct := tester.NewM365Account(suite.T()) acct := tester.NewM365Account(suite.T())
users, err := m365.Users(ctx, acct, fault.New(true)) users, err := Users(ctx, acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, users) assert.NotEmpty(t, users)
@ -59,7 +63,7 @@ func (suite *M365IntegrationSuite) TestUsersCompat_HasNoInfo() {
acct := tester.NewM365Account(suite.T()) acct := tester.NewM365Account(suite.T())
users, err := m365.UsersCompatNoInfo(ctx, acct) users, err := UsersCompatNoInfo(ctx, acct)
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, users) assert.NotEmpty(t, users)
@ -85,7 +89,7 @@ func (suite *M365IntegrationSuite) TestGetUserInfo() {
uid = tester.M365UserID(t) uid = tester.M365UserID(t)
) )
info, err := m365.GetUserInfo(ctx, acct, uid) info, err := GetUserInfo(ctx, acct, uid)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, info) require.NotNil(t, info)
require.NotEmpty(t, info) require.NotEmpty(t, info)
@ -112,7 +116,7 @@ func (suite *M365IntegrationSuite) TestUserHasMailbox() {
uid = tester.M365UserID(t) uid = tester.M365UserID(t)
) )
enabled, err := m365.UserHasMailbox(ctx, acct, uid) enabled, err := UserHasMailbox(ctx, acct, uid)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.True(t, enabled) assert.True(t, enabled)
} }
@ -128,7 +132,7 @@ func (suite *M365IntegrationSuite) TestUserHasDrive() {
uid = tester.M365UserID(t) uid = tester.M365UserID(t)
) )
enabled, err := m365.UserHasDrives(ctx, acct, uid) enabled, err := UserHasDrives(ctx, acct, uid)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.True(t, enabled) assert.True(t, enabled)
} }
@ -139,14 +143,14 @@ func (suite *M365IntegrationSuite) TestSites() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
acct := tester.NewM365Account(suite.T()) acct := tester.NewM365Account(t)
sites, err := m365.Sites(ctx, acct, fault.New(true)) sites, err := Sites(ctx, acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, sites) assert.NotEmpty(t, sites)
for _, s := range sites { for _, s := range sites {
suite.Run("site", func() { suite.Run("site_"+s.ID, func() {
t := suite.T() t := suite.T()
assert.NotEmpty(t, s.WebURL) assert.NotEmpty(t, s.WebURL)
assert.NotEmpty(t, s.ID) assert.NotEmpty(t, s.ID)
@ -154,3 +158,204 @@ func (suite *M365IntegrationSuite) TestSites() {
}) })
} }
} }
type m365UnitSuite struct {
tester.Suite
}
func TestM365UnitSuite(t *testing.T) {
suite.Run(t, &m365UnitSuite{Suite: tester.NewUnitSuite(t)})
}
type mockDGDD struct {
response models.Driveable
err error
}
func (m mockDGDD) GetDefaultDrive(context.Context, string) (models.Driveable, error) {
return m.response, m.err
}
func (suite *m365UnitSuite) TestCheckUserHasDrives() {
table := []struct {
name string
mock func(context.Context) discovery.GetDefaultDriver
expect assert.BoolAssertionFunc
expectErr func(*testing.T, error)
}{
{
name: "ok",
mock: func(ctx context.Context) discovery.GetDefaultDriver {
return mockDGDD{models.NewDrive(), nil}
},
expect: assert.True,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "mysite not found",
mock: func(ctx context.Context) discovery.GetDefaultDriver {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To(string(graph.MysiteNotFound)))
odErr.SetError(merr)
return mockDGDD{nil, graph.Stack(ctx, odErr)}
},
expect: assert.False,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "mysite URL not found",
mock: func(ctx context.Context) discovery.GetDefaultDriver {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To(string(graph.MysiteURLNotFound)))
odErr.SetError(merr)
return mockDGDD{nil, graph.Stack(ctx, odErr)}
},
expect: assert.False,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no sharepoint license",
mock: func(ctx context.Context) discovery.GetDefaultDriver {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To(string(graph.NoSPLicense)))
odErr.SetError(merr)
return mockDGDD{nil, graph.Stack(ctx, odErr)}
},
expect: assert.False,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "user not found",
mock: func(ctx context.Context) discovery.GetDefaultDriver {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To(string(graph.RequestResourceNotFound)))
merr.SetMessage(ptr.To("message"))
odErr.SetError(merr)
return mockDGDD{nil, graph.Stack(ctx, odErr)}
},
expect: assert.False,
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
{
name: "arbitrary error",
mock: func(ctx context.Context) discovery.GetDefaultDriver {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To("message"))
odErr.SetError(merr)
return mockDGDD{nil, graph.Stack(ctx, odErr)}
},
expect: assert.False,
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
dgdd := test.mock(ctx)
ok, err := checkUserHasDrives(ctx, dgdd, "foo")
test.expect(t, ok, "has drives flag")
test.expectErr(t, err)
})
}
}
type mockGAS struct {
response []models.Siteable
err error
}
func (m mockGAS) GetAll(context.Context, *fault.Bus) ([]models.Siteable, error) {
return m.response, m.err
}
func (suite *m365UnitSuite) TestGetAllSites() {
table := []struct {
name string
mock func(context.Context) getAllSiteser
expectErr func(*testing.T, error)
}{
{
name: "ok",
mock: func(ctx context.Context) getAllSiteser {
return mockGAS{[]models.Siteable{}, nil}
},
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no sharepoint license",
mock: func(ctx context.Context) getAllSiteser {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To(string(graph.NoSPLicense)))
odErr.SetError(merr)
return mockGAS{nil, graph.Stack(ctx, odErr)}
},
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrServiceNotEnabled, clues.ToCore(err))
},
},
{
name: "arbitrary error",
mock: func(ctx context.Context) getAllSiteser {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To("message"))
odErr.SetError(merr)
return mockGAS{nil, graph.Stack(ctx, odErr)}
},
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
gas := test.mock(ctx)
_, err := getAllSites(ctx, gas)
test.expectErr(t, err)
})
}
}