Merge branch 'main' into sharepoint-restore-selectors

This commit is contained in:
Danny 2023-02-04 16:49:28 -05:00 committed by GitHub
commit aafc9c73f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
110 changed files with 6912 additions and 2241 deletions

3
.gitignore vendored
View File

@ -20,6 +20,9 @@
.corso_test.toml .corso_test.toml
.corso.toml .corso.toml
# Logging
.corso.log
# Build directories # Build directories
/bin /bin
/docker/bin /docker/bin

View File

@ -10,6 +10,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added ### Added
- Document Corso's fault-tolerance and restartability features - Document Corso's fault-tolerance and restartability features
- Add retries on timeouts and status code 500 for Exchange
- Increase page size preference for delta requests for Exchange to reduce number of roundtrips
- OneDrive file/folder permissions can now be backed up and restored
- Add `--restore-permissions` flag to toggle restoration of OneDrive permissions
- Add versions to backups so that we can understand/handle older backup formats
### Fixed
- Backing up a calendar that has the same name as the default calendar
- Added additional backoff-retry to all OneDrive queries.
- Users with `null` userType values are no longer excluded from user queries.
### Known Issues
- When the same user has permissions to a file and the containing
folder, we only restore folder level permissions for the user and no
separate file only permission is restored.
- Link shares are not restored
## [v0.2.0] (alpha) - 2023-1-29 ## [v0.2.0] (alpha) - 2023-1-29
@ -18,7 +35,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Check if the user specified for an exchange backup operation has a mailbox. - Check if the user specified for an exchange backup operation has a mailbox.
### Changed ### Changed
- Item.Attachments are disabled from being restored for the patching of ([#2353](https://github.com/alcionai/corso/issues/2353))
- BetaClient introduced. Enables Corso to be able to interact with SharePoint Page objects. Package located `/internal/connector/graph/betasdk` - BetaClient introduced. Enables Corso to be able to interact with SharePoint Page objects. Package located `/internal/connector/graph/betasdk`
- Handle case where user's drive has not been initialized - Handle case where user's drive has not been initialized
- Inline attachments (e.g. copy/paste ) are discovered and backed up correctly ([#2163](https://github.com/alcionai/corso/issues/2163)) - Inline attachments (e.g. copy/paste ) are discovered and backed up correctly ([#2163](https://github.com/alcionai/corso/issues/2163))

View File

@ -79,6 +79,7 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, fs = utils.AddCommand(cmd, oneDriveCreateCmd()) c, fs = utils.AddCommand(cmd, oneDriveCreateCmd())
options.AddFeatureToggle(cmd, options.EnablePermissionsBackup())
c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix
c.Example = oneDriveServiceCommandCreateExamples c.Example = oneDriveServiceCommandCreateExamples

View File

@ -72,7 +72,13 @@ func (suite *NoBackupOneDriveIntegrationSuite) SetupSuite() {
suite.m365UserID = tester.M365UserID(t) suite.m365UserID = tester.M365UserID(t)
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(
ctx,
suite.acct,
suite.st,
control.Options{
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
})
require.NoError(t, err) require.NoError(t, err)
} }
@ -152,7 +158,13 @@ func (suite *BackupDeleteOneDriveIntegrationSuite) SetupSuite() {
defer flush() defer flush()
// init the repo first // init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{}) suite.repo, err = repository.Initialize(
ctx,
suite.acct,
suite.st,
control.Options{
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
})
require.NoError(t, err) require.NoError(t, err)
m365UserID := tester.M365UserID(t) m365UserID := tester.M365UserID(t)

View File

@ -6,7 +6,9 @@ import (
"regexp" "regexp"
"strings" "strings"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/backup" "github.com/alcionai/corso/src/cli/backup"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
@ -50,6 +52,13 @@ func preRun(cc *cobra.Command, args []string) error {
flagSl = append(flagSl, f) flagSl = append(flagSl, f)
} }
avoidTheseCommands := []string{
"corso", "env", "help", "backup", "details", "list", "restore", "delete", "repo", "init", "connect",
}
if len(logger.LogFile) > 0 && !slices.Contains(avoidTheseCommands, cc.Use) {
print.Info(cc.Context(), "Logging to file: "+logger.LogFile)
}
log.Infow("cli command", "command", cc.CommandPath(), "flags", flagSl, "version", version.CurrentVersion()) log.Infow("cli command", "command", cc.CommandPath(), "flags", flagSl, "version", version.CurrentVersion())
return nil return nil
@ -121,6 +130,9 @@ func Handle() {
}() }()
if err := corsoCmd.ExecuteContext(ctx); err != nil { if err := corsoCmd.ExecuteContext(ctx); err != nil {
logger.Ctx(ctx).
With("err", err).
Errorw("cli execution", clues.InErr(err).Slice()...)
os.Exit(1) os.Exit(1)
} }
} }

View File

@ -11,17 +11,11 @@ import (
func Control() control.Options { func Control() control.Options {
opt := control.Defaults() opt := control.Defaults()
if fastFail { opt.FailFast = fastFail
opt.FailFast = true opt.DisableMetrics = noStats
} opt.RestorePermissions = restorePermissions
opt.ToggleFeatures.DisableIncrementals = disableIncrementals
if noStats { opt.ToggleFeatures.EnablePermissionsBackup = enablePermissionsBackup
opt.DisableMetrics = true
}
if disableIncrementals {
opt.ToggleFeatures.DisableIncrementals = true
}
return opt return opt
} }
@ -31,8 +25,9 @@ func Control() control.Options {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var ( var (
fastFail bool fastFail bool
noStats bool noStats bool
restorePermissions bool
) )
// AddOperationFlags adds command-local operation flags // AddOperationFlags adds command-local operation flags
@ -49,11 +44,22 @@ func AddGlobalOperationFlags(cmd *cobra.Command) {
fs.BoolVar(&noStats, "no-stats", false, "disable anonymous usage statistics gathering") fs.BoolVar(&noStats, "no-stats", false, "disable anonymous usage statistics gathering")
} }
// AddRestorePermissionsFlag adds OneDrive flag for restoring permissions
func AddRestorePermissionsFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&restorePermissions, "restore-permissions", false, "Restore permissions for files and folders")
// TODO: reveal this flag once backing up permissions becomes default
cobra.CheckErr(fs.MarkHidden("restore-permissions"))
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Feature Flags // Feature Flags
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var disableIncrementals bool var (
disableIncrementals bool
enablePermissionsBackup bool
)
type exposeFeatureFlag func(*pflag.FlagSet) type exposeFeatureFlag func(*pflag.FlagSet)
@ -78,3 +84,16 @@ func DisableIncrementals() func(*pflag.FlagSet) {
cobra.CheckErr(fs.MarkHidden("disable-incrementals")) cobra.CheckErr(fs.MarkHidden("disable-incrementals"))
} }
} }
// Adds the hidden '--enable-permissions-backup' cli flag which, when
// set, enables backing up permissions.
func EnablePermissionsBackup() func(*pflag.FlagSet) {
return func(fs *pflag.FlagSet) {
fs.BoolVar(
&enablePermissionsBackup,
"enable-permissions-backup",
false,
"Enable backing up item permissions for OneDrive")
cobra.CheckErr(fs.MarkHidden("enable-permissions-backup"))
}
}

View File

@ -63,6 +63,9 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
utils.FileFN, nil, utils.FileFN, nil,
"Restore items by file name or ID") "Restore items by file name or ID")
// permissions restore flag
options.AddRestorePermissionsFlag(c)
// onedrive info flags // onedrive info flags
fs.StringVar( fs.StringVar(
@ -97,6 +100,9 @@ const (
oneDriveServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef oneDriveServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Restore file with ID 98765abcdef along with its associated permissions
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions
# Restore Alice's file named "FY2021 Planning.xlsx in "Documents/Finance Reports" from a specific backup # Restore Alice's file named "FY2021 Planning.xlsx in "Documents/Finance Reports" from a specific backup
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \
--user alice@example.com --file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports" --user alice@example.com --file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
@ -50,6 +51,7 @@ func generateAndRestoreItems(
tenantID, userID, destFldr string, tenantID, userID, destFldr string,
howMany int, howMany int,
dbf dataBuilderFunc, dbf dataBuilderFunc,
opts control.Options,
) (*details.Details, error) { ) (*details.Details, error) {
items := make([]item, 0, howMany) items := make([]item, 0, howMany)
@ -74,7 +76,7 @@ func generateAndRestoreItems(
items: items, items: items,
}} }}
// TODO: fit the desination to the containers // TODO: fit the destination to the containers
dest := control.DefaultRestoreDestination(common.SimpleTimeTesting) dest := control.DefaultRestoreDestination(common.SimpleTimeTesting)
dest.ContainerName = destFldr dest.ContainerName = destFldr
@ -90,7 +92,7 @@ func generateAndRestoreItems(
Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination) Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
return gc.RestoreDataCollections(ctx, acct, sel, dest, dataColls) return gc.RestoreDataCollections(ctx, backup.Version, acct, sel, dest, opts, dataColls)
} }
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------

View File

@ -6,6 +6,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -67,6 +68,7 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
subject, body, body, subject, body, body,
now, now, now, now) now, now, now, now)
}, },
control.Options{},
) )
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
@ -107,6 +109,7 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
User, subject, body, body, User, subject, body, body,
now, now, false) now, now, false)
}, },
control.Options{},
) )
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
@ -152,6 +155,7 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
"123-456-7890", "123-456-7890",
) )
}, },
control.Options{},
) )
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)

View File

@ -4,8 +4,8 @@ go 1.19
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230120231953-1cf61dbafc40 github.com/alcionai/clues v0.0.0-20230131232239-cee86233b005
github.com/aws/aws-sdk-go v1.44.190 github.com/aws/aws-sdk-go v1.44.192
github.com/aws/aws-xray-sdk-go v1.8.0 github.com/aws/aws-xray-sdk-go v1.8.0
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0
github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-multierror v1.1.1
@ -71,7 +71,6 @@ require (
github.com/hashicorp/errwrap v1.0.0 // indirect github.com/hashicorp/errwrap v1.0.0 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/inconshreveable/mousetrap v1.0.1 // indirect github.com/inconshreveable/mousetrap v1.0.1 // indirect
github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.15.12 // indirect github.com/klauspost/compress v1.15.12 // indirect

View File

@ -52,8 +52,8 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20230120231953-1cf61dbafc40 h1:bvAwz0dcJeIyRjudVyzmmawOvc4SqlSerKd0B4dh0yw= github.com/alcionai/clues v0.0.0-20230131232239-cee86233b005 h1:eTgICcmcydEWG8J+hgnidf0pzujV3Gd2XqmknykZkzA=
github.com/alcionai/clues v0.0.0-20230120231953-1cf61dbafc40/go.mod h1:UlAs8jkWIpsOMakiC8NxPgQQVQRdvyf1hYMszlYYLb4= github.com/alcionai/clues v0.0.0-20230131232239-cee86233b005/go.mod h1:UlAs8jkWIpsOMakiC8NxPgQQVQRdvyf1hYMszlYYLb4=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -62,8 +62,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/aws/aws-sdk-go v1.44.190 h1:QC+Pf/Ooj7Waf2obOPZbIQOqr00hy4h54j3ZK9mvHcc= github.com/aws/aws-sdk-go v1.44.192 h1:KL54vCxRd5v5XBGjnF3FelzXXwl+aWHDmDTihFmRNgM=
github.com/aws/aws-sdk-go v1.44.190/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.44.192/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.0 h1:0xncHZ588wB/geLjbM/esoW3FOEThWy2TJyb4VXfLFY= github.com/aws/aws-xray-sdk-go v1.8.0 h1:0xncHZ588wB/geLjbM/esoW3FOEThWy2TJyb4VXfLFY=
github.com/aws/aws-xray-sdk-go v1.8.0/go.mod h1:7LKe47H+j3evfvS1+q0wzpoaGXGrF3mUsfM+thqVO+A= github.com/aws/aws-xray-sdk-go v1.8.0/go.mod h1:7LKe47H+j3evfvS1+q0wzpoaGXGrF3mUsfM+thqVO+A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
@ -209,8 +209,6 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf h1:FtEj8sfIcaaBfAKrE1Cwb61YDtYq9JxChK1c7AKce7s=
github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf/go.mod h1:yrqSXGoD/4EKfF26AOGzscPOgTTJcyAwM2rpixWT+t4=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=

View File

@ -83,7 +83,7 @@ func (gc *GraphConnector) DataCollections(
return colls, excludes, nil return colls, excludes, nil
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
return gc.OneDriveDataCollections(ctx, sels, ctrlOpts) return gc.OneDriveDataCollections(ctx, sels, metadata, ctrlOpts)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
colls, excludes, err := sharepoint.DataCollections( colls, excludes, err := sharepoint.DataCollections(
@ -182,6 +182,7 @@ func (fm odFolderMatcher) Matches(dir string) bool {
func (gc *GraphConnector) OneDriveDataCollections( func (gc *GraphConnector) OneDriveDataCollections(
ctx context.Context, ctx context.Context,
selector selectors.Selector, selector selectors.Selector,
metadata []data.Collection,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.Collection, map[string]struct{}, error) {
odb, err := selector.ToOneDriveBackup() odb, err := selector.ToOneDriveBackup()
@ -209,7 +210,7 @@ func (gc *GraphConnector) OneDriveDataCollections(
gc.Service, gc.Service,
gc.UpdateStatus, gc.UpdateStatus,
ctrlOpts, ctrlOpts,
).Get(ctx) ).Get(ctx, metadata)
if err != nil { if err != nil {
return nil, nil, support.WrapAndAppend(user, err, errs) return nil, nil, support.WrapAndAppend(user, err, errs)
} }

View File

@ -1,32 +1,33 @@
package api package api
import ( import (
"github.com/alcionai/corso/src/internal/connector/graph/betasdk"
absser "github.com/microsoft/kiota-abstractions-go/serialization" absser "github.com/microsoft/kiota-abstractions-go/serialization"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk"
) )
// Service wraps BetaClient's functionality. // Service wraps BetaClient's functionality.
// Abstraction created to comply loosely with graph.Servicer // Abstraction created to comply loosely with graph.Servicer
// methods for ease of switching between v1.0 and beta connnectors // methods for ease of switching between v1.0 and beta connnectors
type Service struct { type BetaService struct {
client *betasdk.BetaClient client *betasdk.BetaClient
} }
func (s Service) Client() *betasdk.BetaClient { func (s BetaService) Client() *betasdk.BetaClient {
return s.client return s.client
} }
func NewBetaService(adpt *msgraphsdk.GraphRequestAdapter) *Service { func NewBetaService(adpt *msgraphsdk.GraphRequestAdapter) *BetaService {
return &Service{ return &BetaService{
client: betasdk.NewBetaClient(adpt), client: betasdk.NewBetaClient(adpt),
} }
} }
// Seraialize writes an M365 parsable object into a byte array using the built-in // Seraialize writes an M365 parsable object into a byte array using the built-in
// application/json writer within the adapter. // application/json writer within the adapter.
func (s Service) Serialize(object absser.Parsable) ([]byte, error) { func (s BetaService) Serialize(object absser.Parsable) ([]byte, error) {
writer, err := s.client.Adapter(). writer, err := s.client.Adapter().
GetSerializationWriterFactory(). GetSerializationWriterFactory().
GetSerializationWriter("application/json") GetSerializationWriter("application/json")

View File

@ -3,6 +3,7 @@ package api
import ( import (
"context" "context"
absser "github.com/microsoft/kiota-abstractions-go"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core" msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -58,14 +59,27 @@ const (
// require more fine-tuned controls in the future. // require more fine-tuned controls in the future.
// https://stackoverflow.com/questions/64044266/error-message-unsupported-or-invalid-query-filter-clause-specified-for-property // https://stackoverflow.com/questions/64044266/error-message-unsupported-or-invalid-query-filter-clause-specified-for-property
// //
// ne 'Guest' ensures we don't filter out users where userType = null, which can happen
// for user accounts created prior to 2014. In order to use the `ne` comparator, we
// MUST include $count=true and the ConsistencyLevel: eventual header.
// https://stackoverflow.com/questions/49340485/how-to-filter-users-by-usertype-null
//
//nolint:lll //nolint:lll
var userFilterNoGuests = "onPremisesSyncEnabled eq true OR userType eq 'Member'" var userFilterNoGuests = "onPremisesSyncEnabled eq true OR userType ne 'Guest'"
// I can't believe I have to do this.
var t = true
func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration { func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration {
headers := absser.NewRequestHeaders()
headers.Add("ConsistencyLevel", "eventual")
return &users.UsersRequestBuilderGetRequestConfiguration{ return &users.UsersRequestBuilderGetRequestConfiguration{
Headers: headers,
QueryParameters: &users.UsersRequestBuilderGetQueryParameters{ QueryParameters: &users.UsersRequestBuilderGetQueryParameters{
Select: []string{userSelectID, userSelectPrincipalName, userSelectDisplayName}, Select: []string{userSelectID, userSelectPrincipalName, userSelectDisplayName},
Filter: fs, Filter: fs,
Count: &t,
}, },
} }
} }
@ -77,7 +91,13 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) {
return nil, err return nil, err
} }
resp, err := service.Client().Users().Get(ctx, userOptions(&userFilterNoGuests)) var resp models.UserCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = service.Client().Users().Get(ctx, userOptions(&userFilterNoGuests))
return err
})
if err != nil { if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "getting all users") return nil, support.ConnectorStackErrorTraceWrap(err, "getting all users")
} }
@ -114,22 +134,37 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) {
} }
func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) { func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) {
user, err := c.stable.Client().UsersById(userID).Get(ctx, nil) var (
resp models.Userable
err error
)
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(userID).Get(ctx, nil)
return err
})
if err != nil { if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "getting user by id") return nil, support.ConnectorStackErrorTraceWrap(err, "getting user by id")
} }
return user, nil return resp, err
} }
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) { func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
// Assume all services are enabled // Assume all services are enabled
// then filter down to only services the user has enabled // then filter down to only services the user has enabled
userInfo := newUserInfo() var (
err error
userInfo = newUserInfo()
)
// TODO: OneDrive // TODO: OneDrive
err = graph.RunWithRetry(func() error {
_, err = c.stable.Client().UsersById(userID).MailFolders().Get(ctx, nil)
return err
})
_, err := c.stable.Client().UsersById(userID).MailFolders().Get(ctx, nil)
if err != nil { if err != nil {
if !graph.IsErrExchangeMailFolderNotFound(err) { if !graph.IsErrExchangeMailFolderNotFound(err) {
return nil, support.ConnectorStackErrorTraceWrap(err, "getting user's exchange mailfolders") return nil, support.ConnectorStackErrorTraceWrap(err, "getting user's exchange mailfolders")

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"time" "time"
"github.com/alcionai/clues"
"github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kioser "github.com/microsoft/kiota-serialization-json-go" kioser "github.com/microsoft/kiota-serialization-json-go"
@ -16,6 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/selectors"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -61,7 +63,16 @@ func (c Contacts) GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
cont, err := c.stable.Client().UsersById(user).ContactsById(itemID).Get(ctx, nil) var (
cont models.Contactable
err error
)
err = graph.RunWithRetry(func() error {
cont, err = c.stable.Client().UsersById(user).ContactsById(itemID).Get(ctx, nil)
return err
})
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -81,7 +92,14 @@ func (c Contacts) GetAllContactFolderNamesForUser(
return nil, err return nil, err
} }
return c.stable.Client().UsersById(user).ContactFolders().Get(ctx, options) var resp models.ContactFolderCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(user).ContactFolders().Get(ctx, options)
return err
})
return resp, err
} }
func (c Contacts) GetContainerByID( func (c Contacts) GetContainerByID(
@ -93,10 +111,14 @@ func (c Contacts) GetContainerByID(
return nil, errors.Wrap(err, "options for contact folder") return nil, errors.Wrap(err, "options for contact folder")
} }
return c.stable.Client(). var resp models.ContactFolderable
UsersById(userID).
ContactFoldersById(dirID). err = graph.RunWithRetry(func() error {
Get(ctx, ofcf) resp, err = c.stable.Client().UsersById(userID).ContactFoldersById(dirID).Get(ctx, ofcf)
return err
})
return resp, err
} }
// EnumerateContainers iterates through all of the users current // EnumerateContainers iterates through all of the users current
@ -117,6 +139,7 @@ func (c Contacts) EnumerateContainers(
var ( var (
errs *multierror.Error errs *multierror.Error
resp models.ContactFolderCollectionResponseable
fields = []string{"displayName", "parentFolderId"} fields = []string{"displayName", "parentFolderId"}
) )
@ -131,7 +154,11 @@ func (c Contacts) EnumerateContainers(
ChildFolders() ChildFolders()
for { for {
resp, err := builder.Get(ctx, ofcf) err = graph.RunWithRetry(func() error {
resp, err = builder.Get(ctx, ofcf)
return err
})
if err != nil { if err != nil {
return errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
} }
@ -174,7 +201,17 @@ type contactPager struct {
} }
func (p *contactPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) { func (p *contactPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
return p.builder.Get(ctx, p.options) var (
resp api.DeltaPageLinker
err error
)
err = graph.RunWithRetry(func() error {
resp, err = p.builder.Get(ctx, p.options)
return err
})
return resp, err
} }
func (p *contactPager) setNext(nextLink string) { func (p *contactPager) setNext(nextLink string) {
@ -199,6 +236,11 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
resetDelta bool resetDelta bool
) )
ctx = clues.AddAll(
ctx,
"category", selectors.ExchangeContact,
"folder_id", directoryID)
options, err := optionsForContactFoldersItemDelta([]string{"parentFolderId"}) options, err := optionsForContactFoldersItemDelta([]string{"parentFolderId"})
if err != nil { if err != nil {
return nil, nil, DeltaUpdate{}, errors.Wrap(err, "getting query options") return nil, nil, DeltaUpdate{}, errors.Wrap(err, "getting query options")

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"time" "time"
"github.com/alcionai/clues"
"github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kioser "github.com/microsoft/kiota-serialization-json-go" kioser "github.com/microsoft/kiota-serialization-json-go"
@ -19,6 +20,7 @@ import (
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -73,7 +75,13 @@ func (c Events) GetContainerByID(
return nil, errors.Wrap(err, "options for event calendar") return nil, errors.Wrap(err, "options for event calendar")
} }
cal, err := service.Client().UsersById(userID).CalendarsById(containerID).Get(ctx, ofc) var cal models.Calendarable
err = graph.RunWithRetry(func() error {
cal, err = service.Client().UsersById(userID).CalendarsById(containerID).Get(ctx, ofc)
return err
})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -86,12 +94,28 @@ func (c Events) GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
event, err := c.stable.Client().UsersById(user).EventsById(itemID).Get(ctx, nil) var (
event models.Eventable
err error
)
err = graph.RunWithRetry(func() error {
event, err = c.stable.Client().UsersById(user).EventsById(itemID).Get(ctx, nil)
return err
})
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
var errs *multierror.Error var (
errs *multierror.Error
options = &users.ItemEventsItemAttachmentsRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemEventsItemAttachmentsRequestBuilderGetQueryParameters{
Expand: []string{"microsoft.graph.itemattachment/item"},
},
}
)
if *event.GetHasAttachments() || HasAttachments(event.GetBody()) { if *event.GetHasAttachments() || HasAttachments(event.GetBody()) {
for count := 0; count < numberOfRetries; count++ { for count := 0; count < numberOfRetries; count++ {
@ -100,7 +124,7 @@ func (c Events) GetItem(
UsersById(user). UsersById(user).
EventsById(itemID). EventsById(itemID).
Attachments(). Attachments().
Get(ctx, nil) Get(ctx, options)
if err == nil { if err == nil {
event.SetAttachments(attached.GetValue()) event.SetAttachments(attached.GetValue())
break break
@ -128,7 +152,14 @@ func (c Client) GetAllCalendarNamesForUser(
return nil, err return nil, err
} }
return c.stable.Client().UsersById(user).Calendars().Get(ctx, options) var resp models.CalendarCollectionResponseable
err = graph.RunWithRetry(func() error {
resp, err = c.stable.Client().UsersById(user).Calendars().Get(ctx, options)
return err
})
return resp, err
} }
// EnumerateContainers iterates through all of the users current // EnumerateContainers iterates through all of the users current
@ -147,7 +178,10 @@ func (c Events) EnumerateContainers(
return err return err
} }
var errs *multierror.Error var (
resp models.CalendarCollectionResponseable
errs *multierror.Error
)
ofc, err := optionsForCalendars([]string{"name"}) ofc, err := optionsForCalendars([]string{"name"})
if err != nil { if err != nil {
@ -157,7 +191,13 @@ func (c Events) EnumerateContainers(
builder := service.Client().UsersById(userID).Calendars() builder := service.Client().UsersById(userID).Calendars()
for { for {
resp, err := builder.Get(ctx, ofc) var err error
err = graph.RunWithRetry(func() error {
resp, err = builder.Get(ctx, ofc)
return err
})
if err != nil { if err != nil {
return errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
} }
@ -205,7 +245,16 @@ type eventPager struct {
} }
func (p *eventPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) { func (p *eventPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options) var (
resp api.DeltaPageLinker
err error
)
err = graph.RunWithRetry(func() error {
resp, err = p.builder.Get(ctx, p.options)
return err
})
return resp, err return resp, err
} }
@ -231,6 +280,11 @@ func (c Events) GetAddedAndRemovedItemIDs(
errs *multierror.Error errs *multierror.Error
) )
ctx = clues.AddAll(
ctx,
"category", selectors.ExchangeEvent,
"calendar_id", calendarID)
if len(oldDelta) > 0 { if len(oldDelta) > 0 {
builder := users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, service.Adapter()) builder := users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, service.Adapter())
pgr := &eventPager{service, builder, nil} pgr := &eventPager{service, builder, nil}

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"time" "time"
"github.com/alcionai/clues"
"github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kioser "github.com/microsoft/kiota-serialization-json-go" kioser "github.com/microsoft/kiota-serialization-json-go"
@ -17,6 +18,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/selectors"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -95,7 +97,14 @@ func (c Mail) GetContainerByID(
return nil, errors.Wrap(err, "options for mail folder") return nil, errors.Wrap(err, "options for mail folder")
} }
return service.Client().UsersById(userID).MailFoldersById(dirID).Get(ctx, ofmf) var resp graph.Container
err = graph.RunWithRetry(func() error {
resp, err = service.Client().UsersById(userID).MailFoldersById(dirID).Get(ctx, ofmf)
return err
})
return resp, err
} }
// GetItem retrieves a Messageable item. If the item contains an attachment, that // GetItem retrieves a Messageable item. If the item contains an attachment, that
@ -104,7 +113,16 @@ func (c Mail) GetItem(
ctx context.Context, ctx context.Context,
user, itemID string, user, itemID string,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
mail, err := c.stable.Client().UsersById(user).MessagesById(itemID).Get(ctx, nil) var (
mail models.Messageable
err error
)
err = graph.RunWithRetry(func() error {
mail, err = c.stable.Client().UsersById(user).MessagesById(itemID).Get(ctx, nil)
return err
})
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -112,13 +130,18 @@ func (c Mail) GetItem(
var errs *multierror.Error var errs *multierror.Error
if *mail.GetHasAttachments() || HasAttachments(mail.GetBody()) { if *mail.GetHasAttachments() || HasAttachments(mail.GetBody()) {
options := &users.ItemMessagesItemAttachmentsRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMessagesItemAttachmentsRequestBuilderGetQueryParameters{
Expand: []string{"microsoft.graph.itemattachment/item"},
},
}
for count := 0; count < numberOfRetries; count++ { for count := 0; count < numberOfRetries; count++ {
attached, err := c.largeItem. attached, err := c.largeItem.
Client(). Client().
UsersById(user). UsersById(user).
MessagesById(itemID). MessagesById(itemID).
Attachments(). Attachments().
Get(ctx, nil) Get(ctx, options)
if err == nil { if err == nil {
mail.SetAttachments(attached.GetValue()) mail.SetAttachments(attached.GetValue())
break break
@ -154,6 +177,7 @@ func (c Mail) EnumerateContainers(
} }
var ( var (
resp users.ItemMailFoldersDeltaResponseable
errs *multierror.Error errs *multierror.Error
builder = service.Client(). builder = service.Client().
UsersById(userID). UsersById(userID).
@ -162,7 +186,13 @@ func (c Mail) EnumerateContainers(
) )
for { for {
resp, err := builder.Get(ctx, nil) var err error
err = graph.RunWithRetry(func() error {
resp, err = builder.Get(ctx, nil)
return err
})
if err != nil { if err != nil {
return errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
} }
@ -200,7 +230,17 @@ type mailPager struct {
} }
func (p *mailPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) { func (p *mailPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
return p.builder.Get(ctx, p.options) var (
page api.DeltaPageLinker
err error
)
err = graph.RunWithRetry(func() error {
page, err = p.builder.Get(ctx, p.options)
return err
})
return page, err
} }
func (p *mailPager) setNext(nextLink string) { func (p *mailPager) setNext(nextLink string) {
@ -226,6 +266,11 @@ func (c Mail) GetAddedAndRemovedItemIDs(
resetDelta bool resetDelta bool
) )
ctx = clues.AddAll(
ctx,
"category", selectors.ExchangeMail,
"folder_id", directoryID)
options, err := optionsForFolderMessagesDelta([]string{"isRead"}) options, err := optionsForFolderMessagesDelta([]string{"isRead"})
if err != nil { if err != nil {
return nil, nil, DeltaUpdate{}, errors.Wrap(err, "getting query options") return nil, nil, DeltaUpdate{}, errors.Wrap(err, "getting query options")

View File

@ -3,6 +3,7 @@ package api
import ( import (
"fmt" "fmt"
abstractions "github.com/microsoft/kiota-abstractions-go"
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
) )
@ -53,6 +54,16 @@ var (
} }
) )
const (
// headerKeyPrefer is used to set query preferences
headerKeyPrefer = "Prefer"
// maxPageSizeHeaderFmt is used to indicate max page size
// preferences
maxPageSizeHeaderFmt = "odata.maxpagesize=%d"
// deltaMaxPageSize is the max page size to use for delta queries
deltaMaxPageSize = 200
)
// ----------------------------------------------------------------------- // -----------------------------------------------------------------------
// exchange.Query Option Section // exchange.Query Option Section
// These functions can be used to filter a response on M365 // These functions can be used to filter a response on M365
@ -71,8 +82,10 @@ func optionsForFolderMessagesDelta(
requestParameters := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{ requestParameters := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
Select: selecting, Select: selecting,
} }
options := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{ options := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters, QueryParameters: requestParameters,
Headers: buildDeltaRequestHeaders(),
} }
return options, nil return options, nil
@ -218,6 +231,7 @@ func optionsForContactFoldersItemDelta(
options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{ options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: requestParameters, QueryParameters: requestParameters,
Headers: buildDeltaRequestHeaders(),
} }
return options, nil return options, nil
@ -275,3 +289,11 @@ func buildOptions(fields []string, allowed map[string]struct{}) ([]string, error
return append(returnedOptions, fields...), nil return append(returnedOptions, fields...), nil
} }
// buildDeltaRequestHeaders returns the headers we add to delta page requests
func buildDeltaRequestHeaders() *abstractions.RequestHeaders {
headers := abstractions.NewRequestHeaders()
headers.Add(headerKeyPrefer, fmt.Sprintf(maxPageSizeHeaderFmt, deltaMaxPageSize))
return headers
}

View File

@ -8,6 +8,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/pkg/logger"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -64,6 +65,9 @@ func getItemsAddedAndRemovedFromContainer(
deltaURL string deltaURL string
) )
itemCount := 0
page := 0
for { for {
// get the next page of data, check for standard errors // get the next page of data, check for standard errors
resp, err := pager.getPage(ctx) resp, err := pager.getPage(ctx)
@ -82,6 +86,14 @@ func getItemsAddedAndRemovedFromContainer(
return nil, nil, "", err return nil, nil, "", err
} }
itemCount += len(items)
page++
// Log every ~1000 items (the page size we use is 200)
if page%5 == 0 {
logger.Ctx(ctx).Infow("queried items", "count", itemCount)
}
// iterate through the items in the page // iterate through the items in the page
for _, item := range items { for _, item := range items {
// if the additional data conains a `@removed` key, the value will either // if the additional data conains a `@removed` key, the value will either
@ -114,5 +126,7 @@ func getItemsAddedAndRemovedFromContainer(
pager.setNext(nextLink) pager.setNext(nextLink)
} }
logger.Ctx(ctx).Infow("completed enumeration", "count", itemCount)
return addedIDs, removedIDs, deltaURL, nil return addedIDs, removedIDs, deltaURL, nil
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/connector/uploadsession" "github.com/alcionai/corso/src/internal/connector/uploadsession"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -44,8 +45,11 @@ func uploadAttachment(
attachment models.Attachmentable, attachment models.Attachmentable,
) error { ) error {
logger.Ctx(ctx).Debugf("uploading attachment with size %d", *attachment.GetSize()) logger.Ctx(ctx).Debugf("uploading attachment with size %d", *attachment.GetSize())
attachmentType := attachmentType(attachment)
var (
attachmentType = attachmentType(attachment)
err error
)
// Reference attachments that are inline() do not need to be recreated. The contents are part of the body. // Reference attachments that are inline() do not need to be recreated. The contents are part of the body.
if attachmentType == models.REFERENCE_ATTACHMENTTYPE && if attachmentType == models.REFERENCE_ATTACHMENTTYPE &&
attachment.GetIsInline() != nil && *attachment.GetIsInline() { attachment.GetIsInline() != nil && *attachment.GetIsInline() {
@ -53,6 +57,30 @@ func uploadAttachment(
return nil return nil
} }
// item Attachments to be skipped until the completion of Issue #2353
if attachmentType == models.ITEM_ATTACHMENTTYPE {
prev := attachment
attachment, err = support.ToItemAttachment(attachment)
if err != nil {
name := ""
if prev.GetName() != nil {
name = *prev.GetName()
}
// TODO: Update to support PII protection
logger.Ctx(ctx).Infow("item attachment uploads are not supported ",
"err", err,
"attachment_name", name,
"attachment_type", attachmentType,
"internal_item_type", getItemAttachmentItemType(prev),
"attachment_id", *prev.GetId(),
)
return nil
}
}
// For Item/Reference attachments *or* file attachments < 3MB, use the attachments endpoint // For Item/Reference attachments *or* file attachments < 3MB, use the attachments endpoint
if attachmentType != models.FILE_ATTACHMENTTYPE || *attachment.GetSize() < largeAttachmentSize { if attachmentType != models.FILE_ATTACHMENTTYPE || *attachment.GetSize() < largeAttachmentSize {
err := uploader.uploadSmallAttachment(ctx, attachment) err := uploader.uploadSmallAttachment(ctx, attachment)
@ -90,3 +118,19 @@ func uploadLargeAttachment(ctx context.Context, uploader attachmentUploadable,
return nil return nil
} }
func getItemAttachmentItemType(query models.Attachmentable) string {
empty := ""
attachment, ok := query.(models.ItemAttachmentable)
if !ok {
return empty
}
item := attachment.GetItem()
if item.GetOdataType() == nil {
return empty
}
return *item.GetOdataType()
}

View File

@ -501,10 +501,11 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
directoryCaches = make(map[path.CategoryType]graph.ContainerResolver) directoryCaches = make(map[path.CategoryType]graph.ContainerResolver)
folderName = tester.DefaultTestRestoreDestination().ContainerName folderName = tester.DefaultTestRestoreDestination().ContainerName
tests = []struct { tests = []struct {
name string name string
pathFunc1 func(t *testing.T) path.Path pathFunc1 func(t *testing.T) path.Path
pathFunc2 func(t *testing.T) path.Path pathFunc2 func(t *testing.T) path.Path
category path.CategoryType category path.CategoryType
folderPrefix string
}{ }{
{ {
name: "Mail Cache Test", name: "Mail Cache Test",
@ -587,6 +588,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
require.NoError(t, err) require.NoError(t, err)
return aPath return aPath
}, },
folderPrefix: calendarOthersFolder,
}, },
} }
) )
@ -617,8 +619,9 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
_, err = resolver.IDToPath(ctx, secondID) _, err = resolver.IDToPath(ctx, secondID)
require.NoError(t, err) require.NoError(t, err)
_, ok := resolver.PathInCache(folderName) p := stdpath.Join(test.folderPrefix, folderName)
require.True(t, ok) _, ok := resolver.PathInCache(p)
require.True(t, ok, "looking for path in cache: %s", p)
}) })
} }
} }

View File

@ -537,9 +537,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
}, },
{ {
name: "Birthday Calendar", name: "Birthday Calendar",
expected: "Birthdays", expected: calendarOthersFolder + "/Birthdays",
scope: selectors.NewExchangeBackup(users).EventCalendars( scope: selectors.NewExchangeBackup(users).EventCalendars(
[]string{"Birthdays"}, []string{calendarOthersFolder + "/Birthdays"},
selectors.PrefixMatch(), selectors.PrefixMatch(),
)[0], )[0],
}, },

View File

@ -64,7 +64,15 @@ func (ecc *eventCalendarCache) Populate(
return errors.Wrap(err, "initializing") return errors.Wrap(err, "initializing")
} }
err := ecc.enumer.EnumerateContainers(ctx, ecc.userID, "", ecc.addFolder) err := ecc.enumer.EnumerateContainers(
ctx,
ecc.userID,
"",
func(cf graph.CacheFolder) error {
cf.SetPath(path.Builder{}.Append(calendarOthersFolder, *cf.GetDisplayName()))
return ecc.addFolder(cf)
},
)
if err != nil { if err != nil {
return errors.Wrap(err, "enumerating containers") return errors.Wrap(err, "enumerating containers")
} }
@ -83,7 +91,7 @@ func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container
return errors.Wrap(err, "validating container") return errors.Wrap(err, "validating container")
} }
temp := graph.NewCacheFolder(f, path.Builder{}.Append(*f.GetDisplayName())) temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName()))
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding container") return errors.Wrap(err, "adding container")

View File

@ -38,4 +38,5 @@ const (
rootFolderAlias = "msgfolderroot" rootFolderAlias = "msgfolderroot"
DefaultContactFolder = "Contacts" DefaultContactFolder = "Contacts"
DefaultCalendar = "Calendar" DefaultCalendar = "Calendar"
calendarOthersFolder = "Other Calendars"
) )

View File

@ -175,6 +175,30 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
return *folder.GetId() return *folder.GetId()
}, },
}, },
{
name: "Test Mail: Item Attachment_Event",
bytes: mockconnector.GetMockMessageWithItemAttachmentEvent("Event Item Attachment"),
category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
return *folder.GetId()
},
},
{ // Restore will upload the Message without uploading the attachment
name: "Test Mail: Item Attachment_NestedEvent",
bytes: mockconnector.GetMockMessageWithNestedItemAttachmentEvent("Nested Item Attachment"),
category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now)
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err)
return *folder.GetId()
},
},
{ {
name: "Test Mail: One Large Attachment", name: "Test Mail: One Large Attachment",
bytes: mockconnector.GetMockMessageWithLargeAttachment("Restore Large Attachment"), bytes: mockconnector.GetMockMessageWithLargeAttachment("Restore Large Attachment"),
@ -266,7 +290,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
userID, userID,
) )
assert.NoError(t, err, support.ConnectorStackErrorTrace(err)) assert.NoError(t, err, support.ConnectorStackErrorTrace(err))
assert.NotNil(t, info, "item info is populated") assert.NotNil(t, info, "item info was not populated")
assert.NoError(t, deleters[test.category].DeleteContainer(ctx, userID, destination)) assert.NoError(t, deleters[test.category].DeleteContainer(ctx, userID, destination))
}) })
} }

View File

@ -189,23 +189,32 @@ func RestoreMailMessage(
// 1st: No transmission // 1st: No transmission
// 2nd: Send Date // 2nd: Send Date
// 3rd: Recv Date // 3rd: Recv Date
svlep := make([]models.SingleValueLegacyExtendedPropertyable, 0)
sv1 := models.NewSingleValueLegacyExtendedProperty() sv1 := models.NewSingleValueLegacyExtendedProperty()
sv1.SetId(&valueID) sv1.SetId(&valueID)
sv1.SetValue(&enableValue) sv1.SetValue(&enableValue)
svlep = append(svlep, sv1)
sv2 := models.NewSingleValueLegacyExtendedProperty() if clone.GetSentDateTime() != nil {
sendPropertyValue := common.FormatLegacyTime(*clone.GetSentDateTime()) sv2 := models.NewSingleValueLegacyExtendedProperty()
sendPropertyTag := MailSendDateTimeOverrideProperty sendPropertyValue := common.FormatLegacyTime(*clone.GetSentDateTime())
sv2.SetId(&sendPropertyTag) sendPropertyTag := MailSendDateTimeOverrideProperty
sv2.SetValue(&sendPropertyValue) sv2.SetId(&sendPropertyTag)
sv2.SetValue(&sendPropertyValue)
sv3 := models.NewSingleValueLegacyExtendedProperty() svlep = append(svlep, sv2)
recvPropertyValue := common.FormatLegacyTime(*clone.GetReceivedDateTime()) }
recvPropertyTag := MailReceiveDateTimeOverriveProperty
sv3.SetId(&recvPropertyTag) if clone.GetReceivedDateTime() != nil {
sv3.SetValue(&recvPropertyValue) sv3 := models.NewSingleValueLegacyExtendedProperty()
recvPropertyValue := common.FormatLegacyTime(*clone.GetReceivedDateTime())
recvPropertyTag := MailReceiveDateTimeOverriveProperty
sv3.SetId(&recvPropertyTag)
sv3.SetValue(&recvPropertyValue)
svlep = append(svlep, sv3)
}
svlep := []models.SingleValueLegacyExtendedPropertyable{sv1, sv2, sv3}
clone.SetSingleValueExtendedProperties(svlep) clone.SetSingleValueExtendedProperties(svlep)
// Switch workflow based on collision policy // Switch workflow based on collision policy
@ -248,10 +257,9 @@ func SendMailToBackStore(
errs error errs error
) )
if *message.GetHasAttachments() { // Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized
attached = message.GetAttachments() attached = message.GetAttachments()
message.SetAttachments([]models.Attachmentable{}) message.SetAttachments([]models.Attachmentable{})
}
sentMessage, err := service.Client().UsersById(user).MailFoldersById(destination).Messages().Post(ctx, message, nil) sentMessage, err := service.Client().UsersById(user).MailFoldersById(destination).Messages().Post(ctx, message, nil)
if err != nil { if err != nil {
@ -637,7 +645,11 @@ func establishEventsRestoreLocation(
user string, user string,
isNewCache bool, isNewCache bool,
) (string, error) { ) (string, error) {
cached, ok := ecc.PathInCache(folders[0]) // Need to prefix with the "Other Calendars" folder so lookup happens properly.
cached, ok := ecc.PathInCache(path.Builder{}.Append(
calendarOthersFolder,
folders[0],
).String())
if ok { if ok {
return cached, nil return cached, nil
} }

View File

@ -1,13 +1,14 @@
package betasdk package betasdk
import ( import (
i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
absser "github.com/microsoft/kiota-abstractions-go" absser "github.com/microsoft/kiota-abstractions-go"
kioser "github.com/microsoft/kiota-abstractions-go/serialization" kioser "github.com/microsoft/kiota-abstractions-go/serialization"
kform "github.com/microsoft/kiota-serialization-form-go" kform "github.com/microsoft/kiota-serialization-form-go"
kw "github.com/microsoft/kiota-serialization-json-go" kw "github.com/microsoft/kiota-serialization-json-go"
ktext "github.com/microsoft/kiota-serialization-text-go" ktext "github.com/microsoft/kiota-serialization-text-go"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
) )
// BetaClient the main entry point of the SDK, exposes the configuration and the fluent API. // BetaClient the main entry point of the SDK, exposes the configuration and the fluent API.

View File

@ -1,52 +1,54 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type HorizontalSectionLayoutType int type HorizontalSectionLayoutType int
const ( const (
NONE_HORIZONTALSECTIONLAYOUTTYPE HorizontalSectionLayoutType = iota NONE_HORIZONTALSECTIONLAYOUTTYPE HorizontalSectionLayoutType = iota
ONECOLUMN_HORIZONTALSECTIONLAYOUTTYPE ONECOLUMN_HORIZONTALSECTIONLAYOUTTYPE
TWOCOLUMNS_HORIZONTALSECTIONLAYOUTTYPE TWOCOLUMNS_HORIZONTALSECTIONLAYOUTTYPE
THREECOLUMNS_HORIZONTALSECTIONLAYOUTTYPE THREECOLUMNS_HORIZONTALSECTIONLAYOUTTYPE
ONETHIRDLEFTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE ONETHIRDLEFTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE
ONETHIRDRIGHTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE ONETHIRDRIGHTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE
FULLWIDTH_HORIZONTALSECTIONLAYOUTTYPE FULLWIDTH_HORIZONTALSECTIONLAYOUTTYPE
UNKNOWNFUTUREVALUE_HORIZONTALSECTIONLAYOUTTYPE UNKNOWNFUTUREVALUE_HORIZONTALSECTIONLAYOUTTYPE
) )
func (i HorizontalSectionLayoutType) String() string { func (i HorizontalSectionLayoutType) String() string {
return []string{"none", "oneColumn", "twoColumns", "threeColumns", "oneThirdLeftColumn", "oneThirdRightColumn", "fullWidth", "unknownFutureValue"}[i] return []string{"none", "oneColumn", "twoColumns", "threeColumns", "oneThirdLeftColumn", "oneThirdRightColumn", "fullWidth", "unknownFutureValue"}[i]
} }
func ParseHorizontalSectionLayoutType(v string) (interface{}, error) { func ParseHorizontalSectionLayoutType(v string) (interface{}, error) {
result := NONE_HORIZONTALSECTIONLAYOUTTYPE result := NONE_HORIZONTALSECTIONLAYOUTTYPE
switch v { switch v {
case "none": case "none":
result = NONE_HORIZONTALSECTIONLAYOUTTYPE result = NONE_HORIZONTALSECTIONLAYOUTTYPE
case "oneColumn": case "oneColumn":
result = ONECOLUMN_HORIZONTALSECTIONLAYOUTTYPE result = ONECOLUMN_HORIZONTALSECTIONLAYOUTTYPE
case "twoColumns": case "twoColumns":
result = TWOCOLUMNS_HORIZONTALSECTIONLAYOUTTYPE result = TWOCOLUMNS_HORIZONTALSECTIONLAYOUTTYPE
case "threeColumns": case "threeColumns":
result = THREECOLUMNS_HORIZONTALSECTIONLAYOUTTYPE result = THREECOLUMNS_HORIZONTALSECTIONLAYOUTTYPE
case "oneThirdLeftColumn": case "oneThirdLeftColumn":
result = ONETHIRDLEFTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE result = ONETHIRDLEFTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE
case "oneThirdRightColumn": case "oneThirdRightColumn":
result = ONETHIRDRIGHTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE result = ONETHIRDRIGHTCOLUMN_HORIZONTALSECTIONLAYOUTTYPE
case "fullWidth": case "fullWidth":
result = FULLWIDTH_HORIZONTALSECTIONLAYOUTTYPE result = FULLWIDTH_HORIZONTALSECTIONLAYOUTTYPE
case "unknownFutureValue": case "unknownFutureValue":
result = UNKNOWNFUTUREVALUE_HORIZONTALSECTIONLAYOUTTYPE result = UNKNOWNFUTUREVALUE_HORIZONTALSECTIONLAYOUTTYPE
default: default:
return 0, errors.New("Unknown HorizontalSectionLayoutType value: " + v) return 0, errors.New("Unknown HorizontalSectionLayoutType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializeHorizontalSectionLayoutType(values []HorizontalSectionLayoutType) []string { func SerializeHorizontalSectionLayoutType(values []HorizontalSectionLayoutType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,123 +1,134 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// MetaDataKeyStringPair // MetaDataKeyStringPair
type MetaDataKeyStringPair struct { type MetaDataKeyStringPair struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{} additionalData map[string]interface{}
// Key of the meta data. // Key of the meta data.
key *string key *string
// The OdataType property // The OdataType property
odataType *string odataType *string
// Value of the meta data. // Value of the meta data.
value *string value *string
} }
// NewMetaDataKeyStringPair instantiates a new metaDataKeyStringPair and sets the default values. // NewMetaDataKeyStringPair instantiates a new metaDataKeyStringPair and sets the default values.
func NewMetaDataKeyStringPair()(*MetaDataKeyStringPair) { func NewMetaDataKeyStringPair() *MetaDataKeyStringPair {
m := &MetaDataKeyStringPair{ m := &MetaDataKeyStringPair{}
} m.SetAdditionalData(make(map[string]interface{}))
m.SetAdditionalData(make(map[string]interface{})); return m
return m
} }
// CreateMetaDataKeyStringPairFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreateMetaDataKeyStringPairFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateMetaDataKeyStringPairFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreateMetaDataKeyStringPairFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewMetaDataKeyStringPair(), nil return NewMetaDataKeyStringPair(), nil
} }
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *MetaDataKeyStringPair) GetAdditionalData()(map[string]interface{}) { func (m *MetaDataKeyStringPair) GetAdditionalData() map[string]interface{} {
return m.additionalData return m.additionalData
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *MetaDataKeyStringPair) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *MetaDataKeyStringPair) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error)
res["key"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["key"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetKey(val) m.SetKey(val)
} }
return nil return nil
} }
res["@odata.type"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["@odata.type"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetOdataType(val) m.SetOdataType(val)
} }
return nil return nil
} }
res["value"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["value"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetValue(val) m.SetValue(val)
} }
return nil return nil
} }
return res return res
} }
// GetKey gets the key property value. Key of the meta data. // GetKey gets the key property value. Key of the meta data.
func (m *MetaDataKeyStringPair) GetKey()(*string) { func (m *MetaDataKeyStringPair) GetKey() *string {
return m.key return m.key
} }
// GetOdataType gets the @odata.type property value. The OdataType property // GetOdataType gets the @odata.type property value. The OdataType property
func (m *MetaDataKeyStringPair) GetOdataType()(*string) { func (m *MetaDataKeyStringPair) GetOdataType() *string {
return m.odataType return m.odataType
} }
// GetValue gets the value property value. Value of the meta data. // GetValue gets the value property value. Value of the meta data.
func (m *MetaDataKeyStringPair) GetValue()(*string) { func (m *MetaDataKeyStringPair) GetValue() *string {
return m.value return m.value
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *MetaDataKeyStringPair) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *MetaDataKeyStringPair) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
{ {
err := writer.WriteStringValue("key", m.GetKey()) err := writer.WriteStringValue("key", m.GetKey())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("@odata.type", m.GetOdataType()) err := writer.WriteStringValue("@odata.type", m.GetOdataType())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("value", m.GetValue()) err := writer.WriteStringValue("value", m.GetValue())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteAdditionalData(m.GetAdditionalData()) err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *MetaDataKeyStringPair) SetAdditionalData(value map[string]interface{})() { func (m *MetaDataKeyStringPair) SetAdditionalData(value map[string]interface{}) {
m.additionalData = value m.additionalData = value
} }
// SetKey sets the key property value. Key of the meta data. // SetKey sets the key property value. Key of the meta data.
func (m *MetaDataKeyStringPair) SetKey(value *string)() { func (m *MetaDataKeyStringPair) SetKey(value *string) {
m.key = value m.key = value
} }
// SetOdataType sets the @odata.type property value. The OdataType property // SetOdataType sets the @odata.type property value. The OdataType property
func (m *MetaDataKeyStringPair) SetOdataType(value *string)() { func (m *MetaDataKeyStringPair) SetOdataType(value *string) {
m.odataType = value m.odataType = value
} }
// SetValue sets the value property value. Value of the meta data. // SetValue sets the value property value. Value of the meta data.
func (m *MetaDataKeyStringPair) SetValue(value *string)() { func (m *MetaDataKeyStringPair) SetValue(value *string) {
m.value = value m.value = value
} }

View File

@ -1,17 +1,17 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// MetaDataKeyStringPairable // MetaDataKeyStringPairable
type MetaDataKeyStringPairable interface { type MetaDataKeyStringPairable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
GetKey()(*string) GetKey() *string
GetOdataType()(*string) GetOdataType() *string
GetValue()(*string) GetValue() *string
SetKey(value *string)() SetKey(value *string)
SetOdataType(value *string)() SetOdataType(value *string)
SetValue(value *string)() SetValue(value *string)
} }

View File

@ -1,40 +1,42 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type PageLayoutType int type PageLayoutType int
const ( const (
MICROSOFTRESERVED_PAGELAYOUTTYPE PageLayoutType = iota MICROSOFTRESERVED_PAGELAYOUTTYPE PageLayoutType = iota
ARTICLE_PAGELAYOUTTYPE ARTICLE_PAGELAYOUTTYPE
HOME_PAGELAYOUTTYPE HOME_PAGELAYOUTTYPE
UNKNOWNFUTUREVALUE_PAGELAYOUTTYPE UNKNOWNFUTUREVALUE_PAGELAYOUTTYPE
) )
func (i PageLayoutType) String() string { func (i PageLayoutType) String() string {
return []string{"microsoftReserved", "article", "home", "unknownFutureValue"}[i] return []string{"microsoftReserved", "article", "home", "unknownFutureValue"}[i]
} }
func ParsePageLayoutType(v string) (interface{}, error) { func ParsePageLayoutType(v string) (interface{}, error) {
result := MICROSOFTRESERVED_PAGELAYOUTTYPE result := MICROSOFTRESERVED_PAGELAYOUTTYPE
switch v { switch v {
case "microsoftReserved": case "microsoftReserved":
result = MICROSOFTRESERVED_PAGELAYOUTTYPE result = MICROSOFTRESERVED_PAGELAYOUTTYPE
case "article": case "article":
result = ARTICLE_PAGELAYOUTTYPE result = ARTICLE_PAGELAYOUTTYPE
case "home": case "home":
result = HOME_PAGELAYOUTTYPE result = HOME_PAGELAYOUTTYPE
case "unknownFutureValue": case "unknownFutureValue":
result = UNKNOWNFUTUREVALUE_PAGELAYOUTTYPE result = UNKNOWNFUTUREVALUE_PAGELAYOUTTYPE
default: default:
return 0, errors.New("Unknown PageLayoutType value: " + v) return 0, errors.New("Unknown PageLayoutType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializePageLayoutType(values []PageLayoutType) []string { func SerializePageLayoutType(values []PageLayoutType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,40 +1,42 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type PagePromotionType int type PagePromotionType int
const ( const (
MICROSOFTRESERVED_PAGEPROMOTIONTYPE PagePromotionType = iota MICROSOFTRESERVED_PAGEPROMOTIONTYPE PagePromotionType = iota
PAGE_PAGEPROMOTIONTYPE PAGE_PAGEPROMOTIONTYPE
NEWSPOST_PAGEPROMOTIONTYPE NEWSPOST_PAGEPROMOTIONTYPE
UNKNOWNFUTUREVALUE_PAGEPROMOTIONTYPE UNKNOWNFUTUREVALUE_PAGEPROMOTIONTYPE
) )
func (i PagePromotionType) String() string { func (i PagePromotionType) String() string {
return []string{"microsoftReserved", "page", "newsPost", "unknownFutureValue"}[i] return []string{"microsoftReserved", "page", "newsPost", "unknownFutureValue"}[i]
} }
func ParsePagePromotionType(v string) (interface{}, error) { func ParsePagePromotionType(v string) (interface{}, error) {
result := MICROSOFTRESERVED_PAGEPROMOTIONTYPE result := MICROSOFTRESERVED_PAGEPROMOTIONTYPE
switch v { switch v {
case "microsoftReserved": case "microsoftReserved":
result = MICROSOFTRESERVED_PAGEPROMOTIONTYPE result = MICROSOFTRESERVED_PAGEPROMOTIONTYPE
case "page": case "page":
result = PAGE_PAGEPROMOTIONTYPE result = PAGE_PAGEPROMOTIONTYPE
case "newsPost": case "newsPost":
result = NEWSPOST_PAGEPROMOTIONTYPE result = NEWSPOST_PAGEPROMOTIONTYPE
case "unknownFutureValue": case "unknownFutureValue":
result = UNKNOWNFUTUREVALUE_PAGEPROMOTIONTYPE result = UNKNOWNFUTUREVALUE_PAGEPROMOTIONTYPE
default: default:
return 0, errors.New("Unknown PagePromotionType value: " + v) return 0, errors.New("Unknown PagePromotionType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializePagePromotionType(values []PagePromotionType) []string { func SerializePagePromotionType(values []PagePromotionType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,123 +1,134 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// PublicationFacet // PublicationFacet
type PublicationFacet struct { type PublicationFacet struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{} additionalData map[string]interface{}
// The state of publication for this document. Either published or checkout. Read-only. // The state of publication for this document. Either published or checkout. Read-only.
level *string level *string
// The OdataType property // The OdataType property
odataType *string odataType *string
// The unique identifier for the version that is visible to the current caller. Read-only. // The unique identifier for the version that is visible to the current caller. Read-only.
versionId *string versionId *string
} }
// NewPublicationFacet instantiates a new publicationFacet and sets the default values. // NewPublicationFacet instantiates a new publicationFacet and sets the default values.
func NewPublicationFacet()(*PublicationFacet) { func NewPublicationFacet() *PublicationFacet {
m := &PublicationFacet{ m := &PublicationFacet{}
} m.SetAdditionalData(make(map[string]interface{}))
m.SetAdditionalData(make(map[string]interface{})); return m
return m
} }
// CreatePublicationFacetFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreatePublicationFacetFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreatePublicationFacetFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreatePublicationFacetFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewPublicationFacet(), nil return NewPublicationFacet(), nil
} }
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *PublicationFacet) GetAdditionalData()(map[string]interface{}) { func (m *PublicationFacet) GetAdditionalData() map[string]interface{} {
return m.additionalData return m.additionalData
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *PublicationFacet) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *PublicationFacet) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error)
res["level"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["level"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetLevel(val) m.SetLevel(val)
} }
return nil return nil
} }
res["@odata.type"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["@odata.type"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetOdataType(val) m.SetOdataType(val)
} }
return nil return nil
} }
res["versionId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["versionId"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetVersionId(val) m.SetVersionId(val)
} }
return nil return nil
} }
return res return res
} }
// GetLevel gets the level property value. The state of publication for this document. Either published or checkout. Read-only. // GetLevel gets the level property value. The state of publication for this document. Either published or checkout. Read-only.
func (m *PublicationFacet) GetLevel()(*string) { func (m *PublicationFacet) GetLevel() *string {
return m.level return m.level
} }
// GetOdataType gets the @odata.type property value. The OdataType property // GetOdataType gets the @odata.type property value. The OdataType property
func (m *PublicationFacet) GetOdataType()(*string) { func (m *PublicationFacet) GetOdataType() *string {
return m.odataType return m.odataType
} }
// GetVersionId gets the versionId property value. The unique identifier for the version that is visible to the current caller. Read-only. // GetVersionId gets the versionId property value. The unique identifier for the version that is visible to the current caller. Read-only.
func (m *PublicationFacet) GetVersionId()(*string) { func (m *PublicationFacet) GetVersionId() *string {
return m.versionId return m.versionId
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *PublicationFacet) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *PublicationFacet) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
{ {
err := writer.WriteStringValue("level", m.GetLevel()) err := writer.WriteStringValue("level", m.GetLevel())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("@odata.type", m.GetOdataType()) err := writer.WriteStringValue("@odata.type", m.GetOdataType())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("versionId", m.GetVersionId()) err := writer.WriteStringValue("versionId", m.GetVersionId())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteAdditionalData(m.GetAdditionalData()) err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *PublicationFacet) SetAdditionalData(value map[string]interface{})() { func (m *PublicationFacet) SetAdditionalData(value map[string]interface{}) {
m.additionalData = value m.additionalData = value
} }
// SetLevel sets the level property value. The state of publication for this document. Either published or checkout. Read-only. // SetLevel sets the level property value. The state of publication for this document. Either published or checkout. Read-only.
func (m *PublicationFacet) SetLevel(value *string)() { func (m *PublicationFacet) SetLevel(value *string) {
m.level = value m.level = value
} }
// SetOdataType sets the @odata.type property value. The OdataType property // SetOdataType sets the @odata.type property value. The OdataType property
func (m *PublicationFacet) SetOdataType(value *string)() { func (m *PublicationFacet) SetOdataType(value *string) {
m.odataType = value m.odataType = value
} }
// SetVersionId sets the versionId property value. The unique identifier for the version that is visible to the current caller. Read-only. // SetVersionId sets the versionId property value. The unique identifier for the version that is visible to the current caller. Read-only.
func (m *PublicationFacet) SetVersionId(value *string)() { func (m *PublicationFacet) SetVersionId(value *string) {
m.versionId = value m.versionId = value
} }

View File

@ -1,17 +1,17 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// PublicationFacetable // PublicationFacetable
type PublicationFacetable interface { type PublicationFacetable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
GetLevel()(*string) GetLevel() *string
GetOdataType()(*string) GetOdataType() *string
GetVersionId()(*string) GetVersionId() *string
SetLevel(value *string)() SetLevel(value *string)
SetOdataType(value *string)() SetOdataType(value *string)
SetVersionId(value *string)() SetVersionId(value *string)
} }

View File

@ -1,149 +1,162 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// ReactionsFacet // ReactionsFacet
type ReactionsFacet struct { type ReactionsFacet struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{} additionalData map[string]interface{}
// Count of comments. // Count of comments.
commentCount *int32 commentCount *int32
// Count of likes. // Count of likes.
likeCount *int32 likeCount *int32
// The OdataType property // The OdataType property
odataType *string odataType *string
// Count of shares. // Count of shares.
shareCount *int32 shareCount *int32
} }
// NewReactionsFacet instantiates a new reactionsFacet and sets the default values. // NewReactionsFacet instantiates a new reactionsFacet and sets the default values.
func NewReactionsFacet()(*ReactionsFacet) { func NewReactionsFacet() *ReactionsFacet {
m := &ReactionsFacet{ m := &ReactionsFacet{}
} m.SetAdditionalData(make(map[string]interface{}))
m.SetAdditionalData(make(map[string]interface{})); return m
return m
} }
// CreateReactionsFacetFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreateReactionsFacetFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateReactionsFacetFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreateReactionsFacetFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewReactionsFacet(), nil return NewReactionsFacet(), nil
} }
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *ReactionsFacet) GetAdditionalData()(map[string]interface{}) { func (m *ReactionsFacet) GetAdditionalData() map[string]interface{} {
return m.additionalData return m.additionalData
} }
// GetCommentCount gets the commentCount property value. Count of comments. // GetCommentCount gets the commentCount property value. Count of comments.
func (m *ReactionsFacet) GetCommentCount()(*int32) { func (m *ReactionsFacet) GetCommentCount() *int32 {
return m.commentCount return m.commentCount
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *ReactionsFacet) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *ReactionsFacet) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error)
res["commentCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["commentCount"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value() val, err := n.GetInt32Value()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetCommentCount(val) m.SetCommentCount(val)
} }
return nil return nil
} }
res["likeCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["likeCount"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value() val, err := n.GetInt32Value()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetLikeCount(val) m.SetLikeCount(val)
} }
return nil return nil
} }
res["@odata.type"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["@odata.type"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetOdataType(val) m.SetOdataType(val)
} }
return nil return nil
} }
res["shareCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["shareCount"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value() val, err := n.GetInt32Value()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetShareCount(val) m.SetShareCount(val)
} }
return nil return nil
} }
return res return res
} }
// GetLikeCount gets the likeCount property value. Count of likes. // GetLikeCount gets the likeCount property value. Count of likes.
func (m *ReactionsFacet) GetLikeCount()(*int32) { func (m *ReactionsFacet) GetLikeCount() *int32 {
return m.likeCount return m.likeCount
} }
// GetOdataType gets the @odata.type property value. The OdataType property // GetOdataType gets the @odata.type property value. The OdataType property
func (m *ReactionsFacet) GetOdataType()(*string) { func (m *ReactionsFacet) GetOdataType() *string {
return m.odataType return m.odataType
} }
// GetShareCount gets the shareCount property value. Count of shares. // GetShareCount gets the shareCount property value. Count of shares.
func (m *ReactionsFacet) GetShareCount()(*int32) { func (m *ReactionsFacet) GetShareCount() *int32 {
return m.shareCount return m.shareCount
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *ReactionsFacet) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *ReactionsFacet) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
{ {
err := writer.WriteInt32Value("commentCount", m.GetCommentCount()) err := writer.WriteInt32Value("commentCount", m.GetCommentCount())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteInt32Value("likeCount", m.GetLikeCount()) err := writer.WriteInt32Value("likeCount", m.GetLikeCount())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("@odata.type", m.GetOdataType()) err := writer.WriteStringValue("@odata.type", m.GetOdataType())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteInt32Value("shareCount", m.GetShareCount()) err := writer.WriteInt32Value("shareCount", m.GetShareCount())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteAdditionalData(m.GetAdditionalData()) err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *ReactionsFacet) SetAdditionalData(value map[string]interface{})() { func (m *ReactionsFacet) SetAdditionalData(value map[string]interface{}) {
m.additionalData = value m.additionalData = value
} }
// SetCommentCount sets the commentCount property value. Count of comments. // SetCommentCount sets the commentCount property value. Count of comments.
func (m *ReactionsFacet) SetCommentCount(value *int32)() { func (m *ReactionsFacet) SetCommentCount(value *int32) {
m.commentCount = value m.commentCount = value
} }
// SetLikeCount sets the likeCount property value. Count of likes. // SetLikeCount sets the likeCount property value. Count of likes.
func (m *ReactionsFacet) SetLikeCount(value *int32)() { func (m *ReactionsFacet) SetLikeCount(value *int32) {
m.likeCount = value m.likeCount = value
} }
// SetOdataType sets the @odata.type property value. The OdataType property // SetOdataType sets the @odata.type property value. The OdataType property
func (m *ReactionsFacet) SetOdataType(value *string)() { func (m *ReactionsFacet) SetOdataType(value *string) {
m.odataType = value m.odataType = value
} }
// SetShareCount sets the shareCount property value. Count of shares. // SetShareCount sets the shareCount property value. Count of shares.
func (m *ReactionsFacet) SetShareCount(value *int32)() { func (m *ReactionsFacet) SetShareCount(value *int32) {
m.shareCount = value m.shareCount = value
} }

View File

@ -1,19 +1,19 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// ReactionsFacetable // ReactionsFacetable
type ReactionsFacetable interface { type ReactionsFacetable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
GetCommentCount()(*int32) GetCommentCount() *int32
GetLikeCount()(*int32) GetLikeCount() *int32
GetOdataType()(*string) GetOdataType() *string
GetShareCount()(*int32) GetShareCount() *int32
SetCommentCount(value *int32)() SetCommentCount(value *int32)
SetLikeCount(value *int32)() SetLikeCount(value *int32)
SetOdataType(value *string)() SetOdataType(value *string)
SetShareCount(value *int32)() SetShareCount(value *int32)
} }

View File

@ -1,43 +1,45 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type SectionEmphasisType int type SectionEmphasisType int
const ( const (
NONE_SECTIONEMPHASISTYPE SectionEmphasisType = iota NONE_SECTIONEMPHASISTYPE SectionEmphasisType = iota
NEUTRAL_SECTIONEMPHASISTYPE NEUTRAL_SECTIONEMPHASISTYPE
SOFT_SECTIONEMPHASISTYPE SOFT_SECTIONEMPHASISTYPE
STRONG_SECTIONEMPHASISTYPE STRONG_SECTIONEMPHASISTYPE
UNKNOWNFUTUREVALUE_SECTIONEMPHASISTYPE UNKNOWNFUTUREVALUE_SECTIONEMPHASISTYPE
) )
func (i SectionEmphasisType) String() string { func (i SectionEmphasisType) String() string {
return []string{"none", "neutral", "soft", "strong", "unknownFutureValue"}[i] return []string{"none", "neutral", "soft", "strong", "unknownFutureValue"}[i]
} }
func ParseSectionEmphasisType(v string) (interface{}, error) { func ParseSectionEmphasisType(v string) (interface{}, error) {
result := NONE_SECTIONEMPHASISTYPE result := NONE_SECTIONEMPHASISTYPE
switch v { switch v {
case "none": case "none":
result = NONE_SECTIONEMPHASISTYPE result = NONE_SECTIONEMPHASISTYPE
case "neutral": case "neutral":
result = NEUTRAL_SECTIONEMPHASISTYPE result = NEUTRAL_SECTIONEMPHASISTYPE
case "soft": case "soft":
result = SOFT_SECTIONEMPHASISTYPE result = SOFT_SECTIONEMPHASISTYPE
case "strong": case "strong":
result = STRONG_SECTIONEMPHASISTYPE result = STRONG_SECTIONEMPHASISTYPE
case "unknownFutureValue": case "unknownFutureValue":
result = UNKNOWNFUTUREVALUE_SECTIONEMPHASISTYPE result = UNKNOWNFUTUREVALUE_SECTIONEMPHASISTYPE
default: default:
return 0, errors.New("Unknown SectionEmphasisType value: " + v) return 0, errors.New("Unknown SectionEmphasisType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializeSectionEmphasisType(values []SectionEmphasisType) []string { func SerializeSectionEmphasisType(values []SectionEmphasisType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,37 +1,39 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type SiteAccessType int type SiteAccessType int
const ( const (
BLOCK_SITEACCESSTYPE SiteAccessType = iota BLOCK_SITEACCESSTYPE SiteAccessType = iota
FULL_SITEACCESSTYPE FULL_SITEACCESSTYPE
LIMITED_SITEACCESSTYPE LIMITED_SITEACCESSTYPE
) )
func (i SiteAccessType) String() string { func (i SiteAccessType) String() string {
return []string{"block", "full", "limited"}[i] return []string{"block", "full", "limited"}[i]
} }
func ParseSiteAccessType(v string) (interface{}, error) { func ParseSiteAccessType(v string) (interface{}, error) {
result := BLOCK_SITEACCESSTYPE result := BLOCK_SITEACCESSTYPE
switch v { switch v {
case "block": case "block":
result = BLOCK_SITEACCESSTYPE result = BLOCK_SITEACCESSTYPE
case "full": case "full":
result = FULL_SITEACCESSTYPE result = FULL_SITEACCESSTYPE
case "limited": case "limited":
result = LIMITED_SITEACCESSTYPE result = LIMITED_SITEACCESSTYPE
default: default:
return 0, errors.New("Unknown SiteAccessType value: " + v) return 0, errors.New("Unknown SiteAccessType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializeSiteAccessType(values []SiteAccessType) []string { func SerializeSiteAccessType(values []SiteAccessType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -2,7 +2,6 @@ package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
msmodel "github.com/microsoftgraph/msgraph-sdk-go/models" msmodel "github.com/microsoftgraph/msgraph-sdk-go/models"
) )

View File

@ -1,52 +1,54 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the add method. // Provides operations to call the add method.
type SiteSecurityLevel int type SiteSecurityLevel int
const ( const (
// User Defined, default value, no intent. // User Defined, default value, no intent.
USERDEFINED_SITESECURITYLEVEL SiteSecurityLevel = iota USERDEFINED_SITESECURITYLEVEL SiteSecurityLevel = iota
// Low. // Low.
LOW_SITESECURITYLEVEL LOW_SITESECURITYLEVEL
// Medium-low. // Medium-low.
MEDIUMLOW_SITESECURITYLEVEL MEDIUMLOW_SITESECURITYLEVEL
// Medium. // Medium.
MEDIUM_SITESECURITYLEVEL MEDIUM_SITESECURITYLEVEL
// Medium-high. // Medium-high.
MEDIUMHIGH_SITESECURITYLEVEL MEDIUMHIGH_SITESECURITYLEVEL
// High. // High.
HIGH_SITESECURITYLEVEL HIGH_SITESECURITYLEVEL
) )
func (i SiteSecurityLevel) String() string { func (i SiteSecurityLevel) String() string {
return []string{"userDefined", "low", "mediumLow", "medium", "mediumHigh", "high"}[i] return []string{"userDefined", "low", "mediumLow", "medium", "mediumHigh", "high"}[i]
} }
func ParseSiteSecurityLevel(v string) (interface{}, error) { func ParseSiteSecurityLevel(v string) (interface{}, error) {
result := USERDEFINED_SITESECURITYLEVEL result := USERDEFINED_SITESECURITYLEVEL
switch v { switch v {
case "userDefined": case "userDefined":
result = USERDEFINED_SITESECURITYLEVEL result = USERDEFINED_SITESECURITYLEVEL
case "low": case "low":
result = LOW_SITESECURITYLEVEL result = LOW_SITESECURITYLEVEL
case "mediumLow": case "mediumLow":
result = MEDIUMLOW_SITESECURITYLEVEL result = MEDIUMLOW_SITESECURITYLEVEL
case "medium": case "medium":
result = MEDIUM_SITESECURITYLEVEL result = MEDIUM_SITESECURITYLEVEL
case "mediumHigh": case "mediumHigh":
result = MEDIUMHIGH_SITESECURITYLEVEL result = MEDIUMHIGH_SITESECURITYLEVEL
case "high": case "high":
result = HIGH_SITESECURITYLEVEL result = HIGH_SITESECURITYLEVEL
default: default:
return 0, errors.New("Unknown SiteSecurityLevel value: " + v) return 0, errors.New("Unknown SiteSecurityLevel value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializeSiteSecurityLevel(values []SiteSecurityLevel) []string { func SerializeSiteSecurityLevel(values []SiteSecurityLevel) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,123 +1,134 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// SiteSettings // SiteSettings
type SiteSettings struct { type SiteSettings struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{} additionalData map[string]interface{}
// The language tag for the language used on this site. // The language tag for the language used on this site.
languageTag *string languageTag *string
// The OdataType property // The OdataType property
odataType *string odataType *string
// Indicates the time offset for the time zone of the site from Coordinated Universal Time (UTC). // Indicates the time offset for the time zone of the site from Coordinated Universal Time (UTC).
timeZone *string timeZone *string
} }
// NewSiteSettings instantiates a new siteSettings and sets the default values. // NewSiteSettings instantiates a new siteSettings and sets the default values.
func NewSiteSettings()(*SiteSettings) { func NewSiteSettings() *SiteSettings {
m := &SiteSettings{ m := &SiteSettings{}
} m.SetAdditionalData(make(map[string]interface{}))
m.SetAdditionalData(make(map[string]interface{})); return m
return m
} }
// CreateSiteSettingsFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreateSiteSettingsFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateSiteSettingsFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreateSiteSettingsFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewSiteSettings(), nil return NewSiteSettings(), nil
} }
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *SiteSettings) GetAdditionalData()(map[string]interface{}) { func (m *SiteSettings) GetAdditionalData() map[string]interface{} {
return m.additionalData return m.additionalData
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *SiteSettings) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *SiteSettings) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error)
res["languageTag"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["languageTag"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetLanguageTag(val) m.SetLanguageTag(val)
} }
return nil return nil
} }
res["@odata.type"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["@odata.type"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetOdataType(val) m.SetOdataType(val)
} }
return nil return nil
} }
res["timeZone"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["timeZone"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetTimeZone(val) m.SetTimeZone(val)
} }
return nil return nil
} }
return res return res
} }
// GetLanguageTag gets the languageTag property value. The language tag for the language used on this site. // GetLanguageTag gets the languageTag property value. The language tag for the language used on this site.
func (m *SiteSettings) GetLanguageTag()(*string) { func (m *SiteSettings) GetLanguageTag() *string {
return m.languageTag return m.languageTag
} }
// GetOdataType gets the @odata.type property value. The OdataType property // GetOdataType gets the @odata.type property value. The OdataType property
func (m *SiteSettings) GetOdataType()(*string) { func (m *SiteSettings) GetOdataType() *string {
return m.odataType return m.odataType
} }
// GetTimeZone gets the timeZone property value. Indicates the time offset for the time zone of the site from Coordinated Universal Time (UTC). // GetTimeZone gets the timeZone property value. Indicates the time offset for the time zone of the site from Coordinated Universal Time (UTC).
func (m *SiteSettings) GetTimeZone()(*string) { func (m *SiteSettings) GetTimeZone() *string {
return m.timeZone return m.timeZone
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *SiteSettings) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *SiteSettings) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
{ {
err := writer.WriteStringValue("languageTag", m.GetLanguageTag()) err := writer.WriteStringValue("languageTag", m.GetLanguageTag())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("@odata.type", m.GetOdataType()) err := writer.WriteStringValue("@odata.type", m.GetOdataType())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("timeZone", m.GetTimeZone()) err := writer.WriteStringValue("timeZone", m.GetTimeZone())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteAdditionalData(m.GetAdditionalData()) err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *SiteSettings) SetAdditionalData(value map[string]interface{})() { func (m *SiteSettings) SetAdditionalData(value map[string]interface{}) {
m.additionalData = value m.additionalData = value
} }
// SetLanguageTag sets the languageTag property value. The language tag for the language used on this site. // SetLanguageTag sets the languageTag property value. The language tag for the language used on this site.
func (m *SiteSettings) SetLanguageTag(value *string)() { func (m *SiteSettings) SetLanguageTag(value *string) {
m.languageTag = value m.languageTag = value
} }
// SetOdataType sets the @odata.type property value. The OdataType property // SetOdataType sets the @odata.type property value. The OdataType property
func (m *SiteSettings) SetOdataType(value *string)() { func (m *SiteSettings) SetOdataType(value *string) {
m.odataType = value m.odataType = value
} }
// SetTimeZone sets the timeZone property value. Indicates the time offset for the time zone of the site from Coordinated Universal Time (UTC). // SetTimeZone sets the timeZone property value. Indicates the time offset for the time zone of the site from Coordinated Universal Time (UTC).
func (m *SiteSettings) SetTimeZone(value *string)() { func (m *SiteSettings) SetTimeZone(value *string) {
m.timeZone = value m.timeZone = value
} }

View File

@ -1,17 +1,17 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// SiteSettingsable // SiteSettingsable
type SiteSettingsable interface { type SiteSettingsable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
GetLanguageTag()(*string) GetLanguageTag() *string
GetOdataType()(*string) GetOdataType() *string
GetTimeZone()(*string) GetTimeZone() *string
SetLanguageTag(value *string)() SetLanguageTag(value *string)
SetOdataType(value *string)() SetOdataType(value *string)
SetTimeZone(value *string)() SetTimeZone(value *string)
} }

View File

@ -1,88 +1,96 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// StandardWebPart // StandardWebPart
type StandardWebPart struct { type StandardWebPart struct {
WebPart WebPart
// Data of the webPart. // Data of the webPart.
data WebPartDataable data WebPartDataable
// A Guid which indicates the type of the webParts // A Guid which indicates the type of the webParts
webPartType *string webPartType *string
} }
// NewStandardWebPart instantiates a new StandardWebPart and sets the default values. // NewStandardWebPart instantiates a new StandardWebPart and sets the default values.
func NewStandardWebPart()(*StandardWebPart) { func NewStandardWebPart() *StandardWebPart {
m := &StandardWebPart{ m := &StandardWebPart{
WebPart: *NewWebPart(), WebPart: *NewWebPart(),
} }
odataTypeValue := "#microsoft.graph.standardWebPart"; odataTypeValue := "#microsoft.graph.standardWebPart"
m.SetOdataType(&odataTypeValue); m.SetOdataType(&odataTypeValue)
return m return m
} }
// CreateStandardWebPartFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreateStandardWebPartFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateStandardWebPartFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreateStandardWebPartFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewStandardWebPart(), nil return NewStandardWebPart(), nil
} }
// GetData gets the data property value. Data of the webPart. // GetData gets the data property value. Data of the webPart.
func (m *StandardWebPart) GetData()(WebPartDataable) { func (m *StandardWebPart) GetData() WebPartDataable {
return m.data return m.data
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *StandardWebPart) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *StandardWebPart) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := m.WebPart.GetFieldDeserializers() res := m.WebPart.GetFieldDeserializers()
res["data"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["data"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateWebPartDataFromDiscriminatorValue) val, err := n.GetObjectValue(CreateWebPartDataFromDiscriminatorValue)
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetData(val.(WebPartDataable)) m.SetData(val.(WebPartDataable))
} }
return nil return nil
} }
res["webPartType"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["webPartType"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetWebPartType(val) m.SetWebPartType(val)
} }
return nil return nil
} }
return res return res
} }
// GetWebPartType gets the webPartType property value. A Guid which indicates the type of the webParts // GetWebPartType gets the webPartType property value. A Guid which indicates the type of the webParts
func (m *StandardWebPart) GetWebPartType()(*string) { func (m *StandardWebPart) GetWebPartType() *string {
return m.webPartType return m.webPartType
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *StandardWebPart) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *StandardWebPart) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
err := m.WebPart.Serialize(writer) err := m.WebPart.Serialize(writer)
if err != nil { if err != nil {
return err return err
} }
{ {
err = writer.WriteObjectValue("data", m.GetData()) err = writer.WriteObjectValue("data", m.GetData())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err = writer.WriteStringValue("webPartType", m.GetWebPartType()) err = writer.WriteStringValue("webPartType", m.GetWebPartType())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetData sets the data property value. Data of the webPart. // SetData sets the data property value. Data of the webPart.
func (m *StandardWebPart) SetData(value WebPartDataable)() { func (m *StandardWebPart) SetData(value WebPartDataable) {
m.data = value m.data = value
} }
// SetWebPartType sets the webPartType property value. A Guid which indicates the type of the webParts // SetWebPartType sets the webPartType property value. A Guid which indicates the type of the webParts
func (m *StandardWebPart) SetWebPartType(value *string)() { func (m *StandardWebPart) SetWebPartType(value *string) {
m.webPartType = value m.webPartType = value
} }

View File

@ -1,15 +1,15 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// StandardWebPartable // StandardWebPartable
type StandardWebPartable interface { type StandardWebPartable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
WebPartable WebPartable
GetData()(WebPartDataable) GetData() WebPartDataable
GetWebPartType()(*string) GetWebPartType() *string
SetData(value WebPartDataable)() SetData(value WebPartDataable)
SetWebPartType(value *string)() SetWebPartType(value *string)
} }

View File

@ -1,62 +1,68 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// TextWebPart // TextWebPart
type TextWebPart struct { type TextWebPart struct {
WebPart WebPart
// The HTML string in text web part. // The HTML string in text web part.
innerHtml *string innerHtml *string
} }
// NewTextWebPart instantiates a new TextWebPart and sets the default values. // NewTextWebPart instantiates a new TextWebPart and sets the default values.
func NewTextWebPart()(*TextWebPart) { func NewTextWebPart() *TextWebPart {
m := &TextWebPart{ m := &TextWebPart{
WebPart: *NewWebPart(), WebPart: *NewWebPart(),
} }
odataTypeValue := "#microsoft.graph.textWebPart"; odataTypeValue := "#microsoft.graph.textWebPart"
m.SetOdataType(&odataTypeValue); m.SetOdataType(&odataTypeValue)
return m return m
} }
// CreateTextWebPartFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreateTextWebPartFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateTextWebPartFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreateTextWebPartFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewTextWebPart(), nil return NewTextWebPart(), nil
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *TextWebPart) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *TextWebPart) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := m.WebPart.GetFieldDeserializers() res := m.WebPart.GetFieldDeserializers()
res["innerHtml"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["innerHtml"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetInnerHtml(val) m.SetInnerHtml(val)
} }
return nil return nil
} }
return res return res
} }
// GetInnerHtml gets the innerHtml property value. The HTML string in text web part. // GetInnerHtml gets the innerHtml property value. The HTML string in text web part.
func (m *TextWebPart) GetInnerHtml()(*string) { func (m *TextWebPart) GetInnerHtml() *string {
return m.innerHtml return m.innerHtml
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *TextWebPart) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *TextWebPart) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
err := m.WebPart.Serialize(writer) err := m.WebPart.Serialize(writer)
if err != nil { if err != nil {
return err return err
} }
{ {
err = writer.WriteStringValue("innerHtml", m.GetInnerHtml()) err = writer.WriteStringValue("innerHtml", m.GetInnerHtml())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetInnerHtml sets the innerHtml property value. The HTML string in text web part. // SetInnerHtml sets the innerHtml property value. The HTML string in text web part.
func (m *TextWebPart) SetInnerHtml(value *string)() { func (m *TextWebPart) SetInnerHtml(value *string) {
m.innerHtml = value m.innerHtml = value
} }

View File

@ -1,13 +1,13 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// TextWebPartable // TextWebPartable
type TextWebPartable interface { type TextWebPartable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
WebPartable WebPartable
GetInnerHtml()(*string) GetInnerHtml() *string
SetInnerHtml(value *string)() SetInnerHtml(value *string)
} }

View File

@ -1,43 +1,45 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type TitleAreaLayoutType int type TitleAreaLayoutType int
const ( const (
IMAGEANDTITLE_TITLEAREALAYOUTTYPE TitleAreaLayoutType = iota IMAGEANDTITLE_TITLEAREALAYOUTTYPE TitleAreaLayoutType = iota
PLAIN_TITLEAREALAYOUTTYPE PLAIN_TITLEAREALAYOUTTYPE
COLORBLOCK_TITLEAREALAYOUTTYPE COLORBLOCK_TITLEAREALAYOUTTYPE
OVERLAP_TITLEAREALAYOUTTYPE OVERLAP_TITLEAREALAYOUTTYPE
UNKNOWNFUTUREVALUE_TITLEAREALAYOUTTYPE UNKNOWNFUTUREVALUE_TITLEAREALAYOUTTYPE
) )
func (i TitleAreaLayoutType) String() string { func (i TitleAreaLayoutType) String() string {
return []string{"imageAndTitle", "plain", "colorBlock", "overlap", "unknownFutureValue"}[i] return []string{"imageAndTitle", "plain", "colorBlock", "overlap", "unknownFutureValue"}[i]
} }
func ParseTitleAreaLayoutType(v string) (interface{}, error) { func ParseTitleAreaLayoutType(v string) (interface{}, error) {
result := IMAGEANDTITLE_TITLEAREALAYOUTTYPE result := IMAGEANDTITLE_TITLEAREALAYOUTTYPE
switch v { switch v {
case "imageAndTitle": case "imageAndTitle":
result = IMAGEANDTITLE_TITLEAREALAYOUTTYPE result = IMAGEANDTITLE_TITLEAREALAYOUTTYPE
case "plain": case "plain":
result = PLAIN_TITLEAREALAYOUTTYPE result = PLAIN_TITLEAREALAYOUTTYPE
case "colorBlock": case "colorBlock":
result = COLORBLOCK_TITLEAREALAYOUTTYPE result = COLORBLOCK_TITLEAREALAYOUTTYPE
case "overlap": case "overlap":
result = OVERLAP_TITLEAREALAYOUTTYPE result = OVERLAP_TITLEAREALAYOUTTYPE
case "unknownFutureValue": case "unknownFutureValue":
result = UNKNOWNFUTUREVALUE_TITLEAREALAYOUTTYPE result = UNKNOWNFUTUREVALUE_TITLEAREALAYOUTTYPE
default: default:
return 0, errors.New("Unknown TitleAreaLayoutType value: " + v) return 0, errors.New("Unknown TitleAreaLayoutType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializeTitleAreaLayoutType(values []TitleAreaLayoutType) []string { func SerializeTitleAreaLayoutType(values []TitleAreaLayoutType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,37 +1,39 @@
package models package models
import ( import (
"errors" "errors"
) )
// Provides operations to call the remove method. // Provides operations to call the remove method.
type TitleAreaTextAlignmentType int type TitleAreaTextAlignmentType int
const ( const (
LEFT_TITLEAREATEXTALIGNMENTTYPE TitleAreaTextAlignmentType = iota LEFT_TITLEAREATEXTALIGNMENTTYPE TitleAreaTextAlignmentType = iota
CENTER_TITLEAREATEXTALIGNMENTTYPE CENTER_TITLEAREATEXTALIGNMENTTYPE
UNKNOWNFUTUREVALUE_TITLEAREATEXTALIGNMENTTYPE UNKNOWNFUTUREVALUE_TITLEAREATEXTALIGNMENTTYPE
) )
func (i TitleAreaTextAlignmentType) String() string { func (i TitleAreaTextAlignmentType) String() string {
return []string{"left", "center", "unknownFutureValue"}[i] return []string{"left", "center", "unknownFutureValue"}[i]
} }
func ParseTitleAreaTextAlignmentType(v string) (interface{}, error) { func ParseTitleAreaTextAlignmentType(v string) (interface{}, error) {
result := LEFT_TITLEAREATEXTALIGNMENTTYPE result := LEFT_TITLEAREATEXTALIGNMENTTYPE
switch v { switch v {
case "left": case "left":
result = LEFT_TITLEAREATEXTALIGNMENTTYPE result = LEFT_TITLEAREATEXTALIGNMENTTYPE
case "center": case "center":
result = CENTER_TITLEAREATEXTALIGNMENTTYPE result = CENTER_TITLEAREATEXTALIGNMENTTYPE
case "unknownFutureValue": case "unknownFutureValue":
result = UNKNOWNFUTUREVALUE_TITLEAREATEXTALIGNMENTTYPE result = UNKNOWNFUTUREVALUE_TITLEAREATEXTALIGNMENTTYPE
default: default:
return 0, errors.New("Unknown TitleAreaTextAlignmentType value: " + v) return 0, errors.New("Unknown TitleAreaTextAlignmentType value: " + v)
} }
return &result, nil return &result, nil
} }
func SerializeTitleAreaTextAlignmentType(values []TitleAreaTextAlignmentType) []string { func SerializeTitleAreaTextAlignmentType(values []TitleAreaTextAlignmentType) []string {
result := make([]string, len(values)) result := make([]string, len(values))
for i, v := range values { for i, v := range values {
result[i] = v.String() result[i] = v.String()
} }
return result return result
} }

View File

@ -1,175 +1,190 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// WebPartPosition // WebPartPosition
type WebPartPosition struct { type WebPartPosition struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{} additionalData map[string]interface{}
// Indicates the identifier of the column where the web part is located. // Indicates the identifier of the column where the web part is located.
columnId *float64 columnId *float64
// Indicates the horizontal section where the web part is located. // Indicates the horizontal section where the web part is located.
horizontalSectionId *float64 horizontalSectionId *float64
// Indicates whether the web part is located in the vertical section. // Indicates whether the web part is located in the vertical section.
isInVerticalSection *bool isInVerticalSection *bool
// The OdataType property // The OdataType property
odataType *string odataType *string
// Index of the current web part. Represents the order of the web part in this column or section. // Index of the current web part. Represents the order of the web part in this column or section.
webPartIndex *float64 webPartIndex *float64
} }
// NewWebPartPosition instantiates a new webPartPosition and sets the default values. // NewWebPartPosition instantiates a new webPartPosition and sets the default values.
func NewWebPartPosition()(*WebPartPosition) { func NewWebPartPosition() *WebPartPosition {
m := &WebPartPosition{ m := &WebPartPosition{}
} m.SetAdditionalData(make(map[string]interface{}))
m.SetAdditionalData(make(map[string]interface{})); return m
return m
} }
// CreateWebPartPositionFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value // CreateWebPartPositionFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateWebPartPositionFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { func CreateWebPartPositionFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) (i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewWebPartPosition(), nil return NewWebPartPosition(), nil
} }
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *WebPartPosition) GetAdditionalData()(map[string]interface{}) { func (m *WebPartPosition) GetAdditionalData() map[string]interface{} {
return m.additionalData return m.additionalData
} }
// GetColumnId gets the columnId property value. Indicates the identifier of the column where the web part is located. // GetColumnId gets the columnId property value. Indicates the identifier of the column where the web part is located.
func (m *WebPartPosition) GetColumnId()(*float64) { func (m *WebPartPosition) GetColumnId() *float64 {
return m.columnId return m.columnId
} }
// GetFieldDeserializers the deserialization information for the current model // GetFieldDeserializers the deserialization information for the current model
func (m *WebPartPosition) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { func (m *WebPartPosition) GetFieldDeserializers() map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error)
res["columnId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["columnId"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetFloat64Value() val, err := n.GetFloat64Value()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetColumnId(val) m.SetColumnId(val)
} }
return nil return nil
} }
res["horizontalSectionId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["horizontalSectionId"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetFloat64Value() val, err := n.GetFloat64Value()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetHorizontalSectionId(val) m.SetHorizontalSectionId(val)
} }
return nil return nil
} }
res["isInVerticalSection"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["isInVerticalSection"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue() val, err := n.GetBoolValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetIsInVerticalSection(val) m.SetIsInVerticalSection(val)
} }
return nil return nil
} }
res["@odata.type"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["@odata.type"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue() val, err := n.GetStringValue()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetOdataType(val) m.SetOdataType(val)
} }
return nil return nil
} }
res["webPartIndex"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { res["webPartIndex"] = func(n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetFloat64Value() val, err := n.GetFloat64Value()
if err != nil { if err != nil {
return err return err
} }
if val != nil { if val != nil {
m.SetWebPartIndex(val) m.SetWebPartIndex(val)
} }
return nil return nil
} }
return res return res
} }
// GetHorizontalSectionId gets the horizontalSectionId property value. Indicates the horizontal section where the web part is located. // GetHorizontalSectionId gets the horizontalSectionId property value. Indicates the horizontal section where the web part is located.
func (m *WebPartPosition) GetHorizontalSectionId()(*float64) { func (m *WebPartPosition) GetHorizontalSectionId() *float64 {
return m.horizontalSectionId return m.horizontalSectionId
} }
// GetIsInVerticalSection gets the isInVerticalSection property value. Indicates whether the web part is located in the vertical section. // GetIsInVerticalSection gets the isInVerticalSection property value. Indicates whether the web part is located in the vertical section.
func (m *WebPartPosition) GetIsInVerticalSection()(*bool) { func (m *WebPartPosition) GetIsInVerticalSection() *bool {
return m.isInVerticalSection return m.isInVerticalSection
} }
// GetOdataType gets the @odata.type property value. The OdataType property // GetOdataType gets the @odata.type property value. The OdataType property
func (m *WebPartPosition) GetOdataType()(*string) { func (m *WebPartPosition) GetOdataType() *string {
return m.odataType return m.odataType
} }
// GetWebPartIndex gets the webPartIndex property value. Index of the current web part. Represents the order of the web part in this column or section. // GetWebPartIndex gets the webPartIndex property value. Index of the current web part. Represents the order of the web part in this column or section.
func (m *WebPartPosition) GetWebPartIndex()(*float64) { func (m *WebPartPosition) GetWebPartIndex() *float64 {
return m.webPartIndex return m.webPartIndex
} }
// Serialize serializes information the current object // Serialize serializes information the current object
func (m *WebPartPosition) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { func (m *WebPartPosition) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter) error {
{ {
err := writer.WriteFloat64Value("columnId", m.GetColumnId()) err := writer.WriteFloat64Value("columnId", m.GetColumnId())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteFloat64Value("horizontalSectionId", m.GetHorizontalSectionId()) err := writer.WriteFloat64Value("horizontalSectionId", m.GetHorizontalSectionId())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteBoolValue("isInVerticalSection", m.GetIsInVerticalSection()) err := writer.WriteBoolValue("isInVerticalSection", m.GetIsInVerticalSection())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteStringValue("@odata.type", m.GetOdataType()) err := writer.WriteStringValue("@odata.type", m.GetOdataType())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteFloat64Value("webPartIndex", m.GetWebPartIndex()) err := writer.WriteFloat64Value("webPartIndex", m.GetWebPartIndex())
if err != nil { if err != nil {
return err return err
} }
} }
{ {
err := writer.WriteAdditionalData(m.GetAdditionalData()) err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil { if err != nil {
return err return err
} }
} }
return nil return nil
} }
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *WebPartPosition) SetAdditionalData(value map[string]interface{})() { func (m *WebPartPosition) SetAdditionalData(value map[string]interface{}) {
m.additionalData = value m.additionalData = value
} }
// SetColumnId sets the columnId property value. Indicates the identifier of the column where the web part is located. // SetColumnId sets the columnId property value. Indicates the identifier of the column where the web part is located.
func (m *WebPartPosition) SetColumnId(value *float64)() { func (m *WebPartPosition) SetColumnId(value *float64) {
m.columnId = value m.columnId = value
} }
// SetHorizontalSectionId sets the horizontalSectionId property value. Indicates the horizontal section where the web part is located. // SetHorizontalSectionId sets the horizontalSectionId property value. Indicates the horizontal section where the web part is located.
func (m *WebPartPosition) SetHorizontalSectionId(value *float64)() { func (m *WebPartPosition) SetHorizontalSectionId(value *float64) {
m.horizontalSectionId = value m.horizontalSectionId = value
} }
// SetIsInVerticalSection sets the isInVerticalSection property value. Indicates whether the web part is located in the vertical section. // SetIsInVerticalSection sets the isInVerticalSection property value. Indicates whether the web part is located in the vertical section.
func (m *WebPartPosition) SetIsInVerticalSection(value *bool)() { func (m *WebPartPosition) SetIsInVerticalSection(value *bool) {
m.isInVerticalSection = value m.isInVerticalSection = value
} }
// SetOdataType sets the @odata.type property value. The OdataType property // SetOdataType sets the @odata.type property value. The OdataType property
func (m *WebPartPosition) SetOdataType(value *string)() { func (m *WebPartPosition) SetOdataType(value *string) {
m.odataType = value m.odataType = value
} }
// SetWebPartIndex sets the webPartIndex property value. Index of the current web part. Represents the order of the web part in this column or section. // SetWebPartIndex sets the webPartIndex property value. Index of the current web part. Represents the order of the web part in this column or section.
func (m *WebPartPosition) SetWebPartIndex(value *float64)() { func (m *WebPartPosition) SetWebPartIndex(value *float64) {
m.webPartIndex = value m.webPartIndex = value
} }

View File

@ -1,21 +1,21 @@
package models package models
import ( import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
) )
// WebPartPositionable // WebPartPositionable
type WebPartPositionable interface { type WebPartPositionable interface {
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.AdditionalDataHolder
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable
GetColumnId()(*float64) GetColumnId() *float64
GetHorizontalSectionId()(*float64) GetHorizontalSectionId() *float64
GetIsInVerticalSection()(*bool) GetIsInVerticalSection() *bool
GetOdataType()(*string) GetOdataType() *string
GetWebPartIndex()(*float64) GetWebPartIndex() *float64
SetColumnId(value *float64)() SetColumnId(value *float64)
SetHorizontalSectionId(value *float64)() SetHorizontalSectionId(value *float64)
SetIsInVerticalSection(value *bool)() SetIsInVerticalSection(value *bool)
SetOdataType(value *string)() SetOdataType(value *string)
SetWebPartIndex(value *float64)() SetWebPartIndex(value *float64)
} }

View File

@ -3,9 +3,10 @@ package sites
import ( import (
"context" "context"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
) )
// ItemPagesItemWebPartsItemGetPositionOfWebPartRequestBuilder provides operations to call the getPositionOfWebPart method. // ItemPagesItemWebPartsItemGetPositionOfWebPartRequestBuilder provides operations to call the getPositionOfWebPart method.

View File

@ -3,9 +3,10 @@ package sites
import ( import (
"context" "context"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
) )
// ItemPagesItemWebPartsRequestBuilder provides operations to manage the webParts property of the microsoft.graph.sitePage entity. // ItemPagesItemWebPartsRequestBuilder provides operations to manage the webParts property of the microsoft.graph.sitePage entity.

View File

@ -3,10 +3,10 @@ package sites
import ( import (
"context" "context"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
) )
// ItemPagesItemWebPartsWebPartItemRequestBuilder provides operations to manage the webParts property of the microsoft.graph.sitePage entity. // ItemPagesItemWebPartsWebPartItemRequestBuilder provides operations to manage the webParts property of the microsoft.graph.sitePage entity.

View File

@ -3,9 +3,10 @@ package sites
import ( import (
"context" "context"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
) )
// ItemPagesRequestBuilder provides operations to manage the pages property of the microsoft.graph.site entity. // ItemPagesRequestBuilder provides operations to manage the pages property of the microsoft.graph.site entity.

View File

@ -17,6 +17,7 @@ import (
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
const ( const (
errCodeActivityLimitReached = "activityLimitReached"
errCodeItemNotFound = "ErrorItemNotFound" errCodeItemNotFound = "ErrorItemNotFound"
errCodeEmailFolderNotFound = "ErrorSyncFolderNotFound" errCodeEmailFolderNotFound = "ErrorSyncFolderNotFound"
errCodeResyncRequired = "ResyncRequired" errCodeResyncRequired = "ResyncRequired"
@ -31,8 +32,10 @@ var (
// normally the graph client will catch this for us, but in case we // normally the graph client will catch this for us, but in case we
// run our own client Do(), we need to translate it to a timeout type // run our own client Do(), we need to translate it to a timeout type
// failure locally. // failure locally.
Err429TooManyRequests = errors.New("429 too many requests") Err429TooManyRequests = errors.New("429 too many requests")
Err503ServiceUnavailable = errors.New("503 Service Unavailable") Err503ServiceUnavailable = errors.New("503 Service Unavailable")
Err504GatewayTimeout = errors.New("504 Gateway Timeout")
Err500InternalServerError = errors.New("500 Internal Server Error")
) )
// The folder or item was deleted between the time we identified // The folder or item was deleted between the time we identified
@ -113,6 +116,10 @@ func IsErrThrottled(err error) bool {
return true return true
} }
if hasErrorCode(err, errCodeActivityLimitReached) {
return true
}
e := ErrThrottled{} e := ErrThrottled{}
return errors.As(err, &e) return errors.As(err, &e)
@ -135,21 +142,18 @@ func IsErrUnauthorized(err error) bool {
return errors.As(err, &e) return errors.As(err, &e)
} }
type ErrServiceUnavailable struct { type ErrInternalServerError struct {
common.Err common.Err
} }
func IsSericeUnavailable(err error) bool { func IsInternalServerError(err error) bool {
if errors.Is(err, Err503ServiceUnavailable) { if errors.Is(err, Err500InternalServerError) {
return true return true
} }
e := ErrUnauthorized{} e := ErrInternalServerError{}
if errors.As(err, &e) {
return true
}
return true return errors.As(err, &e)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -0,0 +1,248 @@
package graph
import (
"context"
"testing"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
)
type GraphErrorsUnitSuite struct {
suite.Suite
}
func TestGraphErrorsUnitSuite(t *testing.T) {
suite.Run(t, new(GraphErrorsUnitSuite))
}
func odErr(code string) *odataerrors.ODataError {
odErr := &odataerrors.ODataError{}
merr := odataerrors.MainError{}
merr.SetCode(&code)
odErr.SetError(&merr)
return odErr
}
func (suite *GraphErrorsUnitSuite) TestIsErrDeletedInFlight() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrDeletedInFlight{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "non-matching oDataErr",
err: odErr("fnords"),
expect: assert.False,
},
{
name: "not-found oDataErr",
err: odErr(errCodeItemNotFound),
expect: assert.True,
},
{
name: "sync-not-found oDataErr",
err: odErr(errCodeSyncFolderNotFound),
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrDeletedInFlight(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsErrInvalidDelta() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrInvalidDelta{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "non-matching oDataErr",
err: odErr("fnords"),
expect: assert.False,
},
{
name: "resync-required oDataErr",
err: odErr(errCodeResyncRequired),
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrInvalidDelta(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsErrTimeout() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrTimeout{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "context deadline",
err: context.DeadlineExceeded,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrTimeout(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsErrThrottled() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrThrottled{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "is429",
err: Err429TooManyRequests,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrThrottled(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsErrUnauthorized() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrUnauthorized{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "is429",
err: Err401Unauthorized,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsErrUnauthorized(test.err))
})
}
}
func (suite *GraphErrorsUnitSuite) TestIsInternalServerError() {
table := []struct {
name string
err error
expect assert.BoolAssertionFunc
}{
{
name: "nil",
err: nil,
expect: assert.False,
},
{
name: "non-matching",
err: assert.AnError,
expect: assert.False,
},
{
name: "as",
err: ErrInternalServerError{Err: *common.EncapsulateError(assert.AnError)},
expect: assert.True,
},
{
name: "is429",
err: Err500InternalServerError,
expect: assert.True,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, IsInternalServerError(test.err))
})
}
}

View File

@ -8,6 +8,7 @@ import (
"time" "time"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity" "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
ka "github.com/microsoft/kiota-authentication-azure-go" ka "github.com/microsoft/kiota-authentication-azure-go"
khttp "github.com/microsoft/kiota-http-go" khttp "github.com/microsoft/kiota-http-go"
@ -22,6 +23,7 @@ import (
const ( const (
logGraphRequestsEnvKey = "LOG_GRAPH_REQUESTS" logGraphRequestsEnvKey = "LOG_GRAPH_REQUESTS"
numberOfRetries = 3
) )
// AllMetadataFileNames produces the standard set of filenames used to store graph // AllMetadataFileNames produces the standard set of filenames used to store graph
@ -149,7 +151,7 @@ func HTTPClient(opts ...option) *http.Client {
middlewares := msgraphgocore.GetDefaultMiddlewaresWithOptions(&clientOptions) middlewares := msgraphgocore.GetDefaultMiddlewaresWithOptions(&clientOptions)
middlewares = append(middlewares, &LoggingMiddleware{}) middlewares = append(middlewares, &LoggingMiddleware{})
httpClient := msgraphgocore.GetDefaultClient(&clientOptions, middlewares...) httpClient := msgraphgocore.GetDefaultClient(&clientOptions, middlewares...)
httpClient.Timeout = time.Second * 90 httpClient.Timeout = time.Minute * 3
(&clientConfig{}). (&clientConfig{}).
populate(opts...). populate(opts...).
@ -250,7 +252,6 @@ func (handler *LoggingMiddleware) Intercept(
respDump, _ := httputil.DumpResponse(resp, false) respDump, _ := httputil.DumpResponse(resp, false)
metadata := []any{ metadata := []any{
"idx", middlewareIndex,
"method", req.Method, "method", req.Method,
"status", resp.Status, "status", resp.Status,
"statusCode", resp.StatusCode, "statusCode", resp.StatusCode,
@ -273,7 +274,6 @@ func (handler *LoggingMiddleware) Intercept(
respDump, _ := httputil.DumpResponse(resp, true) respDump, _ := httputil.DumpResponse(resp, true)
metadata := []any{ metadata := []any{
"idx", middlewareIndex,
"method", req.Method, "method", req.Method,
"status", resp.Status, "status", resp.Status,
"statusCode", resp.StatusCode, "statusCode", resp.StatusCode,
@ -296,3 +296,26 @@ func (handler *LoggingMiddleware) Intercept(
return resp, err return resp, err
} }
// Run a function with retries
func RunWithRetry(run func() error) error {
var err error
for i := 0; i < numberOfRetries; i++ {
err = run()
if err == nil {
return nil
}
// only retry on timeouts and 500-internal-errors.
if !(IsErrTimeout(err) || IsInternalServerError(err)) {
break
}
if i < numberOfRetries {
time.Sleep(time.Duration(3*(i+2)) * time.Second)
}
}
return support.ConnectorStackErrorTraceWrap(err, "maximum retries or unretryable")
}

View File

@ -53,7 +53,7 @@ func (suite *GraphUnitSuite) TestHTTPClient() {
name: "no options", name: "no options",
opts: []option{}, opts: []option{},
check: func(t *testing.T, c *http.Client) { check: func(t *testing.T, c *http.Client) {
assert.Equal(t, 90*time.Second, c.Timeout, "default timeout") assert.Equal(t, 3*time.Minute, c.Timeout, "default timeout")
}, },
}, },
{ {

View File

@ -266,9 +266,11 @@ func (gc *GraphConnector) UnionSiteIDsAndWebURLs(ctx context.Context, ids, urls
// SideEffect: gc.status is updated at the completion of operation // SideEffect: gc.status is updated at the completion of operation
func (gc *GraphConnector) RestoreDataCollections( func (gc *GraphConnector) RestoreDataCollections(
ctx context.Context, ctx context.Context,
backupVersion int,
acct account.Account, acct account.Account,
selector selectors.Selector, selector selectors.Selector,
dest control.RestoreDestination, dest control.RestoreDestination,
opts control.Options,
dcs []data.Collection, dcs []data.Collection,
) (*details.Details, error) { ) (*details.Details, error) {
ctx, end := D.Span(ctx, "connector:restore") ctx, end := D.Span(ctx, "connector:restore")
@ -289,9 +291,9 @@ func (gc *GraphConnector) RestoreDataCollections(
case selectors.ServiceExchange: case selectors.ServiceExchange:
status, err = exchange.RestoreExchangeDataCollections(ctx, creds, gc.Service, dest, dcs, deets) status, err = exchange.RestoreExchangeDataCollections(ctx, creds, gc.Service, dest, dcs, deets)
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
status, err = onedrive.RestoreCollections(ctx, gc.Service, dest, dcs, deets) status, err = onedrive.RestoreCollections(ctx, backupVersion, gc.Service, dest, opts, dcs, deets)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
status, err = sharepoint.RestoreCollections(ctx, gc.Service, dest, dcs, deets) status, err = sharepoint.RestoreCollections(ctx, backupVersion, gc.Service, dest, dcs, deets)
default: default:
err = errors.Errorf("restore data from service %s not supported", selector.Service.String()) err = errors.Errorf("restore data from service %s not supported", selector.Service.String())
} }

View File

@ -2,9 +2,11 @@ package connector
import ( import (
"context" "context"
"encoding/json"
"io" "io"
"net/http" "net/http"
"reflect" "reflect"
"strings"
"testing" "testing"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
@ -14,6 +16,7 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -169,6 +172,14 @@ type restoreBackupInfo struct {
resource resource resource resource
} }
type restoreBackupInfoMultiVersion struct {
name string
service path.ServiceType
collectionsLatest []colInfo
collectionsPrevious []colInfo
resource resource
}
func attachmentEqual( func attachmentEqual(
expected models.Attachmentable, expected models.Attachmentable,
got models.Attachmentable, got models.Attachmentable,
@ -645,21 +656,52 @@ func compareOneDriveItem(
t *testing.T, t *testing.T,
expected map[string][]byte, expected map[string][]byte,
item data.Stream, item data.Stream,
restorePermissions bool,
) { ) {
name := item.UUID()
expectedData := expected[item.UUID()] expectedData := expected[item.UUID()]
if !assert.NotNil(t, expectedData, "unexpected file with name %s", item.UUID) { if !assert.NotNil(t, expectedData, "unexpected file with name %s", item.UUID()) {
return return
} }
// OneDrive items are just byte buffers of the data. Nothing special to
// interpret. May need to do chunked comparisons in the future if we test
// large item equality.
buf, err := io.ReadAll(item.ToReader()) buf, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err) { if !assert.NoError(t, err) {
return return
} }
assert.Equal(t, expectedData, buf) if !strings.HasSuffix(name, onedrive.MetaFileSuffix) && !strings.HasSuffix(name, onedrive.DirMetaFileSuffix) {
// OneDrive data items are just byte buffers of the data. Nothing special to
// interpret. May need to do chunked comparisons in the future if we test
// large item equality.
assert.Equal(t, expectedData, buf)
return
}
var (
itemMeta onedrive.Metadata
expectedMeta onedrive.Metadata
)
err = json.Unmarshal(buf, &itemMeta)
assert.Nil(t, err)
err = json.Unmarshal(expectedData, &expectedMeta)
assert.Nil(t, err)
if !restorePermissions {
assert.Equal(t, 0, len(itemMeta.Permissions))
return
}
assert.Equal(t, len(expectedMeta.Permissions), len(itemMeta.Permissions), "number of permissions after restore")
// FIXME(meain): The permissions before and after might not be in the same order.
for i, p := range expectedMeta.Permissions {
assert.Equal(t, p.Email, itemMeta.Permissions[i].Email)
assert.Equal(t, p.Roles, itemMeta.Permissions[i].Roles)
assert.Equal(t, p.Expiration, itemMeta.Permissions[i].Expiration)
}
} }
func compareItem( func compareItem(
@ -668,6 +710,7 @@ func compareItem(
service path.ServiceType, service path.ServiceType,
category path.CategoryType, category path.CategoryType,
item data.Stream, item data.Stream,
restorePermissions bool,
) { ) {
if mt, ok := item.(data.StreamModTime); ok { if mt, ok := item.(data.StreamModTime); ok {
assert.NotZero(t, mt.ModTime()) assert.NotZero(t, mt.ModTime())
@ -687,7 +730,7 @@ func compareItem(
} }
case path.OneDriveService: case path.OneDriveService:
compareOneDriveItem(t, expected, item) compareOneDriveItem(t, expected, item, restorePermissions)
default: default:
assert.FailNowf(t, "unexpected service: %s", service.String()) assert.FailNowf(t, "unexpected service: %s", service.String())
@ -720,6 +763,7 @@ func checkCollections(
expectedItems int, expectedItems int,
expected map[string]map[string][]byte, expected map[string]map[string][]byte,
got []data.Collection, got []data.Collection,
restorePermissions bool,
) int { ) int {
collectionsWithItems := []data.Collection{} collectionsWithItems := []data.Collection{}
@ -754,7 +798,7 @@ func checkCollections(
continue continue
} }
compareItem(t, expectedColData, service, category, item) compareItem(t, expectedColData, service, category, item, restorePermissions)
} }
if gotItems != startingItems { if gotItems != startingItems {
@ -906,10 +950,63 @@ func collectionsForInfo(
tenant, user string, tenant, user string,
dest control.RestoreDestination, dest control.RestoreDestination,
allInfo []colInfo, allInfo []colInfo,
) (int, []data.Collection, map[string]map[string][]byte) { ) (int, int, []data.Collection, map[string]map[string][]byte) {
collections := make([]data.Collection, 0, len(allInfo)) collections := make([]data.Collection, 0, len(allInfo))
expectedData := make(map[string]map[string][]byte, len(allInfo)) expectedData := make(map[string]map[string][]byte, len(allInfo))
totalItems := 0 totalItems := 0
kopiaEntries := 0
for _, info := range allInfo {
pth := mustToDataLayerPath(
t,
service,
tenant,
user,
info.category,
info.pathElements,
false,
)
c := mockconnector.NewMockExchangeCollection(pth, len(info.items))
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
baseExpected := expectedData[baseDestPath.String()]
if baseExpected == nil {
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.items))
baseExpected = expectedData[baseDestPath.String()]
}
for i := 0; i < len(info.items); i++ {
c.Names[i] = info.items[i].name
c.Data[i] = info.items[i].data
baseExpected[info.items[i].lookupKey] = info.items[i].data
// We do not count metadata files against item count
if service != path.OneDriveService ||
(service == path.OneDriveService &&
strings.HasSuffix(info.items[i].name, onedrive.DataFileSuffix)) {
totalItems++
}
}
collections = append(collections, c)
kopiaEntries += len(info.items)
}
return totalItems, kopiaEntries, collections, expectedData
}
func collectionsForInfoVersion0(
t *testing.T,
service path.ServiceType,
tenant, user string,
dest control.RestoreDestination,
allInfo []colInfo,
) (int, int, []data.Collection, map[string]map[string][]byte) {
collections := make([]data.Collection, 0, len(allInfo))
expectedData := make(map[string]map[string][]byte, len(allInfo))
totalItems := 0
kopiaEntries := 0
for _, info := range allInfo { for _, info := range allInfo {
pth := mustToDataLayerPath( pth := mustToDataLayerPath(
@ -939,9 +1036,10 @@ func collectionsForInfo(
collections = append(collections, c) collections = append(collections, c)
totalItems += len(info.items) totalItems += len(info.items)
kopiaEntries += len(info.items)
} }
return totalItems, collections, expectedData return totalItems, kopiaEntries, collections, expectedData
} }
//nolint:deadcode //nolint:deadcode

File diff suppressed because it is too large Load Diff

View File

@ -202,6 +202,15 @@ func (suite *MockExchangeDataSuite) TestMockByteHydration() {
return err return err
}, },
}, },
{
name: "SharePoint: Page",
transformation: func(t *testing.T) error {
bytes := mockconnector.GetMockPage(subject)
_, err := support.CreatePageFromBytes(bytes)
return err
},
},
} }
for _, test := range tests { for _, test := range tests {

View File

@ -336,3 +336,212 @@ func GetMockEventMessageRequest(subject string) []byte {
return []byte(message) return []byte(message)
} }
func GetMockMessageWithItemAttachmentEvent(subject string) []byte {
//nolint:lll
message := "{\"id\":\"AAMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOABGAAAAAAAPvVwUramXT7jlSGpVU8_7BwB8wYc0thTTTYl3RpEYIUq_AAAAAAEMAAB8wYc0thTTTYl3RpEYIUq_AADFfThMAAA=\",\"@odata.type\":\"#microsoft.graph.message\"," +
"\"@odata.etag\":\"W/\\\"CQAAABYAAAB8wYc0thTTTYl3RpEYIUq+AADFK3BH\\\"\",\"@odata.context\":\"https://graph.microsoft.com/v1.0/$metadata#users('dustina%408qzvrj.onmicrosoft.com')/messages/$entity\",\"categories\":[]," +
"\"changeKey\":\"CQAAABYAAAB8wYc0thTTTYl3RpEYIUq+AADFK3BH\",\"createdDateTime\":\"2023-02-01T13:48:43Z\",\"lastModifiedDateTime\":\"2023-02-01T18:27:03Z\"," +
"\"attachments\":[{\"id\":\"AAMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOABGAAAAAAAPvVwUramXT7jlSGpVU8_7BwB8wYc0thTTTYl3RpEYIUq_AAAAAAEMAAB8wYc0thTTTYl3RpEYIUq_AADFfThMAAABEgAQAKHxTL6mNCZPo71dbwrfKYM=\"," +
"\"@odata.type\":\"#microsoft.graph.itemAttachment\",\"isInline\":false,\"lastModifiedDateTime\":\"2023-02-01T13:52:56Z\",\"name\":\"Holidayevent\",\"size\":2059,\"item\":{\"id\":\"\",\"@odata.type\":\"#microsoft.graph.event\"," +
"\"createdDateTime\":\"2023-02-01T13:52:56Z\",\"lastModifiedDateTime\":\"2023-02-01T13:52:56Z\",\"body\":{\"content\":\"<html><head>\\r\\n<metahttp-equiv=\\\"Content-Type\\\"content=\\\"text/html;charset=utf-8\\\"></head><body>Let'slookforfunding!</body></html>\"," +
"\"contentType\":\"html\"},\"end\":{\"dateTime\":\"2016-12-02T19:00:00.0000000Z\",\"timeZone\":\"UTC\"}," +
"\"hasAttachments\":false,\"isAllDay\":false,\"isCancelled\":false,\"isDraft\":true,\"isOnlineMeeting\":false,\"isOrganizer\":true,\"isReminderOn\":false,\"organizer\":{\"emailAddress\":{\"address\":\"" + defaultMessageFrom + "\",\"name\":\"" + defaultAlias + "\"}}," +
"\"originalEndTimeZone\":\"tzone://Microsoft/Utc\",\"originalStartTimeZone\":\"tzone://Microsoft/Utc\",\"reminderMinutesBeforeStart\":0,\"responseRequested\":true,\"start\":{\"dateTime\":\"2016-12-02T18:00:00.0000000Z\",\"timeZone\":\"UTC\"}," +
"\"subject\":\"Discussgiftsforchildren\",\"type\":\"singleInstance\"}}],\"bccRecipients\":[],\"body\":{\"content\":\"<html><head>\\r\\n<metahttp-equiv=\\\"Content-Type\\\"content=\\\"text/html;charset=utf-8\\\"><styletype=\\\"text/css\\\"style=\\\"display:none\\\">\\r\\n<!--\\r\\np\\r\\n\\t{margin-top:0;\\r\\n\\tmargin-bottom:0}\\r\\n-->\\r\\n</style></head><bodydir=\\\"ltr\\\"><divclass=\\\"elementToProof\\\"style=\\\"font-family:Calibri,Arial,Helvetica,sans-serif;font-size:12pt;color:rgb(0,0,0);background-color:rgb(255,255,255)\\\">Lookingtodothis </div></body></html>\",\"contentType\":\"html\"}," +
"\"bodyPreview\":\"Lookingtodothis\",\"ccRecipients\":[],\"conversationId\":\"AAQkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOAAQADGvj5ACBMdGpESX4xSOxCo=\",\"conversationIndex\":\"AQHZNkPmMa+PkAIEx0akRJfjFI7EKg==\",\"flag\":{\"flagStatus\":\"notFlagged\"}," +
"\"from\":{\"emailAddress\":{\"address\":\"" + defaultMessageFrom + "\",\"name\":\"" + defaultAlias + "\"}},\"hasAttachments\":true,\"importance\":\"normal\",\"inferenceClassification\":\"focused\"," +
"\"internetMessageId\":\"<SJ0PR17MB56220B4F6A443386A11D5154C3D19@SJ0PR17MB5622.namprd17.prod.outlook.com>\",\"isDeliveryReceiptRequested\":false,\"isDraft\":false,\"isRead\":true,\"isReadReceiptRequested\":false," +
"\"parentFolderId\":\"AQMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4ADVkZWQwNmNlMTgALgAAAw_9XBStqZdPuOVIalVTz7sBAHzBhzS2FNNNiXdGkRghSr4AAAIBDAAAAA==\",\"receivedDateTime\":\"2023-02-01T13:48:47Z\",\"replyTo\":[]," +
"\"sender\":{\"emailAddress\":{\"address\":\"" + defaultMessageSender + "\",\"name\":\"" + defaultAlias + "\"}},\"sentDateTime\":\"2023-02-01T13:48:46Z\"," +
"\"subject\":\"" + subject + "\",\"toRecipients\":[{\"emailAddress\":{\"address\":\"" + defaultMessageTo + "\",\"name\":\"" + defaultAlias + "\"}}]," +
"\"webLink\":\"https://outlook.office365.com/owa/?ItemID=AAMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOABGAAAAAAAPvVwUramXT7jlSGpVU8%2B7BwB8wYc0thTTTYl3RpEYIUq%2BAAAAAAEMAAB8wYc0thTTTYl3RpEYIUq%2BAADFfThMAAA%3D&exvsurl=1&viewmodel=ReadMessageItem\"}"
return []byte(message)
}
func GetMockMessageWithNestedItemAttachmentEvent(subject string) []byte {
//nolint:lll
// Order of fields:
// 1. subject
// 2. alias
// 3. sender address
// 4. from address
// 5. toRecipients email address
template := `{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('f435c656-f8b2-4d71-93c3-6e092f52a167')/messages(attachments())/$entity",
"@odata.etag": "W/\"CQAAABYAAAB8wYc0thTTTYl3RpEYIUq+AADFK782\"",
"id": "AAMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOABGAAAAAAAPvVwUramXT7jlSGpVU8_7BwB8wYc0thTTTYl3RpEYIUq_AAAAAAEMAAB8wYc0thTTTYl3RpEYIUq_AADFfThSAAA=",
"createdDateTime": "2023-02-02T21:38:27Z",
"lastModifiedDateTime": "2023-02-02T22:42:49Z",
"changeKey": "CQAAABYAAAB8wYc0thTTTYl3RpEYIUq+AADFK782",
"categories": [],
"receivedDateTime": "2023-02-02T21:38:27Z",
"sentDateTime": "2023-02-02T21:38:24Z",
"hasAttachments": true,
"internetMessageId": "<SJ0PR17MB562287BE29A86751D6E77FE5C3D69@SJ0PR17MB5622.namprd17.prod.outlook.com>",
"subject": "%[1]v",
"bodyPreview": "Dustin,\r\n\r\nI'm here to see if we are still able to discover our object.",
"importance": "normal",
"parentFolderId": "AQMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4ADVkZWQwNmNlMTgALgAAAw_9XBStqZdPuOVIalVTz7sBAHzBhzS2FNNNiXdGkRghSr4AAAIBDAAAAA==",
"conversationId": "AAQkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOAAQAB13OyMdkNJJqEaIrGi3Yjc=",
"conversationIndex": "AQHZN06dHXc7Ix2Q0kmoRoisaLdiNw==",
"isDeliveryReceiptRequested": false,
"isReadReceiptRequested": false,
"isRead": false,
"isDraft": false,
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGQ1NzTruncated",
"inferenceClassification": "focused",
"body": {
"contentType": "html",
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">I'm here to see if we are still able to discover our object.&nbsp;</div></body></html>"
},
"sender": {
"emailAddress": {
"name": "%[2]s",
"address": "%[3]s"
}
},
"from": {
"emailAddress": {
"name": "%[2]s",
"address": "%[4]s"
}
},
"toRecipients": [
{
"emailAddress": {
"name": "%[2]s",
"address": "%[5]s"
}
}
],
"ccRecipients": [],
"bccRecipients": [],
"replyTo": [],
"flag": {
"flagStatus": "notFlagged"
},
"attachments": [
{
"@odata.type": "#microsoft.graph.itemAttachment",
"id": "AAMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOABGAAAAAAAPvVwUramXT7jlSGpVU8_7BwB8wYc0thTTTYl3RpEYIUq_AAAAAAEMAAB8wYc0thTTTYl3RpEYIUq_AADFfThSAAABEgAQAIyAgT1ZccRCjKKyF7VZ3dA=",
"lastModifiedDateTime": "2023-02-02T21:38:27Z",
"name": "Mail Item Attachment",
"contentType": null,
"size": 5362,
"isInline": false,
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('f435c656-f8b2-4d71-93c3-6e092f52a167')/messages('')/$ref",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('f435c656-f8b2-4d71-93c3-6e092f52a167')/messages('')",
"item": {
"@odata.type": "#microsoft.graph.message",
"id": "",
"createdDateTime": "2023-02-02T21:38:27Z",
"lastModifiedDateTime": "2023-02-02T21:38:27Z",
"receivedDateTime": "2023-02-01T13:48:47Z",
"sentDateTime": "2023-02-01T13:48:46Z",
"hasAttachments": true,
"internetMessageId": "<SJ0PR17MB56220B4F6A443386A11D5154C3D19@SJ0PR17MB5622.namprd17.prod.outlook.com>",
"subject": "Mail Item Attachment",
"bodyPreview": "Lookingtodothis",
"importance": "normal",
"conversationId": "AAQkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOAAQAMNK0NU7Kx5GhAaHdzhfSRU=",
"conversationIndex": "AQHZN02pw0rQ1TsrHkaEBod3OF9JFQ==",
"isDeliveryReceiptRequested": false,
"isReadReceiptRequested": false,
"isRead": true,
"isDraft": false,
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGQ1NzViZTdhLTEwMTM",
"body": {
"contentType": "html",
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><metahttp-equiv=\"Content-Type\"content=\"text html;charset=\"utf-8&quot;\"><styletype=\"text css?style=\"display:none\"><!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n--><bodydir=\"ltr\"><divclass=\"elementToProof\"style=\"font-family:Calibri,Arial,Helvetica,sans-serif;font-size:12pt;color:rgb(0,0,0);background-color:rgb(255,255,255)\"></head><body>Lookingtodothis&nbsp; <div></div></body></html>"
},
"sender": {
"emailAddress": {
"name": "A Stranger",
"address": "foobar@8qzvrj.onmicrosoft.com"
}
},
"from": {
"emailAddress": {
"name": "A Stranger",
"address": "foobar@8qzvrj.onmicrosoft.com"
}
},
"toRecipients": [
{
"emailAddress": {
"name": "Direct Report",
"address": "notAvailable@8qzvrj.onmicrosoft.com"
}
}
],
"flag": {
"flagStatus": "notFlagged"
},
"attachments": [
{
"@odata.type": "#microsoft.graph.itemAttachment",
"id": "AAMkAGQ1NzViZTdhLTEwMTMtNGJjNi05YWI2LTg4NWRlZDA2Y2UxOABGAAAAAAAPvVwUramXT7jlSGpVU8_7BwB8wYc0thTTTYl3RpEYIUq_AAAAAAEMAAB8wYc0thTTTYl3RpEYIUq_AADFfThSAAACEgAQAIyAgT1ZccRCjKKyF7VZ3dASABAAuYCb3N2YZ02RpJrZPzCBFQ==",
"lastModifiedDateTime": "2023-02-02T21:38:27Z",
"name": "Holidayevent",
"contentType": null,
"size": 2331,
"isInline": false,
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('f435c656-f8b2-4d71-93c3-6e092f52a167')/events('')/$ref",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('f435c656-f8b2-4d71-93c3-6e092f52a167')/events('')",
"item": {
"@odata.type": "#microsoft.graph.event",
"id": "",
"createdDateTime": "2023-02-02T21:38:27Z",
"lastModifiedDateTime": "2023-02-02T21:38:27Z",
"originalStartTimeZone": "tzone://Microsoft/Utc",
"originalEndTimeZone": "tzone://Microsoft/Utc",
"reminderMinutesBeforeStart": 0,
"isReminderOn": false,
"hasAttachments": false,
"subject": "Discuss Gifts for Children",
"isAllDay": false,
"isCancelled": false,
"isOrganizer": true,
"responseRequested": true,
"type": "singleInstance",
"isOnlineMeeting": false,
"isDraft": true,
"body": {
"contentType": "html",
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><metahttp-equiv=\"Content-Type\"content=\"text html;charset=\"utf-8&quot;\"></head><body>Let'slookforfunding! </body></html>"
},
"start": {
"dateTime": "2016-12-02T18:00:00.0000000Z",
"timeZone": "UTC"
},
"end": {
"dateTime": "2016-12-02T19:00:00.0000000Z",
"timeZone": "UTC"
},
"organizer": {
"emailAddress": {
"name": "Event Manager",
"address": "philonis@8qzvrj.onmicrosoft.com"
}
}
}
}
]
}
}
]
}`
message := fmt.Sprintf(
template,
subject,
defaultAlias,
defaultMessageSender,
defaultMessageFrom,
defaultMessageTo,
)
return []byte(message)
}

View File

@ -0,0 +1,25 @@
package mockconnector
// GetMockPage returns bytes for models.SitePageable object
// Title string changes of fields: name and title
func GetMockPage(title string) []byte {
fileName := title + ".aspx"
// Create Test Page
//nolint:lll
byteArray := []byte("{\"name\":\"" + fileName + "\",\"title\":\"" + title + "\",\"pageLayout\":\"article\",\"showComments\":true," +
"\"showRecommendedPages\":false,\"titleArea\":{\"enableGradientEffect\":true,\"imageWebUrl\":\"/_LAYOUTS/IMAGES/VISUALTEMPLATETITLEIMAGE.JPG\"," +
"\"layout\":\"colorBlock\",\"showAuthor\":true,\"showPublishedDate\":false,\"showTextBlockAboveTitle\":false,\"textAboveTitle\":\"TEXTABOVETITLE\"," +
"\"textAlignment\":\"left\",\"imageSourceType\":2,\"title\":\"sample1\"}," +
"\"canvasLayout\":{\"horizontalSections\":[{\"layout\":\"oneThirdRightColumn\",\"id\":\"1\",\"emphasis\":\"none\",\"columns\":[{\"id\":\"1\",\"width\":8," +
"\"webparts\":[{\"id\":\"6f9230af-2a98-4952-b205-9ede4f9ef548\",\"innerHtml\":\"<p><b>Hello!</b></p>\"}]},{\"id\":\"2\",\"width\":4," +
"\"webparts\":[{\"id\":\"73d07dde-3474-4545-badb-f28ba239e0e1\",\"webPartType\":\"d1d91016-032f-456d-98a4-721247c305e8\",\"data\":{\"dataVersion\":\"1.9\"," +
"\"description\":\"Showanimageonyourpage\",\"title\":\"Image\",\"properties\":{\"imageSourceType\":2,\"altText\":\"\",\"overlayText\":\"\"," +
"\"siteid\":\"0264cabe-6b92-450a-b162-b0c3d54fe5e8\",\"webid\":\"f3989670-cd37-4514-8ccb-0f7c2cbe5314\",\"listid\":\"bdb41041-eb06-474e-ac29-87093386bb14\"," +
"\"uniqueid\":\"d9f94b40-78ba-48d0-a39f-3cb23c2fe7eb\",\"imgWidth\":4288,\"imgHeight\":2848,\"fixAspectRatio\":false,\"captionText\":\"\",\"alignment\":\"Center\"}," +
"\"serverProcessedContent\":{\"imageSources\":[{\"key\":\"imageSource\",\"value\":\"/_LAYOUTS/IMAGES/VISUALTEMPLATEIMAGE1.JPG\"}]," +
"\"customMetadata\":[{\"key\":\"imageSource\",\"value\":{\"siteid\":\"0264cabe-6b92-450a-b162-b0c3d54fe5e8\",\"webid\":\"f3989670-cd37-4514-8ccb-0f7c2cbe5314\"," +
"\"listid\":\"bdb41041-eb06-474e-ac29-87093386bb14\",\"uniqueid\":\"d9f94b40-78ba-48d0-a39f-3cb23c2fe7eb\",\"width\":\"4288\",\"height\":\"2848\"}}]}}}]}]}]}}")
return byteArray
}

View File

@ -3,6 +3,7 @@ package api
import ( import (
"context" "context"
msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
mssites "github.com/microsoftgraph/msgraph-sdk-go/sites" mssites "github.com/microsoftgraph/msgraph-sdk-go/sites"
msusers "github.com/microsoftgraph/msgraph-sdk-go/users" msusers "github.com/microsoftgraph/msgraph-sdk-go/users"
@ -12,6 +13,75 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/graph/api"
) )
func getValues[T any](l api.PageLinker) ([]T, error) {
page, ok := l.(interface{ GetValue() []T })
if !ok {
return nil, errors.Errorf(
"response of type [%T] does not comply with GetValue() interface",
l,
)
}
return page.GetValue(), nil
}
// max we can do is 999
const pageSize = int32(999)
type driveItemPager struct {
gs graph.Servicer
builder *msdrives.ItemRootDeltaRequestBuilder
options *msdrives.ItemRootDeltaRequestBuilderGetRequestConfiguration
}
func NewItemPager(
gs graph.Servicer,
driveID, link string,
fields []string,
) *driveItemPager {
pageCount := pageSize
requestConfig := &msdrives.ItemRootDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &msdrives.ItemRootDeltaRequestBuilderGetQueryParameters{
Top: &pageCount,
Select: fields,
},
}
res := &driveItemPager{
gs: gs,
options: requestConfig,
builder: gs.Client().DrivesById(driveID).Root().Delta(),
}
if len(link) > 0 {
res.builder = msdrives.NewItemRootDeltaRequestBuilder(link, gs.Adapter())
}
return res
}
func (p *driveItemPager) GetPage(ctx context.Context) (api.DeltaPageLinker, error) {
var (
resp api.DeltaPageLinker
err error
)
err = graph.RunWithRetry(func() error {
resp, err = p.builder.Get(ctx, p.options)
return err
})
return resp, err
}
func (p *driveItemPager) SetNext(link string) {
p.builder = msdrives.NewItemRootDeltaRequestBuilder(link, p.gs.Adapter())
}
func (p *driveItemPager) ValuesIn(l api.DeltaPageLinker) ([]models.DriveItemable, error) {
return getValues[models.DriveItemable](l)
}
type userDrivePager struct { type userDrivePager struct {
gs graph.Servicer gs graph.Servicer
builder *msusers.ItemDrivesRequestBuilder builder *msusers.ItemDrivesRequestBuilder
@ -39,7 +109,17 @@ func NewUserDrivePager(
} }
func (p *userDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) { func (p *userDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) {
return p.builder.Get(ctx, p.options) var (
resp api.PageLinker
err error
)
err = graph.RunWithRetry(func() error {
resp, err = p.builder.Get(ctx, p.options)
return err
})
return resp, err
} }
func (p *userDrivePager) SetNext(link string) { func (p *userDrivePager) SetNext(link string) {
@ -47,15 +127,7 @@ func (p *userDrivePager) SetNext(link string) {
} }
func (p *userDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) { func (p *userDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) {
page, ok := l.(interface{ GetValue() []models.Driveable }) return getValues[models.Driveable](l)
if !ok {
return nil, errors.Errorf(
"response of type [%T] does not comply with GetValue() interface",
l,
)
}
return page.GetValue(), nil
} }
type siteDrivePager struct { type siteDrivePager struct {
@ -85,7 +157,17 @@ func NewSiteDrivePager(
} }
func (p *siteDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) { func (p *siteDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) {
return p.builder.Get(ctx, p.options) var (
resp api.PageLinker
err error
)
err = graph.RunWithRetry(func() error {
resp, err = p.builder.Get(ctx, p.options)
return err
})
return resp, err
} }
func (p *siteDrivePager) SetNext(link string) { func (p *siteDrivePager) SetNext(link string) {
@ -93,13 +175,5 @@ func (p *siteDrivePager) SetNext(link string) {
} }
func (p *siteDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) { func (p *siteDrivePager) ValuesIn(l api.PageLinker) ([]models.Driveable, error) {
page, ok := l.(interface{ GetValue() []models.Driveable }) return getValues[models.Driveable](l)
if !ok {
return nil, errors.Errorf(
"response of type [%T] does not comply with GetValue() interface",
l,
)
}
return page.GetValue(), nil
} }

View File

@ -5,6 +5,7 @@ import (
"context" "context"
"io" "io"
"net/http" "net/http"
"strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
"time" "time"
@ -34,6 +35,10 @@ const (
// Max number of retries to get doc from M365 // Max number of retries to get doc from M365
// Seems to timeout at times because of multiple requests // Seems to timeout at times because of multiple requests
maxRetries = 4 // 1 + 3 retries maxRetries = 4 // 1 + 3 retries
MetaFileSuffix = ".meta"
DirMetaFileSuffix = ".dirmeta"
DataFileSuffix = ".data"
) )
var ( var (
@ -56,12 +61,13 @@ type Collection struct {
// M365 IDs of file items within this collection // M365 IDs of file items within this collection
driveItems map[string]models.DriveItemable driveItems map[string]models.DriveItemable
// M365 ID of the drive this collection was created from // M365 ID of the drive this collection was created from
driveID string driveID string
source driveSource source driveSource
service graph.Servicer service graph.Servicer
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
itemReader itemReaderFunc itemReader itemReaderFunc
ctrl control.Options itemMetaReader itemMetaReaderFunc
ctrl control.Options
// should only be true if the old delta token expired // should only be true if the old delta token expired
doNotMergeItems bool doNotMergeItems bool
@ -73,6 +79,15 @@ type itemReaderFunc func(
item models.DriveItemable, item models.DriveItemable,
) (itemInfo details.ItemInfo, itemData io.ReadCloser, err error) ) (itemInfo details.ItemInfo, itemData io.ReadCloser, err error)
// itemMetaReaderFunc returns a reader for the metadata of the
// specified item
type itemMetaReaderFunc func(
ctx context.Context,
service graph.Servicer,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error)
// NewCollection creates a Collection // NewCollection creates a Collection
func NewCollection( func NewCollection(
itemClient *http.Client, itemClient *http.Client,
@ -101,6 +116,7 @@ func NewCollection(
c.itemReader = sharePointItemReader c.itemReader = sharePointItemReader
default: default:
c.itemReader = oneDriveItemReader c.itemReader = oneDriveItemReader
c.itemMetaReader = oneDriveItemMetaReader
} }
return c return c
@ -138,6 +154,21 @@ func (oc Collection) DoNotMergeItems() bool {
return oc.doNotMergeItems return oc.doNotMergeItems
} }
// FilePermission is used to store permissions of a specific user to a
// OneDrive item.
type UserPermission struct {
ID string `json:"id,omitempty"`
Roles []string `json:"role,omitempty"`
Email string `json:"email,omitempty"`
Expiration *time.Time `json:"expiration,omitempty"`
}
// ItemMeta contains metadata about the Item. It gets stored in a
// separate file in kopia
type Metadata struct {
Permissions []UserPermission `json:"permissions,omitempty"`
}
// Item represents a single item retrieved from OneDrive // Item represents a single item retrieved from OneDrive
type Item struct { type Item struct {
id string id string
@ -173,18 +204,21 @@ func (od *Item) ModTime() time.Time {
// and uses the collection `itemReader` to read the item // and uses the collection `itemReader` to read the item
func (oc *Collection) populateItems(ctx context.Context) { func (oc *Collection) populateItems(ctx context.Context) {
var ( var (
errs error errs error
byteCount int64 byteCount int64
itemsRead int64 itemsRead int64
wg sync.WaitGroup dirsRead int64
m sync.Mutex itemsFound int64
dirsFound int64
wg sync.WaitGroup
m sync.Mutex
) )
// Retrieve the OneDrive folder path to set later in // Retrieve the OneDrive folder path to set later in
// `details.OneDriveInfo` // `details.OneDriveInfo`
parentPathString, err := path.GetDriveFolderPath(oc.folderPath) parentPathString, err := path.GetDriveFolderPath(oc.folderPath)
if err != nil { if err != nil {
oc.reportAsCompleted(ctx, 0, 0, err) oc.reportAsCompleted(ctx, 0, 0, 0, err)
return return
} }
@ -205,16 +239,11 @@ func (oc *Collection) populateItems(ctx context.Context) {
m.Unlock() m.Unlock()
} }
for id, item := range oc.driveItems { for _, item := range oc.driveItems {
if oc.ctrl.FailFast && errs != nil { if oc.ctrl.FailFast && errs != nil {
break break
} }
if item == nil {
errUpdater(id, errors.New("nil item"))
continue
}
semaphoreCh <- struct{}{} semaphoreCh <- struct{}{}
wg.Add(1) wg.Add(1)
@ -223,13 +252,64 @@ func (oc *Collection) populateItems(ctx context.Context) {
defer wg.Done() defer wg.Done()
defer func() { <-semaphoreCh }() defer func() { <-semaphoreCh }()
// Read the item
var ( var (
itemID = *item.GetId() itemID = *item.GetId()
itemName = *item.GetName() itemName = *item.GetName()
itemSize = *item.GetSize() itemSize = *item.GetSize()
itemInfo details.ItemInfo itemInfo details.ItemInfo
itemMeta io.ReadCloser
itemMetaSize int
metaSuffix string
err error
) )
isFile := item.GetFile() != nil
if isFile {
atomic.AddInt64(&itemsFound, 1)
metaSuffix = MetaFileSuffix
} else {
atomic.AddInt64(&dirsFound, 1)
metaSuffix = DirMetaFileSuffix
}
if oc.source == OneDriveSource {
// Fetch metadata for the file
for i := 1; i <= maxRetries; i++ {
if !oc.ctrl.ToggleFeatures.EnablePermissionsBackup {
// We are still writing the metadata file but with
// empty permissions as we don't have a way to
// signify that the permissions was explicitly
// not added.
itemMeta = io.NopCloser(strings.NewReader("{}"))
itemMetaSize = 2
break
}
itemMeta, itemMetaSize, err = oc.itemMetaReader(ctx, oc.service, oc.driveID, item)
// retry on Timeout type errors, break otherwise.
if err == nil ||
!graph.IsErrTimeout(err) ||
!graph.IsInternalServerError(err) {
break
}
if i < maxRetries {
time.Sleep(1 * time.Second)
}
}
if err != nil {
errUpdater(*item.GetId(), errors.Wrap(err, "failed to get item permissions"))
return
}
}
switch oc.source { switch oc.source {
case SharePointSource: case SharePointSource:
itemInfo.SharePoint = sharePointItemInfo(item, itemSize) itemInfo.SharePoint = sharePointItemInfo(item, itemSize)
@ -239,101 +319,127 @@ func (oc *Collection) populateItems(ctx context.Context) {
itemInfo.OneDrive.ParentPath = parentPathString itemInfo.OneDrive.ParentPath = parentPathString
} }
// Construct a new lazy readCloser to feed to the collection consumer. if isFile {
// This ensures that downloads won't be attempted unless that consumer dataSuffix := ""
// attempts to read bytes. Assumption is that kopia will check things if oc.source == OneDriveSource {
// like file modtimes before attempting to read. dataSuffix = DataFileSuffix
itemReader := lazy.NewLazyReadCloser(func() (io.ReadCloser, error) { }
// Read the item
var (
itemData io.ReadCloser
err error
)
for i := 1; i <= maxRetries; i++ { // Construct a new lazy readCloser to feed to the collection consumer.
_, itemData, err = oc.itemReader(oc.itemClient, item) // This ensures that downloads won't be attempted unless that consumer
if err == nil { // attempts to read bytes. Assumption is that kopia will check things
break // like file modtimes before attempting to read.
} itemReader := lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
// Read the item
var (
itemData io.ReadCloser
err error
)
if graph.IsErrUnauthorized(err) { for i := 1; i <= maxRetries; i++ {
// assume unauthorized requests are a sign of an expired _, itemData, err = oc.itemReader(oc.itemClient, item)
// jwt token, and that we've overrun the available window if err == nil {
// to download the actual file. Re-downloading the item
// will refresh that download url.
di, diErr := getDriveItem(ctx, oc.service, oc.driveID, itemID)
if diErr != nil {
err = errors.Wrap(diErr, "retrieving expired item")
break break
} }
item = di if graph.IsErrUnauthorized(err) {
// assume unauthorized requests are a sign of an expired
// jwt token, and that we've overrun the available window
// to download the actual file. Re-downloading the item
// will refresh that download url.
di, diErr := getDriveItem(ctx, oc.service, oc.driveID, itemID)
if diErr != nil {
err = errors.Wrap(diErr, "retrieving expired item")
break
}
continue item = di
} else if !graph.IsErrTimeout(err) && !graph.IsErrThrottled(err) && !graph.IsSericeUnavailable(err) { continue
// TODO: graphAPI will provides headers that state the duration to wait
// in order to succeed again. The one second sleep won't cut it here. } else if !graph.IsErrTimeout(err) &&
// !graph.IsInternalServerError(err) {
// for all non-timeout, non-unauth, non-throttling errors, do not retry // Don't retry for non-timeout, on-unauth, as
break // we are already retrying it in the default
// retry middleware
break
}
if i < maxRetries {
time.Sleep(1 * time.Second)
}
} }
if i < maxRetries { // check for errors following retries
time.Sleep(1 * time.Second) if err != nil {
errUpdater(itemID, err)
return nil, err
} }
// display/log the item download
progReader, closer := observe.ItemProgress(
ctx,
itemData,
observe.ItemBackupMsg,
observe.PII(itemName+dataSuffix),
itemSize,
)
go closer()
return progReader, nil
})
oc.data <- &Item{
id: itemName + dataSuffix,
data: itemReader,
info: itemInfo,
} }
}
// check for errors following retries if oc.source == OneDriveSource {
if err != nil { metaReader := lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
errUpdater(itemID, err) progReader, closer := observe.ItemProgress(
return nil, err ctx, itemMeta, observe.ItemBackupMsg,
observe.PII(itemName+metaSuffix), int64(itemMetaSize))
go closer()
return progReader, nil
})
oc.data <- &Item{
id: itemName + metaSuffix,
data: metaReader,
info: itemInfo,
} }
}
// display/log the item download
progReader, closer := observe.ItemProgress(ctx, itemData, observe.ItemBackupMsg, observe.PII(itemName), itemSize)
go closer()
return progReader, nil
})
// This can cause inaccurate counts. Right now it counts all the items
// we intend to read. Errors within the lazy readCloser will create a
// conflict: an item is both successful and erroneous. But the async
// control to fix that is more error-prone than helpful.
//
// TODO: transform this into a stats bus so that async control of stats
// aggregation is handled at the backup level, not at the item iteration
// level.
//
// Item read successfully, add to collection // Item read successfully, add to collection
atomic.AddInt64(&itemsRead, 1) if isFile {
atomic.AddInt64(&itemsRead, 1)
} else {
atomic.AddInt64(&dirsRead, 1)
}
// byteCount iteration // byteCount iteration
atomic.AddInt64(&byteCount, itemSize) atomic.AddInt64(&byteCount, itemSize)
oc.data <- &Item{
id: itemName,
data: itemReader,
info: itemInfo,
}
folderProgress <- struct{}{} folderProgress <- struct{}{}
}(item) }(item)
} }
wg.Wait() wg.Wait()
oc.reportAsCompleted(ctx, int(itemsRead), byteCount, errs) oc.reportAsCompleted(ctx, int(itemsFound), int(itemsRead), byteCount, errs)
} }
func (oc *Collection) reportAsCompleted(ctx context.Context, itemsRead int, byteCount int64, errs error) { func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRead int, byteCount int64, errs error) {
close(oc.data) close(oc.data)
status := support.CreateStatus(ctx, support.Backup, status := support.CreateStatus(ctx, support.Backup,
1, // num folders (always 1) 1, // num folders (always 1)
support.CollectionMetrics{ support.CollectionMetrics{
Objects: len(oc.driveItems), // items to read, Objects: itemsFound, // items to read,
Successes: itemsRead, // items read successfully, Successes: itemsRead, // items read successfully,
TotalBytes: byteCount, // Number of bytes read in the operation, TotalBytes: byteCount, // Number of bytes read in the operation,
}, },
errs, errs,
oc.folderPath.Folder(), // Additional details oc.folderPath.Folder(), // Additional details

View File

@ -2,8 +2,11 @@ package onedrive
import ( import (
"bytes" "bytes"
"context"
"encoding/json"
"io" "io"
"net/http" "net/http"
"strings"
"sync" "sync"
"testing" "testing"
"time" "time"
@ -60,6 +63,14 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
testItemName = "itemName" testItemName = "itemName"
testItemData = []byte("testdata") testItemData = []byte("testdata")
now = time.Now() now = time.Now()
testItemMeta = Metadata{Permissions: []UserPermission{
{
ID: "testMetaID",
Roles: []string{"read", "write"},
Email: "email@provider.com",
Expiration: &now,
},
}}
) )
type nst struct { type nst struct {
@ -157,13 +168,14 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
suite, suite,
suite.testStatusUpdater(&wg, &collStatus), suite.testStatusUpdater(&wg, &collStatus),
test.source, test.source,
control.Options{}) control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}})
require.NotNil(t, coll) require.NotNil(t, coll)
assert.Equal(t, folderPath, coll.FullPath()) assert.Equal(t, folderPath, coll.FullPath())
// Set a item reader, add an item and validate we get the item back // Set a item reader, add an item and validate we get the item back
mockItem := models.NewDriveItem() mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID) mockItem.SetId(&testItemID)
mockItem.SetFile(models.NewFile())
mockItem.SetName(&test.itemDeets.name) mockItem.SetName(&test.itemDeets.name)
mockItem.SetSize(&test.itemDeets.size) mockItem.SetSize(&test.itemDeets.size)
mockItem.SetCreatedDateTime(&test.itemDeets.time) mockItem.SetCreatedDateTime(&test.itemDeets.time)
@ -174,6 +186,18 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
} }
coll.itemReader = test.itemReader coll.itemReader = test.itemReader
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
metaJSON, err := json.Marshal(testItemMeta)
if err != nil {
return nil, 0, err
}
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
}
// Read items from the collection // Read items from the collection
wg.Add(1) wg.Add(1)
@ -184,28 +208,54 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
wg.Wait() wg.Wait()
if test.source == OneDriveSource {
require.Len(t, readItems, 2) // .data and .meta
} else {
require.Len(t, readItems, 1)
}
// Expect only 1 item
require.Equal(t, 1, collStatus.ObjectCount)
require.Equal(t, 1, collStatus.Successful)
// Validate item info and data // Validate item info and data
readItem := readItems[0] readItem := readItems[0]
readItemInfo := readItem.(data.StreamInfo) readItemInfo := readItem.(data.StreamInfo)
readData, err := io.ReadAll(readItem.ToReader()) if test.source == OneDriveSource {
require.NoError(t, err) assert.Equal(t, testItemName+DataFileSuffix, readItem.UUID())
assert.Equal(t, testItemData, readData) } else {
assert.Equal(t, testItemName, readItem.UUID())
// Expect only 1 item }
require.Len(t, readItems, 1)
require.Equal(t, 1, collStatus.ObjectCount, "items iterated")
require.Equal(t, 1, collStatus.Successful, "items successful")
assert.Equal(t, testItemName, readItem.UUID())
require.Implements(t, (*data.StreamModTime)(nil), readItem) require.Implements(t, (*data.StreamModTime)(nil), readItem)
mt := readItem.(data.StreamModTime) mt := readItem.(data.StreamModTime)
assert.Equal(t, now, mt.ModTime()) assert.Equal(t, now, mt.ModTime())
readData, err := io.ReadAll(readItem.ToReader())
require.NoError(t, err)
name, parentPath := test.infoFrom(t, readItemInfo.Info()) name, parentPath := test.infoFrom(t, readItemInfo.Info())
assert.Equal(t, testItemData, readData)
assert.Equal(t, testItemName, name) assert.Equal(t, testItemName, name)
assert.Equal(t, driveFolderPath, parentPath) assert.Equal(t, driveFolderPath, parentPath)
if test.source == OneDriveSource {
readItemMeta := readItems[1]
assert.Equal(t, testItemName+MetaFileSuffix, readItemMeta.UUID())
readMetaData, err := io.ReadAll(readItemMeta.ToReader())
require.NoError(t, err)
tm, err := json.Marshal(testItemMeta)
if err != nil {
t.Fatal("unable to marshall test permissions", err)
}
assert.Equal(t, tm, readMetaData)
}
}) })
} }
} }
@ -251,10 +301,11 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
suite, suite,
suite.testStatusUpdater(&wg, &collStatus), suite.testStatusUpdater(&wg, &collStatus),
test.source, test.source,
control.Options{}) control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}})
mockItem := models.NewDriveItem() mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID) mockItem.SetId(&testItemID)
mockItem.SetFile(models.NewFile())
mockItem.SetName(&name) mockItem.SetName(&name)
mockItem.SetSize(&size) mockItem.SetSize(&size)
mockItem.SetCreatedDateTime(&now) mockItem.SetCreatedDateTime(&now)
@ -265,6 +316,14 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
return details.ItemInfo{}, nil, assert.AnError return details.ItemInfo{}, nil, assert.AnError
} }
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
return io.NopCloser(strings.NewReader(`{}`)), 2, nil
}
collItem, ok := <-coll.Items() collItem, ok := <-coll.Items()
assert.True(t, ok) assert.True(t, ok)
@ -279,3 +338,87 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
}) })
} }
} }
func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() {
table := []struct {
name string
source driveSource
}{
{
name: "oneDrive",
source: OneDriveSource,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
var (
testItemID = "fakeItemID"
testItemName = "Fake Item"
testItemSize = int64(10)
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
)
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err)
coll := NewCollection(
graph.HTTPClient(graph.NoTimeout()),
folderPath,
"fakeDriveID",
suite,
suite.testStatusUpdater(&wg, &collStatus),
test.source,
control.Options{ToggleFeatures: control.Toggles{}})
now := time.Now()
mockItem := models.NewDriveItem()
mockItem.SetFile(models.NewFile())
mockItem.SetId(&testItemID)
mockItem.SetName(&testItemName)
mockItem.SetSize(&testItemSize)
mockItem.SetCreatedDateTime(&now)
mockItem.SetLastModifiedDateTime(&now)
coll.Add(mockItem)
coll.itemReader = func(
*http.Client,
models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}},
io.NopCloser(strings.NewReader("Fake Data!")),
nil
}
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
return io.NopCloser(strings.NewReader(`{"key": "value"}`)), 16, nil
}
readItems := []data.Stream{}
for item := range coll.Items() {
readItems = append(readItems, item)
}
wg.Wait()
// Expect no items
require.Equal(t, 1, collStatus.ObjectCount)
require.Equal(t, 1, collStatus.Successful)
for _, i := range readItems {
if strings.HasSuffix(i.UUID(), MetaFileSuffix) {
content, err := io.ReadAll(i.ToReader())
require.NoError(t, err)
require.Equal(t, content, []byte("{}"))
}
}
})
}
}

View File

@ -2,7 +2,9 @@ package onedrive
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"io"
"net/http" "net/http"
"strings" "strings"
@ -63,6 +65,19 @@ type Collections struct {
// for a OneDrive folder // for a OneDrive folder
CollectionMap map[string]data.Collection CollectionMap map[string]data.Collection
// Not the most ideal, but allows us to change the pager function for testing
// as needed. This will allow us to mock out some scenarios during testing.
drivePagerFunc func(
source driveSource,
servicer graph.Servicer,
resourceOwner string,
fields []string,
) (drivePager, error)
itemPagerFunc func(
servicer graph.Servicer,
driveID, link string,
) itemPager
// Track stats from drive enumeration. Represents the items backed up. // Track stats from drive enumeration. Represents the items backed up.
NumItems int NumItems int
NumFiles int NumFiles int
@ -80,23 +95,169 @@ func NewCollections(
ctrlOpts control.Options, ctrlOpts control.Options,
) *Collections { ) *Collections {
return &Collections{ return &Collections{
itemClient: itemClient, itemClient: itemClient,
tenant: tenant, tenant: tenant,
resourceOwner: resourceOwner, resourceOwner: resourceOwner,
source: source, source: source,
matcher: matcher, matcher: matcher,
CollectionMap: map[string]data.Collection{}, CollectionMap: map[string]data.Collection{},
service: service, drivePagerFunc: PagerForSource,
statusUpdater: statusUpdater, itemPagerFunc: defaultItemPager,
ctrl: ctrlOpts, service: service,
statusUpdater: statusUpdater,
ctrl: ctrlOpts,
} }
} }
func deserializeMetadata(
ctx context.Context,
cols []data.Collection,
) (map[string]string, map[string]map[string]string, error) {
logger.Ctx(ctx).Infow(
"deserialzing previous backup metadata",
"num_collections",
len(cols),
)
prevDeltas := map[string]string{}
prevFolders := map[string]map[string]string{}
for _, col := range cols {
items := col.Items()
for breakLoop := false; !breakLoop; {
select {
case <-ctx.Done():
return nil, nil, errors.Wrap(ctx.Err(), "deserialzing previous backup metadata")
case item, ok := <-items:
if !ok {
// End of collection items.
breakLoop = true
break
}
var err error
switch item.UUID() {
case graph.PreviousPathFileName:
err = deserializeMap(item.ToReader(), prevFolders)
case graph.DeltaURLsFileName:
err = deserializeMap(item.ToReader(), prevDeltas)
default:
logger.Ctx(ctx).Infow(
"skipping unknown metadata file",
"file_name",
item.UUID(),
)
continue
}
if err == nil {
// Successful decode.
continue
}
// This is conservative, but report an error if any of the items for
// any of the deserialized maps have duplicate drive IDs. This will
// cause the entire backup to fail, but it's not clear if higher
// layers would have caught this. Worst case if we don't handle this
// we end up in a situation where we're sourcing items from the wrong
// base in kopia wrapper.
if errors.Is(err, errExistingMapping) {
return nil, nil, errors.Wrapf(
err,
"deserializing metadata file %s",
item.UUID(),
)
}
logger.Ctx(ctx).Errorw(
"deserializing base backup metadata. Falling back to full backup for selected drives",
"error",
err,
"file_name",
item.UUID(),
)
}
}
// Go through and remove partial results (i.e. path mapping but no delta URL
// or vice-versa).
for k, v := range prevDeltas {
// Remove entries with an empty delta token as it's not useful.
if len(v) == 0 {
delete(prevDeltas, k)
delete(prevFolders, k)
}
// Remove entries without a folders map as we can't tell kopia the
// hierarchy changes.
if _, ok := prevFolders[k]; !ok {
delete(prevDeltas, k)
}
}
for k := range prevFolders {
if _, ok := prevDeltas[k]; !ok {
delete(prevFolders, k)
}
}
}
return prevDeltas, prevFolders, nil
}
var errExistingMapping = errors.New("mapping already exists for same drive ID")
// deserializeMap takes an reader and a map of already deserialized items and
// adds the newly deserialized items to alreadyFound. Items are only added to
// alreadyFound if none of the keys in the freshly deserialized map already
// exist in alreadyFound. reader is closed at the end of this function.
func deserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) error {
defer reader.Close()
tmp := map[string]T{}
err := json.NewDecoder(reader).Decode(&tmp)
if err != nil {
return errors.Wrap(err, "deserializing file contents")
}
var duplicate bool
for k := range tmp {
if _, ok := alreadyFound[k]; ok {
duplicate = true
break
}
}
if duplicate {
return errors.WithStack(errExistingMapping)
}
maps.Copy(alreadyFound, tmp)
return nil
}
// Retrieves drive data as set of `data.Collections` and a set of item names to // Retrieves drive data as set of `data.Collections` and a set of item names to
// be excluded from the upcoming backup. // be excluded from the upcoming backup.
func (c *Collections) Get(ctx context.Context) ([]data.Collection, map[string]struct{}, error) { func (c *Collections) Get(
ctx context.Context,
prevMetadata []data.Collection,
) ([]data.Collection, map[string]struct{}, error) {
_, _, err := deserializeMetadata(ctx, prevMetadata)
if err != nil {
return nil, nil, err
}
// Enumerate drives for the specified resourceOwner // Enumerate drives for the specified resourceOwner
pager, err := PagerForSource(c.source, c.service, c.resourceOwner, nil) pager, err := c.drivePagerFunc(c.source, c.service, c.resourceOwner, nil)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -128,7 +289,11 @@ func (c *Collections) Get(ctx context.Context) ([]data.Collection, map[string]st
delta, paths, excluded, err := collectItems( delta, paths, excluded, err := collectItems(
ctx, ctx,
c.service, c.itemPagerFunc(
c.service,
driveID,
"",
),
driveID, driveID,
driveName, driveName,
c.UpdateCollections, c.UpdateCollections,
@ -137,17 +302,21 @@ func (c *Collections) Get(ctx context.Context) ([]data.Collection, map[string]st
return nil, nil, err return nil, nil, err
} }
// It's alright to have an empty folders map (i.e. no folders found) but not
// an empty delta token. This is because when deserializing the metadata we
// remove entries for which there is no corresponding delta token/folder. If
// we leave empty delta tokens then we may end up setting the State field
// for collections when not actually getting delta results.
if len(delta) > 0 { if len(delta) > 0 {
deltaURLs[driveID] = delta deltaURLs[driveID] = delta
} }
if len(paths) > 0 { // Avoid the edge case where there's no paths but we do have a valid delta
folderPaths[driveID] = map[string]string{} // token. We can accomplish this by adding an empty paths map for this
// drive. If we don't have this then the next backup won't use the delta
for id, p := range paths { // token because it thinks the folder paths weren't persisted.
folderPaths[driveID][id] = p folderPaths[driveID] = map[string]string{}
} maps.Copy(folderPaths[driveID], paths)
}
maps.Copy(excludedItems, excluded) maps.Copy(excludedItems, excluded)
} }
@ -261,6 +430,12 @@ func (c *Collections) UpdateCollections(
// already created and partially populated. // already created and partially populated.
updatePath(newPaths, *item.GetId(), folderPath.String()) updatePath(newPaths, *item.GetId(), folderPath.String())
if c.source != OneDriveSource {
continue
}
fallthrough
case item.GetFile() != nil: case item.GetFile() != nil:
if item.GetDeleted() != nil { if item.GetDeleted() != nil {
excluded[*item.GetId()] = struct{}{} excluded[*item.GetId()] = struct{}{}
@ -276,6 +451,7 @@ func (c *Collections) UpdateCollections(
// the exclude list. // the exclude list.
col, found := c.CollectionMap[collectionPath.String()] col, found := c.CollectionMap[collectionPath.String()]
if !found { if !found {
// TODO(ashmrtn): Compare old and new path and set collection state // TODO(ashmrtn): Compare old and new path and set collection state
// accordingly. // accordingly.
@ -290,13 +466,17 @@ func (c *Collections) UpdateCollections(
c.CollectionMap[collectionPath.String()] = col c.CollectionMap[collectionPath.String()] = col
c.NumContainers++ c.NumContainers++
c.NumItems++
} }
collection := col.(*Collection) collection := col.(*Collection)
collection.Add(item) collection.Add(item)
c.NumFiles++
c.NumItems++ c.NumItems++
if item.GetFile() != nil {
// This is necessary as we have a fallthrough for
// folders and packages
c.NumFiles++
}
default: default:
return errors.Errorf("item type not supported. item name : %s", *item.GetName()) return errors.Errorf("item type not supported. item name : %s", *item.GetName())

File diff suppressed because it is too large Load Diff

View File

@ -7,7 +7,6 @@ import (
"time" "time"
msdrive "github.com/microsoftgraph/msgraph-sdk-go/drive" msdrive "github.com/microsoftgraph/msgraph-sdk-go/drive"
msdrives "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -81,7 +80,7 @@ func drives(
page, err = pager.GetPage(ctx) page, err = pager.GetPage(ctx)
if err != nil { if err != nil {
// Various error handling. May return an error or perform a retry. // Various error handling. May return an error or perform a retry.
detailedError := support.ConnectorStackErrorTrace(err) detailedError := err.Error()
if strings.Contains(detailedError, userMysiteURLNotFound) || if strings.Contains(detailedError, userMysiteURLNotFound) ||
strings.Contains(detailedError, userMysiteNotFound) { strings.Contains(detailedError, userMysiteNotFound) {
logger.Ctx(ctx).Infof("resource owner does not have a drive") logger.Ctx(ctx).Infof("resource owner does not have a drive")
@ -135,11 +134,42 @@ type itemCollector func(
excluded map[string]struct{}, excluded map[string]struct{},
) error ) error
type itemPager interface {
GetPage(context.Context) (gapi.DeltaPageLinker, error)
SetNext(nextLink string)
ValuesIn(gapi.DeltaPageLinker) ([]models.DriveItemable, error)
}
func defaultItemPager(
servicer graph.Servicer,
driveID, link string,
) itemPager {
return api.NewItemPager(
servicer,
driveID,
link,
[]string{
"content.downloadUrl",
"createdBy",
"createdDateTime",
"file",
"folder",
"id",
"lastModifiedDateTime",
"name",
"package",
"parentReference",
"root",
"size",
},
)
}
// collectItems will enumerate all items in the specified drive and hand them to the // collectItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method // provided `collector` method
func collectItems( func collectItems(
ctx context.Context, ctx context.Context,
service graph.Servicer, pager itemPager,
driveID, driveName string, driveID, driveName string,
collector itemCollector, collector itemCollector,
) (string, map[string]string, map[string]struct{}, error) { ) (string, map[string]string, map[string]struct{}, error) {
@ -154,34 +184,8 @@ func collectItems(
maps.Copy(newPaths, oldPaths) maps.Copy(newPaths, oldPaths)
// TODO: Specify a timestamp in the delta query
// https://docs.microsoft.com/en-us/graph/api/driveitem-delta?
// view=graph-rest-1.0&tabs=http#example-4-retrieving-delta-results-using-a-timestamp
builder := service.Client().DrivesById(driveID).Root().Delta()
pageCount := int32(999) // max we can do is 999
requestFields := []string{
"content.downloadUrl",
"createdBy",
"createdDateTime",
"file",
"folder",
"id",
"lastModifiedDateTime",
"name",
"package",
"parentReference",
"root",
"size",
}
requestConfig := &msdrives.ItemRootDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &msdrives.ItemRootDeltaRequestBuilderGetQueryParameters{
Top: &pageCount,
Select: requestFields,
},
}
for { for {
r, err := builder.Get(ctx, requestConfig) page, err := pager.GetPage(ctx)
if err != nil { if err != nil {
return "", nil, nil, errors.Wrapf( return "", nil, nil, errors.Wrapf(
err, err,
@ -190,23 +194,29 @@ func collectItems(
) )
} }
err = collector(ctx, driveID, driveName, r.GetValue(), oldPaths, newPaths, excluded) vals, err := pager.ValuesIn(page)
if err != nil {
return "", nil, nil, errors.Wrap(err, "extracting items from response")
}
err = collector(ctx, driveID, driveName, vals, oldPaths, newPaths, excluded)
if err != nil { if err != nil {
return "", nil, nil, err return "", nil, nil, err
} }
if r.GetOdataDeltaLink() != nil && len(*r.GetOdataDeltaLink()) > 0 { nextLink, deltaLink := gapi.NextAndDeltaLink(page)
newDeltaURL = *r.GetOdataDeltaLink()
if len(deltaLink) > 0 {
newDeltaURL = deltaLink
} }
// Check if there are more items // Check if there are more items
nextLink := r.GetOdataNextLink() if len(nextLink) == 0 {
if nextLink == nil {
break break
} }
logger.Ctx(ctx).Debugf("Found %s nextLink", *nextLink) logger.Ctx(ctx).Debugw("Found nextLink", "link", nextLink)
builder = msdrives.NewItemRootDeltaRequestBuilder(*nextLink, service.Adapter()) pager.SetNext(nextLink)
} }
return newDeltaURL, newPaths, excluded, nil return newDeltaURL, newPaths, excluded, nil
@ -226,7 +236,16 @@ func getFolder(
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folderName) rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folderName)
builder := msdrive.NewItemsDriveItemItemRequestBuilder(rawURL, service.Adapter()) builder := msdrive.NewItemsDriveItemItemRequestBuilder(rawURL, service.Adapter())
foundItem, err := builder.Get(ctx, nil) var (
foundItem models.DriveItemable
err error
)
err = graph.RunWithRetry(func() error {
foundItem, err = builder.Get(ctx, nil)
return err
})
if err != nil { if err != nil {
var oDataError *odataerrors.ODataError var oDataError *odataerrors.ODataError
if errors.As(err, &oDataError) && if errors.As(err, &oDataError) &&
@ -318,7 +337,11 @@ func GetAllFolders(
for _, d := range drives { for _, d := range drives {
_, _, _, err = collectItems( _, _, _, err = collectItems(
ctx, ctx,
gs, defaultItemPager(
gs,
*d.GetId(),
"",
),
*d.GetId(), *d.GetId(),
*d.GetName(), *d.GetName(),
func( func(

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/graph/api"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -76,6 +77,15 @@ func TestOneDriveUnitSuite(t *testing.T) {
suite.Run(t, new(OneDriveUnitSuite)) suite.Run(t, new(OneDriveUnitSuite))
} }
func odErr(code string) *odataerrors.ODataError {
odErr := &odataerrors.ODataError{}
merr := odataerrors.MainError{}
merr.SetCode(&code)
odErr.SetError(&merr)
return odErr
}
func (suite *OneDriveUnitSuite) TestDrives() { func (suite *OneDriveUnitSuite) TestDrives() {
numDriveResults := 4 numDriveResults := 4
emptyLink := "" emptyLink := ""
@ -84,26 +94,18 @@ func (suite *OneDriveUnitSuite) TestDrives() {
// These errors won't be the "correct" format when compared to what graph // These errors won't be the "correct" format when compared to what graph
// returns, but they're close enough to have the same info when the inner // returns, but they're close enough to have the same info when the inner
// details are extracted via support package. // details are extracted via support package.
tmp := userMysiteURLNotFound mySiteURLNotFound := support.ConnectorStackErrorTraceWrap(
tmpMySiteURLNotFound := odataerrors.NewMainError() odErr(userMysiteURLNotFound),
tmpMySiteURLNotFound.SetMessage(&tmp) "maximum retries or unretryable",
)
mySiteURLNotFound := odataerrors.NewODataError() mySiteNotFound := support.ConnectorStackErrorTraceWrap(
mySiteURLNotFound.SetError(tmpMySiteURLNotFound) odErr(userMysiteNotFound),
"maximum retries or unretryable",
tmp2 := userMysiteNotFound )
tmpMySiteNotFound := odataerrors.NewMainError() deadlineExceeded := support.ConnectorStackErrorTraceWrap(
tmpMySiteNotFound.SetMessage(&tmp2) odErr(contextDeadlineExceeded),
"maximum retries or unretryable",
mySiteNotFound := odataerrors.NewODataError() )
mySiteNotFound.SetError(tmpMySiteNotFound)
tmp3 := contextDeadlineExceeded
tmpDeadlineExceeded := odataerrors.NewMainError()
tmpDeadlineExceeded.SetMessage(&tmp3)
deadlineExceeded := odataerrors.NewODataError()
deadlineExceeded.SetError(tmpDeadlineExceeded)
resultDrives := make([]models.Driveable, 0, numDriveResults) resultDrives := make([]models.Driveable, 0, numDriveResults)
@ -462,8 +464,8 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
testFolderMatcher{scope}, testFolderMatcher{scope},
service, service,
service.updateStatus, service.updateStatus,
control.Options{}, control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}},
).Get(ctx) ).Get(ctx, nil)
assert.NoError(t, err) assert.NoError(t, err)
// Don't expect excludes as this isn't an incremental backup. // Don't expect excludes as this isn't an incremental backup.
assert.Empty(t, excludes) assert.Empty(t, excludes)

View File

@ -1,7 +1,9 @@
package onedrive package onedrive
import ( import (
"bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@ -37,6 +39,7 @@ func getDriveItem(
// sharePointItemReader will return a io.ReadCloser for the specified item // sharePointItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item // It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader // and using a http client to initialize a reader
// TODO: Add metadata fetching to SharePoint
func sharePointItemReader( func sharePointItemReader(
hc *http.Client, hc *http.Client,
item models.DriveItemable, item models.DriveItemable,
@ -53,6 +56,25 @@ func sharePointItemReader(
return dii, resp.Body, nil return dii, resp.Body, nil
} }
func oneDriveItemMetaReader(
ctx context.Context,
service graph.Servicer,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error) {
meta, err := oneDriveItemMetaInfo(ctx, service, driveID, item)
if err != nil {
return nil, 0, err
}
metaJSON, err := json.Marshal(meta)
if err != nil {
return nil, 0, err
}
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
}
// oneDriveItemReader will return a io.ReadCloser for the specified item // oneDriveItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item // It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader // and using a http client to initialize a reader
@ -60,16 +82,25 @@ func oneDriveItemReader(
hc *http.Client, hc *http.Client,
item models.DriveItemable, item models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) { ) (details.ItemInfo, io.ReadCloser, error) {
resp, err := downloadItem(hc, item) var (
if err != nil { rc io.ReadCloser
return details.ItemInfo{}, nil, errors.Wrap(err, "downloading item") isFile = item.GetFile() != nil
)
if isFile {
resp, err := downloadItem(hc, item)
if err != nil {
return details.ItemInfo{}, nil, errors.Wrap(err, "downloading item")
}
rc = resp.Body
} }
dii := details.ItemInfo{ dii := details.ItemInfo{
OneDrive: oneDriveItemInfo(item, *item.GetSize()), OneDrive: oneDriveItemInfo(item, *item.GetSize()),
} }
return dii, resp.Body, nil return dii, rc, nil
} }
func downloadItem(hc *http.Client, item models.DriveItemable) (*http.Response, error) { func downloadItem(hc *http.Client, item models.DriveItemable) (*http.Response, error) {
@ -105,6 +136,10 @@ func downloadItem(hc *http.Client, item models.DriveItemable) (*http.Response, e
return resp, graph.Err401Unauthorized return resp, graph.Err401Unauthorized
} }
if resp.StatusCode == http.StatusInternalServerError {
return resp, graph.Err500InternalServerError
}
if resp.StatusCode == http.StatusServiceUnavailable { if resp.StatusCode == http.StatusServiceUnavailable {
return resp, graph.Err503ServiceUnavailable return resp, graph.Err503ServiceUnavailable
} }
@ -145,6 +180,59 @@ func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDrive
} }
} }
// oneDriveItemMetaInfo will fetch the meta information for a drive
// item. As of now, it only adds the permissions applicable for a
// onedrive item.
func oneDriveItemMetaInfo(
ctx context.Context, service graph.Servicer,
driveID string, di models.DriveItemable,
) (Metadata, error) {
itemID := di.GetId()
perm, err := service.Client().DrivesById(driveID).ItemsById(*itemID).Permissions().Get(ctx, nil)
if err != nil {
return Metadata{}, err
}
uperms := filterUserPermissions(perm.GetValue())
return Metadata{Permissions: uperms}, nil
}
func filterUserPermissions(perms []models.Permissionable) []UserPermission {
up := []UserPermission{}
for _, p := range perms {
if p.GetGrantedToV2() == nil {
// For link shares, we get permissions without a user
// specified
continue
}
roles := []string{}
for _, r := range p.GetRoles() {
// Skip if the only role available in owner
if r != "owner" {
roles = append(roles, r)
}
}
if len(roles) == 0 {
continue
}
up = append(up, UserPermission{
ID: *p.GetId(),
Roles: roles,
Email: *p.GetGrantedToV2().GetUser().GetAdditionalData()["email"].(*string),
Expiration: p.GetExpirationDateTime(),
})
}
return up
}
// sharePointItemInfo will populate a details.SharePointInfo struct // sharePointItemInfo will populate a details.SharePointInfo struct
// with properties from the drive item. ItemSize is specified // with properties from the drive item. ItemSize is specified
// separately for restore processes because the local itemable // separately for restore processes because the local itemable

View File

@ -8,6 +8,7 @@ import (
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -115,7 +116,17 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
return nil return nil
} }
_, _, _, err := collectItems(ctx, suite, suite.userDriveID, "General", itemCollector) _, _, _, err := collectItems(
ctx,
defaultItemPager(
suite,
suite.userDriveID,
"",
),
suite.userDriveID,
"General",
itemCollector,
)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
// Test Requirement 2: Need a file // Test Requirement 2: Need a file
@ -128,8 +139,8 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
) )
// Read data for the file // Read data for the file
itemInfo, itemData, err := oneDriveItemReader(graph.HTTPClient(graph.NoTimeout()), driveItem) itemInfo, itemData, err := oneDriveItemReader(graph.HTTPClient(graph.NoTimeout()), driveItem)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
require.NotNil(suite.T(), itemInfo.OneDrive) require.NotNil(suite.T(), itemInfo.OneDrive)
require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName) require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName)
@ -247,3 +258,72 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
}) })
} }
} }
func getPermsUperms(permID, userID string, scopes []string) (models.Permissionable, UserPermission) {
identity := models.NewIdentity()
identity.SetAdditionalData(map[string]any{"email": &userID})
sharepointIdentity := models.NewSharePointIdentitySet()
sharepointIdentity.SetUser(identity)
perm := models.NewPermission()
perm.SetId(&permID)
perm.SetRoles([]string{"read"})
perm.SetGrantedToV2(sharepointIdentity)
uperm := UserPermission{
ID: permID,
Roles: []string{"read"},
Email: userID,
}
return perm, uperm
}
func TestOneDrivePermissionsFilter(t *testing.T) {
permID := "fakePermId"
userID := "fakeuser@provider.com"
userID2 := "fakeuser2@provider.com"
readPerm, readUperm := getPermsUperms(permID, userID, []string{"read"})
readWritePerm, readWriteUperm := getPermsUperms(permID, userID2, []string{"read", "write"})
noPerm, _ := getPermsUperms(permID, userID, []string{"read"})
noPerm.SetGrantedToV2(nil) // eg: link shares
cases := []struct {
name string
graphPermissions []models.Permissionable
parsedPermissions []UserPermission
}{
{
name: "no perms",
graphPermissions: []models.Permissionable{},
parsedPermissions: []UserPermission{},
},
{
name: "no user bound to perms",
graphPermissions: []models.Permissionable{noPerm},
parsedPermissions: []UserPermission{},
},
{
name: "user with read permissions",
graphPermissions: []models.Permissionable{readPerm},
parsedPermissions: []UserPermission{readUperm},
},
{
name: "user with read and write permissions",
graphPermissions: []models.Permissionable{readWritePerm},
parsedPermissions: []UserPermission{readWriteUperm},
},
{
name: "multiple users with separate permissions",
graphPermissions: []models.Permissionable{readPerm, readWritePerm},
parsedPermissions: []UserPermission{readUperm, readWriteUperm},
},
}
for _, tc := range cases {
actual := filterUserPermissions(tc.graphPermissions)
assert.ElementsMatch(t, tc.parsedPermissions, actual)
}
}

View File

@ -2,9 +2,15 @@ package onedrive
import ( import (
"context" "context"
"encoding/json"
"fmt"
"io" "io"
"runtime/trace" "runtime/trace"
"sort"
"strings"
msdrive "github.com/microsoftgraph/msgraph-sdk-go/drive"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -23,30 +29,101 @@ const (
// Microsoft recommends 5-10MB buffers // Microsoft recommends 5-10MB buffers
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession?view=graph-rest-1.0#best-practices // https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession?view=graph-rest-1.0#best-practices
copyBufferSize = 5 * 1024 * 1024 copyBufferSize = 5 * 1024 * 1024
// versionWithDataAndMetaFiles is the corso backup format version
// in which we split from storing just the data to storing both
// the data and metadata in two files.
versionWithDataAndMetaFiles = 1
) )
func getParentPermissions(
parentPath path.Path,
parentPermissions map[string][]UserPermission,
) ([]UserPermission, error) {
parentPerms, ok := parentPermissions[parentPath.String()]
if !ok {
onedrivePath, err := path.ToOneDrivePath(parentPath)
if err != nil {
return nil, errors.Wrap(err, "invalid restore path")
}
if len(onedrivePath.Folders) != 0 {
return nil, errors.Wrap(err, "unable to compute item permissions")
}
parentPerms = []UserPermission{}
}
return parentPerms, nil
}
// RestoreCollections will restore the specified data collections into OneDrive // RestoreCollections will restore the specified data collections into OneDrive
func RestoreCollections( func RestoreCollections(
ctx context.Context, ctx context.Context,
backupVersion int,
service graph.Servicer, service graph.Servicer,
dest control.RestoreDestination, dest control.RestoreDestination,
opts control.Options,
dcs []data.Collection, dcs []data.Collection,
deets *details.Builder, deets *details.Builder,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
restoreMetrics support.CollectionMetrics restoreMetrics support.CollectionMetrics
restoreErrors error restoreErrors error
metrics support.CollectionMetrics
folderPerms map[string][]UserPermission
canceled bool
// permissionIDMappings is used to map between old and new id
// of permissions as we restore them
permissionIDMappings = map[string]string{}
) )
errUpdater := func(id string, err error) { errUpdater := func(id string, err error) {
restoreErrors = support.WrapAndAppend(id, err, restoreErrors) restoreErrors = support.WrapAndAppend(id, err, restoreErrors)
} }
// Reorder collections so that the parents directories are created
// before the child directories
sort.Slice(dcs, func(i, j int) bool {
return dcs[i].FullPath().String() < dcs[j].FullPath().String()
})
parentPermissions := map[string][]UserPermission{}
// Iterate through the data collections and restore the contents of each // Iterate through the data collections and restore the contents of each
for _, dc := range dcs { for _, dc := range dcs {
temp, canceled := RestoreCollection(ctx, service, dc, OneDriveSource, dest.ContainerName, deets, errUpdater) var (
parentPerms []UserPermission
err error
)
restoreMetrics.Combine(temp) if opts.RestorePermissions {
parentPerms, err = getParentPermissions(dc.FullPath(), parentPermissions)
if err != nil {
errUpdater(dc.FullPath().String(), err)
}
}
metrics, folderPerms, permissionIDMappings, canceled = RestoreCollection(
ctx,
backupVersion,
service,
dc,
parentPerms,
OneDriveSource,
dest.ContainerName,
deets,
errUpdater,
permissionIDMappings,
opts.RestorePermissions,
)
for k, v := range folderPerms {
parentPermissions[k] = v
}
restoreMetrics.Combine(metrics)
if canceled { if canceled {
break break
@ -66,29 +143,37 @@ func RestoreCollections(
// RestoreCollection handles restoration of an individual collection. // RestoreCollection handles restoration of an individual collection.
// returns: // returns:
// - the collection's item and byte count metrics // - the collection's item and byte count metrics
// - the context cancellation state (true if the context is cancelled) // - the context cancellation state (true if the context is canceled)
func RestoreCollection( func RestoreCollection(
ctx context.Context, ctx context.Context,
backupVersion int,
service graph.Servicer, service graph.Servicer,
dc data.Collection, dc data.Collection,
parentPerms []UserPermission,
source driveSource, source driveSource,
restoreContainerName string, restoreContainerName string,
deets *details.Builder, deets *details.Builder,
errUpdater func(string, error), errUpdater func(string, error),
) (support.CollectionMetrics, bool) { permissionIDMappings map[string]string,
restorePerms bool,
) (support.CollectionMetrics, map[string][]UserPermission, map[string]string, bool) {
ctx, end := D.Span(ctx, "gc:oneDrive:restoreCollection", D.Label("path", dc.FullPath())) ctx, end := D.Span(ctx, "gc:oneDrive:restoreCollection", D.Label("path", dc.FullPath()))
defer end() defer end()
var ( var (
metrics = support.CollectionMetrics{} metrics = support.CollectionMetrics{}
copyBuffer = make([]byte, copyBufferSize) copyBuffer = make([]byte, copyBufferSize)
directory = dc.FullPath() directory = dc.FullPath()
restoredIDs = map[string]string{}
itemInfo details.ItemInfo
itemID string
folderPerms = map[string][]UserPermission{}
) )
drivePath, err := path.ToOneDrivePath(directory) drivePath, err := path.ToOneDrivePath(directory)
if err != nil { if err != nil {
errUpdater(directory.String(), err) errUpdater(directory.String(), err)
return metrics, false return metrics, folderPerms, permissionIDMappings, false
} }
// Assemble folder hierarchy we're going to restore into (we recreate the folder hierarchy // Assemble folder hierarchy we're going to restore into (we recreate the folder hierarchy
@ -108,7 +193,7 @@ func RestoreCollection(
restoreFolderID, err := CreateRestoreFolders(ctx, service, drivePath.DriveID, restoreFolderElements) restoreFolderID, err := CreateRestoreFolders(ctx, service, drivePath.DriveID, restoreFolderElements)
if err != nil { if err != nil {
errUpdater(directory.String(), errors.Wrapf(err, "failed to create folders %v", restoreFolderElements)) errUpdater(directory.String(), errors.Wrapf(err, "failed to create folders %v", restoreFolderElements))
return metrics, false return metrics, folderPerms, permissionIDMappings, false
} }
// Restore items from the collection // Restore items from the collection
@ -118,50 +203,175 @@ func RestoreCollection(
select { select {
case <-ctx.Done(): case <-ctx.Done():
errUpdater("context canceled", ctx.Err()) errUpdater("context canceled", ctx.Err())
return metrics, true return metrics, folderPerms, permissionIDMappings, true
case itemData, ok := <-items: case itemData, ok := <-items:
if !ok { if !ok {
return metrics, false return metrics, folderPerms, permissionIDMappings, false
}
metrics.Objects++
metrics.TotalBytes += int64(len(copyBuffer))
itemInfo, err := restoreItem(ctx,
service,
itemData,
drivePath.DriveID,
restoreFolderID,
copyBuffer,
source)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
} }
itemPath, err := dc.FullPath().Append(itemData.UUID(), true) itemPath, err := dc.FullPath().Append(itemData.UUID(), true)
if err != nil { if err != nil {
logger.Ctx(ctx).DPanicw("transforming item to full path", "error", err) logger.Ctx(ctx).DPanicw("transforming item to full path", "error", err)
errUpdater(itemData.UUID(), err) errUpdater(itemData.UUID(), err)
continue continue
} }
deets.Add( if source == OneDriveSource && backupVersion >= versionWithDataAndMetaFiles {
itemPath.String(), name := itemData.UUID()
itemPath.ShortRef(), if strings.HasSuffix(name, DataFileSuffix) {
"", metrics.Objects++
true, metrics.TotalBytes += int64(len(copyBuffer))
itemInfo) trimmedName := strings.TrimSuffix(name, DataFileSuffix)
metrics.Successes++ itemID, itemInfo, err = restoreData(ctx, service, trimmedName, itemData,
drivePath.DriveID, restoreFolderID, copyBuffer, source)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
restoredIDs[trimmedName] = itemID
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
// Mark it as success without processing .meta
// file if we are not restoring permissions
if !restorePerms {
metrics.Successes++
}
} else if strings.HasSuffix(name, MetaFileSuffix) {
if !restorePerms {
continue
}
meta, err := getMetadata(itemData.ToReader())
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
trimmedName := strings.TrimSuffix(name, MetaFileSuffix)
restoreID, ok := restoredIDs[trimmedName]
if !ok {
errUpdater(itemData.UUID(), fmt.Errorf("item not available to restore permissions"))
continue
}
permissionIDMappings, err = restorePermissions(
ctx,
service,
drivePath.DriveID,
restoreID,
parentPerms,
meta.Permissions,
permissionIDMappings,
)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
// Objects count is incremented when we restore a
// data file and success count is incremented when
// we restore a meta file as every data file
// should have an associated meta file
metrics.Successes++
} else if strings.HasSuffix(name, DirMetaFileSuffix) {
trimmedName := strings.TrimSuffix(name, DirMetaFileSuffix)
folderID, err := createRestoreFolder(
ctx,
service,
drivePath.DriveID,
trimmedName,
restoreFolderID,
)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
if !restorePerms {
continue
}
meta, err := getMetadata(itemData.ToReader())
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
permissionIDMappings, err = restorePermissions(
ctx,
service,
drivePath.DriveID,
folderID,
parentPerms,
meta.Permissions,
permissionIDMappings,
)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
trimmedPath := strings.TrimSuffix(itemPath.String(), DirMetaFileSuffix)
folderPerms[trimmedPath] = meta.Permissions
} else {
if !ok {
errUpdater(itemData.UUID(), fmt.Errorf("invalid backup format, you might be using an old backup"))
continue
}
}
} else {
metrics.Objects++
metrics.TotalBytes += int64(len(copyBuffer))
// No permissions stored at the moment for SharePoint
_, itemInfo, err = restoreData(ctx,
service,
itemData.UUID(),
itemData,
drivePath.DriveID,
restoreFolderID,
copyBuffer,
source)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
}
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
metrics.Successes++
}
} }
} }
} }
// createRestoreFolders creates the restore folder hieararchy in the specified drive and returns the folder ID // Creates a folder with its permissions
// of the last folder entry in the hiearchy func createRestoreFolder(
ctx context.Context,
service graph.Servicer,
driveID, folder, parentFolderID string,
) (string, error) {
folderItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(folder, true))
if err != nil {
return "", errors.Wrapf(
err,
"failed to create folder %s/%s. details: %s", parentFolderID, folder,
support.ConnectorStackErrorTrace(err),
)
}
logger.Ctx(ctx).Debugf("Resolved %s in %s to %s", folder, parentFolderID, *folderItem.GetId())
return *folderItem.GetId(), nil
}
// createRestoreFolders creates the restore folder hierarchy in the specified drive and returns the folder ID
// of the last folder entry in the hierarchy
func CreateRestoreFolders(ctx context.Context, service graph.Servicer, driveID string, restoreFolders []string, func CreateRestoreFolders(ctx context.Context, service graph.Servicer, driveID string, restoreFolders []string,
) (string, error) { ) (string, error) {
driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil) driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil)
@ -209,15 +419,16 @@ func CreateRestoreFolders(ctx context.Context, service graph.Servicer, driveID s
return parentFolderID, nil return parentFolderID, nil
} }
// restoreItem will create a new item in the specified `parentFolderID` and upload the data.Stream // restoreData will create a new item in the specified `parentFolderID` and upload the data.Stream
func restoreItem( func restoreData(
ctx context.Context, ctx context.Context,
service graph.Servicer, service graph.Servicer,
name string,
itemData data.Stream, itemData data.Stream,
driveID, parentFolderID string, driveID, parentFolderID string,
copyBuffer []byte, copyBuffer []byte,
source driveSource, source driveSource,
) (details.ItemInfo, error) { ) (string, details.ItemInfo, error) {
ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID())) ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID()))
defer end() defer end()
@ -227,19 +438,19 @@ func restoreItem(
// Get the stream size (needed to create the upload session) // Get the stream size (needed to create the upload session)
ss, ok := itemData.(data.StreamSize) ss, ok := itemData.(data.StreamSize)
if !ok { if !ok {
return details.ItemInfo{}, errors.Errorf("item %q does not implement DataStreamInfo", itemName) return "", details.ItemInfo{}, errors.Errorf("item %q does not implement DataStreamInfo", itemName)
} }
// Create Item // Create Item
newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(itemData.UUID(), false)) newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(name, false))
if err != nil { if err != nil {
return details.ItemInfo{}, errors.Wrapf(err, "failed to create item %s", itemName) return "", details.ItemInfo{}, errors.Wrapf(err, "failed to create item %s", itemName)
} }
// Get a drive item writer // Get a drive item writer
w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size()) w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size())
if err != nil { if err != nil {
return details.ItemInfo{}, errors.Wrapf(err, "failed to create item upload session %s", itemName) return "", details.ItemInfo{}, errors.Wrapf(err, "failed to create item upload session %s", itemName)
} }
iReader := itemData.ToReader() iReader := itemData.ToReader()
@ -250,7 +461,7 @@ func restoreItem(
// Upload the stream data // Upload the stream data
written, err := io.CopyBuffer(w, progReader, copyBuffer) written, err := io.CopyBuffer(w, progReader, copyBuffer)
if err != nil { if err != nil {
return details.ItemInfo{}, errors.Wrapf(err, "failed to upload data: item %s", itemName) return "", details.ItemInfo{}, errors.Wrapf(err, "failed to upload data: item %s", itemName)
} }
dii := details.ItemInfo{} dii := details.ItemInfo{}
@ -262,5 +473,129 @@ func restoreItem(
dii.OneDrive = oneDriveItemInfo(newItem, written) dii.OneDrive = oneDriveItemInfo(newItem, written)
} }
return dii, nil return *newItem.GetId(), dii, nil
}
// getMetadata read and parses the metadata info for an item
func getMetadata(metar io.ReadCloser) (Metadata, error) {
var meta Metadata
// `metar` will be nil for the top level container folder
if metar != nil {
metaraw, err := io.ReadAll(metar)
if err != nil {
return Metadata{}, err
}
err = json.Unmarshal(metaraw, &meta)
if err != nil {
return Metadata{}, err
}
}
return meta, nil
}
// getChildPermissions is to filter out permissions present in the
// parent from the ones that are available for child. This is
// necessary as we store the nested permissions in the child. We
// cannot avoid storing the nested permissions as it is possible that
// a file in a folder can remove the nested permission that is present
// on itself.
func getChildPermissions(childPermissions, parentPermissions []UserPermission) ([]UserPermission, []UserPermission) {
addedPermissions := []UserPermission{}
removedPermissions := []UserPermission{}
for _, cp := range childPermissions {
found := false
for _, pp := range parentPermissions {
if cp.ID == pp.ID {
found = true
break
}
}
if !found {
addedPermissions = append(addedPermissions, cp)
}
}
for _, pp := range parentPermissions {
found := false
for _, cp := range childPermissions {
if pp.ID == cp.ID {
found = true
break
}
}
if !found {
removedPermissions = append(removedPermissions, pp)
}
}
return addedPermissions, removedPermissions
}
// restorePermissions takes in the permissions that were added and the
// removed(ones present in parent but not in child) and adds/removes
// the necessary permissions on onedrive objects.
func restorePermissions(
ctx context.Context,
service graph.Servicer,
driveID string,
itemID string,
parentPerms []UserPermission,
childPerms []UserPermission,
permissionIDMappings map[string]string,
) (map[string]string, error) {
permAdded, permRemoved := getChildPermissions(childPerms, parentPerms)
for _, p := range permRemoved {
err := service.Client().DrivesById(driveID).ItemsById(itemID).
PermissionsById(permissionIDMappings[p.ID]).Delete(ctx, nil)
if err != nil {
return permissionIDMappings, errors.Wrapf(
err,
"failed to remove permission for item %s. details: %s",
itemID,
support.ConnectorStackErrorTrace(err),
)
}
}
for _, p := range permAdded {
pbody := msdrive.NewItemsItemInvitePostRequestBody()
pbody.SetRoles(p.Roles)
if p.Expiration != nil {
expiry := p.Expiration.String()
pbody.SetExpirationDateTime(&expiry)
}
si := false
pbody.SetSendInvitation(&si)
rs := true
pbody.SetRequireSignIn(&rs)
rec := models.NewDriveRecipient()
rec.SetEmail(&p.Email)
pbody.SetRecipients([]models.DriveRecipientable{rec})
np, err := service.Client().DrivesById(driveID).ItemsById(itemID).Invite().Post(ctx, pbody, nil)
if err != nil {
return permissionIDMappings, errors.Wrapf(
err,
"failed to set permission for item %s. details: %s",
itemID,
support.ConnectorStackErrorTrace(err),
)
}
permissionIDMappings[p.ID] = *np.GetValue()[0].GetId()
}
return permissionIDMappings, nil
} }

View File

@ -0,0 +1,6 @@
package api
type Tuple struct {
Name string
ID string
}

View File

@ -0,0 +1,21 @@
package api
import (
"testing"
"github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account"
"github.com/stretchr/testify/require"
)
func createTestBetaService(t *testing.T, credentials account.M365Config) *api.BetaService {
adapter, err := graph.CreateAdapter(
credentials.AzureTenantID,
credentials.AzureClientID,
credentials.AzureClientSecret,
)
require.NoError(t, err)
return api.NewBetaService(adapter)
}

View File

@ -0,0 +1,93 @@
package api
import (
"context"
"github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
"github.com/alcionai/corso/src/internal/connector/support"
)
// GetSitePages retrieves a collection of Pages related to the give Site.
// Returns error if error experienced during the call
func GetSitePage(
ctx context.Context,
serv *api.BetaService,
siteID string,
pages []string,
) ([]models.SitePageable, error) {
col := make([]models.SitePageable, 0)
opts := retrieveSitePageOptions()
for _, entry := range pages {
page, err := serv.Client().SitesById(siteID).PagesById(entry).Get(ctx, opts)
if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "fetching page: "+entry)
}
col = append(col, page)
}
return col, nil
}
// fetchPages utility function to return the tuple of item
func FetchPages(ctx context.Context, bs *api.BetaService, siteID string) ([]Tuple, error) {
var (
builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions()
pageTuples = make([]Tuple, 0)
)
for {
resp, err := builder.Get(ctx, opts)
if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "failed fetching site page")
}
for _, entry := range resp.GetValue() {
pid := *entry.GetId()
temp := Tuple{pid, pid}
if entry.GetName() != nil {
temp.Name = *entry.GetName()
}
pageTuples = append(pageTuples, temp)
}
if resp.GetOdataNextLink() == nil {
break
}
builder = sites.NewItemPagesRequestBuilder(*resp.GetOdataNextLink(), bs.Client().Adapter())
}
return pageTuples, nil
}
// fetchPageOptions is used to return minimal information reltating to Site Pages
// Pages API: https://learn.microsoft.com/en-us/graph/api/resources/sitepage?view=graph-rest-beta
func fetchPageOptions() *sites.ItemPagesRequestBuilderGetRequestConfiguration {
fields := []string{"id", "name"}
options := &sites.ItemPagesRequestBuilderGetRequestConfiguration{
QueryParameters: &sites.ItemPagesRequestBuilderGetQueryParameters{
Select: fields,
},
}
return options
}
// retrievePageOptions returns options to expand
func retrieveSitePageOptions() *sites.ItemPagesSitePageItemRequestBuilderGetRequestConfiguration {
fields := []string{"canvasLayout"}
options := &sites.ItemPagesSitePageItemRequestBuilderGetRequestConfiguration{
QueryParameters: &sites.ItemPagesSitePageItemRequestBuilderGetQueryParameters{
Expand: fields,
},
}
return options
}

View File

@ -0,0 +1,71 @@
package api
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
)
type SharePointPageSuite struct {
suite.Suite
siteID string
creds account.M365Config
}
func (suite *SharePointPageSuite) SetupSuite() {
t := suite.T()
tester.MustGetEnvSets(t, tester.M365AcctCredEnvs)
suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
suite.creds = m365
}
func TestSharePointPageSuite(t *testing.T) {
tester.RunOnAny(
t,
tester.CorsoCITests,
tester.CorsoGraphConnectorSharePointTests)
suite.Run(t, new(SharePointPageSuite))
}
func (suite *SharePointPageSuite) TestFetchPages() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
service := createTestBetaService(t, suite.creds)
pgs, err := FetchPages(ctx, service, suite.siteID)
assert.NoError(t, err)
require.NotNil(t, pgs)
assert.NotZero(t, len(pgs))
for _, entry := range pgs {
t.Logf("id: %s\t name: %s\n", entry.ID, entry.Name)
}
}
func (suite *SharePointPageSuite) TestGetSitePage() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
service := createTestBetaService(t, suite.creds)
tuples, err := FetchPages(ctx, service, suite.siteID)
require.NoError(t, err)
require.NotNil(t, tuples)
jobs := []string{tuples[0].ID}
pages, err := GetSitePage(ctx, service, suite.siteID, jobs)
assert.NoError(t, err)
assert.NotEmpty(t, pages)
}

View File

@ -9,6 +9,7 @@ import (
kw "github.com/microsoft/kiota-serialization-json-go" kw "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -46,6 +47,7 @@ type Collection struct {
jobs []string jobs []string
// M365 IDs of the items of this collection // M365 IDs of the items of this collection
service graph.Servicer service graph.Servicer
betaService *api.BetaService
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
} }

View File

@ -17,11 +17,27 @@ import (
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
type SharePointCollectionSuite struct { type SharePointCollectionSuite struct {
suite.Suite suite.Suite
siteID string
creds account.M365Config
}
func (suite *SharePointCollectionSuite) SetupSuite() {
t := suite.T()
tester.MustGetEnvSets(t, tester.M365AcctCredEnvs)
suite.siteID = tester.M365SiteID(t)
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err)
suite.creds = m365
} }
func TestSharePointCollectionSuite(t *testing.T) { func TestSharePointCollectionSuite(t *testing.T) {
@ -95,20 +111,33 @@ func (suite *SharePointCollectionSuite) TestSharePointListCollection() {
assert.Equal(t, testName, shareInfo.Info().SharePoint.ItemName) assert.Equal(t, testName, shareInfo.Info().SharePoint.ItemName)
} }
func (suite *SharePointCollectionSuite) TestCollectPages() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
col, err := collectPages(
ctx,
suite.creds,
nil,
account.AzureTenantID,
suite.siteID,
nil,
&MockGraphService{},
control.Defaults(),
)
assert.NoError(t, err)
assert.NotEmpty(t, col)
}
// TestRestoreListCollection verifies Graph Restore API for the List Collection // TestRestoreListCollection verifies Graph Restore API for the List Collection
func (suite *SharePointCollectionSuite) TestRestoreListCollection() { func (suite *SharePointCollectionSuite) TestRestoreListCollection() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T() t := suite.T()
siteID := tester.M365SiteID(t)
a := tester.NewM365Account(t)
account, err := a.M365Config()
require.NoError(t, err)
service, err := createTestService(account)
require.NoError(t, err)
service := createTestService(t, suite.creds)
listing := mockconnector.GetMockListDefault("Mock List") listing := mockconnector.GetMockListDefault("Mock List")
testName := "MockListing" testName := "MockListing"
listing.SetDisplayName(&testName) listing.SetDisplayName(&testName)
@ -123,13 +152,13 @@ func (suite *SharePointCollectionSuite) TestRestoreListCollection() {
destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting) destName := "Corso_Restore_" + common.FormatNow(common.SimpleTimeTesting)
deets, err := restoreListItem(ctx, service, listData, siteID, destName) deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName)
assert.NoError(t, err) assert.NoError(t, err)
t.Logf("List created: %s\n", deets.SharePoint.ItemName) t.Logf("List created: %s\n", deets.SharePoint.ItemName)
// Clean-Up // Clean-Up
var ( var (
builder = service.Client().SitesById(siteID).Lists() builder = service.Client().SitesById(suite.siteID).Lists()
isFound bool isFound bool
deleteID string deleteID string
) )
@ -156,7 +185,7 @@ func (suite *SharePointCollectionSuite) TestRestoreListCollection() {
} }
if isFound { if isFound {
err := DeleteList(ctx, service, siteID, deleteID) err := DeleteList(ctx, service, suite.siteID, deleteID)
assert.NoError(t, err) assert.NoError(t, err)
} }
} }
@ -168,25 +197,18 @@ func (suite *SharePointCollectionSuite) TestRestoreLocation() {
defer flush() defer flush()
t := suite.T() t := suite.T()
a := tester.NewM365Account(t)
account, err := a.M365Config()
require.NoError(t, err)
service, err := createTestService(account)
require.NoError(t, err)
service := createTestService(t, suite.creds)
rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting) rootFolder := "General_" + common.FormatNow(common.SimpleTimeTesting)
siteID := tester.M365SiteID(t) folderID, err := createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder})
folderID, err := createRestoreFolders(ctx, service, siteID, []string{rootFolder})
assert.NoError(t, err) assert.NoError(t, err)
t.Log("FolderID: " + folderID) t.Log("FolderID: " + folderID)
_, err = createRestoreFolders(ctx, service, siteID, []string{rootFolder, "Tsao"}) _, err = createRestoreFolders(ctx, service, suite.siteID, []string{rootFolder, "Tsao"})
assert.NoError(t, err) assert.NoError(t, err)
// CleanUp // CleanUp
siteDrive, err := service.Client().SitesById(siteID).Drive().Get(ctx, nil) siteDrive, err := service.Client().SitesById(suite.siteID).Drive().Get(ctx, nil)
require.NoError(t, err) require.NoError(t, err)
driveID := *siteDrive.GetId() driveID := *siteDrive.GetId()

View File

@ -6,11 +6,14 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
sapi "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -152,7 +155,9 @@ func collectLibraries(
updater.UpdateStatus, updater.UpdateStatus,
ctrlOpts) ctrlOpts)
odcs, excludes, err := colls.Get(ctx) // TODO(ashmrtn): Pass previous backup metadata when SharePoint supports delta
// token-based incrementals.
odcs, excludes, err := colls.Get(ctx, nil)
if err != nil { if err != nil {
return nil, nil, support.WrapAndAppend(siteID, err, errs) return nil, nil, support.WrapAndAppend(siteID, err, errs)
} }
@ -160,6 +165,55 @@ func collectLibraries(
return append(collections, odcs...), excludes, errs return append(collections, odcs...), excludes, errs
} }
// collectPages constructs a sharepoint Collections struct and Get()s the associated
// M365 IDs for the associated Pages
func collectPages(
ctx context.Context,
creds account.M365Config,
serv graph.Servicer,
tenantID, siteID string,
scope selectors.SharePointScope,
updater statusUpdater,
ctrlOpts control.Options,
) ([]data.Collection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Pages collections")
spcs := make([]data.Collection, 0)
// make the betaClient
adpt, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret)
if err != nil {
return nil, errors.Wrap(err, "adapter for betaservice not created")
}
betaService := api.NewBetaService(adpt)
tuples, err := sapi.FetchPages(ctx, betaService, siteID)
if err != nil {
return nil, err
}
for _, tuple := range tuples {
dir, err := path.Builder{}.Append(tuple.Name).
ToDataLayerSharePointPath(
tenantID,
siteID,
path.PagesCategory,
false)
if err != nil {
return nil, errors.Wrapf(err, "failed to create collection path for site: %s", siteID)
}
collection := NewCollection(dir, serv, updater.UpdateStatus)
collection.betaService = betaService
collection.AddJob(tuple.ID)
spcs = append(spcs, collection)
}
return spcs, nil
}
type folderMatcher struct { type folderMatcher struct {
scope selectors.SharePointScope scope selectors.SharePointScope
} }

View File

@ -77,7 +77,7 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
site, site,
testBaseDrivePath, testBaseDrivePath,
), ),
expectedItemCount: 2, expectedItemCount: 1,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
}, },

View File

@ -4,11 +4,11 @@ import (
"testing" "testing"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/pkg/errors"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
) )
@ -29,21 +29,22 @@ func (ms *MockGraphService) Adapter() *msgraphsdk.GraphRequestAdapter {
return nil return nil
} }
func (ms *MockGraphService) UpdateStatus(*support.ConnectorOperationStatus) {
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Helper Functions // Helper Functions
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func createTestService(credentials account.M365Config) (*graph.Service, error) { func createTestService(t *testing.T, credentials account.M365Config) *graph.Service {
adapter, err := graph.CreateAdapter( adapter, err := graph.CreateAdapter(
credentials.AzureTenantID, credentials.AzureTenantID,
credentials.AzureClientID, credentials.AzureClientID,
credentials.AzureClientSecret, credentials.AzureClientSecret,
) )
if err != nil { require.NoError(t, err, "creating microsoft graph service for exchange")
return nil, errors.Wrap(err, "creating microsoft graph service for exchange")
}
return graph.NewService(adapter), nil return graph.NewService(adapter)
} }
func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string { func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string {

View File

@ -49,9 +49,7 @@ func (suite *SharePointSuite) TestLoadList() {
defer flush() defer flush()
t := suite.T() t := suite.T()
service, err := createTestService(suite.creds) service := createTestService(t, suite.creds)
require.NoError(t, err)
tuples, err := preFetchLists(ctx, service, "root") tuples, err := preFetchLists(ctx, service, "root")
require.NoError(t, err) require.NoError(t, err)

View File

@ -36,6 +36,7 @@ import (
// RestoreCollections will restore the specified data collections into OneDrive // RestoreCollections will restore the specified data collections into OneDrive
func RestoreCollections( func RestoreCollections(
ctx context.Context, ctx context.Context,
backupVersion int,
service graph.Servicer, service graph.Servicer,
dest control.RestoreDestination, dest control.RestoreDestination,
dcs []data.Collection, dcs []data.Collection,
@ -59,14 +60,19 @@ func RestoreCollections(
switch dc.FullPath().Category() { switch dc.FullPath().Category() {
case path.LibrariesCategory: case path.LibrariesCategory:
metrics, canceled = onedrive.RestoreCollection( metrics, _, _, canceled = onedrive.RestoreCollection(
ctx, ctx,
backupVersion,
service, service,
dc, dc,
[]onedrive.UserPermission{}, // Currently permission data is not stored for sharepoint
onedrive.OneDriveSource, onedrive.OneDriveSource,
dest.ContainerName, dest.ContainerName,
deets, deets,
errUpdater) errUpdater,
map[string]string{},
false,
)
case path.ListsCategory: case path.ListsCategory:
metrics, canceled = RestoreCollection( metrics, canceled = RestoreCollection(
ctx, ctx,

View File

@ -1,6 +1,9 @@
package support package support
import ( import (
"strings"
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
absser "github.com/microsoft/kiota-abstractions-go/serialization" absser "github.com/microsoft/kiota-abstractions-go/serialization"
js "github.com/microsoft/kiota-serialization-json-go" js "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
@ -12,7 +15,7 @@ import (
func CreateFromBytes(bytes []byte, createFunc absser.ParsableFactory) (absser.Parsable, error) { func CreateFromBytes(bytes []byte, createFunc absser.ParsableFactory) (absser.Parsable, error) {
parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", bytes) parseNode, err := js.NewJsonParseNodeFactory().GetRootParseNode("application/json", bytes)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "parsing byte array into m365 object") return nil, errors.Wrap(err, "deserializing bytes into base m365 object")
} }
anObject, err := parseNode.GetObjectValue(createFunc) anObject, err := parseNode.GetObjectValue(createFunc)
@ -27,7 +30,7 @@ func CreateFromBytes(bytes []byte, createFunc absser.ParsableFactory) (absser.Pa
func CreateMessageFromBytes(bytes []byte) (models.Messageable, error) { func CreateMessageFromBytes(bytes []byte) (models.Messageable, error) {
aMessage, err := CreateFromBytes(bytes, models.CreateMessageFromDiscriminatorValue) aMessage, err := CreateFromBytes(bytes, models.CreateMessageFromDiscriminatorValue)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "creating m365 exchange.Mail object from provided bytes") return nil, errors.Wrap(err, "deserializing bytes to exchange message")
} }
message := aMessage.(models.Messageable) message := aMessage.(models.Messageable)
@ -40,7 +43,7 @@ func CreateMessageFromBytes(bytes []byte) (models.Messageable, error) {
func CreateContactFromBytes(bytes []byte) (models.Contactable, error) { func CreateContactFromBytes(bytes []byte) (models.Contactable, error) {
parsable, err := CreateFromBytes(bytes, models.CreateContactFromDiscriminatorValue) parsable, err := CreateFromBytes(bytes, models.CreateContactFromDiscriminatorValue)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "creating m365 exchange.Contact object from provided bytes") return nil, errors.Wrap(err, "deserializing bytes to exchange contact")
} }
contact := parsable.(models.Contactable) contact := parsable.(models.Contactable)
@ -52,7 +55,7 @@ func CreateContactFromBytes(bytes []byte) (models.Contactable, error) {
func CreateEventFromBytes(bytes []byte) (models.Eventable, error) { func CreateEventFromBytes(bytes []byte) (models.Eventable, error) {
parsable, err := CreateFromBytes(bytes, models.CreateEventFromDiscriminatorValue) parsable, err := CreateFromBytes(bytes, models.CreateEventFromDiscriminatorValue)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "creating m365 exchange.Event object from provided bytes") return nil, errors.Wrap(err, "deserializing bytes to exchange event")
} }
event := parsable.(models.Eventable) event := parsable.(models.Eventable)
@ -64,10 +67,33 @@ func CreateEventFromBytes(bytes []byte) (models.Eventable, error) {
func CreateListFromBytes(bytes []byte) (models.Listable, error) { func CreateListFromBytes(bytes []byte) (models.Listable, error) {
parsable, err := CreateFromBytes(bytes, models.CreateListFromDiscriminatorValue) parsable, err := CreateFromBytes(bytes, models.CreateListFromDiscriminatorValue)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "creating m365 sharepoint.List object from provided bytes") return nil, errors.Wrap(err, "deserializing bytes to sharepoint list")
} }
list := parsable.(models.Listable) list := parsable.(models.Listable)
return list, nil return list, nil
} }
// CreatePageFromBytes transforms given bytes in models.SitePageable object
func CreatePageFromBytes(bytes []byte) (bmodels.SitePageable, error) {
parsable, err := CreateFromBytes(bytes, bmodels.CreateSitePageFromDiscriminatorValue)
if err != nil {
return nil, errors.Wrap(err, "deserializing bytes to sharepoint page")
}
page := parsable.(bmodels.SitePageable)
return page, nil
}
func HasAttachments(body models.ItemBodyable) bool {
if body.GetContent() == nil || body.GetContentType() == nil ||
*body.GetContentType() == models.TEXT_BODYTYPE || len(*body.GetContent()) == 0 {
return false
}
content := *body.GetContent()
return strings.Contains(content, "src=\"cid:")
}

View File

@ -3,10 +3,13 @@ package support
import ( import (
"testing" "testing"
kioser "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
) )
@ -18,6 +21,11 @@ func TestDataSupportSuite(t *testing.T) {
suite.Run(t, new(DataSupportSuite)) suite.Run(t, new(DataSupportSuite))
} }
var (
empty = "Empty Bytes"
invalid = "Invalid Bytes"
)
// TestCreateMessageFromBytes verifies approved mockdata bytes can // TestCreateMessageFromBytes verifies approved mockdata bytes can
// be successfully transformed into M365 Message data. // be successfully transformed into M365 Message data.
func (suite *DataSupportSuite) TestCreateMessageFromBytes() { func (suite *DataSupportSuite) TestCreateMessageFromBytes() {
@ -59,13 +67,13 @@ func (suite *DataSupportSuite) TestCreateContactFromBytes() {
isNil assert.ValueAssertionFunc isNil assert.ValueAssertionFunc
}{ }{
{ {
name: "Empty Bytes", name: empty,
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: assert.Error,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: "Invalid Bytes", name: invalid,
byteArray: []byte("A random sentence doesn't make an object"), byteArray: []byte("A random sentence doesn't make an object"),
checkError: assert.Error, checkError: assert.Error,
isNil: assert.Nil, isNil: assert.Nil,
@ -94,13 +102,13 @@ func (suite *DataSupportSuite) TestCreateEventFromBytes() {
isNil assert.ValueAssertionFunc isNil assert.ValueAssertionFunc
}{ }{
{ {
name: "Empty Byes", name: empty,
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: assert.Error,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: "Invalid Bytes", name: invalid,
byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"), byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"),
checkError: assert.Error, checkError: assert.Error,
isNil: assert.Nil, isNil: assert.Nil,
@ -132,13 +140,13 @@ func (suite *DataSupportSuite) TestCreateListFromBytes() {
isNil assert.ValueAssertionFunc isNil assert.ValueAssertionFunc
}{ }{
{ {
name: "Empty Byes", name: empty,
byteArray: make([]byte, 0), byteArray: make([]byte, 0),
checkError: assert.Error, checkError: assert.Error,
isNil: assert.Nil, isNil: assert.Nil,
}, },
{ {
name: "Invalid Bytes", name: invalid,
byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"), byteArray: []byte("Invalid byte stream \"subject:\" Not going to work"),
checkError: assert.Error, checkError: assert.Error,
isNil: assert.Nil, isNil: assert.Nil,
@ -159,3 +167,111 @@ func (suite *DataSupportSuite) TestCreateListFromBytes() {
}) })
} }
} }
func (suite *DataSupportSuite) TestCreatePageFromBytes() {
tests := []struct {
name string
checkError assert.ErrorAssertionFunc
isNil assert.ValueAssertionFunc
getBytes func(t *testing.T) []byte
}{
{
empty,
assert.Error,
assert.Nil,
func(t *testing.T) []byte {
return make([]byte, 0)
},
},
{
invalid,
assert.Error,
assert.Nil,
func(t *testing.T) []byte {
return []byte("snarf")
},
},
{
"Valid Page",
assert.NoError,
assert.NotNil,
func(t *testing.T) []byte {
pg := bmodels.NewSitePage()
title := "Tested"
pg.SetTitle(&title)
pg.SetName(&title)
pg.SetWebUrl(&title)
writer := kioser.NewJsonSerializationWriter()
err := pg.Serialize(writer)
require.NoError(t, err)
byteArray, err := writer.GetSerializedContent()
require.NoError(t, err)
return byteArray
},
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
result, err := CreatePageFromBytes(test.getBytes(t))
test.checkError(t, err)
test.isNil(t, result)
})
}
}
func (suite *DataSupportSuite) TestHasAttachments() {
tests := []struct {
name string
hasAttachment assert.BoolAssertionFunc
getBodyable func(t *testing.T) models.ItemBodyable
}{
{
name: "Mock w/out attachment",
hasAttachment: assert.False,
getBodyable: func(t *testing.T) models.ItemBodyable {
byteArray := mockconnector.GetMockMessageWithBodyBytes(
"Test",
"This is testing",
"This is testing",
)
message, err := CreateMessageFromBytes(byteArray)
require.NoError(t, err)
return message.GetBody()
},
},
{
name: "Mock w/ inline attachment",
hasAttachment: assert.True,
getBodyable: func(t *testing.T) models.ItemBodyable {
byteArray := mockconnector.GetMessageWithOneDriveAttachment("Test legacy")
message, err := CreateMessageFromBytes(byteArray)
require.NoError(t, err)
return message.GetBody()
},
},
{
name: "Edge Case",
hasAttachment: assert.True,
getBodyable: func(t *testing.T) models.ItemBodyable {
//nolint:lll
content := "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Happy New Year,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture?&nbsp;</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><img class=\"FluidPluginCopy ContentPasted0 w-2070 h-1380\" size=\"5854817\" data-outlook-trace=\"F:1|T:1\" src=\"cid:85f4faa3-9851-40c7-ba0a-e63dce1185f9\" style=\"max-width:100%\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Let me know if this meets our culture requirements.</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Warm Regards,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin</div></body></html>"
body := models.NewItemBody()
body.SetContent(&content)
cat := models.HTML_BODYTYPE
body.SetContentType(&cat)
return body
},
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
found := HasAttachments(test.getBodyable(t))
test.hasAttachment(t, found)
})
}
}

View File

@ -1,11 +1,14 @@
package support package support
import ( import (
"fmt"
"strings" "strings"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
) )
const itemAttachment = "#microsoft.graph.itemAttachment"
// CloneMessageableFields places data from original data into new message object. // CloneMessageableFields places data from original data into new message object.
// SingleLegacyValueProperty is not populated during this operation // SingleLegacyValueProperty is not populated during this operation
func CloneMessageableFields(orig, message models.Messageable) models.Messageable { func CloneMessageableFields(orig, message models.Messageable) models.Messageable {
@ -278,3 +281,90 @@ func cloneColumnDefinitionable(orig models.ColumnDefinitionable) models.ColumnDe
return newColumn return newColumn
} }
// ToItemAttachment transforms internal item, OutlookItemables, into
// objects that are able to be uploaded into M365.
// Supported Internal Items:
// - Events
func ToItemAttachment(orig models.Attachmentable) (models.Attachmentable, error) {
transform, ok := orig.(models.ItemAttachmentable)
supported := "#microsoft.graph.event"
if !ok { // Shouldn't ever happen
return nil, fmt.Errorf("transforming attachment to item attachment")
}
item := transform.GetItem()
itemType := item.GetOdataType()
switch *itemType {
case supported:
event := item.(models.Eventable)
newEvent, err := sanitizeEvent(event)
if err != nil {
return nil, err
}
transform.SetItem(newEvent)
return transform, nil
default:
return nil, fmt.Errorf("exiting ToItemAttachment: %s not supported", *itemType)
}
}
// sanitizeEvent transfers data into event object and
// removes unique IDs from the M365 object
func sanitizeEvent(orig models.Eventable) (models.Eventable, error) {
newEvent := models.NewEvent()
newEvent.SetAttendees(orig.GetAttendees())
newEvent.SetBody(orig.GetBody())
newEvent.SetBodyPreview(orig.GetBodyPreview())
newEvent.SetCalendar(orig.GetCalendar())
newEvent.SetCreatedDateTime(orig.GetCreatedDateTime())
newEvent.SetEnd(orig.GetEnd())
newEvent.SetHasAttachments(orig.GetHasAttachments())
newEvent.SetHideAttendees(orig.GetHideAttendees())
newEvent.SetImportance(orig.GetImportance())
newEvent.SetIsAllDay(orig.GetIsAllDay())
newEvent.SetIsOnlineMeeting(orig.GetIsOnlineMeeting())
newEvent.SetLocation(orig.GetLocation())
newEvent.SetLocations(orig.GetLocations())
newEvent.SetSensitivity(orig.GetSensitivity())
newEvent.SetReminderMinutesBeforeStart(orig.GetReminderMinutesBeforeStart())
newEvent.SetStart(orig.GetStart())
newEvent.SetSubject(orig.GetSubject())
newEvent.SetType(orig.GetType())
// Sanitation
// isDraft and isOrganizer *bool ptr's have to be removed completely
// from JSON in order for POST method to succeed.
// Current as of 2/2/2023
newEvent.SetIsOrganizer(nil)
newEvent.SetIsDraft(nil)
newEvent.SetAdditionalData(orig.GetAdditionalData())
attached := orig.GetAttachments()
attachments := make([]models.Attachmentable, len(attached))
for _, ax := range attached {
if *ax.GetOdataType() == itemAttachment {
newAttachment, err := ToItemAttachment(ax)
if err != nil {
return nil, err
}
attachments = append(attachments, newAttachment)
continue
}
attachments = append(attachments, ax)
}
newEvent.SetAttachments(attachments)
return newEvent, nil
}

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/dustin/go-humanize"
multierror "github.com/hashicorp/go-multierror" multierror "github.com/hashicorp/go-multierror"
bytesize "github.com/inhies/go-bytesize"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -66,6 +66,7 @@ func CreateStatus(
hasErrors := err != nil hasErrors := err != nil
numErr := GetNumberOfErrors(err) numErr := GetNumberOfErrors(err)
status := ConnectorOperationStatus{ status := ConnectorOperationStatus{
lastOperation: op, lastOperation: op,
ObjectCount: cm.Objects, ObjectCount: cm.Objects,
@ -142,7 +143,7 @@ func (cos *ConnectorOperationStatus) String() string {
cos.lastOperation.String(), cos.lastOperation.String(),
cos.Successful, cos.Successful,
cos.ObjectCount, cos.ObjectCount,
bytesize.New(float64(cos.bytes)), humanize.Bytes(uint64(cos.bytes)),
cos.FolderCount, cos.FolderCount,
) )

View File

@ -177,7 +177,7 @@ func MessageWithCompletion(
completionCh := make(chan struct{}, 1) completionCh := make(chan struct{}, 1)
if cfg.hidden() { if cfg.hidden() {
return completionCh, func() {} return completionCh, func() { log.Info("done - " + clean) }
} }
wg.Add(1) wg.Add(1)
@ -232,7 +232,7 @@ func ItemProgress(
log.Debug(header) log.Debug(header)
if cfg.hidden() || rc == nil || totalBytes == 0 { if cfg.hidden() || rc == nil || totalBytes == 0 {
return rc, func() {} return rc, func() { log.Debug("done - " + header) }
} }
wg.Add(1) wg.Add(1)
@ -286,7 +286,7 @@ func ProgressWithCount(
} }
}(progressCh) }(progressCh)
return progressCh, func() {} return progressCh, func() { log.Info("done - " + lmsg) }
} }
wg.Add(1) wg.Add(1)
@ -381,16 +381,24 @@ func CollectionProgress(
if cfg.hidden() || len(user.String()) == 0 || len(dirName.String()) == 0 { if cfg.hidden() || len(user.String()) == 0 || len(dirName.String()) == 0 {
ch := make(chan struct{}) ch := make(chan struct{})
counted := 0
go func(ci <-chan struct{}) { go func(ci <-chan struct{}) {
for { for {
_, ok := <-ci _, ok := <-ci
if !ok { if !ok {
return return
} }
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("uploading", "count", counted)
}
} }
}(ch) }(ch)
return ch, func() {} return ch, func() { log.Infow("done - "+message, "count", counted) }
} }
wg.Add(1) wg.Add(1)
@ -432,6 +440,11 @@ func CollectionProgress(
counted++ counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("uploading", "count", counted)
}
bar.Increment() bar.Increment()
} }
} }

View File

@ -2,6 +2,8 @@ package operations
import ( import (
"context" "context"
"fmt"
"runtime/debug"
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -40,6 +42,9 @@ type BackupOperation struct {
Version string `json:"version"` Version string `json:"version"`
account account.Account account account.Account
// when true, this allows for incremental backups instead of full data pulls
incremental bool
} }
// BackupResults aggregate the details of the result of the operation. // BackupResults aggregate the details of the result of the operation.
@ -66,6 +71,7 @@ func NewBackupOperation(
Selectors: selector, Selectors: selector,
Version: "v0", Version: "v0",
account: acct, account: acct,
incremental: useIncrementalBackup(selector, opts),
} }
if err := op.validate(); err != nil { if err := op.validate(); err != nil {
return BackupOperation{}, err return BackupOperation{}, err
@ -103,29 +109,52 @@ type detailsWriter interface {
// Run begins a synchronous backup operation. // Run begins a synchronous backup operation.
func (op *BackupOperation) Run(ctx context.Context) (err error) { func (op *BackupOperation) Run(ctx context.Context) (err error) {
defer func() {
if r := recover(); r != nil {
var rerr error
if re, ok := r.(error); ok {
rerr = re
} else if re, ok := r.(string); ok {
rerr = clues.New(re)
} else {
rerr = clues.New(fmt.Sprintf("%v", r))
}
err = clues.Wrap(rerr, "panic recovery").
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.Ctx(ctx).
With("err", err).
Errorw("backup panic", clues.InErr(err).Slice()...)
}
}()
ctx, end := D.Span(ctx, "operations:backup:run") ctx, end := D.Span(ctx, "operations:backup:run")
defer end() defer func() {
end()
// wait for the progress display to clean up
observe.Complete()
}()
// -----
// Setup
// -----
var ( var (
opStats backupStats opStats backupStats
backupDetails *details.Builder startTime = time.Now()
toMerge map[string]path.Path detailsStore = streamstore.New(op.kopia, op.account.ID(), op.Selectors.PathService())
tenantID = op.account.ID()
startTime = time.Now()
detailsStore = streamstore.New(op.kopia, tenantID, op.Selectors.PathService())
reasons = selectorToReasons(op.Selectors)
uib = useIncrementalBackup(op.Selectors, op.Options)
) )
op.Results.BackupID = model.StableID(uuid.NewString()) op.Results.BackupID = model.StableID(uuid.NewString())
ctx = clues.AddAll( ctx = clues.AddAll(
ctx, ctx,
"tenant_id", tenantID, // TODO: pii "tenant_id", op.account.ID(), // TODO: pii
"resource_owner", op.ResourceOwner, // TODO: pii "resource_owner", op.ResourceOwner, // TODO: pii
"backup_id", op.Results.BackupID, "backup_id", op.Results.BackupID,
"service", op.Selectors.Service, "service", op.Selectors.Service,
"incremental", uib) "incremental", op.incremental)
op.bus.Event( op.bus.Event(
ctx, ctx,
@ -134,101 +163,128 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
events.StartTime: startTime, events.StartTime: startTime,
events.Service: op.Selectors.Service.String(), events.Service: op.Selectors.Service.String(),
events.BackupID: op.Results.BackupID, events.BackupID: op.Results.BackupID,
}, })
)
// persist operation results to the model store on exit // -----
defer func() { // Execution
// wait for the progress display to clean up // -----
observe.Complete()
err = op.persistResults(startTime, &opStats) deets, err := op.do(
if err != nil { ctx,
return &opStats,
} detailsStore,
op.Results.BackupID)
if err != nil {
// No return here! We continue down to persistResults, even in case of failure.
logger.Ctx(ctx).
With("err", err).
Errorw("doing backup", clues.InErr(err).Slice()...)
op.Errors.Fail(errors.Wrap(err, "doing backup"))
opStats.readErr = op.Errors.Err()
}
err = op.createBackupModels( // -----
ctx, // Persistence
detailsStore, // -----
opStats.k.SnapshotID,
backupDetails.Details()) err = op.persistResults(startTime, &opStats)
if err != nil { if err != nil {
opStats.writeErr = err op.Errors.Fail(errors.Wrap(err, "persisting backup results"))
} opStats.writeErr = op.Errors.Err()
}()
return op.Errors.Err()
}
err = op.createBackupModels(
ctx,
detailsStore,
opStats.k.SnapshotID,
op.Results.BackupID,
deets.Details())
if err != nil {
op.Errors.Fail(errors.Wrap(err, "persisting backup"))
opStats.writeErr = op.Errors.Err()
return op.Errors.Err()
}
logger.Ctx(ctx).Infow("completed backup", "results", op.Results)
return nil
}
// do is purely the action of running a backup. All pre/post behavior
// is found in Run().
func (op *BackupOperation) do(
ctx context.Context,
opStats *backupStats,
detailsStore detailsReader,
backupID model.StableID,
) (*details.Builder, error) {
reasons := selectorToReasons(op.Selectors)
// should always be 1, since backups are 1:1 with resourceOwners.
opStats.resourceCount = 1
mans, mdColls, canUseMetaData, err := produceManifestsAndMetadata( mans, mdColls, canUseMetaData, err := produceManifestsAndMetadata(
ctx, ctx,
op.kopia, op.kopia,
op.store, op.store,
reasons, reasons,
tenantID, op.account.ID(),
uib, op.incremental,
) op.Errors)
if err != nil { if err != nil {
opStats.readErr = errors.Wrap(err, "connecting to M365") return nil, errors.Wrap(err, "producing manifests and metadata")
return opStats.readErr
} }
gc, err := connectToM365(ctx, op.Selectors, op.account) gc, err := connectToM365(ctx, op.Selectors, op.account)
if err != nil { if err != nil {
opStats.readErr = errors.Wrap(err, "connecting to M365") return nil, errors.Wrap(err, "connectng to m365")
return opStats.readErr
} }
cs, err := produceBackupDataCollections(ctx, gc, op.Selectors, mdColls, op.Options) cs, err := produceBackupDataCollections(ctx, gc, op.Selectors, mdColls, op.Options)
if err != nil { if err != nil {
opStats.readErr = errors.Wrap(err, "retrieving data to backup") return nil, errors.Wrap(err, "producing backup data collections")
return opStats.readErr
} }
ctx = clues.Add(ctx, "collections", len(cs)) ctx = clues.Add(ctx, "coll_count", len(cs))
opStats.k, backupDetails, toMerge, err = consumeBackupDataCollections( writeStats, deets, toMerge, err := consumeBackupDataCollections(
ctx, ctx,
op.kopia, op.kopia,
tenantID, op.account.ID(),
reasons, reasons,
mans, mans,
cs, cs,
op.Results.BackupID, backupID,
uib && canUseMetaData) op.incremental && canUseMetaData)
if err != nil { if err != nil {
opStats.writeErr = errors.Wrap(err, "backing up service data") return nil, errors.Wrap(err, "persisting collection backups")
return opStats.writeErr
} }
logger.Ctx(ctx).Debugf( opStats.k = writeStats
"Backed up %d directories and %d files",
opStats.k.TotalDirectoryCount, opStats.k.TotalFileCount,
)
if err = mergeDetails( err = mergeDetails(
ctx, ctx,
op.store, op.store,
detailsStore, detailsStore,
mans, mans,
toMerge, toMerge,
backupDetails, deets)
); err != nil { if err != nil {
opStats.writeErr = errors.Wrap(err, "merging backup details") return nil, errors.Wrap(err, "merging details")
return opStats.writeErr
} }
opStats.gc = gc.AwaitStatus() opStats.gc = gc.AwaitStatus()
// TODO(keepers): remove when fault.Errors handles all iterable error aggregation.
if opStats.gc.ErrorCount > 0 { if opStats.gc.ErrorCount > 0 {
merr := multierror.Append(opStats.readErr, errors.Wrap(opStats.gc.Err, "retrieving data")) return nil, opStats.gc.Err
opStats.readErr = merr.ErrorOrNil()
// Need to exit before we set started to true else we'll report no errors.
return opStats.readErr
} }
// should always be 1, since backups are 1:1 with resourceOwners. logger.Ctx(ctx).Debug(gc.PrintableStatus())
opStats.resourceCount = 1
return err return deets, nil
} }
// checker to see if conditions are correct for incremental backup behavior such as // checker to see if conditions are correct for incremental backup behavior such as
@ -307,7 +363,9 @@ func selectorToReasons(sel selectors.Selector) []kopia.Reason {
return reasons return reasons
} }
func builderFromReason(tenant string, r kopia.Reason) (*path.Builder, error) { func builderFromReason(ctx context.Context, tenant string, r kopia.Reason) (*path.Builder, error) {
ctx = clues.Add(ctx, "category", r.Category.String())
// This is hacky, but we want the path package to format the path the right // This is hacky, but we want the path package to format the path the right
// way (e.x. proper order for service, category, etc), but we don't care about // way (e.x. proper order for service, category, etc), but we don't care about
// the folders after the prefix. // the folders after the prefix.
@ -319,12 +377,7 @@ func builderFromReason(tenant string, r kopia.Reason) (*path.Builder, error) {
false, false,
) )
if err != nil { if err != nil {
return nil, errors.Wrapf( return nil, clues.Wrap(err, "building path").WithClues(ctx)
err,
"building path for service %s category %s",
r.Service.String(),
r.Category.String(),
)
} }
return p.ToBuilder().Dir(), nil return p.ToBuilder().Dir(), nil
@ -367,7 +420,7 @@ func consumeBackupDataCollections(
categories := map[string]struct{}{} categories := map[string]struct{}{}
for _, reason := range m.Reasons { for _, reason := range m.Reasons {
pb, err := builderFromReason(tenantID, reason) pb, err := builderFromReason(ctx, tenantID, reason)
if err != nil { if err != nil {
return nil, nil, nil, errors.Wrap(err, "getting subtree paths for bases") return nil, nil, nil, errors.Wrap(err, "getting subtree paths for bases")
} }
@ -394,13 +447,9 @@ func consumeBackupDataCollections(
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
"using base for backup", "using base for backup",
"snapshot_id", "snapshot_id", m.ID,
m.ID, "services", svcs,
"services", "categories", cats)
svcs,
"categories",
cats,
)
} }
kopiaStats, deets, itemsSourcedFromBase, err := bu.BackupCollections( kopiaStats, deets, itemsSourcedFromBase, err := bu.BackupCollections(
@ -409,24 +458,22 @@ func consumeBackupDataCollections(
cs, cs,
nil, nil,
tags, tags,
isIncremental, isIncremental)
) if err != nil {
if kopiaStats == nil {
return nil, nil, nil, err
}
return nil, nil, nil, errors.Wrapf(
err,
"kopia snapshot failed with %v catastrophic errors and %v ignored errors",
kopiaStats.ErrorCount, kopiaStats.IgnoredErrorCount)
}
if kopiaStats.ErrorCount > 0 || kopiaStats.IgnoredErrorCount > 0 { if kopiaStats.ErrorCount > 0 || kopiaStats.IgnoredErrorCount > 0 {
if err != nil { err = errors.Errorf(
err = errors.Wrapf( "kopia snapshot failed with %v catastrophic errors and %v ignored errors",
err, kopiaStats.ErrorCount, kopiaStats.IgnoredErrorCount)
"kopia snapshot failed with %v catastrophic errors and %v ignored errors",
kopiaStats.ErrorCount,
kopiaStats.IgnoredErrorCount,
)
} else {
err = errors.Errorf(
"kopia snapshot failed with %v catastrophic errors and %v ignored errors",
kopiaStats.ErrorCount,
kopiaStats.IgnoredErrorCount,
)
}
} }
return kopiaStats, deets, itemsSourcedFromBase, err return kopiaStats, deets, itemsSourcedFromBase, err
@ -461,6 +508,8 @@ func mergeDetails(
var addedEntries int var addedEntries int
for _, man := range mans { for _, man := range mans {
mctx := clues.Add(ctx, "manifest_id", man.ID)
// For now skip snapshots that aren't complete. We will need to revisit this // For now skip snapshots that aren't complete. We will need to revisit this
// when we tackle restartability. // when we tackle restartability.
if len(man.IncompleteReason) > 0 { if len(man.IncompleteReason) > 0 {
@ -469,28 +518,26 @@ func mergeDetails(
bID, ok := man.GetTag(kopia.TagBackupID) bID, ok := man.GetTag(kopia.TagBackupID)
if !ok { if !ok {
return errors.Errorf("no backup ID in snapshot manifest with ID %s", man.ID) return clues.New("no backup ID in snapshot manifest").WithClues(mctx)
} }
mctx = clues.Add(mctx, "manifest_backup_id", bID)
_, baseDeets, err := getBackupAndDetailsFromID( _, baseDeets, err := getBackupAndDetailsFromID(
ctx, ctx,
model.StableID(bID), model.StableID(bID),
ms, ms,
detailsStore, detailsStore)
)
if err != nil { if err != nil {
return errors.Wrapf(err, "backup fetching base details for backup %s", bID) return clues.New("fetching base details for backup").WithClues(mctx)
} }
for _, entry := range baseDeets.Items() { for _, entry := range baseDeets.Items() {
rr, err := path.FromDataLayerPath(entry.RepoRef, true) rr, err := path.FromDataLayerPath(entry.RepoRef, true)
if err != nil { if err != nil {
return errors.Wrapf( return clues.New("parsing base item info path").
err, WithClues(mctx).
"parsing base item info path %s in backup %s", With("repo_ref", entry.RepoRef) // todo: pii
entry.RepoRef,
bID,
)
} }
// Although this base has an entry it may not be the most recent. Check // Although this base has an entry it may not be the most recent. Check
@ -513,11 +560,7 @@ func mergeDetails(
// Fixup paths in the item. // Fixup paths in the item.
item := entry.ItemInfo item := entry.ItemInfo
if err := details.UpdateItem(&item, newPath); err != nil { if err := details.UpdateItem(&item, newPath); err != nil {
return errors.Wrapf( return clues.New("updating item details").WithClues(mctx)
err,
"updating item info for entry from backup %s",
bID,
)
} }
// TODO(ashmrtn): This may need updated if we start using this merge // TODO(ashmrtn): This may need updated if we start using this merge
@ -529,8 +572,7 @@ func mergeDetails(
newPath.ShortRef(), newPath.ShortRef(),
newPath.ToBuilder().Dir().ShortRef(), newPath.ToBuilder().Dir().ShortRef(),
itemUpdated, itemUpdated,
item, item)
)
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir()) folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir())
deets.AddFoldersForItem(folders, item, itemUpdated) deets.AddFoldersForItem(folders, item, itemUpdated)
@ -542,11 +584,9 @@ func mergeDetails(
} }
if addedEntries != len(shortRefsFromPrevBackup) { if addedEntries != len(shortRefsFromPrevBackup) {
return errors.Errorf( return clues.New("incomplete migration of backup details").
"incomplete migration of backup details: found %v of %v expected items", WithClues(ctx).
addedEntries, WithAll("item_count", addedEntries, "expected_item_count", len(shortRefsFromPrevBackup))
len(shortRefsFromPrevBackup),
)
} }
return nil return nil
@ -568,21 +608,28 @@ func (op *BackupOperation) persistResults(
if opStats.readErr != nil || opStats.writeErr != nil { if opStats.readErr != nil || opStats.writeErr != nil {
op.Status = Failed op.Status = Failed
// TODO(keepers): replace with fault.Errors handling.
return multierror.Append( return multierror.Append(
errors.New("errors prevented the operation from processing"), errors.New("errors prevented the operation from processing"),
opStats.readErr, opStats.readErr,
opStats.writeErr) opStats.writeErr)
} }
if opStats.readErr == nil && opStats.writeErr == nil && opStats.gc.Successful == 0 { op.Results.BytesRead = opStats.k.TotalHashedBytes
op.Results.BytesUploaded = opStats.k.TotalUploadedBytes
op.Results.ItemsWritten = opStats.k.TotalFileCount
op.Results.ResourceOwners = opStats.resourceCount
if opStats.gc == nil {
op.Status = Failed
return errors.New("backup population never completed")
}
if opStats.gc.Successful == 0 {
op.Status = NoData op.Status = NoData
} }
op.Results.BytesRead = opStats.k.TotalHashedBytes
op.Results.BytesUploaded = opStats.k.TotalUploadedBytes
op.Results.ItemsRead = opStats.gc.Successful op.Results.ItemsRead = opStats.gc.Successful
op.Results.ItemsWritten = opStats.k.TotalFileCount
op.Results.ResourceOwners = opStats.resourceCount
return nil return nil
} }
@ -592,29 +639,32 @@ func (op *BackupOperation) createBackupModels(
ctx context.Context, ctx context.Context,
detailsStore detailsWriter, detailsStore detailsWriter,
snapID string, snapID string,
backupID model.StableID,
backupDetails *details.Details, backupDetails *details.Details,
) error { ) error {
ctx = clues.Add(ctx, "snapshot_id", snapID)
if backupDetails == nil { if backupDetails == nil {
return errors.New("no backup details to record") return clues.New("no backup details to record").WithClues(ctx)
} }
detailsID, err := detailsStore.WriteBackupDetails(ctx, backupDetails) detailsID, err := detailsStore.WriteBackupDetails(ctx, backupDetails)
if err != nil { if err != nil {
return errors.Wrap(err, "creating backupdetails model") return clues.Wrap(err, "creating backupDetails model").WithClues(ctx)
} }
ctx = clues.Add(ctx, "details_id", detailsID)
b := backup.New( b := backup.New(
snapID, detailsID, op.Status.String(), snapID, detailsID, op.Status.String(),
op.Results.BackupID, backupID,
op.Selectors, op.Selectors,
op.Results.ReadWrites, op.Results.ReadWrites,
op.Results.StartAndEndTime, op.Results.StartAndEndTime,
op.Errors, op.Errors,
) )
err = op.store.Put(ctx, model.BackupSchema, b) if err = op.store.Put(ctx, model.BackupSchema, b); err != nil {
if err != nil { return clues.Wrap(err, "creating backup model").WithClues(ctx)
return errors.Wrap(err, "creating backup model")
} }
dur := op.Results.CompletedAt.Sub(op.Results.StartedAt) dur := op.Results.CompletedAt.Sub(op.Results.StartedAt)

View File

@ -339,7 +339,15 @@ func generateContainerOfItems(
dest, dest,
collections) collections)
deets, err := gc.RestoreDataCollections(ctx, acct, sel, dest, dataColls) deets, err := gc.RestoreDataCollections(
ctx,
backup.Version,
acct,
sel,
dest,
control.Options{RestorePermissions: true},
dataColls,
)
require.NoError(t, err) require.NoError(t, err)
return deets return deets
@ -1073,7 +1081,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
sel.Include(sel.AllData()) sel.Include(sel.AllData())
bo, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{}) bo, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{EnablePermissionsBackup: true})
defer closer() defer closer()
runAndCheckBackup(t, ctx, &bo, mb) runAndCheckBackup(t, ctx, &bo, mb)

View File

@ -432,258 +432,6 @@ func (suite *BackupOpSuite) TestBackupOperation_PersistResults() {
} }
} }
func (suite *BackupOpSuite) TestBackupOperation_VerifyDistinctBases() {
const user = "a-user"
table := []struct {
name string
input []*kopia.ManifestEntry
errCheck assert.ErrorAssertionFunc
}{
{
name: "SingleManifestMultipleReasons",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EventsCategory,
},
},
},
},
errCheck: assert.NoError,
},
{
name: "MultipleManifestsDistinctReason",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
{
Manifest: &snapshot.Manifest{
ID: "id2",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EventsCategory,
},
},
},
},
errCheck: assert.NoError,
},
{
name: "MultipleManifestsSameReason",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
{
Manifest: &snapshot.Manifest{
ID: "id2",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
},
errCheck: assert.Error,
},
{
name: "MultipleManifestsSameReasonOneIncomplete",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
{
Manifest: &snapshot.Manifest{
ID: "id2",
IncompleteReason: "checkpoint",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
},
errCheck: assert.NoError,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.errCheck(t, verifyDistinctBases(test.input))
})
}
}
func (suite *BackupOpSuite) TestBackupOperation_CollectMetadata() {
var (
tenant = "a-tenant"
resourceOwner = "a-user"
fileNames = []string{
"delta",
"paths",
}
emailDeltaPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.EmailCategory,
fileNames[0],
)
emailPathsPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.EmailCategory,
fileNames[1],
)
contactsDeltaPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.ContactsCategory,
fileNames[0],
)
contactsPathsPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.ContactsCategory,
fileNames[1],
)
)
table := []struct {
name string
inputMan *kopia.ManifestEntry
inputFiles []string
expected []path.Path
}{
{
name: "SingleReasonSingleFile",
inputMan: &kopia.ManifestEntry{
Manifest: &snapshot.Manifest{},
Reasons: []kopia.Reason{
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
inputFiles: []string{fileNames[0]},
expected: []path.Path{emailDeltaPath},
},
{
name: "SingleReasonMultipleFiles",
inputMan: &kopia.ManifestEntry{
Manifest: &snapshot.Manifest{},
Reasons: []kopia.Reason{
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
inputFiles: fileNames,
expected: []path.Path{emailDeltaPath, emailPathsPath},
},
{
name: "MultipleReasonsMultipleFiles",
inputMan: &kopia.ManifestEntry{
Manifest: &snapshot.Manifest{},
Reasons: []kopia.Reason{
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.ContactsCategory,
},
},
},
inputFiles: fileNames,
expected: []path.Path{
emailDeltaPath,
emailPathsPath,
contactsDeltaPath,
contactsPathsPath,
},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
mr := &mockRestorer{}
_, err := collectMetadata(ctx, mr, test.inputMan, test.inputFiles, tenant)
assert.NoError(t, err)
checkPaths(t, test.expected, mr.gotPaths)
})
}
}
func (suite *BackupOpSuite) TestBackupOperation_ConsumeBackupDataCollections_Paths() { func (suite *BackupOpSuite) TestBackupOperation_ConsumeBackupDataCollections_Paths() {
var ( var (
tenant = "a-tenant" tenant = "a-tenant"

View File

@ -3,7 +3,7 @@ package operations
import ( import (
"context" "context"
multierror "github.com/hashicorp/go-multierror" "github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest" "github.com/kopia/kopia/repo/manifest"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -44,6 +45,7 @@ func produceManifestsAndMetadata(
reasons []kopia.Reason, reasons []kopia.Reason,
tenantID string, tenantID string,
getMetadata bool, getMetadata bool,
errs fault.Adder,
) ([]*kopia.ManifestEntry, []data.Collection, bool, error) { ) ([]*kopia.ManifestEntry, []data.Collection, bool, error) {
var ( var (
metadataFiles = graph.AllMetadataFileNames() metadataFiles = graph.AllMetadataFileNames()
@ -68,12 +70,10 @@ func produceManifestsAndMetadata(
// //
// TODO(ashmrtn): This may need updating if we start sourcing item backup // TODO(ashmrtn): This may need updating if we start sourcing item backup
// details from previous snapshots when using kopia-assisted incrementals. // details from previous snapshots when using kopia-assisted incrementals.
if err := verifyDistinctBases(ms); err != nil { if err := verifyDistinctBases(ctx, ms, errs); err != nil {
logger.Ctx(ctx).Warnw( logger.Ctx(ctx).With("error", err).Infow(
"base snapshot collision, falling back to full backup", "base snapshot collision, falling back to full backup",
"error", clues.In(ctx).Slice()...)
err,
)
return ms, nil, false, nil return ms, nil, false, nil
} }
@ -83,40 +83,41 @@ func produceManifestsAndMetadata(
continue continue
} }
mctx := clues.Add(ctx, "manifest_id", man.ID)
bID, ok := man.GetTag(kopia.TagBackupID) bID, ok := man.GetTag(kopia.TagBackupID)
if !ok { if !ok {
return nil, nil, false, errors.New("snapshot manifest missing backup ID") err = clues.New("snapshot manifest missing backup ID").WithClues(ctx)
return nil, nil, false, err
} }
dID, _, err := gdi.GetDetailsIDFromBackupID(ctx, model.StableID(bID)) mctx = clues.Add(mctx, "manifest_backup_id", man.ID)
dID, _, err := gdi.GetDetailsIDFromBackupID(mctx, model.StableID(bID))
if err != nil { if err != nil {
// if no backup exists for any of the complete manifests, we want // if no backup exists for any of the complete manifests, we want
// to fall back to a complete backup. // to fall back to a complete backup.
if errors.Is(err, kopia.ErrNotFound) { if errors.Is(err, kopia.ErrNotFound) {
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow("backup missing, falling back to full backup", clues.In(mctx).Slice()...)
"backup missing, falling back to full backup",
"backup_id", bID)
return ms, nil, false, nil return ms, nil, false, nil
} }
return nil, nil, false, errors.Wrap(err, "retrieving prior backup data") return nil, nil, false, errors.Wrap(err, "retrieving prior backup data")
} }
mctx = clues.Add(mctx, "manifest_details_id", dID)
// if no detailsID exists for any of the complete manifests, we want // if no detailsID exists for any of the complete manifests, we want
// to fall back to a complete backup. This is a temporary prevention // to fall back to a complete backup. This is a temporary prevention
// mechanism to keep backups from falling into a perpetually bad state. // mechanism to keep backups from falling into a perpetually bad state.
// This makes an assumption that the ID points to a populated set of // This makes an assumption that the ID points to a populated set of
// details; we aren't doing the work to look them up. // details; we aren't doing the work to look them up.
if len(dID) == 0 { if len(dID) == 0 {
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow("backup missing details ID, falling back to full backup", clues.In(mctx).Slice()...)
"backup missing details ID, falling back to full backup",
"backup_id", bID)
return ms, nil, false, nil return ms, nil, false, nil
} }
colls, err := collectMetadata(ctx, mr, man, metadataFiles, tenantID) colls, err := collectMetadata(mctx, mr, man, metadataFiles, tenantID)
if err != nil && !errors.Is(err, kopia.ErrNotFound) { if err != nil && !errors.Is(err, kopia.ErrNotFound) {
// prior metadata isn't guaranteed to exist. // prior metadata isn't guaranteed to exist.
// if it doesn't, we'll just have to do a // if it doesn't, we'll just have to do a
@ -134,9 +135,9 @@ func produceManifestsAndMetadata(
// of manifests, that each manifest's Reason (owner, service, category) is only // of manifests, that each manifest's Reason (owner, service, category) is only
// included once. If a reason is duplicated by any two manifests, an error is // included once. If a reason is duplicated by any two manifests, an error is
// returned. // returned.
func verifyDistinctBases(mans []*kopia.ManifestEntry) error { func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs fault.Adder) error {
var ( var (
errs *multierror.Error failed bool
reasons = map[string]manifest.ID{} reasons = map[string]manifest.ID{}
) )
@ -155,10 +156,11 @@ func verifyDistinctBases(mans []*kopia.ManifestEntry) error {
reasonKey := reason.ResourceOwner + reason.Service.String() + reason.Category.String() reasonKey := reason.ResourceOwner + reason.Service.String() + reason.Category.String()
if b, ok := reasons[reasonKey]; ok { if b, ok := reasons[reasonKey]; ok {
errs = multierror.Append(errs, errors.Errorf( failed = true
"multiple base snapshots source data for %s %s. IDs: %s, %s",
reason.Service, reason.Category, b, man.ID, errs.Add(clues.New("manifests have overlapping reasons").
)) WithClues(ctx).
With("other_manifest_id", b))
continue continue
} }
@ -167,7 +169,11 @@ func verifyDistinctBases(mans []*kopia.ManifestEntry) error {
} }
} }
return errs.ErrorOrNil() if failed {
return clues.New("multiple base snapshots qualify").WithClues(ctx)
}
return nil
} }
// collectMetadata retrieves all metadata files associated with the manifest. // collectMetadata retrieves all metadata files associated with the manifest.
@ -191,7 +197,9 @@ func collectMetadata(
reason.Category, reason.Category,
true) true)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "building metadata path") return nil, clues.
Wrap(err, "building metadata path").
WithAll("metadata_file", fn, "category", reason.Category)
} }
paths = append(paths, p) paths = append(paths, p)

View File

@ -14,6 +14,7 @@ import (
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/fault/mock"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -400,7 +401,10 @@ func (suite *OperationsManifestsUnitSuite) TestVerifyDistinctBases() {
} }
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
err := verifyDistinctBases(test.mans) ctx, flush := tester.NewContext()
defer flush()
err := verifyDistinctBases(ctx, test.mans, mock.NewAdder())
test.expect(t, err) test.expect(t, err)
}) })
} }
@ -646,6 +650,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
ma := mock.NewAdder()
mans, dcs, b, err := produceManifestsAndMetadata( mans, dcs, b, err := produceManifestsAndMetadata(
ctx, ctx,
&test.mr, &test.mr,
@ -653,7 +659,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
test.reasons, test.reasons,
tid, tid,
test.getMeta, test.getMeta,
) ma)
test.assertErr(t, err) test.assertErr(t, err)
test.assertB(t, b) test.assertB(t, b)
@ -683,3 +689,270 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}) })
} }
} }
// ---------------------------------------------------------------------------
// older tests
// ---------------------------------------------------------------------------
type BackupManifestSuite struct {
suite.Suite
}
func TestBackupManifestSuite(t *testing.T) {
suite.Run(t, new(BackupOpSuite))
}
func (suite *BackupManifestSuite) TestBackupOperation_VerifyDistinctBases() {
const user = "a-user"
table := []struct {
name string
input []*kopia.ManifestEntry
errCheck assert.ErrorAssertionFunc
}{
{
name: "SingleManifestMultipleReasons",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EventsCategory,
},
},
},
},
errCheck: assert.NoError,
},
{
name: "MultipleManifestsDistinctReason",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
{
Manifest: &snapshot.Manifest{
ID: "id2",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EventsCategory,
},
},
},
},
errCheck: assert.NoError,
},
{
name: "MultipleManifestsSameReason",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
{
Manifest: &snapshot.Manifest{
ID: "id2",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
},
errCheck: assert.Error,
},
{
name: "MultipleManifestsSameReasonOneIncomplete",
input: []*kopia.ManifestEntry{
{
Manifest: &snapshot.Manifest{
ID: "id1",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
{
Manifest: &snapshot.Manifest{
ID: "id2",
IncompleteReason: "checkpoint",
},
Reasons: []kopia.Reason{
{
ResourceOwner: user,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
},
errCheck: assert.NoError,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
test.errCheck(t, verifyDistinctBases(ctx, test.input, mock.NewAdder()))
})
}
}
func (suite *BackupManifestSuite) TestBackupOperation_CollectMetadata() {
var (
tenant = "a-tenant"
resourceOwner = "a-user"
fileNames = []string{
"delta",
"paths",
}
emailDeltaPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.EmailCategory,
fileNames[0],
)
emailPathsPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.EmailCategory,
fileNames[1],
)
contactsDeltaPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.ContactsCategory,
fileNames[0],
)
contactsPathsPath = makeMetadataPath(
suite.T(),
tenant,
path.ExchangeService,
resourceOwner,
path.ContactsCategory,
fileNames[1],
)
)
table := []struct {
name string
inputMan *kopia.ManifestEntry
inputFiles []string
expected []path.Path
}{
{
name: "SingleReasonSingleFile",
inputMan: &kopia.ManifestEntry{
Manifest: &snapshot.Manifest{},
Reasons: []kopia.Reason{
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
inputFiles: []string{fileNames[0]},
expected: []path.Path{emailDeltaPath},
},
{
name: "SingleReasonMultipleFiles",
inputMan: &kopia.ManifestEntry{
Manifest: &snapshot.Manifest{},
Reasons: []kopia.Reason{
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
},
},
inputFiles: fileNames,
expected: []path.Path{emailDeltaPath, emailPathsPath},
},
{
name: "MultipleReasonsMultipleFiles",
inputMan: &kopia.ManifestEntry{
Manifest: &snapshot.Manifest{},
Reasons: []kopia.Reason{
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.EmailCategory,
},
{
ResourceOwner: resourceOwner,
Service: path.ExchangeService,
Category: path.ContactsCategory,
},
},
},
inputFiles: fileNames,
expected: []path.Path{
emailDeltaPath,
emailPathsPath,
contactsDeltaPath,
contactsPathsPath,
},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
mr := &mockRestorer{}
_, err := collectMetadata(ctx, mr, test.inputMan, test.inputFiles, tenant)
assert.NoError(t, err)
checkPaths(t, test.expected, mr.gotPaths)
})
}
}

View File

@ -3,6 +3,8 @@ package operations
import ( import (
"context" "context"
"fmt" "fmt"
"runtime/debug"
"sort"
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -106,35 +108,89 @@ type restorer interface {
// Run begins a synchronous restore operation. // Run begins a synchronous restore operation.
func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.Details, err error) { func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.Details, err error) {
ctx, end := D.Span(ctx, "operations:restore:run") defer func() {
defer end() if r := recover(); r != nil {
var rerr error
if re, ok := r.(error); ok {
rerr = re
} else if re, ok := r.(string); ok {
rerr = clues.New(re)
} else {
rerr = clues.New(fmt.Sprintf("%v", r))
}
err = clues.Wrap(rerr, "panic recovery").
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.Ctx(ctx).
With("err", err).
Errorw("backup panic", clues.InErr(err).Slice()...)
}
}()
var ( var (
opStats = restoreStats{ opStats = restoreStats{
bytesRead: &stats.ByteCounter{}, bytesRead: &stats.ByteCounter{},
restoreID: uuid.NewString(), restoreID: uuid.NewString(),
} }
startTime = time.Now() start = time.Now()
detailsStore = streamstore.New(op.kopia, op.account.ID(), op.Selectors.PathService())
) )
// -----
// Setup
// -----
ctx, end := D.Span(ctx, "operations:restore:run")
defer func() { defer func() {
end()
// wait for the progress display to clean up // wait for the progress display to clean up
observe.Complete() observe.Complete()
err = op.persistResults(ctx, startTime, &opStats)
if err != nil {
return
}
}() }()
detailsStore := streamstore.New(op.kopia, op.account.ID(), op.Selectors.PathService())
ctx = clues.AddAll( ctx = clues.AddAll(
ctx, ctx,
"tenant_id", op.account.ID(), // TODO: pii "tenant_id", op.account.ID(), // TODO: pii
"backup_id", op.BackupID, "backup_id", op.BackupID,
"service", op.Selectors.Service) "service", op.Selectors.Service)
// -----
// Execution
// -----
deets, err := op.do(ctx, &opStats, detailsStore, start)
if err != nil {
// No return here! We continue down to persistResults, even in case of failure.
logger.Ctx(ctx).
With("err", err).
Errorw("doing restore", clues.InErr(err).Slice()...)
op.Errors.Fail(errors.Wrap(err, "doing restore"))
opStats.readErr = op.Errors.Err()
}
// -----
// Persistence
// -----
err = op.persistResults(ctx, start, &opStats)
if err != nil {
op.Errors.Fail(errors.Wrap(err, "persisting restore results"))
opStats.writeErr = op.Errors.Err()
return nil, op.Errors.Err()
}
logger.Ctx(ctx).Infow("completed restore", "results", op.Results)
return deets, nil
}
func (op *RestoreOperation) do(
ctx context.Context,
opStats *restoreStats,
detailsStore detailsReader,
start time.Time,
) (*details.Details, error) {
bup, deets, err := getBackupAndDetailsFromID( bup, deets, err := getBackupAndDetailsFromID(
ctx, ctx,
op.BackupID, op.BackupID,
@ -142,30 +198,28 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
detailsStore, detailsStore,
) )
if err != nil { if err != nil {
opStats.readErr = errors.Wrap(err, "restore") return nil, errors.Wrap(err, "getting backup and details")
return nil, opStats.readErr
} }
ctx = clues.Add(ctx, "resource_owner", bup.Selector.DiscreteOwner) paths, err := formatDetailsForRestoration(ctx, op.Selectors, deets)
if err != nil {
return nil, errors.Wrap(err, "formatting paths from details")
}
ctx = clues.AddAll(
ctx,
"resource_owner", bup.Selector.DiscreteOwner,
"details_paths", len(paths))
op.bus.Event( op.bus.Event(
ctx, ctx,
events.RestoreStart, events.RestoreStart,
map[string]any{ map[string]any{
events.StartTime: startTime, events.StartTime: start,
events.BackupID: op.BackupID, events.BackupID: op.BackupID,
events.BackupCreateTime: bup.CreationTime, events.BackupCreateTime: bup.CreationTime,
events.RestoreID: opStats.restoreID, events.RestoreID: opStats.restoreID,
}, })
)
paths, err := formatDetailsForRestoration(ctx, op.Selectors, deets)
if err != nil {
opStats.readErr = err
return nil, err
}
ctx = clues.Add(ctx, "details_paths", len(paths))
observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items in backup %s to restore", len(paths), op.BackupID))) observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items in backup %s to restore", len(paths), op.BackupID)))
@ -175,39 +229,45 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
dcs, err := op.kopia.RestoreMultipleItems(ctx, bup.SnapshotID, paths, opStats.bytesRead) dcs, err := op.kopia.RestoreMultipleItems(ctx, bup.SnapshotID, paths, opStats.bytesRead)
if err != nil { if err != nil {
opStats.readErr = errors.Wrap(err, "retrieving service data") return nil, errors.Wrap(err, "retrieving collections from repository")
return nil, opStats.readErr
} }
kopiaComplete <- struct{}{} kopiaComplete <- struct{}{}
ctx = clues.Add(ctx, "collections", len(dcs)) ctx = clues.Add(ctx, "coll_count", len(dcs))
// should always be 1, since backups are 1:1 with resourceOwners.
opStats.resourceCount = 1
opStats.cs = dcs opStats.cs = dcs
opStats.resourceCount = len(data.ResourceOwnerSet(dcs))
gc, err := connectToM365(ctx, op.Selectors, op.account) gc, err := connectToM365(ctx, op.Selectors, op.account)
if err != nil { if err != nil {
opStats.readErr = errors.Wrap(err, "connecting to M365") return nil, errors.Wrap(err, "connecting to M365")
return nil, opStats.readErr
} }
restoreComplete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Restoring data")) restoreComplete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Restoring data"))
defer closer() defer closer()
defer close(restoreComplete) defer close(restoreComplete)
restoreDetails, err = gc.RestoreDataCollections( restoreDetails, err := gc.RestoreDataCollections(
ctx, ctx,
bup.Version,
op.account, op.account,
op.Selectors, op.Selectors,
op.Destination, op.Destination,
op.Options,
dcs) dcs)
if err != nil { if err != nil {
opStats.writeErr = errors.Wrap(err, "restoring service data") return nil, errors.Wrap(err, "restoring collections")
return nil, opStats.writeErr
} }
restoreComplete <- struct{}{} restoreComplete <- struct{}{}
opStats.gc = gc.AwaitStatus() opStats.gc = gc.AwaitStatus()
// TODO(keepers): remove when fault.Errors handles all iterable error aggregation.
if opStats.gc.ErrorCount > 0 {
return nil, opStats.gc.Err
}
logger.Ctx(ctx).Debug(gc.PrintableStatus()) logger.Ctx(ctx).Debug(gc.PrintableStatus())
@ -236,14 +296,20 @@ func (op *RestoreOperation) persistResults(
opStats.writeErr) opStats.writeErr)
} }
if opStats.readErr == nil && opStats.writeErr == nil && opStats.gc.Successful == 0 { op.Results.BytesRead = opStats.bytesRead.NumBytes
op.Results.ItemsRead = len(opStats.cs) // TODO: file count, not collection count
op.Results.ResourceOwners = opStats.resourceCount
if opStats.gc == nil {
op.Status = Failed
return errors.New("restoration never completed")
}
if opStats.gc.Successful == 0 {
op.Status = NoData op.Status = NoData
} }
op.Results.BytesRead = opStats.bytesRead.NumBytes
op.Results.ItemsRead = len(opStats.cs) // TODO: file count, not collection count
op.Results.ItemsWritten = opStats.gc.Successful op.Results.ItemsWritten = opStats.gc.Successful
op.Results.ResourceOwners = opStats.resourceCount
dur := op.Results.CompletedAt.Sub(op.Results.StartedAt) dur := op.Results.CompletedAt.Sub(op.Results.StartedAt)
@ -300,6 +366,17 @@ func formatDetailsForRestoration(
paths[i] = p paths[i] = p
} }
// TODO(meain): Move this to onedrive specific component, but as
// of now the paths can technically be from multiple services
// This sort is done primarily to order `.meta` files after `.data`
// files. This is only a necessity for OneDrive as we are storing
// metadata for files/folders in separate meta files and we the
// data to be restored before we can restore the metadata.
sort.Slice(paths, func(i, j int) bool {
return paths[i].String() < paths[j].String()
})
if errs != nil { if errs != nil {
return nil, errs return nil, errs
} }

View File

@ -14,6 +14,8 @@ import (
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
const Version = 1
// Backup represents the result of a backup operation // Backup represents the result of a backup operation
type Backup struct { type Backup struct {
model.BaseModel model.BaseModel
@ -32,6 +34,9 @@ type Backup struct {
// Selector used in this operation // Selector used in this operation
Selector selectors.Selector `json:"selectors"` Selector selectors.Selector `json:"selectors"`
// Version represents the version of the backup format
Version int `json:"version"`
// Errors contains all errors aggregated during a backup operation. // Errors contains all errors aggregated during a backup operation.
Errors fault.ErrorsData `json:"errors"` Errors fault.ErrorsData `json:"errors"`
@ -67,6 +72,7 @@ func New(
Errors: errs.Data(), Errors: errs.Data(),
ReadWrites: rw, ReadWrites: rw,
StartAndEndTime: se, StartAndEndTime: se,
Version: Version,
} }
} }

View File

@ -6,10 +6,11 @@ import (
// Options holds the optional configurations for a process // Options holds the optional configurations for a process
type Options struct { type Options struct {
Collision CollisionPolicy `json:"-"` Collision CollisionPolicy `json:"-"`
DisableMetrics bool `json:"disableMetrics"` DisableMetrics bool `json:"disableMetrics"`
FailFast bool `json:"failFast"` FailFast bool `json:"failFast"`
ToggleFeatures Toggles `json:"ToggleFeatures"` RestorePermissions bool `json:"restorePermissions"`
ToggleFeatures Toggles `json:"ToggleFeatures"`
} }
// Defaults provides an Options with the default values set. // Defaults provides an Options with the default values set.
@ -74,4 +75,9 @@ type Toggles struct {
// DisableIncrementals prevents backups from using incremental lookups, // DisableIncrementals prevents backups from using incremental lookups,
// forcing a new, complete backup of all data regardless of prior state. // forcing a new, complete backup of all data regardless of prior state.
DisableIncrementals bool `json:"exchangeIncrementals,omitempty"` DisableIncrementals bool `json:"exchangeIncrementals,omitempty"`
// EnablePermissionsBackup is used to enable backups of item
// permissions. Permission metadata increases graph api call count,
// so disabling their retrieval when not needed is advised.
EnablePermissionsBackup bool `json:"enablePermissionsBackup,omitempty"`
} }

View File

@ -87,16 +87,19 @@ func (e *Errors) Fail(err error) *Errors {
// setErr handles setting errors.err. Sync locking gets // setErr handles setting errors.err. Sync locking gets
// handled upstream of this call. // handled upstream of this call.
func (e *Errors) setErr(err error) *Errors { func (e *Errors) setErr(err error) *Errors {
if e.err != nil { if e.err == nil {
return e.addErr(err) e.err = err
return e
} }
e.err = err e.errs = append(e.errs, err)
return e return e
} }
// TODO: introduce Adder interface type Adder interface {
Add(err error) *Errors
}
// Add appends the error to the slice of recoverable and // Add appends the error to the slice of recoverable and
// iterated errors (ie: errors.errs). If failFast is true, // iterated errors (ie: errors.errs). If failFast is true,

View File

@ -73,6 +73,8 @@ func (suite *FaultErrorsUnitSuite) TestErr() {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
n := fault.New(test.failFast) n := fault.New(test.failFast)
require.NotNil(t, n) require.NotNil(t, n)
require.NoError(t, n.Err())
require.Empty(t, n.Errs())
e := n.Fail(test.fail) e := n.Fail(test.fail)
require.NotNil(t, e) require.NotNil(t, e)
@ -90,6 +92,8 @@ func (suite *FaultErrorsUnitSuite) TestFail() {
n := fault.New(false) n := fault.New(false)
require.NotNil(t, n) require.NotNil(t, n)
require.NoError(t, n.Err())
require.Empty(t, n.Errs())
n.Fail(assert.AnError) n.Fail(assert.AnError)
assert.Error(t, n.Err()) assert.Error(t, n.Err())

Some files were not shown because too many files have changed in this diff Show More