Merge branch 'main' into defer_resource_generation
This commit is contained in:
commit
99c5db9a2e
@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
### Fixed
|
||||
- Handle OneDrive folders being deleted and recreated midway through a backup
|
||||
- Automatically re-run a full delta query on incrmental if the prior backup is found to have malformed prior-state information.
|
||||
|
||||
## [v0.15.0] (beta) - 2023-10-31
|
||||
|
||||
|
||||
@ -125,7 +125,10 @@ issues:
|
||||
linters:
|
||||
- forbidigo
|
||||
text: "context.(Background|TODO)"
|
||||
- path: internal/m365/graph/betasdk
|
||||
- path: internal/m365/collection/drive/collections_test.go
|
||||
linters:
|
||||
- lll
|
||||
- path: pkg/services/m365/api/graph/betasdk
|
||||
linters:
|
||||
- wsl
|
||||
- revive
|
||||
|
||||
@ -18,7 +18,7 @@ lint: check-lint-version
|
||||
fmt:
|
||||
gofumpt -w .
|
||||
goimports -w .
|
||||
gci write --skip-generated -s 'standard,default,prefix(github.com/alcionai/corso)' .
|
||||
gci write --skip-generated -s 'standard' -s 'default' -s 'prefix(github.com/alcionai/corso)' .
|
||||
|
||||
check-lint-version: check-lint
|
||||
@if [ "$(LINT_VERSION)" != "$(WANTED_LINT_VERSION)" ]; then \
|
||||
|
||||
@ -14,7 +14,6 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
|
||||
@ -16,8 +16,6 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
gmock "github.com/alcionai/corso/src/internal/m365/graph/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
@ -26,6 +24,8 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
gmock "github.com/alcionai/corso/src/pkg/services/m365/api/graph/mock"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
"github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
@ -11,6 +11,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/cli/backup"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/debug"
|
||||
"github.com/alcionai/corso/src/cli/export"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/help"
|
||||
@ -125,6 +126,7 @@ func BuildCommandTree(cmd *cobra.Command) {
|
||||
backup.AddCommands(cmd)
|
||||
restore.AddCommands(cmd)
|
||||
export.AddCommands(cmd)
|
||||
debug.AddCommands(cmd)
|
||||
help.AddCommands(cmd)
|
||||
}
|
||||
|
||||
|
||||
@ -30,6 +30,9 @@ func m365Overrides(in map[string]string) map[string]string {
|
||||
}
|
||||
}
|
||||
|
||||
// add m365 config key names that require path related validations
|
||||
var m365PathKeys = []string{}
|
||||
|
||||
// configureAccount builds a complete account configuration from a mix of
|
||||
// viper properties and manual overrides.
|
||||
func configureAccount(
|
||||
@ -57,7 +60,7 @@ func configureAccount(
|
||||
return acct, clues.New("unsupported account provider: [" + providerType + "]")
|
||||
}
|
||||
|
||||
if err := mustMatchConfig(vpr, m365Overrides(overrides)); err != nil {
|
||||
if err := mustMatchConfig(vpr, m365Overrides(overrides), m365PathKeys); err != nil {
|
||||
return acct, clues.Wrap(err, "verifying m365 configs in corso config file")
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,8 +2,11 @@ package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
@ -16,6 +19,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
@ -71,8 +75,8 @@ func init() {
|
||||
|
||||
// adds the persistent flag --config-file to the provided command.
|
||||
func AddConfigFlags(cmd *cobra.Command) {
|
||||
fs := cmd.PersistentFlags()
|
||||
fs.StringVar(
|
||||
pf := cmd.PersistentFlags()
|
||||
pf.StringVar(
|
||||
&configFilePathFlag,
|
||||
"config-file", displayDefaultFP, "config file location")
|
||||
}
|
||||
@ -84,17 +88,22 @@ func AddConfigFlags(cmd *cobra.Command) {
|
||||
// InitFunc provides a func that lazily initializes viper and
|
||||
// verifies that the configuration was able to read a file.
|
||||
func InitFunc(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
fp := configFilePathFlag
|
||||
if len(fp) == 0 || fp == displayDefaultFP {
|
||||
fp = configFilePath
|
||||
}
|
||||
|
||||
err := initWithViper(GetViper(cmd.Context()), fp)
|
||||
if err != nil {
|
||||
vpr := GetViper(ctx)
|
||||
|
||||
if err := initWithViper(vpr, fp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return Read(cmd.Context())
|
||||
ctx = SetViper(ctx, vpr)
|
||||
|
||||
return Read(ctx)
|
||||
}
|
||||
|
||||
// initWithViper implements InitConfig, but takes in a viper
|
||||
@ -112,16 +121,7 @@ func initWithViper(vpr *viper.Viper, configFP string) error {
|
||||
vpr.AddConfigPath(configDir)
|
||||
vpr.SetConfigType("toml")
|
||||
vpr.SetConfigName(".corso")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
vpr.SetConfigFile(configFP)
|
||||
// We also configure the path, type and filename
|
||||
// because `vpr.SafeWriteConfig` needs these set to
|
||||
// work correctly (it does not use the configured file)
|
||||
vpr.AddConfigPath(filepath.Dir(configFP))
|
||||
|
||||
} else {
|
||||
ext := filepath.Ext(configFP)
|
||||
if len(ext) == 0 {
|
||||
return clues.New("config file requires an extension e.g. `toml`")
|
||||
@ -131,6 +131,12 @@ func initWithViper(vpr *viper.Viper, configFP string) error {
|
||||
fileName = strings.TrimSuffix(fileName, ext)
|
||||
vpr.SetConfigType(strings.TrimPrefix(ext, "."))
|
||||
vpr.SetConfigName(fileName)
|
||||
vpr.SetConfigFile(configFP)
|
||||
// We also configure the path, type and filename
|
||||
// because `vpr.SafeWriteConfig` needs these set to
|
||||
// work correctly (it does not use the configured file)
|
||||
vpr.AddConfigPath(filepath.Dir(configFP))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -280,7 +286,10 @@ func getStorageAndAccountWithViper(
|
||||
// possibly read the prior config from a .corso file
|
||||
if readFromFile {
|
||||
if err := vpr.ReadInConfig(); err != nil {
|
||||
if _, ok := err.(viper.ConfigFileNotFoundError); !ok {
|
||||
configNotSet := errors.As(err, &viper.ConfigFileNotFoundError{})
|
||||
configNotFound := errors.Is(err, fs.ErrNotExist)
|
||||
|
||||
if !configNotSet && !configNotFound {
|
||||
return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed())
|
||||
}
|
||||
|
||||
@ -333,7 +342,7 @@ var constToTomlKeyMap = map[string]string{
|
||||
// If any value differs from the viper value, an error is returned.
|
||||
// values in m that aren't stored in the config are ignored.
|
||||
// TODO(pandeyabs): This code is currently duplicated in 2 places.
|
||||
func mustMatchConfig(vpr *viper.Viper, m map[string]string) error {
|
||||
func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) error {
|
||||
for k, v := range m {
|
||||
if len(v) == 0 {
|
||||
continue // empty variables will get caught by configuration validators, if necessary
|
||||
@ -345,7 +354,16 @@ func mustMatchConfig(vpr *viper.Viper, m map[string]string) error {
|
||||
}
|
||||
|
||||
vv := vpr.GetString(tomlK)
|
||||
if v != vv {
|
||||
areEqual := false
|
||||
|
||||
// some of the values maybe paths, hence they require more than just string equality
|
||||
if len(pathKeys) > 0 && slices.Contains(pathKeys, k) {
|
||||
areEqual = path.ArePathsEquivalent(v, vv)
|
||||
} else {
|
||||
areEqual = v == vv
|
||||
}
|
||||
|
||||
if !areEqual {
|
||||
return clues.New("value of " + k + " (" + v + ") does not match corso configuration value (" + vv + ")")
|
||||
}
|
||||
}
|
||||
|
||||
@ -216,6 +216,8 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
|
||||
s3Cfg := &storage.S3Config{Bucket: bkt}
|
||||
m365 := account.M365Config{AzureTenantID: tid}
|
||||
|
||||
m365PathKeys := []string{}
|
||||
|
||||
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid")
|
||||
require.NoError(t, err, "writing repo config", clues.ToCore(err))
|
||||
|
||||
@ -272,7 +274,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input), clues.ToCore(err))
|
||||
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input, m365PathKeys), clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -45,15 +45,17 @@ func configureStorage(
|
||||
return store, clues.Wrap(err, "validating corso credentials")
|
||||
}
|
||||
|
||||
configDir, _ := filepath.Split(vpr.ConfigFileUsed())
|
||||
|
||||
cCfg := storage.CommonConfig{
|
||||
Corso: corso,
|
||||
KopiaCfgDir: configDir,
|
||||
}
|
||||
// the following is a hack purely for integration testing.
|
||||
// the value is not required, and if empty, kopia will default
|
||||
// to its routine behavior
|
||||
if t, ok := vpr.Get("corso-testing").(bool); t && ok {
|
||||
dir, _ := filepath.Split(vpr.ConfigFileUsed())
|
||||
cCfg.KopiaCfgDir = dir
|
||||
cCfg.KopiaCfgDir = configDir
|
||||
}
|
||||
|
||||
// ensure required properties are present
|
||||
|
||||
@ -3,11 +3,13 @@ package debug
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -31,9 +33,9 @@ func AddCommands(cmd *cobra.Command) {
|
||||
utils.AddCommand(debugC, subCommand, utils.MarkDebugCommand())
|
||||
|
||||
for _, addTo := range debugCommands {
|
||||
addTo(subCommand)
|
||||
flags.AddAllProviderFlags(subCommand)
|
||||
flags.AddAllStorageFlags(subCommand)
|
||||
servCmd := addTo(subCommand)
|
||||
flags.AddAllProviderFlags(servCmd)
|
||||
flags.AddAllStorageFlags(servCmd)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -85,13 +87,15 @@ func handleMetadataFilesCmd(cmd *cobra.Command, args []string) error {
|
||||
// runners
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func runMetadataFiles(
|
||||
func genericMetadataFiles(
|
||||
ctx context.Context,
|
||||
cmd *cobra.Command,
|
||||
args []string,
|
||||
sel selectors.Selector,
|
||||
debugID, serviceName string,
|
||||
backupID string,
|
||||
) error {
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
@ -99,7 +103,18 @@ func runMetadataFiles(
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
// TODO: read and print out all metadata files in the debug
|
||||
// read metadata
|
||||
files, err := r.GetBackupMetadata(ctx, sel, backupID, fault.New(true))
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "retrieving metadata files"))
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
Infof(ctx, "\n------------------------------")
|
||||
Info(ctx, file.Name)
|
||||
Info(ctx, file.Path)
|
||||
Pretty(ctx, file.Data)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -59,13 +59,17 @@ func metadataFilesExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeExchangeOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewExchangeBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "Exchange")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -60,13 +60,17 @@ func metadataFilesGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewGroupsBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "Groups")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -59,13 +59,17 @@ func metadataFilesOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeOneDriveOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewOneDriveBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "OneDrive")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -59,13 +59,17 @@ func metadataFilesSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeSharePointOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewSharePointBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "SharePoint")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -119,10 +119,21 @@ func Infof(ctx context.Context, t string, s ...any) {
|
||||
outf(ctx, getRootCmd(ctx).ErrOrStderr(), t, s...)
|
||||
}
|
||||
|
||||
// Pretty prettifies and prints the value.
|
||||
func Pretty(ctx context.Context, a any) {
|
||||
if a == nil {
|
||||
Err(ctx, "<nil>")
|
||||
return
|
||||
}
|
||||
|
||||
printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a)
|
||||
}
|
||||
|
||||
// PrettyJSON prettifies and prints the value.
|
||||
func PrettyJSON(ctx context.Context, p minimumPrintabler) {
|
||||
if p == nil {
|
||||
Err(ctx, "<nil>")
|
||||
return
|
||||
}
|
||||
|
||||
outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
|
||||
@ -281,3 +292,14 @@ func printJSON(w io.Writer, a any) {
|
||||
|
||||
fmt.Fprintln(w, string(pretty.Pretty(bs)))
|
||||
}
|
||||
|
||||
// output to stdout the list of printable structs as prettified json.
|
||||
func printPrettyJSON(w io.Writer, a any) {
|
||||
bs, err := json.MarshalIndent(a, "", " ")
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprintln(w, string(pretty.Pretty(bs)))
|
||||
}
|
||||
|
||||
@ -42,28 +42,28 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
|
||||
{
|
||||
name: "no inputs",
|
||||
opts: utils.GroupsOpts{},
|
||||
expectIncludeLen: 2,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: empty,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "single inputs",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "multi inputs",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: multi,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
// sharepoint
|
||||
{
|
||||
@ -120,7 +120,7 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
|
||||
FileName: empty,
|
||||
FolderPath: empty,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "library folder suffixes and contains",
|
||||
@ -128,7 +128,7 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
|
||||
FileName: empty,
|
||||
FolderPath: empty,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "Page Folder",
|
||||
@ -389,7 +389,7 @@ func (suite *GroupsUtilsSuite) TestAddGroupsCategories() {
|
||||
{
|
||||
name: "none",
|
||||
cats: []string{},
|
||||
expectScopeLen: 2,
|
||||
expectScopeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "libraries",
|
||||
|
||||
@ -12,8 +12,8 @@ import (
|
||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||
"github.com/alcionai/corso/src/cmd/sanity_test/export"
|
||||
"github.com/alcionai/corso/src/cmd/sanity_test/restore"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -34,6 +34,7 @@ require (
|
||||
github.com/tidwall/pretty v1.2.1
|
||||
github.com/tomlazar/table v0.1.2
|
||||
github.com/vbauerster/mpb/v8 v8.1.6
|
||||
go.uber.org/goleak v1.3.0
|
||||
go.uber.org/zap v1.26.0
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
|
||||
golang.org/x/time v0.4.0
|
||||
|
||||
@ -468,8 +468,8 @@ go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPi
|
||||
go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8=
|
||||
go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg=
|
||||
go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo=
|
||||
go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk=
|
||||
go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo=
|
||||
|
||||
8
src/internal/common/limiters/limiter.go
Normal file
8
src/internal/common/limiters/limiter.go
Normal file
@ -0,0 +1,8 @@
|
||||
package limiters
|
||||
|
||||
import "context"
|
||||
|
||||
type Limiter interface {
|
||||
Wait(ctx context.Context) error
|
||||
Shutdown()
|
||||
}
|
||||
191
src/internal/common/limiters/sliding_window.go
Normal file
191
src/internal/common/limiters/sliding_window.go
Normal file
@ -0,0 +1,191 @@
|
||||
package limiters
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
)
|
||||
|
||||
type token struct{}
|
||||
|
||||
type fixedWindow struct {
|
||||
count []int
|
||||
}
|
||||
|
||||
var _ Limiter = &slidingWindow{}
|
||||
|
||||
type slidingWindow struct {
|
||||
// capacity is the maximum number of requests allowed in a sliding window at
|
||||
// any given time.
|
||||
capacity int
|
||||
// windowSize is the total duration of the sliding window. Limiter will allow
|
||||
// at most capacity requests in this duration.
|
||||
windowSize time.Duration
|
||||
// slideInterval controls how frequently the window slides. Smaller interval
|
||||
// provides better accuracy at the cost of more frequent sliding & more
|
||||
// memory usage.
|
||||
slideInterval time.Duration
|
||||
|
||||
// numIntervals is the number of intervals in the window. Calculated as
|
||||
// windowSize / slideInterval.
|
||||
numIntervals int
|
||||
// currentInterval tracks the current slide interval
|
||||
currentInterval int
|
||||
|
||||
// Each request acquires a token from the permits channel. If the channel
|
||||
// is empty, the request is blocked until a permit is available or if the
|
||||
// context is cancelled.
|
||||
permits chan token
|
||||
|
||||
// curr and prev are fixed windows of size windowSize. Each window contains
|
||||
// a slice of intervals which hold a count of the number of tokens granted
|
||||
// during that interval.
|
||||
curr fixedWindow
|
||||
prev fixedWindow
|
||||
|
||||
// mu synchronizes access to the curr and prev windows
|
||||
mu sync.Mutex
|
||||
// stopTicker stops the recurring slide ticker
|
||||
stopTicker chan struct{}
|
||||
closeOnce sync.Once
|
||||
}
|
||||
|
||||
func NewSlidingWindowLimiter(
|
||||
windowSize, slideInterval time.Duration,
|
||||
capacity int,
|
||||
) (Limiter, error) {
|
||||
if err := validate(windowSize, slideInterval, capacity); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ni := int(windowSize / slideInterval)
|
||||
|
||||
s := &slidingWindow{
|
||||
windowSize: windowSize,
|
||||
slideInterval: slideInterval,
|
||||
capacity: capacity,
|
||||
permits: make(chan token, capacity),
|
||||
numIntervals: ni,
|
||||
prev: fixedWindow{
|
||||
count: make([]int, ni),
|
||||
},
|
||||
curr: fixedWindow{
|
||||
count: make([]int, ni),
|
||||
},
|
||||
currentInterval: -1,
|
||||
stopTicker: make(chan struct{}),
|
||||
}
|
||||
|
||||
s.initialize()
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// Wait blocks a request until a token is available or the context is cancelled.
|
||||
// TODO(pandeyabs): Implement WaitN.
|
||||
func (s *slidingWindow) Wait(ctx context.Context) error {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return clues.Stack(ctx.Err())
|
||||
case <-s.permits:
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
|
||||
s.curr.count[s.currentInterval]++
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Shutdown cleans up the slide goroutine. If shutdown is not called, the slide
|
||||
// goroutine will continue to run until the program exits.
|
||||
func (s *slidingWindow) Shutdown() {
|
||||
s.closeOnce.Do(func() {
|
||||
close(s.stopTicker)
|
||||
})
|
||||
}
|
||||
|
||||
// initialize starts the slide goroutine and prefills tokens to full capacity.
|
||||
func (s *slidingWindow) initialize() {
|
||||
// Ok to not hold the mutex here since nothing else is running yet.
|
||||
s.nextInterval()
|
||||
|
||||
// Start a goroutine which runs every slideInterval. This goroutine will
|
||||
// continue to run until the program exits or until Shutdown is called.
|
||||
go func() {
|
||||
ticker := time.NewTicker(s.slideInterval)
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
s.slide()
|
||||
case <-s.stopTicker:
|
||||
ticker.Stop()
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Prefill permits to allow tokens to be granted immediately
|
||||
for i := 0; i < s.capacity; i++ {
|
||||
s.permits <- token{}
|
||||
}
|
||||
}
|
||||
|
||||
// nextInterval increments the current interval and slides the fixed
|
||||
// windows if needed. Should be called with the mutex held.
|
||||
func (s *slidingWindow) nextInterval() {
|
||||
// Increment current interval
|
||||
s.currentInterval = (s.currentInterval + 1) % s.numIntervals
|
||||
|
||||
// Slide the fixed windows if windowSize time has elapsed.
|
||||
if s.currentInterval == 0 {
|
||||
s.prev = s.curr
|
||||
s.curr = fixedWindow{
|
||||
count: make([]int, s.numIntervals),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// slide moves the window forward by one interval. It reclaims tokens from the
|
||||
// interval that we slid past and adds them back to available permits.
|
||||
func (s *slidingWindow) slide() {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
|
||||
s.nextInterval()
|
||||
|
||||
for i := 0; i < s.prev.count[s.currentInterval]; i++ {
|
||||
s.permits <- token{}
|
||||
}
|
||||
}
|
||||
|
||||
func validate(
|
||||
windowSize, slideInterval time.Duration,
|
||||
capacity int,
|
||||
) error {
|
||||
if windowSize <= 0 {
|
||||
return clues.New("invalid window size")
|
||||
}
|
||||
|
||||
if slideInterval <= 0 {
|
||||
return clues.New("invalid slide interval")
|
||||
}
|
||||
|
||||
// Allow capacity to be 0 for testing purposes
|
||||
if capacity < 0 {
|
||||
return clues.New("invalid window capacity")
|
||||
}
|
||||
|
||||
if windowSize < slideInterval {
|
||||
return clues.New("window too small to fit intervals")
|
||||
}
|
||||
|
||||
if windowSize%slideInterval != 0 {
|
||||
return clues.New("window not divisible by slide interval")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
285
src/internal/common/limiters/sliding_window_test.go
Normal file
285
src/internal/common/limiters/sliding_window_test.go
Normal file
@ -0,0 +1,285 @@
|
||||
package limiters
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math/rand"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
"go.uber.org/goleak"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type SlidingWindowUnitTestSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestSlidingWindowLimiterSuite(t *testing.T) {
|
||||
suite.Run(t, &SlidingWindowUnitTestSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
// TestWaitBasic tests the Wait() functionality of the limiter with multiple
|
||||
// concurrent requests.
|
||||
func (suite *SlidingWindowUnitTestSuite) TestWaitBasic() {
|
||||
var (
|
||||
t = suite.T()
|
||||
windowSize = 1 * time.Second
|
||||
// Assume slide interval is equal to window size for simplicity.
|
||||
slideInterval = 1 * time.Second
|
||||
capacity = 100
|
||||
startTime = time.Now()
|
||||
numRequests = 3 * capacity
|
||||
wg sync.WaitGroup
|
||||
mu sync.Mutex
|
||||
intervalToCount = make(map[time.Duration]int)
|
||||
)
|
||||
|
||||
defer goleak.VerifyNone(t)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
s, err := NewSlidingWindowLimiter(windowSize, slideInterval, capacity)
|
||||
require.NoError(t, err)
|
||||
|
||||
defer s.Shutdown()
|
||||
|
||||
// Check if all tokens are available for use post initialization.
|
||||
require.Equal(t, capacity, len(s.(*slidingWindow).permits))
|
||||
|
||||
// Make concurrent requests to the limiter
|
||||
for i := 0; i < numRequests; i++ {
|
||||
wg.Add(1)
|
||||
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
err := s.Wait(ctx)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Number of seconds since startTime
|
||||
bucket := time.Since(startTime).Truncate(windowSize)
|
||||
|
||||
mu.Lock()
|
||||
intervalToCount[bucket]++
|
||||
mu.Unlock()
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
// Verify that number of requests allowed in each window is less than or equal
|
||||
// to window capacity
|
||||
for _, c := range intervalToCount {
|
||||
require.True(t, c <= capacity, "count: %d, capacity: %d", c, capacity)
|
||||
}
|
||||
}
|
||||
|
||||
// TestWaitSliding tests the sliding window functionality of the limiter with
|
||||
// time distributed Wait() calls.
|
||||
func (suite *SlidingWindowUnitTestSuite) TestWaitSliding() {
|
||||
var (
|
||||
t = suite.T()
|
||||
windowSize = 1 * time.Second
|
||||
slideInterval = 10 * time.Millisecond
|
||||
capacity = 100
|
||||
// Test will run for duration of 2 windowSize.
|
||||
numRequests = 2 * capacity
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
defer goleak.VerifyNone(t)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
s, err := NewSlidingWindowLimiter(windowSize, slideInterval, capacity)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Make concurrent requests to the limiter
|
||||
for i := 0; i < numRequests; i++ {
|
||||
wg.Add(1)
|
||||
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
// Sleep for a random duration to spread out requests over multiple slide
|
||||
// intervals & windows, so that we can test the sliding window logic better.
|
||||
// Without this, the requests will be bunched up in the very first intervals
|
||||
// of the 2 windows. Rest of the intervals will be empty.
|
||||
time.Sleep(time.Duration(rand.Intn(1500)) * time.Millisecond)
|
||||
|
||||
err := s.Wait(ctx)
|
||||
require.NoError(t, err)
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Shutdown the ticker before accessing the internal limiter state.
|
||||
s.Shutdown()
|
||||
|
||||
// Verify that number of requests allowed in each window is less than or equal
|
||||
// to window capacity
|
||||
sw := s.(*slidingWindow)
|
||||
data := append(sw.prev.count, sw.curr.count...)
|
||||
|
||||
sums := slidingSums(data, sw.numIntervals)
|
||||
|
||||
for _, sum := range sums {
|
||||
require.True(t, sum <= capacity, "sum: %d, capacity: %d", sum, capacity)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SlidingWindowUnitTestSuite) TestContextCancellation() {
|
||||
t := suite.T()
|
||||
|
||||
// Since this test can infinitely block on failure conditions, run it within
|
||||
// a time contained eventually block.
|
||||
assert.Eventually(t, func() bool {
|
||||
var (
|
||||
windowSize = 100 * time.Millisecond
|
||||
slideInterval = 10 * time.Millisecond
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
// Initialize limiter with capacity = 0 to test context cancellations.
|
||||
s, err := NewSlidingWindowLimiter(windowSize, slideInterval, 0)
|
||||
require.NoError(t, err)
|
||||
|
||||
defer s.Shutdown()
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, 2*windowSize)
|
||||
defer cancel()
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
err := s.Wait(ctx)
|
||||
require.ErrorIs(t, err, context.DeadlineExceeded)
|
||||
}()
|
||||
|
||||
wg.Wait()
|
||||
|
||||
return true
|
||||
}, 3*time.Second, 100*time.Millisecond)
|
||||
}
|
||||
|
||||
func (suite *SlidingWindowUnitTestSuite) TestNewSlidingWindowLimiter() {
|
||||
tests := []struct {
|
||||
name string
|
||||
windowSize time.Duration
|
||||
slideInterval time.Duration
|
||||
capacity int
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "Invalid window size",
|
||||
windowSize: 0,
|
||||
slideInterval: 10 * time.Millisecond,
|
||||
capacity: 100,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "Invalid slide interval",
|
||||
windowSize: 100 * time.Millisecond,
|
||||
slideInterval: 0,
|
||||
capacity: 100,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "Slide interval > window size",
|
||||
windowSize: 10 * time.Millisecond,
|
||||
slideInterval: 100 * time.Millisecond,
|
||||
capacity: 100,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "Invalid capacity",
|
||||
windowSize: 100 * time.Millisecond,
|
||||
slideInterval: 10 * time.Millisecond,
|
||||
capacity: -1,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "Window not divisible by slide interval",
|
||||
windowSize: 100 * time.Millisecond,
|
||||
slideInterval: 11 * time.Millisecond,
|
||||
capacity: 100,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "Valid parameters",
|
||||
windowSize: 100 * time.Millisecond,
|
||||
slideInterval: 10 * time.Millisecond,
|
||||
capacity: 100,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
defer goleak.VerifyNone(t)
|
||||
|
||||
s, err := NewSlidingWindowLimiter(
|
||||
test.windowSize,
|
||||
test.slideInterval,
|
||||
test.capacity)
|
||||
if s != nil {
|
||||
s.Shutdown()
|
||||
}
|
||||
|
||||
test.expectErr(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func slidingSums(data []int, w int) []int {
|
||||
var (
|
||||
sum = 0
|
||||
res = make([]int, len(data)-w+1)
|
||||
)
|
||||
|
||||
for i := 0; i < w; i++ {
|
||||
sum += data[i]
|
||||
}
|
||||
|
||||
res[0] = sum
|
||||
|
||||
for i := 1; i < len(data)-w+1; i++ {
|
||||
sum = sum - data[i-1] + data[i+w-1]
|
||||
res[i] = sum
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (suite *SlidingWindowUnitTestSuite) TestShutdown() {
|
||||
var (
|
||||
t = suite.T()
|
||||
windowSize = 1 * time.Second
|
||||
slideInterval = 1 * time.Second
|
||||
capacity = 100
|
||||
)
|
||||
|
||||
defer goleak.VerifyNone(t)
|
||||
|
||||
s, err := NewSlidingWindowLimiter(windowSize, slideInterval, capacity)
|
||||
require.NoError(t, err)
|
||||
|
||||
s.Shutdown()
|
||||
|
||||
// Second call to Shutdown() should be a no-op.
|
||||
s.Shutdown()
|
||||
}
|
||||
@ -1,7 +1,9 @@
|
||||
package str
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
"strconv"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
@ -90,3 +92,12 @@ func SliceToMap(ss []string) map[string]struct{} {
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func GenerateHash(input []byte) string {
|
||||
crc32Hash := crc32.NewIEEE()
|
||||
crc32Hash.Write(input)
|
||||
checksum := crc32Hash.Sum(nil)
|
||||
hashString := hex.EncodeToString(checksum)
|
||||
|
||||
return hashString
|
||||
}
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
package str
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -51,3 +53,68 @@ func TestPreview(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test GenerateHash
|
||||
func TestGenerateHash(t *testing.T) {
|
||||
type testStruct struct {
|
||||
Text string
|
||||
Number int
|
||||
Status bool
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
input1 any
|
||||
input2 any
|
||||
sameCheck bool
|
||||
}{
|
||||
{
|
||||
name: "check if same hash is generated for same string input",
|
||||
input1: "test data",
|
||||
sameCheck: true,
|
||||
},
|
||||
{
|
||||
name: "check if same hash is generated for same struct input",
|
||||
input1: testStruct{Text: "test text", Number: 1, Status: true},
|
||||
sameCheck: true,
|
||||
},
|
||||
{
|
||||
name: "check if different hash is generated for different string input",
|
||||
input1: "test data",
|
||||
input2: "test data 2",
|
||||
sameCheck: false,
|
||||
},
|
||||
{
|
||||
name: "check if different hash is generated for different struct input",
|
||||
input1: testStruct{Text: "test text", Number: 1, Status: true},
|
||||
input2: testStruct{Text: "test text 2", Number: 2, Status: false},
|
||||
sameCheck: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
var input1Bytes []byte
|
||||
|
||||
var err error
|
||||
|
||||
var hash1 string
|
||||
|
||||
input1Bytes, err = json.Marshal(test.input1)
|
||||
require.NoError(t, err)
|
||||
|
||||
hash1 = GenerateHash(input1Bytes)
|
||||
|
||||
if test.sameCheck {
|
||||
hash2 := GenerateHash(input1Bytes)
|
||||
|
||||
assert.Equal(t, hash1, hash2)
|
||||
} else {
|
||||
input2Bytes, err := json.Marshal(test.input2)
|
||||
require.NoError(t, err)
|
||||
|
||||
hash2 := GenerateHash(input2Bytes)
|
||||
|
||||
assert.NotEqual(t, hash1, hash2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
9
src/internal/common/str/testdata/str.go
vendored
Normal file
9
src/internal/common/str/testdata/str.go
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
package testdata
|
||||
|
||||
import "github.com/google/uuid"
|
||||
|
||||
const hashLength = 7
|
||||
|
||||
func NewHashForRepoConfigName() string {
|
||||
return uuid.NewString()[:hashLength]
|
||||
}
|
||||
@ -55,6 +55,14 @@ type BackupBase struct {
|
||||
Reasons []identity.Reasoner
|
||||
}
|
||||
|
||||
func (bb BackupBase) GetReasons() []identity.Reasoner {
|
||||
return bb.Reasons
|
||||
}
|
||||
|
||||
func (bb BackupBase) GetSnapshotID() manifest.ID {
|
||||
return bb.ItemDataSnapshot.ID
|
||||
}
|
||||
|
||||
func (bb BackupBase) GetSnapshotTag(key string) (string, bool) {
|
||||
k, _ := makeTagKV(key)
|
||||
v, ok := bb.ItemDataSnapshot.Tags[k]
|
||||
|
||||
@ -2,6 +2,7 @@ package kopia
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
@ -28,7 +29,7 @@ import (
|
||||
|
||||
const (
|
||||
defaultKopiaConfigDir = "/tmp/"
|
||||
defaultKopiaConfigFile = "repository.config"
|
||||
kopiaConfigFileTemplate = "repository-%s.config"
|
||||
defaultCompressor = "zstd-better-compression"
|
||||
// Interval of 0 disables scheduling.
|
||||
defaultSchedulingInterval = time.Second * 0
|
||||
@ -95,6 +96,7 @@ func (w *conn) Initialize(
|
||||
ctx context.Context,
|
||||
opts repository.Options,
|
||||
retentionOpts repository.Retention,
|
||||
repoNameHash string,
|
||||
) error {
|
||||
bst, err := blobStoreByProvider(ctx, opts, w.storage)
|
||||
if err != nil {
|
||||
@ -135,6 +137,7 @@ func (w *conn) Initialize(
|
||||
ctx,
|
||||
opts,
|
||||
cfg.KopiaCfgDir,
|
||||
repoNameHash,
|
||||
bst,
|
||||
cfg.CorsoPassphrase,
|
||||
defaultCompressor)
|
||||
@ -152,7 +155,7 @@ func (w *conn) Initialize(
|
||||
return clues.Stack(w.setRetentionParameters(ctx, retentionOpts)).OrNil()
|
||||
}
|
||||
|
||||
func (w *conn) Connect(ctx context.Context, opts repository.Options) error {
|
||||
func (w *conn) Connect(ctx context.Context, opts repository.Options, repoNameHash string) error {
|
||||
bst, err := blobStoreByProvider(ctx, opts, w.storage)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "initializing storage")
|
||||
@ -168,6 +171,7 @@ func (w *conn) Connect(ctx context.Context, opts repository.Options) error {
|
||||
ctx,
|
||||
opts,
|
||||
cfg.KopiaCfgDir,
|
||||
repoNameHash,
|
||||
bst,
|
||||
cfg.CorsoPassphrase,
|
||||
defaultCompressor)
|
||||
@ -177,6 +181,7 @@ func (w *conn) commonConnect(
|
||||
ctx context.Context,
|
||||
opts repository.Options,
|
||||
configDir string,
|
||||
repoNameHash string,
|
||||
bst blob.Storage,
|
||||
password, compressor string,
|
||||
) error {
|
||||
@ -196,7 +201,7 @@ func (w *conn) commonConnect(
|
||||
configDir = defaultKopiaConfigDir
|
||||
}
|
||||
|
||||
cfgFile := filepath.Join(configDir, defaultKopiaConfigFile)
|
||||
cfgFile := filepath.Join(configDir, fmt.Sprintf(kopiaConfigFileTemplate, repoNameHash))
|
||||
|
||||
// todo - issue #75: nil here should be storage.ConnectOptions()
|
||||
if err := repo.Connect(
|
||||
@ -579,13 +584,18 @@ func (w *conn) SnapshotRoot(man *snapshot.Manifest) (fs.Entry, error) {
|
||||
return snapshotfs.SnapshotRoot(w.Repository, man)
|
||||
}
|
||||
|
||||
func (w *conn) UpdatePassword(ctx context.Context, password string, opts repository.Options) error {
|
||||
func (w *conn) UpdatePassword(
|
||||
ctx context.Context,
|
||||
password string,
|
||||
opts repository.Options,
|
||||
repoNameHash string,
|
||||
) error {
|
||||
if len(password) <= 0 {
|
||||
return clues.New("empty password provided")
|
||||
}
|
||||
|
||||
kopiaRef := NewConn(w.storage)
|
||||
if err := kopiaRef.Connect(ctx, opts); err != nil {
|
||||
if err := kopiaRef.Connect(ctx, opts, repoNameHash); err != nil {
|
||||
return clues.Wrap(err, "connecting kopia client")
|
||||
}
|
||||
|
||||
|
||||
@ -16,6 +16,7 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
@ -27,9 +28,10 @@ func openLocalKopiaRepo(
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
) (*conn, error) {
|
||||
st := storeTD.NewFilesystemStorage(t)
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
k := NewConn(st)
|
||||
if err := k.Initialize(ctx, repository.Options{}, repository.Retention{}); err != nil {
|
||||
if err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -41,9 +43,10 @@ func openKopiaRepo(
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
) (*conn, error) {
|
||||
st := storeTD.NewPrefixedS3Storage(t)
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
k := NewConn(st)
|
||||
if err := k.Initialize(ctx, repository.Options{}, repository.Retention{}); err != nil {
|
||||
if err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -91,6 +94,7 @@ func TestWrapperIntegrationSuite(t *testing.T) {
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -98,19 +102,20 @@ func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
|
||||
st := storeTD.NewFilesystemStorage(t)
|
||||
k := NewConn(st)
|
||||
|
||||
err := k.Initialize(ctx, repository.Options{}, repository.Retention{})
|
||||
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Close(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Initialize(ctx, repository.Options{}, repository.Retention{})
|
||||
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
|
||||
}
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestBadProviderErrors() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -119,12 +124,13 @@ func (suite *WrapperIntegrationSuite) TestBadProviderErrors() {
|
||||
st.Provider = storage.ProviderUnknown
|
||||
k := NewConn(st)
|
||||
|
||||
err := k.Initialize(ctx, repository.Options{}, repository.Retention{})
|
||||
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -132,7 +138,7 @@ func (suite *WrapperIntegrationSuite) TestConnectWithoutInitErrors() {
|
||||
st := storeTD.NewFilesystemStorage(t)
|
||||
k := NewConn(st)
|
||||
|
||||
err := k.Connect(ctx, repository.Options{})
|
||||
err := k.Connect(ctx, repository.Options{}, repoNameHash)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
@ -282,6 +288,7 @@ func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect
|
||||
newRetentionDaily := policy.OptionalInt(42)
|
||||
newRetention := policy.RetentionPolicy{KeepDaily: &newRetentionDaily}
|
||||
newSchedInterval := time.Second * 42
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
@ -376,7 +383,7 @@ func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect
|
||||
err = k.Close(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Connect(ctx, repository.Options{})
|
||||
err = k.Connect(ctx, repository.Options{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
defer func() {
|
||||
@ -393,6 +400,7 @@ func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestInitAndConnWithTempDirectory() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -404,7 +412,7 @@ func (suite *WrapperIntegrationSuite) TestInitAndConnWithTempDirectory() {
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Re-open with Connect.
|
||||
err = k.Connect(ctx, repository.Options{})
|
||||
err = k.Connect(ctx, repository.Options{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Close(ctx)
|
||||
@ -413,6 +421,7 @@ func (suite *WrapperIntegrationSuite) TestInitAndConnWithTempDirectory() {
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestSetUserAndHost() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -425,7 +434,7 @@ func (suite *WrapperIntegrationSuite) TestSetUserAndHost() {
|
||||
st := storeTD.NewFilesystemStorage(t)
|
||||
k := NewConn(st)
|
||||
|
||||
err := k.Initialize(ctx, opts, repository.Retention{})
|
||||
err := k.Initialize(ctx, opts, repository.Retention{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
kopiaOpts := k.ClientOptions()
|
||||
@ -439,7 +448,7 @@ func (suite *WrapperIntegrationSuite) TestSetUserAndHost() {
|
||||
opts.User = "hello"
|
||||
opts.Host = "world"
|
||||
|
||||
err = k.Connect(ctx, opts)
|
||||
err = k.Connect(ctx, opts, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
kopiaOpts = k.ClientOptions()
|
||||
@ -453,7 +462,7 @@ func (suite *WrapperIntegrationSuite) TestSetUserAndHost() {
|
||||
opts.User = ""
|
||||
opts.Host = ""
|
||||
|
||||
err = k.Connect(ctx, opts)
|
||||
err = k.Connect(ctx, opts, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
kopiaOpts = k.ClientOptions()
|
||||
@ -485,6 +494,7 @@ func TestConnRetentionIntegrationSuite(t *testing.T) {
|
||||
// from the default values that kopia uses.
|
||||
func (suite *ConnRetentionIntegrationSuite) TestInitWithAndWithoutRetention() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -492,7 +502,7 @@ func (suite *ConnRetentionIntegrationSuite) TestInitWithAndWithoutRetention() {
|
||||
st1 := storeTD.NewPrefixedS3Storage(t)
|
||||
|
||||
k1 := NewConn(st1)
|
||||
err := k1.Initialize(ctx, repository.Options{}, repository.Retention{})
|
||||
err := k1.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
require.NoError(t, err, "initializing repo 1: %v", clues.ToCore(err))
|
||||
|
||||
st2 := storeTD.NewPrefixedS3Storage(t)
|
||||
@ -505,7 +515,8 @@ func (suite *ConnRetentionIntegrationSuite) TestInitWithAndWithoutRetention() {
|
||||
Mode: ptr.To(repository.GovernanceRetention),
|
||||
Duration: ptr.To(time.Hour * 48),
|
||||
Extend: ptr.To(true),
|
||||
})
|
||||
},
|
||||
repoNameHash)
|
||||
require.NoError(t, err, "initializing repo 2: %v", clues.ToCore(err))
|
||||
|
||||
dr1, ok := k1.Repository.(repo.DirectRepository)
|
||||
|
||||
@ -14,6 +14,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/model"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -858,8 +859,9 @@ func openConnAndModelStore(
|
||||
) (*conn, *ModelStore) {
|
||||
st := storeTD.NewFilesystemStorage(t)
|
||||
c := NewConn(st)
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
err := c.Initialize(ctx, repository.Options{}, repository.Retention{})
|
||||
err := c.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
defer func() {
|
||||
@ -878,7 +880,8 @@ func reconnectToModelStore(
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
c *conn,
|
||||
) *ModelStore {
|
||||
err := c.Connect(ctx, repository.Options{})
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
err := c.Connect(ctx, repository.Options{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
defer func() {
|
||||
|
||||
@ -20,13 +20,13 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
)
|
||||
|
||||
const maxInflateTraversalDepth = 500
|
||||
@ -723,16 +723,25 @@ func addMergeLocation(col data.BackupCollection, toMerge *mergeDetails) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type pathUpdate struct {
|
||||
p path.Path
|
||||
state data.CollectionState
|
||||
}
|
||||
|
||||
func inflateCollectionTree(
|
||||
ctx context.Context,
|
||||
collections []data.BackupCollection,
|
||||
toMerge *mergeDetails,
|
||||
) (map[string]*treeMap, map[string]path.Path, error) {
|
||||
) (map[string]*treeMap, map[string]pathUpdate, error) {
|
||||
// failed is temporary and just allows us to log all conflicts before
|
||||
// returning an error.
|
||||
var firstErr error
|
||||
|
||||
roots := make(map[string]*treeMap)
|
||||
// Contains the old path for collections that are not new.
|
||||
// Allows resolving what the new path should be when walking the base
|
||||
// snapshot(s)'s hierarchy. Nil represents a collection that was deleted.
|
||||
updatedPaths := make(map[string]path.Path)
|
||||
updatedPaths := make(map[string]pathUpdate)
|
||||
// Temporary variable just to track the things that have been marked as
|
||||
// changed while keeping a reference to their path.
|
||||
changedPaths := []path.Path{}
|
||||
@ -752,24 +761,39 @@ func inflateCollectionTree(
|
||||
|
||||
changedPaths = append(changedPaths, s.PreviousPath())
|
||||
|
||||
if _, ok := updatedPaths[s.PreviousPath().String()]; ok {
|
||||
return nil, nil, clues.New("multiple previous state changes to collection").
|
||||
WithClues(ictx)
|
||||
if p, ok := updatedPaths[s.PreviousPath().String()]; ok {
|
||||
err := clues.New("multiple previous state changes").
|
||||
WithClues(ictx).
|
||||
With("updated_path", p, "current_state", data.DeletedState)
|
||||
logger.CtxErr(ictx, err).Error("previous path state collision")
|
||||
|
||||
if firstErr == nil {
|
||||
firstErr = err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPaths[s.PreviousPath().String()] = nil
|
||||
updatedPaths[s.PreviousPath().String()] = pathUpdate{state: data.DeletedState}
|
||||
|
||||
continue
|
||||
|
||||
case data.MovedState:
|
||||
changedPaths = append(changedPaths, s.PreviousPath())
|
||||
|
||||
if _, ok := updatedPaths[s.PreviousPath().String()]; ok {
|
||||
return nil, nil, clues.New("multiple previous state changes to collection").
|
||||
WithClues(ictx)
|
||||
if p, ok := updatedPaths[s.PreviousPath().String()]; ok {
|
||||
err := clues.New("multiple previous state changes").
|
||||
WithClues(ictx).
|
||||
With("updated_path", p, "current_state", data.MovedState)
|
||||
logger.CtxErr(ictx, err).Error("previous path state collision")
|
||||
|
||||
if firstErr == nil {
|
||||
firstErr = err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPaths[s.PreviousPath().String()] = s.FullPath()
|
||||
updatedPaths[s.PreviousPath().String()] = pathUpdate{
|
||||
p: s.FullPath(),
|
||||
state: data.MovedState,
|
||||
}
|
||||
|
||||
// Only safe when collections are moved since we only need prefix matching
|
||||
// if a nested folder's path changed in some way that didn't generate a
|
||||
@ -780,14 +804,24 @@ func inflateCollectionTree(
|
||||
return nil, nil, clues.Wrap(err, "adding merge location").
|
||||
WithClues(ictx)
|
||||
}
|
||||
|
||||
case data.NotMovedState:
|
||||
p := s.PreviousPath().String()
|
||||
if _, ok := updatedPaths[p]; ok {
|
||||
return nil, nil, clues.New("multiple previous state changes to collection").
|
||||
WithClues(ictx)
|
||||
if p, ok := updatedPaths[p]; ok {
|
||||
err := clues.New("multiple previous state changes").
|
||||
WithClues(ictx).
|
||||
With("updated_path", p, "current_state", data.NotMovedState)
|
||||
logger.CtxErr(ictx, err).Error("previous path state collision")
|
||||
|
||||
if firstErr == nil {
|
||||
firstErr = err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPaths[p] = s.FullPath()
|
||||
updatedPaths[p] = pathUpdate{
|
||||
p: s.FullPath(),
|
||||
state: data.NotMovedState,
|
||||
}
|
||||
}
|
||||
|
||||
if s.FullPath() == nil || len(s.FullPath().Elements()) == 0 {
|
||||
@ -821,18 +855,22 @@ func inflateCollectionTree(
|
||||
}
|
||||
|
||||
if node.collection != nil && node.collection.State() == data.NotMovedState {
|
||||
return nil, nil, clues.New("conflicting states for collection").
|
||||
WithClues(ctx).
|
||||
With("changed_path", p)
|
||||
err := clues.New("conflicting states for collection").
|
||||
WithClues(ctx)
|
||||
logger.CtxErr(ctx, err).Error("adding node to tree")
|
||||
|
||||
if firstErr == nil {
|
||||
firstErr = err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return roots, updatedPaths, nil
|
||||
return roots, updatedPaths, clues.Stack(firstErr).OrNil()
|
||||
}
|
||||
|
||||
func subtreeChanged(
|
||||
roots map[string]*treeMap,
|
||||
updatedPaths map[string]path.Path,
|
||||
updatedPaths map[string]pathUpdate,
|
||||
oldDirPath *path.Builder,
|
||||
currentPath *path.Builder,
|
||||
) bool {
|
||||
@ -889,7 +927,7 @@ func subtreeChanged(
|
||||
func traverseBaseDir(
|
||||
ctx context.Context,
|
||||
depth int,
|
||||
updatedPaths map[string]path.Path,
|
||||
updatedPaths map[string]pathUpdate,
|
||||
oldDirPath *path.Builder,
|
||||
expectedDirPath *path.Builder,
|
||||
dir fs.Directory,
|
||||
@ -934,14 +972,14 @@ func traverseBaseDir(
|
||||
|
||||
if upb, ok := updatedPaths[oldDirPath.String()]; ok {
|
||||
// This directory was deleted.
|
||||
if upb == nil {
|
||||
if upb.p == nil {
|
||||
currentPath = nil
|
||||
|
||||
stats.Inc(statDel)
|
||||
} else {
|
||||
// This directory was explicitly mentioned and the new (possibly
|
||||
// unchanged) location is in upb.
|
||||
currentPath = upb.ToBuilder()
|
||||
currentPath = upb.p.ToBuilder()
|
||||
|
||||
// Below we check if the collection was marked as new or DoNotMerge which
|
||||
// disables merging behavior. That means we can't directly update stats
|
||||
@ -1087,7 +1125,7 @@ func inflateBaseTree(
|
||||
ctx context.Context,
|
||||
loader snapshotLoader,
|
||||
base BackupBase,
|
||||
updatedPaths map[string]path.Path,
|
||||
updatedPaths map[string]pathUpdate,
|
||||
roots map[string]*treeMap,
|
||||
) error {
|
||||
bupID := "no_backup_id"
|
||||
@ -1158,8 +1196,8 @@ func inflateBaseTree(
|
||||
// otherwise unchecked in tree inflation below this point.
|
||||
newSubtreePath := subtreePath.ToBuilder()
|
||||
|
||||
if p, ok := updatedPaths[subtreePath.String()]; ok {
|
||||
newSubtreePath = p.ToBuilder()
|
||||
if up, ok := updatedPaths[subtreePath.String()]; ok {
|
||||
newSubtreePath = up.p.ToBuilder()
|
||||
}
|
||||
|
||||
stats := count.New()
|
||||
|
||||
@ -376,11 +376,15 @@ func getDir(
|
||||
return nil, clues.Wrap(ErrNoRestorePath, "getting directory").WithClues(ctx)
|
||||
}
|
||||
|
||||
toGet := dirPath.PopFront()
|
||||
|
||||
ctx = clues.Add(ctx, "entry_path", toGet)
|
||||
|
||||
// GetNestedEntry handles nil properly.
|
||||
e, err := snapshotfs.GetNestedEntry(
|
||||
ctx,
|
||||
snapshotRoot,
|
||||
encodeElements(dirPath.PopFront().Elements()...))
|
||||
encodeElements(toGet.Elements()...))
|
||||
if err != nil {
|
||||
if isErrEntryNotFound(err) {
|
||||
err = clues.Stack(data.ErrNotFound, err).WithClues(ctx)
|
||||
|
||||
@ -23,6 +23,7 @@ import (
|
||||
|
||||
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
@ -202,6 +203,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_FirstRun_NoChanges() {
|
||||
|
||||
func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -228,7 +230,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
|
||||
Host: "bar",
|
||||
}
|
||||
|
||||
err = k.Connect(ctx, opts)
|
||||
err = k.Connect(ctx, opts, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
var notOwnedErr maintenance.NotOwnedError
|
||||
@ -239,6 +241,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
|
||||
|
||||
func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeeds() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -265,7 +268,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
|
||||
Host: "bar",
|
||||
}
|
||||
|
||||
err = k.Connect(ctx, opts)
|
||||
err = k.Connect(ctx, opts, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mOpts.Force = true
|
||||
@ -286,6 +289,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
|
||||
// blobs as there's several of them, but at least this gives us something.
|
||||
func (suite *BasicKopiaIntegrationSuite) TestSetRetentionParameters_NoChangesOnFailure() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -318,7 +322,7 @@ func (suite *BasicKopiaIntegrationSuite) TestSetRetentionParameters_NoChangesOnF
|
||||
k.Close(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Connect(ctx, repository.Options{})
|
||||
err = k.Connect(ctx, repository.Options{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
defer k.Close(ctx)
|
||||
@ -375,6 +379,7 @@ func checkRetentionParams(
|
||||
//revive:disable-next-line:context-as-argument
|
||||
func mustReopen(t *testing.T, ctx context.Context, w *Wrapper) {
|
||||
k := w.c
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
err := w.Close(ctx)
|
||||
require.NoError(t, err, "closing wrapper: %v", clues.ToCore(err))
|
||||
@ -382,7 +387,7 @@ func mustReopen(t *testing.T, ctx context.Context, w *Wrapper) {
|
||||
err = k.Close(ctx)
|
||||
require.NoError(t, err, "closing conn: %v", clues.ToCore(err))
|
||||
|
||||
err = k.Connect(ctx, repository.Options{})
|
||||
err = k.Connect(ctx, repository.Options{}, repoNameHash)
|
||||
require.NoError(t, err, "reconnecting conn: %v", clues.ToCore(err))
|
||||
|
||||
w.c = k
|
||||
|
||||
@ -8,9 +8,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||
@ -22,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -179,7 +178,7 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
|
||||
func (ctrl *Controller) GetMetadataPaths(
|
||||
ctx context.Context,
|
||||
r kinject.RestoreProducer,
|
||||
base kopia.BackupBase,
|
||||
base inject.ReasonAndSnapshotIDer,
|
||||
errs *fault.Bus,
|
||||
) ([]path.RestorePaths, error) {
|
||||
var (
|
||||
@ -187,12 +186,12 @@ func (ctrl *Controller) GetMetadataPaths(
|
||||
err error
|
||||
)
|
||||
|
||||
for _, reason := range base.Reasons {
|
||||
for _, reason := range base.GetReasons() {
|
||||
filePaths := [][]string{}
|
||||
|
||||
switch true {
|
||||
case reason.Service() == path.GroupsService && reason.Category() == path.LibrariesCategory:
|
||||
filePaths, err = groups.MetadataFiles(ctx, reason, r, base.ItemDataSnapshot.ID, errs)
|
||||
filePaths, err = groups.MetadataFiles(ctx, reason, r, base.GetSnapshotID(), errs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -18,7 +18,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -28,6 +27,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@ -23,7 +23,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
@ -33,6 +32,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/extensions"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -16,7 +16,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
@ -27,6 +26,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -72,7 +72,88 @@ func NewCollections(
|
||||
}
|
||||
}
|
||||
|
||||
func deserializeMetadata(
|
||||
func deserializeAndValidateMetadata(
|
||||
ctx context.Context,
|
||||
cols []data.RestoreCollection,
|
||||
fb *fault.Bus,
|
||||
) (map[string]string, map[string]map[string]string, bool, error) {
|
||||
deltas, prevs, canUse, err := DeserializeMetadata(ctx, cols)
|
||||
if err != nil || !canUse {
|
||||
return deltas, prevs, false, clues.Stack(err).OrNil()
|
||||
}
|
||||
|
||||
// Go through and remove delta tokens if we didn't have any paths for them
|
||||
// or one or more paths are empty (incorrect somehow). This will ensure we
|
||||
// don't accidentally try to pull in delta results when we should have
|
||||
// enumerated everything instead.
|
||||
//
|
||||
// Loop over the set of previous deltas because it's alright to have paths
|
||||
// without a delta but not to have a delta without paths. This way ensures
|
||||
// we check at least all the path sets for the deltas we have.
|
||||
for drive := range deltas {
|
||||
ictx := clues.Add(ctx, "drive_id", drive)
|
||||
|
||||
paths := prevs[drive]
|
||||
if len(paths) == 0 {
|
||||
logger.Ctx(ictx).Info("dropping drive delta due to 0 prev paths")
|
||||
delete(deltas, drive)
|
||||
}
|
||||
|
||||
// Drives have only a single delta token. If we find any folder that
|
||||
// seems like the path is bad we need to drop the entire token and start
|
||||
// fresh. Since we know the token will be gone we can also stop checking
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
logger.Ctx(ictx).Info("dropping drive delta due to 0 len path")
|
||||
delete(deltas, drive)
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
alertIfPrevPathsHaveCollisions(ctx, prevs, fb)
|
||||
|
||||
return deltas, prevs, canUse, nil
|
||||
}
|
||||
|
||||
func alertIfPrevPathsHaveCollisions(
|
||||
ctx context.Context,
|
||||
prevs map[string]map[string]string,
|
||||
fb *fault.Bus,
|
||||
) {
|
||||
for driveID, folders := range prevs {
|
||||
prevPathCollisions := map[string]string{}
|
||||
|
||||
for fid, prev := range folders {
|
||||
if otherID, collision := prevPathCollisions[prev]; collision {
|
||||
ctx = clues.Add(
|
||||
ctx,
|
||||
"collision_folder_id_1", fid,
|
||||
"collision_folder_id_2", otherID,
|
||||
"collision_drive_id", driveID,
|
||||
"collision_prev_path", path.LoggableDir(prev))
|
||||
|
||||
fb.AddAlert(ctx, fault.NewAlert(
|
||||
fault.AlertPreviousPathCollision,
|
||||
"", // no namespace
|
||||
"", // no item id
|
||||
"previousPaths",
|
||||
map[string]any{
|
||||
"collision_folder_id_1": fid,
|
||||
"collision_folder_id_2": otherID,
|
||||
"collision_drive_id": driveID,
|
||||
"collision_prev_path": prev,
|
||||
}))
|
||||
}
|
||||
|
||||
prevPathCollisions[prev] = fid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func DeserializeMetadata(
|
||||
ctx context.Context,
|
||||
cols []data.RestoreCollection,
|
||||
) (map[string]string, map[string]map[string]string, bool, error) {
|
||||
@ -96,7 +177,7 @@ func deserializeMetadata(
|
||||
for breakLoop := false; !breakLoop; {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, nil, false, clues.Wrap(ctx.Err(), "deserialzing previous backup metadata").WithClues(ctx)
|
||||
return nil, nil, false, clues.Wrap(ctx.Err(), "deserializing previous backup metadata").WithClues(ctx)
|
||||
|
||||
case item, ok := <-items:
|
||||
if !ok {
|
||||
@ -137,32 +218,6 @@ func deserializeMetadata(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Go through and remove delta tokens if we didn't have any paths for them
|
||||
// or one or more paths are empty (incorrect somehow). This will ensure we
|
||||
// don't accidentally try to pull in delta results when we should have
|
||||
// enumerated everything instead.
|
||||
//
|
||||
// Loop over the set of previous deltas because it's alright to have paths
|
||||
// without a delta but not to have a delta without paths. This way ensures
|
||||
// we check at least all the path sets for the deltas we have.
|
||||
for drive := range prevDeltas {
|
||||
paths := prevFolders[drive]
|
||||
if len(paths) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
}
|
||||
|
||||
// Drives have only a single delta token. If we find any folder that
|
||||
// seems like the path is bad we need to drop the entire token and start
|
||||
// fresh. Since we know the token will be gone we can also stop checking
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if reads from items failed, return empty but no error
|
||||
@ -215,7 +270,7 @@ func (c *Collections) Get(
|
||||
ssmb *prefixmatcher.StringSetMatchBuilder,
|
||||
errs *fault.Bus,
|
||||
) ([]data.BackupCollection, bool, error) {
|
||||
prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata)
|
||||
deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata(ctx, prevMetadata, errs)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@ -224,7 +279,7 @@ func (c *Collections) Get(
|
||||
|
||||
driveTombstones := map[string]struct{}{}
|
||||
|
||||
for driveID := range oldPrevPathsByDriveID {
|
||||
for driveID := range prevPathsByDriveID {
|
||||
driveTombstones[driveID] = struct{}{}
|
||||
}
|
||||
|
||||
@ -257,8 +312,8 @@ func (c *Collections) Get(
|
||||
"drive_name", clues.Hide(driveName))
|
||||
|
||||
excludedItemIDs = map[string]struct{}{}
|
||||
oldPrevPaths = oldPrevPathsByDriveID[driveID]
|
||||
prevDeltaLink = prevDriveIDToDelta[driveID]
|
||||
oldPrevPaths = prevPathsByDriveID[driveID]
|
||||
prevDeltaLink = deltasByDriveID[driveID]
|
||||
|
||||
// packagePaths is keyed by folder paths to a parent directory
|
||||
// which is marked as a package by its driveItem GetPackage
|
||||
@ -280,7 +335,7 @@ func (c *Collections) Get(
|
||||
|
||||
logger.Ctx(ictx).Infow(
|
||||
"previous metadata for drive",
|
||||
"num_paths_entries", len(oldPrevPaths))
|
||||
"count_old_prev_paths", len(oldPrevPaths))
|
||||
|
||||
du, newPrevPaths, err := c.PopulateDriveCollections(
|
||||
ctx,
|
||||
@ -313,23 +368,34 @@ func (c *Collections) Get(
|
||||
|
||||
logger.Ctx(ictx).Infow(
|
||||
"persisted metadata for drive",
|
||||
"num_new_paths_entries", len(newPrevPaths),
|
||||
"count_new_prev_paths", len(newPrevPaths),
|
||||
"delta_reset", du.Reset)
|
||||
|
||||
numDriveItems := c.NumItems - numPrevItems
|
||||
numPrevItems = c.NumItems
|
||||
|
||||
// Attach an url cache
|
||||
// Attach an url cache to the drive if the number of discovered items is
|
||||
// below the threshold. Attaching cache to larger drives can cause
|
||||
// performance issues since cache delta queries start taking up majority of
|
||||
// the hour the refreshed URLs are valid for.
|
||||
if numDriveItems < urlCacheDriveItemThreshold {
|
||||
logger.Ctx(ictx).Info("adding url cache for drive")
|
||||
logger.Ctx(ictx).Infow(
|
||||
"adding url cache for drive",
|
||||
"num_drive_items", numDriveItems)
|
||||
|
||||
err = c.addURLCacheToDriveCollections(
|
||||
ictx,
|
||||
uc, err := newURLCache(
|
||||
driveID,
|
||||
prevDeltaLink,
|
||||
urlCacheRefreshInterval,
|
||||
c.handler,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, false, clues.Stack(err)
|
||||
}
|
||||
|
||||
// Set the URL cache instance for all collections in this drive.
|
||||
for id := range c.CollectionMap[driveID] {
|
||||
c.CollectionMap[driveID][id].urlCache = uc
|
||||
}
|
||||
}
|
||||
|
||||
@ -426,6 +492,8 @@ func (c *Collections) Get(
|
||||
collections = append(collections, coll)
|
||||
}
|
||||
|
||||
alertIfPrevPathsHaveCollisions(ctx, driveIDToPrevPaths, errs)
|
||||
|
||||
// add metadata collections
|
||||
pathPrefix, err := c.handler.MetadataPathPrefix(c.tenantID)
|
||||
if err != nil {
|
||||
@ -454,36 +522,11 @@ func (c *Collections) Get(
|
||||
collections = append(collections, md)
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Infow("produced collections", "count_collections", len(collections))
|
||||
|
||||
return collections, canUsePrevBackup, nil
|
||||
}
|
||||
|
||||
// addURLCacheToDriveCollections adds an URL cache to all collections belonging to
|
||||
// a drive.
|
||||
func (c *Collections) addURLCacheToDriveCollections(
|
||||
ctx context.Context,
|
||||
driveID, prevDelta string,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
uc, err := newURLCache(
|
||||
driveID,
|
||||
prevDelta,
|
||||
urlCacheRefreshInterval,
|
||||
c.handler,
|
||||
errs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Set the URL cache for all collections in this drive
|
||||
for _, driveColls := range c.CollectionMap {
|
||||
for _, coll := range driveColls {
|
||||
coll.urlCache = uc
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func updateCollectionPaths(
|
||||
driveID, itemID string,
|
||||
cmap map[string]map[string]*Collection,
|
||||
@ -696,6 +739,8 @@ func (c *Collections) PopulateDriveCollections(
|
||||
seenFolders = map[string]string{}
|
||||
)
|
||||
|
||||
ctx = clues.Add(ctx, "invalid_prev_delta", invalidPrevDelta)
|
||||
|
||||
if !invalidPrevDelta {
|
||||
maps.Copy(newPrevPaths, oldPrevPaths)
|
||||
}
|
||||
@ -714,8 +759,10 @@ func (c *Collections) PopulateDriveCollections(
|
||||
}
|
||||
|
||||
if reset {
|
||||
ctx = clues.Add(ctx, "delta_reset_occurred", true)
|
||||
newPrevPaths = map[string]string{}
|
||||
currPrevPaths = map[string]string{}
|
||||
seenFolders = map[string]string{}
|
||||
c.CollectionMap[driveID] = map[string]*Collection{}
|
||||
invalidPrevDelta = true
|
||||
}
|
||||
@ -883,6 +930,10 @@ func (c *Collections) processItem(
|
||||
delete(newPrevPaths, alreadyHandledFolderID)
|
||||
}
|
||||
|
||||
if invalidPrevDelta {
|
||||
prevPath = nil
|
||||
}
|
||||
|
||||
seenFolders[collectionPath.String()] = itemID
|
||||
|
||||
col, err := NewCollection(
|
||||
@ -1012,13 +1063,13 @@ func includePath(ctx context.Context, dsc dirScopeChecker, folderPath path.Path)
|
||||
}
|
||||
|
||||
func updatePath(paths map[string]string, id, newPath string) {
|
||||
oldPath := paths[id]
|
||||
if len(oldPath) == 0 {
|
||||
currPath := paths[id]
|
||||
if len(currPath) == 0 {
|
||||
paths[id] = newPath
|
||||
return
|
||||
}
|
||||
|
||||
if oldPath == newPath {
|
||||
if currPath == newPath {
|
||||
return
|
||||
}
|
||||
|
||||
@ -1027,10 +1078,10 @@ func updatePath(paths map[string]string, id, newPath string) {
|
||||
// other components should take care of that. We do need to ensure that the
|
||||
// resulting map contains all folders though so we know the next time around.
|
||||
for folderID, p := range paths {
|
||||
if !strings.HasPrefix(p, oldPath) {
|
||||
if !strings.HasPrefix(p, currPath) {
|
||||
continue
|
||||
}
|
||||
|
||||
paths[folderID] = strings.Replace(p, oldPath, newPath, 1)
|
||||
paths[folderID] = strings.Replace(p, currPath, newPath, 1)
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
32
src/internal/m365/collection/drive/debug.go
Normal file
32
src/internal/m365/collection/drive/debug.go
Normal file
@ -0,0 +1,32 @@
|
||||
package drive
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
deltas, prevs, _, err := deserializeAndValidateMetadata(ctx, colls, fault.New(true))
|
||||
|
||||
files := []store.MetadataFile{
|
||||
{
|
||||
Name: bupMD.PreviousPathFileName,
|
||||
Data: prevs,
|
||||
},
|
||||
{
|
||||
Name: bupMD.DeltaURLsFileName,
|
||||
Data: deltas,
|
||||
},
|
||||
}
|
||||
|
||||
return files, clues.Stack(err).OrNil()
|
||||
}
|
||||
@ -16,10 +16,10 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/readers"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@ -13,7 +13,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
|
||||
@ -17,7 +17,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
@ -25,6 +24,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type ItemIntegrationSuite struct {
|
||||
|
||||
@ -12,11 +12,11 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
func getParentMetadata(
|
||||
|
||||
@ -18,7 +18,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -30,6 +29,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@ -11,8 +11,8 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type driveInfo struct {
|
||||
|
||||
@ -14,7 +14,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
odMock "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||
@ -26,6 +25,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -18,7 +18,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
@ -27,6 +26,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -55,6 +55,8 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
suite.user = tconfig.SecondaryM365UserID(t)
|
||||
|
||||
acct := tconfig.NewM365Account(t)
|
||||
@ -111,9 +113,14 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
Select: api.URLCacheDriveItemProps(),
|
||||
})
|
||||
|
||||
// normally we'd page through all the pager.NextPage
|
||||
// enumerations first. But Results should make sure
|
||||
// that we don't need to drain lower-level communication first.
|
||||
// We need to go through all the pages of results so we don't get stuck. This
|
||||
// is the only way to get a delta token since getting one requires going
|
||||
// through all request results.
|
||||
//
|
||||
//revive:disable-next-line:empty-block
|
||||
for _, _, done := pager.NextPage(); !done; _, _, done = pager.NextPage() {
|
||||
}
|
||||
|
||||
du, err := pager.Results()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
require.NotEmpty(t, du.URL)
|
||||
@ -531,7 +538,7 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
driveItem("2", "folder2", "root", "root", false, true, false),
|
||||
driveItem("2", "folder2", "root", "root", isFolder),
|
||||
}},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/pii"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -18,6 +17,8 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -157,14 +158,18 @@ func populateCollections(
|
||||
|
||||
ictx = clues.Add(ictx, "previous_path", prevPath)
|
||||
|
||||
added, _, removed, newDelta, err := bh.itemEnumerator().
|
||||
cc := api.CallConfig{
|
||||
CanMakeDeltaQueries: !ctrlOpts.ToggleFeatures.DisableDelta,
|
||||
UseImmutableIDs: ctrlOpts.ToggleFeatures.ExchangeImmutableIDs,
|
||||
}
|
||||
|
||||
addAndRem, err := bh.itemEnumerator().
|
||||
GetAddedAndRemovedItemIDs(
|
||||
ictx,
|
||||
qp.ProtectedResource.ID(),
|
||||
cID,
|
||||
prevDelta,
|
||||
ctrlOpts.ToggleFeatures.ExchangeImmutableIDs,
|
||||
!ctrlOpts.ToggleFeatures.DisableDelta)
|
||||
cc)
|
||||
if err != nil {
|
||||
if !graph.IsErrDeletedInFlight(err) {
|
||||
el.AddRecoverable(ctx, clues.Stack(err).Label(fault.LabelForceNoBackupCreation))
|
||||
@ -176,12 +181,12 @@ func populateCollections(
|
||||
// to reset. This prevents any old items from being retained in
|
||||
// storage. If the container (or its children) are sill missing
|
||||
// on the next backup, they'll get tombstoned.
|
||||
newDelta = pagers.DeltaUpdate{Reset: true}
|
||||
addAndRem.DU = pagers.DeltaUpdate{Reset: true}
|
||||
}
|
||||
|
||||
if len(newDelta.URL) > 0 {
|
||||
deltaURLs[cID] = newDelta.URL
|
||||
} else if !newDelta.Reset {
|
||||
if len(addAndRem.DU.URL) > 0 {
|
||||
deltaURLs[cID] = addAndRem.DU.URL
|
||||
} else if !addAndRem.DU.Reset {
|
||||
logger.Ctx(ictx).Info("missing delta url")
|
||||
}
|
||||
|
||||
@ -191,11 +196,11 @@ func populateCollections(
|
||||
prevPath,
|
||||
locPath,
|
||||
ctrlOpts,
|
||||
newDelta.Reset),
|
||||
addAndRem.DU.Reset),
|
||||
qp.ProtectedResource.ID(),
|
||||
bh.itemHandler(),
|
||||
added,
|
||||
removed,
|
||||
addAndRem.Added,
|
||||
addAndRem.Removed,
|
||||
// TODO: produce a feature flag that allows selective
|
||||
// enabling of valid modTimes. This currently produces
|
||||
// rare-case failures with incorrect details merging.
|
||||
@ -273,6 +278,8 @@ func populateCollections(
|
||||
|
||||
collections["metadata"] = col
|
||||
|
||||
logger.Ctx(ctx).Infow("produced collections", "count_collections", len(collections))
|
||||
|
||||
return collections, el.Failure()
|
||||
}
|
||||
|
||||
|
||||
@ -18,7 +18,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/readers"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -32,6 +31,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -75,18 +75,11 @@ type (
|
||||
func (mg mockGetter) GetAddedAndRemovedItemIDs(
|
||||
ctx context.Context,
|
||||
userID, cID, prevDelta string,
|
||||
_ bool,
|
||||
_ bool,
|
||||
) (
|
||||
map[string]time.Time,
|
||||
bool,
|
||||
[]string,
|
||||
pagers.DeltaUpdate,
|
||||
error,
|
||||
) {
|
||||
_ api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error) {
|
||||
results, ok := mg.results[cID]
|
||||
if !ok {
|
||||
return nil, false, nil, pagers.DeltaUpdate{}, clues.New("mock not found for " + cID)
|
||||
return pagers.AddedAndRemoved{}, clues.New("mock not found for " + cID)
|
||||
}
|
||||
|
||||
delta := results.newDelta
|
||||
@ -99,7 +92,14 @@ func (mg mockGetter) GetAddedAndRemovedItemIDs(
|
||||
resAdded[add] = time.Time{}
|
||||
}
|
||||
|
||||
return resAdded, false, results.removed, delta, results.err
|
||||
aar := pagers.AddedAndRemoved{
|
||||
Added: resAdded,
|
||||
Removed: results.removed,
|
||||
ValidModTimes: false,
|
||||
DU: delta,
|
||||
}
|
||||
|
||||
return aar, results.err
|
||||
}
|
||||
|
||||
var _ graph.ContainerResolver = &mockResolver{}
|
||||
|
||||
@ -4,7 +4,7 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// checkIDAndName is a helper function to ensure that
|
||||
|
||||
@ -15,13 +15,13 @@ import (
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@ -20,13 +20,13 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/readers"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type CollectionUnitSuite struct {
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ backupHandler = &contactBackupHandler{}
|
||||
|
||||
@ -7,9 +7,9 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@ -7,7 +7,6 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
@ -15,6 +14,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ itemRestorer = &contactRestoreHandler{}
|
||||
|
||||
@ -10,7 +10,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
@ -20,6 +19,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ contactRestorer = &contactRestoreMock{}
|
||||
|
||||
@ -6,10 +6,10 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -13,7 +13,6 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
17
src/internal/m365/collection/exchange/debug.go
Normal file
17
src/internal/m365/collection/exchange/debug.go
Normal file
@ -0,0 +1,17 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
return nil, clues.New("TODO: needs implementation")
|
||||
}
|
||||
@ -1,8 +1,8 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ backupHandler = &eventBackupHandler{}
|
||||
|
||||
@ -7,10 +7,10 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ graph.ContainerResolver = &eventContainerCache{}
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
@ -16,6 +15,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ itemRestorer = &eventRestoreHandler{}
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
@ -21,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ eventRestorer = &eventRestoreMock{}
|
||||
|
||||
@ -2,17 +2,16 @@ package exchange
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -30,9 +29,8 @@ type addedAndRemovedItemGetter interface {
|
||||
GetAddedAndRemovedItemIDs(
|
||||
ctx context.Context,
|
||||
user, containerID, oldDeltaToken string,
|
||||
immutableIDs bool,
|
||||
canMakeDeltaQueries bool,
|
||||
) (map[string]time.Time, bool, []string, pagers.DeltaUpdate, error)
|
||||
cc api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error)
|
||||
}
|
||||
|
||||
type itemGetterSerializer interface {
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ backupHandler = &mailBackupHandler{}
|
||||
|
||||
@ -6,10 +6,10 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
@ -16,6 +15,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ itemRestorer = &mailRestoreHandler{}
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
@ -21,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var _ mailRestorer = &mailRestoreMock{}
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -20,6 +19,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// RestoreCollection handles restoration of an individual collection.
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||
@ -20,6 +19,8 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// TODO: incremental support
|
||||
@ -108,7 +109,7 @@ func populateCollections(
|
||||
el = errs.Local()
|
||||
)
|
||||
|
||||
logger.Ctx(ctx).Info("filling collections", "len_deltapaths", len(dps))
|
||||
logger.Ctx(ctx).Infow("filling collections", "len_deltapaths", len(dps))
|
||||
|
||||
for _, c := range channels {
|
||||
if el.Failure() != nil {
|
||||
@ -152,20 +153,22 @@ func populateCollections(
|
||||
|
||||
// if the channel has no email property, it is unable to process delta tokens
|
||||
// and will return an error if a delta token is queried.
|
||||
canMakeDeltaQueries := len(ptr.Val(c.GetEmail())) > 0
|
||||
cc := api.CallConfig{
|
||||
CanMakeDeltaQueries: len(ptr.Val(c.GetEmail())) > 0,
|
||||
}
|
||||
|
||||
add, _, rem, du, err := bh.getContainerItemIDs(ctx, cID, prevDelta, canMakeDeltaQueries)
|
||||
addAndRem, err := bh.getContainerItemIDs(ctx, cID, prevDelta, cc)
|
||||
if err != nil {
|
||||
el.AddRecoverable(ctx, clues.Stack(err))
|
||||
continue
|
||||
}
|
||||
|
||||
added := str.SliceToMap(maps.Keys(add))
|
||||
removed := str.SliceToMap(rem)
|
||||
added := str.SliceToMap(maps.Keys(addAndRem.Added))
|
||||
removed := str.SliceToMap(addAndRem.Removed)
|
||||
|
||||
if len(du.URL) > 0 {
|
||||
deltaURLs[cID] = du.URL
|
||||
} else if !du.Reset {
|
||||
if len(addAndRem.DU.URL) > 0 {
|
||||
deltaURLs[cID] = addAndRem.DU.URL
|
||||
} else if !addAndRem.DU.Reset {
|
||||
logger.Ctx(ictx).Info("missing delta url")
|
||||
}
|
||||
|
||||
@ -188,7 +191,7 @@ func populateCollections(
|
||||
prevPath,
|
||||
path.Builder{}.Append(cName),
|
||||
ctrlOpts,
|
||||
du.Reset),
|
||||
addAndRem.DU.Reset),
|
||||
bh,
|
||||
qp.ProtectedResource.ID(),
|
||||
added,
|
||||
@ -264,5 +267,7 @@ func populateCollections(
|
||||
|
||||
collections["metadata"] = col
|
||||
|
||||
logger.Ctx(ctx).Infow("produced collections", "count_collections", len(collections))
|
||||
|
||||
return collections, el.Failure()
|
||||
}
|
||||
|
||||
@ -14,7 +14,6 @@ import (
|
||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups/testdata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -30,6 +29,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -58,15 +58,22 @@ func (bh mockBackupHandler) getContainers(context.Context) ([]models.Channelable
|
||||
func (bh mockBackupHandler) getContainerItemIDs(
|
||||
_ context.Context,
|
||||
_, _ string,
|
||||
_ bool,
|
||||
) (map[string]time.Time, bool, []string, pagers.DeltaUpdate, error) {
|
||||
_ api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error) {
|
||||
idRes := make(map[string]time.Time, len(bh.messageIDs))
|
||||
|
||||
for _, id := range bh.messageIDs {
|
||||
idRes[id] = time.Time{}
|
||||
}
|
||||
|
||||
return idRes, true, bh.deletedMsgIDs, pagers.DeltaUpdate{}, bh.messagesErr
|
||||
aar := pagers.AddedAndRemoved{
|
||||
Added: idRes,
|
||||
Removed: bh.deletedMsgIDs,
|
||||
ValidModTimes: true,
|
||||
DU: pagers.DeltaUpdate{},
|
||||
}
|
||||
|
||||
return aar, bh.messagesErr
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) includeContainer(
|
||||
|
||||
@ -2,16 +2,15 @@ package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -41,9 +40,9 @@ func (bh channelsBackupHandler) getContainers(
|
||||
func (bh channelsBackupHandler) getContainerItemIDs(
|
||||
ctx context.Context,
|
||||
channelID, prevDelta string,
|
||||
canMakeDeltaQueries bool,
|
||||
) (map[string]time.Time, bool, []string, pagers.DeltaUpdate, error) {
|
||||
return bh.ac.GetChannelMessageIDs(ctx, bh.protectedResource, channelID, prevDelta, canMakeDeltaQueries)
|
||||
cc api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error) {
|
||||
return bh.ac.GetChannelMessageIDs(ctx, bh.protectedResource, channelID, prevDelta, cc)
|
||||
}
|
||||
|
||||
func (bh channelsBackupHandler) includeContainer(
|
||||
|
||||
17
src/internal/m365/collection/groups/debug.go
Normal file
17
src/internal/m365/collection/groups/debug.go
Normal file
@ -0,0 +1,17 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
return nil, clues.New("TODO: needs implementation")
|
||||
}
|
||||
@ -2,14 +2,14 @@ package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
@ -25,8 +25,8 @@ type backupHandler interface {
|
||||
getContainerItemIDs(
|
||||
ctx context.Context,
|
||||
containerID, prevDelta string,
|
||||
canMakeDeltaQueries bool,
|
||||
) (map[string]time.Time, bool, []string, pagers.DeltaUpdate, error)
|
||||
cc api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error)
|
||||
|
||||
// includeContainer evaluates whether the container is included
|
||||
// in the provided scope.
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -19,6 +18,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// CollectLibraries constructs a onedrive Collections struct and Get()s
|
||||
|
||||
@ -9,7 +9,6 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
@ -19,6 +18,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type SharePointPagesSuite struct {
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type DataCategory int
|
||||
|
||||
@ -7,10 +7,10 @@ import (
|
||||
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -9,9 +9,9 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/sites"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ListToSPInfo translates models.Listable metadata into searchable content
|
||||
|
||||
@ -9,12 +9,12 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type ListsUnitSuite struct {
|
||||
|
||||
@ -4,8 +4,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph/betasdk/models"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/betasdk/models"
|
||||
)
|
||||
|
||||
// pageToSPInfo propagates metadata from the SharePoint Page data type
|
||||
|
||||
@ -6,9 +6,9 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph/betasdk/models"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/betasdk/models"
|
||||
)
|
||||
|
||||
type PagesUnitSuite struct {
|
||||
|
||||
@ -16,7 +16,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
betaAPI "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -27,6 +26,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
|
||||
|
||||
@ -9,7 +9,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -19,6 +18,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var ErrNoResourceLookup = clues.New("missing resource lookup client")
|
||||
|
||||
39
src/internal/m365/debug.go
Normal file
39
src/internal/m365/debug.go
Normal file
@ -0,0 +1,39 @@
|
||||
package m365
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func (ctrl *Controller) DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
if len(colls) == 0 {
|
||||
return []store.MetadataFile{}, nil
|
||||
}
|
||||
|
||||
// assume all collections refer to the same service
|
||||
service := colls[0].FullPath().Service()
|
||||
|
||||
switch service {
|
||||
case path.ExchangeService, path.ExchangeMetadataService:
|
||||
return exchange.DeserializeMetadataFiles(ctx, colls)
|
||||
case path.OneDriveService, path.OneDriveMetadataService:
|
||||
return drive.DeserializeMetadataFiles(ctx, colls)
|
||||
case path.SharePointService, path.SharePointMetadataService:
|
||||
return drive.DeserializeMetadataFiles(ctx, colls)
|
||||
case path.GroupsService, path.GroupsMetadataService:
|
||||
return groups.DeserializeMetadataFiles(ctx, colls)
|
||||
default:
|
||||
return nil, clues.New("unrecognized service").With("service", service).WithClues(ctx)
|
||||
}
|
||||
}
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -53,7 +52,7 @@ func (ctrl Controller) ProduceBackupCollections(
|
||||
func (ctrl *Controller) GetMetadataPaths(
|
||||
ctx context.Context,
|
||||
r kinject.RestoreProducer,
|
||||
base kopia.BackupBase,
|
||||
base inject.ReasonAndSnapshotIDer,
|
||||
errs *fault.Bus,
|
||||
) ([]path.RestorePaths, error) {
|
||||
return nil, clues.New("not implemented")
|
||||
|
||||
@ -15,7 +15,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -25,6 +24,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -186,15 +186,15 @@ func (suite *SharePointIntegrationSuite) SetupSuite() {
|
||||
si := NewSuiteInfoImpl(suite.T(), ctx, tconfig.M365SiteID(suite.T()), path.SharePointService)
|
||||
|
||||
// users needed for permissions
|
||||
user, err := si.controller.AC.Users().GetByID(ctx, si.user)
|
||||
user, err := si.controller.AC.Users().GetByID(ctx, si.user, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
||||
si.userID = ptr.Val(user.GetId())
|
||||
|
||||
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser)
|
||||
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||
|
||||
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser)
|
||||
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
||||
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
||||
|
||||
@ -255,15 +255,15 @@ func (suite *OneDriveIntegrationSuite) SetupSuite() {
|
||||
|
||||
si := NewSuiteInfoImpl(t, ctx, tconfig.M365UserID(t), path.OneDriveService)
|
||||
|
||||
user, err := si.controller.AC.Users().GetByID(ctx, si.user)
|
||||
user, err := si.controller.AC.Users().GetByID(ctx, si.user, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
||||
si.userID = ptr.Val(user.GetId())
|
||||
|
||||
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser)
|
||||
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||
|
||||
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser)
|
||||
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
||||
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
||||
|
||||
@ -319,15 +319,15 @@ func (suite *OneDriveNightlySuite) SetupSuite() {
|
||||
|
||||
si := NewSuiteInfoImpl(t, ctx, tconfig.M365UserID(t), path.OneDriveService)
|
||||
|
||||
user, err := si.controller.AC.Users().GetByID(ctx, si.user)
|
||||
user, err := si.controller.AC.Users().GetByID(ctx, si.user, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
|
||||
si.userID = ptr.Val(user.GetId())
|
||||
|
||||
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser)
|
||||
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||
|
||||
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser)
|
||||
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser, api.CallConfig{})
|
||||
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
|
||||
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
|
||||
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||
@ -19,6 +18,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ConsumeRestoreCollections restores data from the specified collections
|
||||
|
||||
@ -8,13 +8,13 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ProduceBackupCollections returns a DataCollection which the caller can
|
||||
|
||||
@ -7,9 +7,9 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type getMailInboxer interface {
|
||||
|
||||
@ -10,9 +10,9 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
|
||||
@ -7,7 +7,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -15,6 +14,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ConsumeRestoreCollections restores M365 objects in data.RestoreCollection to MSFT
|
||||
|
||||
@ -8,10 +8,10 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
func PopulateContainerCache(
|
||||
|
||||
@ -14,7 +14,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/site"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
@ -27,6 +26,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
func ProduceBackupCollections(
|
||||
|
||||
@ -10,9 +10,9 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type EnabledUnitSuite struct {
|
||||
|
||||
@ -12,7 +12,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
|
||||
|
||||
@ -13,7 +13,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
@ -16,6 +15,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
func ProduceBackupCollections(
|
||||
|
||||
@ -6,7 +6,7 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type getDefaultDriver interface {
|
||||
|
||||
@ -10,8 +10,8 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type EnabledUnitSuite struct {
|
||||
@ -33,7 +33,7 @@ func (m mockDGDD) GetDefaultDrive(context.Context, string) (models.Driveable, er
|
||||
return m.response, m.err
|
||||
}
|
||||
|
||||
// Copied from src/internal/m365/graph/errors_test.go
|
||||
// Copied from src/pkg/services/m365/api/graph/errors_test.go
|
||||
func odErrMsg(code, message string) *odataerrors.ODataError {
|
||||
odErr := odataerrors.NewODataError()
|
||||
merr := odataerrors.NewMainError()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user