merge main

This commit is contained in:
neha-Gupta1 2023-08-24 15:29:24 +05:30
commit 93607f5cd5
102 changed files with 5576 additions and 1452 deletions

View File

@ -92,7 +92,7 @@ jobs:
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-nightly.log
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-nightly.log
LOG_GRAPH_REQUESTS: true
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
run: |
@ -101,7 +101,6 @@ jobs:
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 1h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests

View File

@ -39,6 +39,8 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
addExchangeCommands,
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addTeamsCommands,
}
// AddCommands attaches all `corso backup * *` commands to the parent.

365
src/cli/backup/groups.go Normal file
View File

@ -0,0 +1,365 @@
package backup
import (
"context"
"errors"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/repo"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
groupsServiceCommand = "groups"
groupsServiceCommandCreateUseSuffix = "--group <groupsName> | '" + flags.Wildcard + "'"
groupsServiceCommandDeleteUseSuffix = "--backup <backupId>"
groupsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
// TODO: correct examples
const (
groupsServiceCommandCreateExamples = `# Backup all Groups data for Alice
corso backup create groups --group alice@example.com
# Backup only Groups contacts for Alice and Bob
corso backup create groups --group engineering,sales --data contacts
# Backup all Groups data for all M365 users
corso backup create groups --group '*'`
groupsServiceCommandDeleteExamples = `# Delete Groups backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup delete groups --backup 1234abcd-12ab-cd34-56de-1234abcd`
groupsServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore calendar events occurring after start of 2022
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
--event-starts-after 2022-01-01T00:00:00`
)
// called by backup.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case createCommand:
c, fs = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix
c.Example = groupsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddGroupFlag(c)
flags.AddDataFlag(c, []string{dataLibraries}, false)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
flags.AddFetchParallelismFlag(c)
flags.AddFailFastFlag(c)
case listCommand:
c, fs = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
addFailedItemsFN(c)
addSkippedItemsFN(c)
addRecoveredErrorsFN(c)
case detailsCommand:
c, fs = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix
c.Example = groupsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
case deleteCommand:
c, fs = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix
c.Example = groupsServiceCommandDeleteExamples
flags.AddBackupIDFlag(c, true)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create groups [<flag>...]`
func groupsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Backup M365 Group service data",
RunE: createGroupsCmd,
Args: cobra.NoArgs,
}
}
// processes a groups service backup.
func createGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if err := validateGroupsBackupCreateFlags(flags.GroupFV, flags.CategoryDataFV); err != nil {
return err
}
r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx, path.GroupsService, repo.S3Overrides(cmd))
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// TODO: log/print recoverable errors
errs := fault.New(false)
ins, err := m365.GroupsMap(ctx, *acct, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 groups"))
}
sel := groupsBackupCreateSelectors(ctx, ins, flags.GroupFV, flags.CategoryDataFV)
selectorSet := []selectors.Selector{}
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
selectorSet = append(selectorSet, discSel.Selector)
}
return runBackups(
ctx,
r,
"Group", "group",
selectorSet,
ins)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list groups [<flag>...]`
func groupsListCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "List the history of M365 Groups service backups",
RunE: listGroupsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listGroupsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.GroupsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details groups [<flag>...]`
func groupsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Shows the details of a M365 Groups service backup",
RunE: detailsGroupsCmd,
Args: cobra.NoArgs,
}
}
// processes a groups service backup.
func detailsGroupsCmd(cmd *cobra.Command, args []string) error {
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
ctx := cmd.Context()
opts := utils.MakeGroupsOpts(cmd)
r, _, _, ctrlOpts, err := utils.GetAccountAndConnect(ctx, path.GroupsService, repo.S3Overrides(cmd))
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
ds, err := runDetailsGroupsCmd(ctx, r, flags.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) == 0 {
Info(ctx, selectors.ErrorNoMatchingItems)
return nil
}
ds.PrintEntries(ctx)
return nil
}
// runDetailsGroupsCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if
// errs.Failure() == nil.
func runDetailsGroupsCmd(
ctx context.Context,
r repository.BackupGetter,
backupID string,
opts utils.GroupsOpts,
skipReduce bool,
) (*details.Details, error) {
if err := utils.ValidateGroupsRestoreFlags(backupID, opts); err != nil {
return nil, err
}
ctx = clues.Add(ctx, "backup_id", backupID)
d, _, errs := r.GetBackupDetails(ctx, backupID)
// TODO: log/track recoverable errors
if errs.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, clues.New("no backup exists with the id " + backupID)
}
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
}
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
if !skipReduce {
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
d = sel.Reduce(ctx, d, errs)
}
return d, nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete groups [<flag>...]`
func groupsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Delete backed-up M365 Groups service data",
RunE: deleteGroupsCmd,
Args: cobra.NoArgs,
}
}
// deletes an groups service backup.
func deleteGroupsCmd(cmd *cobra.Command, args []string) error {
return genericDeleteCommand(cmd, path.GroupsService, flags.BackupIDFV, "Groups", args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateGroupsBackupCreateFlags(groups, cats []string) error {
if len(groups) == 0 {
return clues.New(
"requires one or more --" +
flags.GroupFN + " ids, or the wildcard --" +
flags.GroupFN + " *",
)
}
// TODO(meain)
// for _, d := range cats {
// if d != dataLibraries {
// return clues.New(
// d + " is an unrecognized data type; only " + dataLibraries + " is supported"
// )
// }
// }
return nil
}
// TODO: users might specify a data type, this only supports AllData().
func groupsBackupCreateSelectors(
ctx context.Context,
ins idname.Cacher,
group, cats []string,
) *selectors.GroupsBackup {
if filters.PathContains(group).Compare(flags.Wildcard) {
return includeAllGroupWithCategories(ins, cats)
}
sel := selectors.NewGroupsBackup(slices.Clone(group))
return addGroupsCategories(sel, cats)
}
func includeAllGroupWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
return addGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
}
func addGroupsCategories(sel *selectors.GroupsBackup, cats []string) *selectors.GroupsBackup {
if len(cats) == 0 {
sel.Include(sel.AllData())
}
// TODO(meain): handle filtering
// for _, d := range cats {
// switch d {
// case dataLibraries:
// sel.Include(sel.LibraryFolders(selectors.Any()))
// case dataPages:
// sel.Include(sel.Pages(selectors.Any()))
// }
// }
return sel
}

View File

@ -0,0 +1,98 @@
package backup
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
flags []string
expectRunE func(*cobra.Command, []string) error
}{
{
"create groups",
createCommand,
expectUse + " " + groupsServiceCommandCreateUseSuffix,
groupsCreateCmd().Short,
[]string{
flags.CategoryDataFN,
flags.FailFastFN,
flags.FetchParallelismFN,
flags.SkipReduceFN,
flags.NoStatsFN,
},
createGroupsCmd,
},
{
"list groups",
listCommand,
expectUse,
groupsListCmd().Short,
[]string{
flags.BackupFN,
flags.FailedItemsFN,
flags.SkippedItemsFN,
flags.RecoveredErrorsFN,
},
listGroupsCmd,
},
{
"details groups",
detailsCommand,
expectUse + " " + groupsServiceCommandDetailsUseSuffix,
groupsDetailsCmd().Short,
[]string{
flags.BackupFN,
},
detailsGroupsCmd,
},
{
"delete groups",
deleteCommand,
expectUse + " " + groupsServiceCommandDeleteUseSuffix,
groupsDeleteCmd().Short,
[]string{flags.BackupFN},
deleteGroupsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addGroupsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}

230
src/cli/backup/teams.go Normal file
View File

@ -0,0 +1,230 @@
package backup
import (
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/path"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
teamsServiceCommand = "teams"
teamsServiceCommandCreateUseSuffix = "--team <teamsName> | '" + flags.Wildcard + "'"
teamsServiceCommandDeleteUseSuffix = "--backup <backupId>"
teamsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
// TODO: correct examples
const (
teamsServiceCommandCreateExamples = `# Backup all Teams data for Alice
corso backup create teams --team alice@example.com
# Backup only Teams contacts for Alice and Bob
corso backup create teams --team engineering,sales --data contacts
# Backup all Teams data for all M365 users
corso backup create teams --team '*'`
teamsServiceCommandDeleteExamples = `# Delete Teams backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup delete teams --backup 1234abcd-12ab-cd34-56de-1234abcd`
teamsServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
corso backup details teams --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore calendar events occurring after start of 2022
corso backup details teams --backup 1234abcd-12ab-cd34-56de-1234abcd \
--event-starts-after 2022-01-01T00:00:00`
)
// called by backup.go to map subcommands to provider-specific handling.
func addTeamsCommands(cmd *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case createCommand:
c, fs = utils.AddCommand(cmd, teamsCreateCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
c.Use = c.Use + " " + teamsServiceCommandCreateUseSuffix
c.Example = teamsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddTeamFlag(c)
flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
flags.AddFetchParallelismFlag(c)
flags.AddFailFastFlag(c)
case listCommand:
c, fs = utils.AddCommand(cmd, teamsListCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
addFailedItemsFN(c)
addSkippedItemsFN(c)
addRecoveredErrorsFN(c)
case detailsCommand:
c, fs = utils.AddCommand(cmd, teamsDetailsCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
c.Use = c.Use + " " + teamsServiceCommandDetailsUseSuffix
c.Example = teamsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
case deleteCommand:
c, fs = utils.AddCommand(cmd, teamsDeleteCmd(), utils.MarkPreReleaseCommand())
fs.SortFlags = false
c.Use = c.Use + " " + teamsServiceCommandDeleteUseSuffix
c.Example = teamsServiceCommandDeleteExamples
flags.AddBackupIDFlag(c, true)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create teams [<flag>...]`
func teamsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: teamsServiceCommand,
Short: "Backup M365 Team service data",
RunE: createTeamsCmd,
Args: cobra.NoArgs,
}
}
// processes a teams service backup.
func createTeamsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if err := validateTeamBackupCreateFlags(flags.TeamFV); err != nil {
return Only(ctx, err)
}
return Only(ctx, utils.ErrNotYetImplemented)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list teams [<flag>...]`
func teamsListCmd() *cobra.Command {
return &cobra.Command{
Use: teamsServiceCommand,
Short: "List the history of M365 Teams service backups",
RunE: listTeamsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listTeamsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details teams [<flag>...]`
func teamsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: teamsServiceCommand,
Short: "Shows the details of a M365 Teams service backup",
RunE: detailsTeamsCmd,
Args: cobra.NoArgs,
}
}
// processes a teams service backup.
func detailsTeamsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
return Only(ctx, utils.ErrNotYetImplemented)
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete teams [<flag>...]`
func teamsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: teamsServiceCommand,
Short: "Delete backed-up M365 Teams service data",
RunE: deleteTeamsCmd,
Args: cobra.NoArgs,
}
}
// deletes an teams service backup.
func deleteTeamsCmd(cmd *cobra.Command, args []string) error {
return genericDeleteCommand(cmd, path.TeamsService, flags.BackupIDFV, "Teams", args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateTeamBackupCreateFlags(teams []string) error {
if len(teams) == 0 {
return clues.New(
"requires one or more --" +
flags.TeamFN + " ids, or the wildcard --" +
flags.TeamFN + " *",
)
}
// TODO(meain)
// for _, d := range cats {
// if d != dataLibraries {
// return clues.New(
// d + " is an unrecognized data type; only " + dataLibraries + " is supported"
// )
// }
// }
return nil
}

View File

@ -0,0 +1,98 @@
package backup
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/tester"
)
type TeamsUnitSuite struct {
tester.Suite
}
func TestTeamsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
expectUse := teamsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
flags []string
expectRunE func(*cobra.Command, []string) error
}{
{
"create teams",
createCommand,
expectUse + " " + teamsServiceCommandCreateUseSuffix,
teamsCreateCmd().Short,
[]string{
flags.CategoryDataFN,
flags.FailFastFN,
flags.FetchParallelismFN,
flags.SkipReduceFN,
flags.NoStatsFN,
},
createTeamsCmd,
},
{
"list teams",
listCommand,
expectUse,
teamsListCmd().Short,
[]string{
flags.BackupFN,
flags.FailedItemsFN,
flags.SkippedItemsFN,
flags.RecoveredErrorsFN,
},
listTeamsCmd,
},
{
"details teams",
detailsCommand,
expectUse + " " + teamsServiceCommandDetailsUseSuffix,
teamsDetailsCmd().Short,
[]string{
flags.BackupFN,
},
detailsTeamsCmd,
},
{
"delete teams",
deleteCommand,
expectUse + " " + teamsServiceCommandDeleteUseSuffix,
teamsDeleteCmd().Short,
[]string{flags.BackupFN},
deleteTeamsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addTeamsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}

View File

@ -21,6 +21,8 @@ import (
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addTeamsCommands,
}
// AddCommands attaches all `corso export * *` commands to the parent.

84
src/cli/export/groups.go Normal file
View File

@ -0,0 +1,84 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
)
// called by export.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case exportCommand:
c, fs = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
}
return c
}
// TODO: correct examples
const (
groupsServiceCommand = "groups"
groupsServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
groupsServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to my-exports directory
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Export files named "FY2021 Planning.xlsx" in "Documents/Finance Reports" to current directory
corso export groups . --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso export groups [<flag>...] <destination>`
func groupsExportCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Export M365 Groups service data",
RunE: exportGroupsCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: groupsServiceCommandExportExamples,
}
}
// processes an groups service export.
func exportGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
return Only(ctx, utils.ErrNotYetImplemented)
}

View File

@ -0,0 +1,94 @@
package export
import (
"bytes"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export groups", exportCommand, expectUse, groupsExportCmd().Short, exportGroupsCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
// normally a persistent flag from the root.
// required to ensure a dry run.
flags.AddRunModeFlag(cmd, true)
c := addGroupsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
cmd.SetArgs([]string{
"groups",
testdata.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
// bool flags
"--" + flags.ArchiveFN,
})
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
// assert.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
opts := utils.MakeGroupsOpts(cmd)
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
})
}
}

View File

@ -39,7 +39,7 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
const (
oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandUseSuffix = "--backup <backupId> <destination>"
oneDriveServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
oneDriveServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to my-exports directory
@ -62,7 +62,7 @@ func oneDriveExportCmd() *cobra.Command {
RunE: exportOneDriveCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing restore destination")
return errors.New("missing export destination")
}
return nil

View File

@ -39,7 +39,7 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
const (
sharePointServiceCommand = "sharepoint"
sharePointServiceCommandUseSuffix = "--backup <backupId> <destination>"
sharePointServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
sharePointServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's latest backup (1234abcd...) to my-exports directory
@ -66,7 +66,7 @@ func sharePointExportCmd() *cobra.Command {
RunE: exportSharePointCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing restore destination")
return errors.New("missing export destination")
}
return nil

84
src/cli/export/teams.go Normal file
View File

@ -0,0 +1,84 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
)
// called by export.go to map subcommands to provider-specific handling.
func addTeamsCommands(cmd *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case exportCommand:
c, fs = utils.AddCommand(cmd, teamsExportCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
}
return c
}
// TODO: correct examples
const (
teamsServiceCommand = "teams"
teamsServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
teamsServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to my-exports directory
corso export teams my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Export files named "FY2021 Planning.xlsx" in "Documents/Finance Reports" to current directory
corso export teams . --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to my-exports
corso export teams my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso export teams [<flag>...] <destination>`
func teamsExportCmd() *cobra.Command {
return &cobra.Command{
Use: teamsServiceCommand,
Short: "Export M365 Teams service data",
RunE: exportTeamsCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: teamsServiceCommandExportExamples,
}
}
// processes an teams service export.
func exportTeamsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
return Only(ctx, utils.ErrNotYetImplemented)
}

View File

@ -0,0 +1,94 @@
package export
import (
"bytes"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type TeamsUnitSuite struct {
tester.Suite
}
func TestTeamsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
expectUse := teamsServiceCommand + " " + teamsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export teams", exportCommand, expectUse, teamsExportCmd().Short, exportTeamsCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
// normally a persistent flag from the root.
// required to ensure a dry run.
flags.AddRunModeFlag(cmd, true)
c := addTeamsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
cmd.SetArgs([]string{
"teams",
testdata.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
// bool flags
"--" + flags.ArchiveFN,
})
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
// assert.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
opts := utils.MakeTeamsOpts(cmd)
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
})
}
}

28
src/cli/flags/groups.go Normal file
View File

@ -0,0 +1,28 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
GroupFN = "group"
)
var GroupFV []string
func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: implement flags
}
// AddGroupFlag adds the --group flag, which accepts id or name values.
// TODO: need to decide what the appropriate "name" to accept here is.
// keepers thinks its either DisplayName or MailNickname or Mail
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
// may not see either one visibly.
// https://learn.microsoft.com/en-us/graph/api/group-list?view=graph-rest-1.0&tabs=http
func AddGroupFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&GroupFV,
GroupFN, nil,
"Backup data by group; accepts '"+Wildcard+"' to select all groups.")
}

28
src/cli/flags/teams.go Normal file
View File

@ -0,0 +1,28 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
TeamFN = "team"
)
var TeamFV []string
func AddTeamDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: implement flags
}
// AddTeamFlag adds the --team flag, which accepts id or name values.
// TODO: need to decide what the appropriate "name" to accept here is.
// keepers thinks its either DisplayName or MailNickname or Mail
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
// may not see either one visibly.
// https://learn.microsoft.com/en-us/graph/api/team-list?view=graph-rest-1.0&tabs=http
func AddTeamFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&TeamFV,
TeamFN, nil,
"Backup data by team; accepts '"+Wildcard+"' to select all teams.")
}

81
src/cli/restore/groups.go Normal file
View File

@ -0,0 +1,81 @@
package restore
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
)
// called by restore.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case restoreCommand:
c, fs = utils.AddCommand(cmd, groupsRestoreCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddRestorePermissionsFlag(c)
flags.AddRestoreConfigFlags(c)
flags.AddFailFastFlag(c)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
}
return c
}
// TODO: correct examples
const (
groupsServiceCommand = "groups"
groupsServiceCommandUseSuffix = "--backup <backupId>"
groupsServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef in Bob's last backup (1234abcd...)
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Restore the file with ID 98765abcdef along with its associated permissions
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions
# Restore files named "FY2021 Planning.xlsx" in "Documents/Finance Reports"
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
# Restore all files and folders in folder "Documents/Finance Reports" that were created before 2020
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso restore groups [<flag>...]`
func groupsRestoreCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Restore M365 Groups service data",
RunE: restoreGroupsCmd,
Args: cobra.NoArgs,
Example: groupsServiceCommandRestoreExamples,
}
}
// processes an groups service restore.
func restoreGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
return Only(ctx, utils.ErrNotYetImplemented)
}

View File

@ -0,0 +1,108 @@
package restore
import (
"bytes"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"restore groups", restoreCommand, expectUse, groupsRestoreCmd().Short, restoreGroupsCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
// normally a persistent flag from the root.
// required to ensure a dry run.
flags.AddRunModeFlag(cmd, true)
c := addGroupsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
cmd.SetArgs([]string{
"groups",
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.CollisionsFN, testdata.Collisions,
"--" + flags.DestinationFN, testdata.Destination,
"--" + flags.ToResourceFN, testdata.ToResource,
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
"--" + flags.AzureClientIDFN, testdata.AzureClientID,
"--" + flags.AzureClientTenantFN, testdata.AzureTenantID,
"--" + flags.AzureClientSecretFN, testdata.AzureClientSecret,
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
// bool flags
"--" + flags.RestorePermissionsFN,
})
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
// assert.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
opts := utils.MakeGroupsOpts(cmd)
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, testdata.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, testdata.ToResource, opts.RestoreCfg.ProtectedResource)
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
assert.Equal(t, testdata.AzureClientID, flags.AzureClientIDFV)
assert.Equal(t, testdata.AzureTenantID, flags.AzureClientTenantFV)
assert.Equal(t, testdata.AzureClientSecret, flags.AzureClientSecretFV)
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
assert.True(t, flags.RestorePermissionsFV)
})
}
}

81
src/cli/restore/teams.go Normal file
View File

@ -0,0 +1,81 @@
package restore
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
)
// called by restore.go to map subcommands to provider-specific handling.
func addTeamsCommands(cmd *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case restoreCommand:
c, fs = utils.AddCommand(cmd, teamsRestoreCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddRestorePermissionsFlag(c)
flags.AddRestoreConfigFlags(c)
flags.AddFailFastFlag(c)
flags.AddCorsoPassphaseFlags(c)
flags.AddAWSCredsFlags(c)
flags.AddAzureCredsFlags(c)
}
return c
}
// TODO: correct examples
const (
teamsServiceCommand = "teams"
teamsServiceCommandUseSuffix = "--backup <backupId>"
teamsServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef in Bob's last backup (1234abcd...)
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Restore the file with ID 98765abcdef along with its associated permissions
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions
# Restore files named "FY2021 Planning.xlsx" in "Documents/Finance Reports"
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
# Restore all files and folders in folder "Documents/Finance Reports" that were created before 2020
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso restore teams [<flag>...]`
func teamsRestoreCmd() *cobra.Command {
return &cobra.Command{
Use: teamsServiceCommand,
Short: "Restore M365 Teams service data",
RunE: restoreTeamsCmd,
Args: cobra.NoArgs,
Example: teamsServiceCommandRestoreExamples,
}
}
// processes an teams service restore.
func restoreTeamsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
return Only(ctx, utils.ErrNotYetImplemented)
}

View File

@ -0,0 +1,108 @@
package restore
import (
"bytes"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type TeamsUnitSuite struct {
tester.Suite
}
func TestTeamsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
expectUse := teamsServiceCommand + " " + teamsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"restore teams", restoreCommand, expectUse, teamsRestoreCmd().Short, restoreTeamsCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
// normally a persistent flag from the root.
// required to ensure a dry run.
flags.AddRunModeFlag(cmd, true)
c := addTeamsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
cmd.SetArgs([]string{
"teams",
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, testdata.BackupInput,
"--" + flags.CollisionsFN, testdata.Collisions,
"--" + flags.DestinationFN, testdata.Destination,
"--" + flags.ToResourceFN, testdata.ToResource,
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
"--" + flags.AzureClientIDFN, testdata.AzureClientID,
"--" + flags.AzureClientTenantFN, testdata.AzureTenantID,
"--" + flags.AzureClientSecretFN, testdata.AzureClientSecret,
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
// bool flags
"--" + flags.RestorePermissionsFN,
})
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
// assert.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
opts := utils.MakeTeamsOpts(cmd)
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, testdata.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, testdata.ToResource, opts.RestoreCfg.ProtectedResource)
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
assert.Equal(t, testdata.AzureClientID, flags.AzureClientIDFV)
assert.Equal(t, testdata.AzureTenantID, flags.AzureClientTenantFV)
assert.Equal(t, testdata.AzureClientSecret, flags.AzureClientSecretFV)
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
assert.True(t, flags.RestorePermissionsFV)
})
}
}

87
src/cli/utils/groups.go Normal file
View File

@ -0,0 +1,87 @@
package utils
import (
"context"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/selectors"
)
type GroupsOpts struct {
Groups []string
RestoreCfg RestoreCfgOpts
ExportCfg ExportCfgOpts
Populated flags.PopulatedFlags
}
func MakeGroupsOpts(cmd *cobra.Command) GroupsOpts {
return GroupsOpts{
Groups: flags.UserFV,
RestoreCfg: makeRestoreCfgOpts(cmd),
ExportCfg: makeExportCfgOpts(cmd),
// populated contains the list of flags that appear in the
// command, according to pflags. Use this to differentiate
// between an "empty" and a "missing" value.
Populated: flags.GetPopulatedFlags(cmd),
}
}
// ValidateGroupsRestoreFlags checks common flags for correctness and interdependencies
func ValidateGroupsRestoreFlags(backupID string, opts GroupsOpts) error {
if len(backupID) == 0 {
return clues.New("a backup ID is required")
}
// TODO(meain): selectors (refer sharepoint)
return validateRestoreConfigFlags(flags.CollisionsFV, opts.RestoreCfg)
}
// AddGroupInfo adds the scope of the provided values to the selector's
// filter set
func AddGroupInfo(
sel *selectors.GroupsRestore,
v string,
f func(string) []selectors.GroupsScope,
) {
if len(v) == 0 {
return
}
sel.Filter(f(v))
}
// IncludeGroupsRestoreDataSelectors builds the common data-selector
// inclusions for Group commands.
func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *selectors.GroupsRestore {
groups := opts.Groups
ls := len(opts.Groups)
if ls == 0 {
groups = selectors.Any()
}
sel := selectors.NewGroupsRestore(groups)
// TODO(meain): add selectors
sel.Include(sel.AllData())
return sel
}
// FilterGroupsRestoreInfoSelectors builds the common info-selector filters.
func FilterGroupsRestoreInfoSelectors(
sel *selectors.GroupsRestore,
opts GroupsOpts,
) {
// TODO(meain)
// AddGroupInfo(sel, opts.GroupID, sel.Library)
}

View File

@ -0,0 +1,161 @@
package utils_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUtilsSuite struct {
tester.Suite
}
func TestGroupsUtilsSuite(t *testing.T) {
suite.Run(t, &GroupsUtilsSuite{Suite: tester.NewUnitSuite(t)})
}
// Tests selector build for Groups properly
// differentiates between the 3 categories: Pages, Libraries and Lists CLI
func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
var (
empty = []string{}
single = []string{"single"}
multi = []string{"more", "than", "one"}
)
table := []struct {
name string
opts utils.GroupsOpts
expectIncludeLen int
}{
{
name: "no inputs",
opts: utils.GroupsOpts{},
expectIncludeLen: 2,
},
{
name: "empty",
opts: utils.GroupsOpts{
Groups: empty,
},
expectIncludeLen: 2,
},
{
name: "single inputs",
opts: utils.GroupsOpts{
Groups: single,
},
expectIncludeLen: 2,
},
{
name: "multi inputs",
opts: utils.GroupsOpts{
Groups: multi,
},
expectIncludeLen: 2,
},
// TODO Add library specific tests once we have filters based
// on library folders
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, test.opts)
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
})
}
}
func (suite *GroupsUtilsSuite) TestValidateGroupsRestoreFlags() {
table := []struct {
name string
backupID string
opts utils.GroupsOpts
expect assert.ErrorAssertionFunc
}{
{
name: "no opts",
backupID: "id",
opts: utils.GroupsOpts{},
expect: assert.NoError,
},
{
name: "no backupID",
backupID: "",
opts: utils.GroupsOpts{},
expect: assert.Error,
},
// TODO: Add tests for selectors once we have them
// {
// name: "all valid",
// backupID: "id",
// opts: utils.GroupsOpts{
// Populated: flags.PopulatedFlags{
// flags.FileCreatedAfterFN: struct{}{},
// flags.FileCreatedBeforeFN: struct{}{},
// flags.FileModifiedAfterFN: struct{}{},
// flags.FileModifiedBeforeFN: struct{}{},
// },
// },
// expect: assert.NoError,
// },
// {
// name: "invalid file created after",
// backupID: "id",
// opts: utils.GroupsOpts{
// FileCreatedAfter: "1235",
// Populated: flags.PopulatedFlags{
// flags.FileCreatedAfterFN: struct{}{},
// },
// },
// expect: assert.Error,
// },
// {
// name: "invalid file created before",
// backupID: "id",
// opts: utils.GroupsOpts{
// FileCreatedBefore: "1235",
// Populated: flags.PopulatedFlags{
// flags.FileCreatedBeforeFN: struct{}{},
// },
// },
// expect: assert.Error,
// },
// {
// name: "invalid file modified after",
// backupID: "id",
// opts: utils.GroupsOpts{
// FileModifiedAfter: "1235",
// Populated: flags.PopulatedFlags{
// flags.FileModifiedAfterFN: struct{}{},
// },
// },
// expect: assert.Error,
// },
// {
// name: "invalid file modified before",
// backupID: "id",
// opts: utils.GroupsOpts{
// FileModifiedBefore: "1235",
// Populated: flags.PopulatedFlags{
// flags.FileModifiedBeforeFN: struct{}{},
// },
// },
// expect: assert.Error,
// },
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
test.expect(t, utils.ValidateGroupsRestoreFlags(test.backupID, test.opts))
})
}
}

30
src/cli/utils/teams.go Normal file
View File

@ -0,0 +1,30 @@
package utils
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
)
type TeamsOpts struct {
Teams []string
RestoreCfg RestoreCfgOpts
ExportCfg ExportCfgOpts
Populated flags.PopulatedFlags
}
func MakeTeamsOpts(cmd *cobra.Command) TeamsOpts {
return TeamsOpts{
Teams: flags.UserFV,
RestoreCfg: makeRestoreCfgOpts(cmd),
ExportCfg: makeExportCfgOpts(cmd),
// populated contains the list of flags that appear in the
// command, according to pflags. Use this to differentiate
// between an "empty" and a "missing" value.
Populated: flags.GetPopulatedFlags(cmd),
}
}

View File

@ -19,6 +19,8 @@ import (
"github.com/alcionai/corso/src/pkg/storage"
)
var ErrNotYetImplemented = clues.New("not yet implemented")
func GetAccountAndConnect(
ctx context.Context,
pst path.ServiceType,

View File

@ -2,16 +2,16 @@ module github.com/alcionai/corso/src
go 1.20
replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230803184432-5f2a35eade6b
replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230822191057-17d4deff94a3
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4
github.com/armon/go-metrics v0.4.1
github.com/aws/aws-sdk-go v1.44.326
github.com/aws/aws-sdk-go v1.44.330
github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.1
github.com/google/uuid v1.3.0
github.com/google/uuid v1.3.1
github.com/h2non/gock v1.2.0
github.com/kopia/kopia v0.13.0
github.com/microsoft/kiota-abstractions-go v1.2.0
@ -19,10 +19,10 @@ require (
github.com/microsoft/kiota-http-go v1.1.0
github.com/microsoft/kiota-serialization-form-go v1.0.0
github.com/microsoft/kiota-serialization-json-go v1.0.4
github.com/microsoftgraph/msgraph-sdk-go v1.14.0
github.com/microsoftgraph/msgraph-sdk-go v1.16.0
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
github.com/pkg/errors v0.9.1
github.com/puzpuzpuz/xsync/v2 v2.4.1
github.com/puzpuzpuz/xsync/v2 v2.5.0
github.com/rudderlabs/analytics-go v3.3.3+incompatible
github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1
github.com/spf13/cobra v1.7.0
@ -46,9 +46,11 @@ require (
github.com/gofrs/flock v0.8.1 // indirect
github.com/golang-jwt/jwt/v5 v5.0.0 // indirect
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
github.com/hashicorp/cronexpr v1.1.2 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/microsoft/kiota-serialization-multipart-go v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/pelletier/go-toml/v2 v2.0.9 // indirect
github.com/spf13/afero v1.9.5 // indirect
@ -58,7 +60,7 @@ require (
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.48.0 // indirect
go.opentelemetry.io/otel/metric v1.16.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect
)
require (

View File

@ -55,8 +55,8 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4 h1:husF7eAYw2HEzgjfAmNy+ZLzyztJV2SyoUngSUo829Y=
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4/go.mod h1:MLEWSZ0cjEMg6hiGCRvE7AtrOhs7deBcm7ZrJBpfGRM=
github.com/alcionai/kopia v0.12.2-0.20230803184432-5f2a35eade6b h1:pkTllM0wtHVFnHfI3vXPYh1ObD4FKo2G2G/qWqzmIfY=
github.com/alcionai/kopia v0.12.2-0.20230803184432-5f2a35eade6b/go.mod h1:WH725ws0BYpZpTkVh4uqFHHPiiJuirl1Cm73jv5RYyA=
github.com/alcionai/kopia v0.12.2-0.20230822191057-17d4deff94a3 h1:6YjRGjEZr/Bmux1XkS13Re1m1LI7VAcbFsA3PiqO2BI=
github.com/alcionai/kopia v0.12.2-0.20230822191057-17d4deff94a3/go.mod h1:u5wAx1XN07PJsO1BLBkGicwSrbmAC1biONnumSCA210=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go v1.44.326 h1:/6xD/9mKZ2RMTDfbhh9qCxw+CaTbJRvfHJ/NHPFbI38=
github.com/aws/aws-sdk-go v1.44.326/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go v1.44.330 h1:kO41s8I4hRYtWSIuMc/O053wmEGfMTT8D4KtPSojUkA=
github.com/aws/aws-sdk-go v1.44.330/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
@ -192,8 +192,8 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4=
github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
@ -204,6 +204,8 @@ github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/B
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
github.com/hanwen/go-fuse/v2 v2.3.0 h1:t5ivNIH2PK+zw4OBul/iJjsoG9K6kXo4nMDoBpciC8A=
github.com/hashicorp/cronexpr v1.1.2 h1:wG/ZYIKT+RT3QkOdgYc+xsKWVRgnxJ1OJtjjy84fJ9A=
github.com/hashicorp/cronexpr v1.1.2/go.mod h1:P4wA0KBl9C5q2hABiMO7cp6jcIg96CDh1Efb3g1PWA4=
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc=
@ -244,7 +246,7 @@ github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQ
github.com/klauspost/reedsolomon v1.11.8 h1:s8RpUW5TK4hjr+djiOpbZJB4ksx+TdYbRH7vHQpwPOY=
github.com/klauspost/reedsolomon v1.11.8/go.mod h1:4bXRN+cVzMdml6ti7qLouuYi32KHJ5MGv0Qd8a47h6A=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kopia/htmluibuild v0.0.0-20230605144737-e386b860759d h1:qvV3TN5X/RsgmckkxsKh9P7Vtf9GYy6vOPzQY1SY4qM=
github.com/kopia/htmluibuild v0.0.0-20230716183504-d78b44b3a9bd h1:Vskpc00T65HkkDSWbkiXOG5yYsgWg5LN48daUfGZ+u0=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@ -281,10 +283,12 @@ github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjb
github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA=
github.com/microsoft/kiota-serialization-json-go v1.0.4 h1:5TaISWwd2Me8clrK7SqNATo0tv9seOq59y4I5953egQ=
github.com/microsoft/kiota-serialization-json-go v1.0.4/go.mod h1:rM4+FsAY+9AEpBsBzkFFis+b/LZLlNKKewuLwK9Q6Mg=
github.com/microsoft/kiota-serialization-multipart-go v1.0.0 h1:3O5sb5Zj+moLBiJympbXNaeV07K0d46IfuEd5v9+pBs=
github.com/microsoft/kiota-serialization-multipart-go v1.0.0/go.mod h1:yauLeBTpANk4L03XD985akNysG24SnRJGaveZf+p4so=
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
github.com/microsoftgraph/msgraph-sdk-go v1.14.0 h1:YdhMvzu8bXcfIQGRur6NkXnv4cPOsMBJ44XjfWLOt9Y=
github.com/microsoftgraph/msgraph-sdk-go v1.14.0/go.mod h1:ccLv84FJFtwdSzYWM/HlTes5FLzkzzBsYh9kg93/WS8=
github.com/microsoftgraph/msgraph-sdk-go v1.16.0 h1:6YjL2f8PZFlJUuCoX1yJwhDFYKPtogxYr/SnKJHAHZ4=
github.com/microsoftgraph/msgraph-sdk-go v1.16.0/go.mod h1:DdshtIL3VJ3abSG6O+gmlvbc/pX7Xh7xbruLTWoRjfU=
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
@ -342,8 +346,8 @@ github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsT
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI=
github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY=
github.com/puzpuzpuz/xsync/v2 v2.4.1 h1:aGdE1C/HaR/QC6YAFdtZXi60Df8/qBIrs8PKrzkItcM=
github.com/puzpuzpuz/xsync/v2 v2.4.1/go.mod h1:gD2H2krq/w52MfPLE+Uy64TzJDVY7lP2znR9qmR35kU=
github.com/puzpuzpuz/xsync/v2 v2.5.0 h1:2k4qrO/orvmEXZ3hmtHqIy9XaQtPTwzMZk1+iErpE8c=
github.com/puzpuzpuz/xsync/v2 v2.5.0/go.mod h1:gD2H2krq/w52MfPLE+Uy64TzJDVY7lP2znR9qmR35kU=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
@ -734,8 +738,8 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 h1:eSaPbMR4T7WfH9FvABk36NBMacoTUKdWCvV0dx+KfOg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5/go.mod h1:zBEcrKX2ZOcEkHWxBPAIvYUWOKKMIhYcmNiUIu2ji3I=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 h1:wukfNtZmZUurLN/atp2hiIeTKn7QJWIQdHzqmsOnAOk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=

View File

@ -13,10 +13,10 @@ var ErrNotFound = clues.New("not found")
type CollectionState int
const (
NewState = CollectionState(iota)
NotMovedState
MovedState
DeletedState
NewState CollectionState = 0
NotMovedState CollectionState = 1
MovedState CollectionState = 2
DeletedState CollectionState = 3
)
type FetchRestoreCollection struct {

View File

@ -115,14 +115,6 @@ func (me ManifestEntry) GetTag(key string) (string, bool) {
return v, ok
}
type snapshotManager interface {
FindManifests(
ctx context.Context,
tags map[string]string,
) ([]*manifest.EntryMetadata, error)
LoadSnapshot(ctx context.Context, id manifest.ID) (*snapshot.Manifest, error)
}
func serviceCatString(s path.ServiceType, c path.CategoryType) string {
return s.String() + c.String()
}

View File

@ -0,0 +1,191 @@
package kopia
import (
"context"
"errors"
"time"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/store"
)
// cleanupOrphanedData uses bs and mf to lookup all models/snapshots for backups
// and deletes items that are older than nowFunc() - gcBuffer (cutoff) that are
// not "complete" backups with:
// - a backup model
// - an item data snapshot
// - a details snapshot or details model
//
// We exclude all items younger than the cutoff to add some buffer so that even
// if this is run concurrently with a backup it's not likely to delete models
// just being created. For example, if there was no buffer period and this is
// run when another corso instance has created an item data snapshot but hasn't
// yet created the details snapshot or the backup model it would result in this
// instance of corso marking the newly created item data snapshot for deletion
// because it appears orphaned.
//
// The buffer duration should be longer than the difference in creation times
// between the first item data snapshot/details/backup model made during a
// backup operation and the last.
//
// We don't have hard numbers on the time right now, but if the order of
// persistence is (item data snapshot, details snapshot, backup model) it should
// be faster than creating the snapshot itself and probably happens O(minutes)
// or O(hours) instead of O(days). Of course, that assumes a non-adversarial
// setup where things such as machine hiberation, process freezing (i.e. paused
// at the OS level), etc. don't occur.
func cleanupOrphanedData(
ctx context.Context,
bs store.Storer,
mf manifestFinder,
gcBuffer time.Duration,
nowFunc func() time.Time,
) error {
// Get all snapshot manifests.
snaps, err := mf.FindManifests(
ctx,
map[string]string{
manifest.TypeLabelKey: snapshot.ManifestType,
})
if err != nil {
return clues.Wrap(err, "getting snapshots")
}
var (
// deets is a hash set of the ModelStoreID or snapshot IDs for backup
// details. It contains the IDs for both legacy details stored in the model
// store and newer details stored as a snapshot because it doesn't matter
// what the storage format is. We only need to know the ID so we can:
// 1. check if there's a corresponding backup for them
// 2. delete the details if they're orphaned
deets = map[manifest.ID]struct{}{}
// dataSnaps is a hash set of the snapshot IDs for item data snapshots.
dataSnaps = map[manifest.ID]struct{}{}
)
cutoff := nowFunc().Add(-gcBuffer)
// Sort all the snapshots as either details snapshots or item data snapshots.
for _, snap := range snaps {
// Don't even try to see if this needs garbage collected because it's not
// old enough and may correspond to an in-progress operation.
if !cutoff.After(snap.ModTime) {
continue
}
k, _ := makeTagKV(TagBackupCategory)
if _, ok := snap.Labels[k]; ok {
dataSnaps[snap.ID] = struct{}{}
continue
}
deets[snap.ID] = struct{}{}
}
// Get all legacy backup details models. The initial version of backup delete
// didn't seem to delete them so they may also be orphaned if the repo is old
// enough.
deetsModels, err := bs.GetIDsForType(ctx, model.BackupDetailsSchema, nil)
if err != nil {
return clues.Wrap(err, "getting legacy backup details")
}
for _, d := range deetsModels {
// Don't even try to see if this needs garbage collected because it's not
// old enough and may correspond to an in-progress operation.
if !cutoff.After(d.ModTime) {
continue
}
deets[d.ModelStoreID] = struct{}{}
}
// Get all backup models.
bups, err := bs.GetIDsForType(ctx, model.BackupSchema, nil)
if err != nil {
return clues.Wrap(err, "getting all backup models")
}
toDelete := maps.Clone(deets)
maps.Copy(toDelete, dataSnaps)
for _, bup := range bups {
// Don't even try to see if this needs garbage collected because it's not
// old enough and may correspond to an in-progress operation.
if !cutoff.After(bup.ModTime) {
continue
}
toDelete[manifest.ID(bup.ModelStoreID)] = struct{}{}
bm := backup.Backup{}
if err := bs.GetWithModelStoreID(
ctx,
model.BackupSchema,
bup.ModelStoreID,
&bm,
); err != nil {
if !errors.Is(err, data.ErrNotFound) {
return clues.Wrap(err, "getting backup model").
With("search_backup_id", bup.ID)
}
// Probably safe to continue if the model wasn't found because that means
// that the possible item data and details for the backup are now
// orphaned. They'll be deleted since we won't remove them from the delete
// set.
//
// The fact that we exclude all items younger than the cutoff should
// already exclude items that are from concurrent corso backup operations.
//
// This isn't expected to really pop up, but it's possible if this
// function is run concurrently with either a backup delete or another
// instance of this function.
logger.Ctx(ctx).Debugw(
"backup model not found",
"search_backup_id", bup.ModelStoreID)
continue
}
ssid := bm.StreamStoreID
if len(ssid) == 0 {
ssid = bm.DetailsID
}
_, dataOK := dataSnaps[manifest.ID(bm.SnapshotID)]
_, deetsOK := deets[manifest.ID(ssid)]
// All data is present, we shouldn't garbage collect this backup.
if deetsOK && dataOK {
delete(toDelete, bup.ModelStoreID)
delete(toDelete, manifest.ID(bm.SnapshotID))
delete(toDelete, manifest.ID(ssid))
}
}
logger.Ctx(ctx).Infow(
"garbage collecting orphaned items",
"num_items", len(toDelete),
"kopia_ids", maps.Keys(toDelete))
// Use single atomic batch delete operation to cleanup to keep from making a
// bunch of manifest content blobs.
if err := bs.DeleteWithModelStoreIDs(ctx, maps.Keys(toDelete)...); err != nil {
return clues.Wrap(err, "deleting orphaned data")
}
// TODO(ashmrtn): Do some pruning of assist backup models so we don't keep
// them around forever.
return nil
}

View File

@ -0,0 +1,566 @@
package kopia
import (
"context"
"fmt"
"testing"
"time"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
)
type BackupCleanupUnitSuite struct {
tester.Suite
}
func TestBackupCleanupUnitSuite(t *testing.T) {
suite.Run(t, &BackupCleanupUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type mockManifestFinder struct {
t *testing.T
manifests []*manifest.EntryMetadata
err error
}
func (mmf mockManifestFinder) FindManifests(
ctx context.Context,
tags map[string]string,
) ([]*manifest.EntryMetadata, error) {
assert.Equal(
mmf.t,
map[string]string{"type": "snapshot"},
tags,
"snapshot search tags")
return mmf.manifests, clues.Stack(mmf.err).OrNil()
}
type mockStorer struct {
t *testing.T
details []*model.BaseModel
detailsErr error
backups []backupRes
backupListErr error
expectDeleteIDs []manifest.ID
deleteErr error
}
func (ms mockStorer) Delete(context.Context, model.Schema, model.StableID) error {
return clues.New("not implemented")
}
func (ms mockStorer) Get(context.Context, model.Schema, model.StableID, model.Model) error {
return clues.New("not implemented")
}
func (ms mockStorer) Put(context.Context, model.Schema, model.Model) error {
return clues.New("not implemented")
}
func (ms mockStorer) Update(context.Context, model.Schema, model.Model) error {
return clues.New("not implemented")
}
func (ms mockStorer) GetIDsForType(
_ context.Context,
s model.Schema,
tags map[string]string,
) ([]*model.BaseModel, error) {
assert.Empty(ms.t, tags, "model search tags")
switch s {
case model.BackupDetailsSchema:
return ms.details, clues.Stack(ms.detailsErr).OrNil()
case model.BackupSchema:
var bases []*model.BaseModel
for _, b := range ms.backups {
bases = append(bases, &b.bup.BaseModel)
}
return bases, clues.Stack(ms.backupListErr).OrNil()
}
return nil, clues.New(fmt.Sprintf("unknown type: %s", s.String()))
}
func (ms mockStorer) GetWithModelStoreID(
_ context.Context,
s model.Schema,
id manifest.ID,
m model.Model,
) error {
assert.Equal(ms.t, model.BackupSchema, s, "model get schema")
d := m.(*backup.Backup)
for _, b := range ms.backups {
if id == b.bup.ModelStoreID {
*d = *b.bup
return clues.Stack(b.err).OrNil()
}
}
return clues.Stack(data.ErrNotFound)
}
func (ms mockStorer) DeleteWithModelStoreIDs(
_ context.Context,
ids ...manifest.ID,
) error {
assert.ElementsMatch(ms.t, ms.expectDeleteIDs, ids, "model delete IDs")
return clues.Stack(ms.deleteErr).OrNil()
}
// backupRes represents an individual return value for an item in GetIDsForType
// or the result of GetWithModelStoreID. err is used for GetWithModelStoreID
// only.
type backupRes struct {
bup *backup.Backup
err error
}
func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
backupTag, _ := makeTagKV(TagBackupCategory)
// Current backup and snapshots.
bupCurrent := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("current-bup-id"),
ModelStoreID: manifest.ID("current-bup-msid"),
},
SnapshotID: "current-snap-msid",
StreamStoreID: "current-deets-msid",
}
}
snapCurrent := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "current-snap-msid",
Labels: map[string]string{
backupTag: "0",
},
}
}
deetsCurrent := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "current-deets-msid",
}
}
// Legacy backup with details in separate model.
bupLegacy := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("legacy-bup-id"),
ModelStoreID: manifest.ID("legacy-bup-msid"),
},
SnapshotID: "legacy-snap-msid",
DetailsID: "legacy-deets-msid",
}
}
snapLegacy := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "legacy-snap-msid",
Labels: map[string]string{
backupTag: "0",
},
}
}
deetsLegacy := func() *model.BaseModel {
return &model.BaseModel{
ID: "legacy-deets-id",
ModelStoreID: "legacy-deets-msid",
}
}
// Incomplete backup missing data snapshot.
bupNoSnapshot := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("ns-bup-id"),
ModelStoreID: manifest.ID("ns-bup-id-msid"),
},
StreamStoreID: "ns-deets-msid",
}
}
deetsNoSnapshot := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "ns-deets-msid",
}
}
// Legacy incomplete backup missing data snapshot.
bupLegacyNoSnapshot := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("ns-legacy-bup-id"),
ModelStoreID: manifest.ID("ns-legacy-bup-id-msid"),
},
DetailsID: "ns-legacy-deets-msid",
}
}
deetsLegacyNoSnapshot := func() *model.BaseModel {
return &model.BaseModel{
ID: "ns-legacy-deets-id",
ModelStoreID: "ns-legacy-deets-msid",
}
}
// Incomplete backup missing details.
bupNoDetails := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("nssid-bup-id"),
ModelStoreID: manifest.ID("nssid-bup-msid"),
},
SnapshotID: "nssid-snap-msid",
}
}
snapNoDetails := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "nssid-snap-msid",
Labels: map[string]string{
backupTag: "0",
},
}
}
// Get some stable time so that we can do everything relative to this in the
// tests. Mostly just makes reasoning/viewing times easier because the only
// differences will be the changes we make.
baseTime := time.Now()
manifestWithTime := func(
mt time.Time,
m *manifest.EntryMetadata,
) *manifest.EntryMetadata {
res := *m
res.ModTime = mt
return &res
}
backupWithTime := func(mt time.Time, b *backup.Backup) *backup.Backup {
res := *b
res.ModTime = mt
return &res
}
table := []struct {
name string
snapshots []*manifest.EntryMetadata
snapshotFetchErr error
// only need BaseModel here since we never look inside the details items.
detailsModels []*model.BaseModel
detailsModelListErr error
backups []backupRes
backupListErr error
deleteErr error
time time.Time
buffer time.Duration
expectDeleteIDs []manifest.ID
expectErr assert.ErrorAssertionFunc
}{
{
name: "EmptyRepo",
time: baseTime,
expectErr: assert.NoError,
},
{
name: "OnlyCompleteBackups Noops",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
deetsCurrent(),
snapLegacy(),
},
detailsModels: []*model.BaseModel{
deetsLegacy(),
},
backups: []backupRes{
{bup: bupCurrent()},
{bup: bupLegacy()},
},
time: baseTime,
expectErr: assert.NoError,
},
{
name: "MissingFieldsInBackup CausesCleanup",
snapshots: []*manifest.EntryMetadata{
snapNoDetails(),
deetsNoSnapshot(),
},
detailsModels: []*model.BaseModel{
deetsLegacyNoSnapshot(),
},
backups: []backupRes{
{bup: bupNoSnapshot()},
{bup: bupLegacyNoSnapshot()},
{bup: bupNoDetails()},
},
expectDeleteIDs: []manifest.ID{
manifest.ID(bupNoSnapshot().ModelStoreID),
manifest.ID(bupLegacyNoSnapshot().ModelStoreID),
manifest.ID(bupNoDetails().ModelStoreID),
manifest.ID(deetsLegacyNoSnapshot().ModelStoreID),
snapNoDetails().ID,
deetsNoSnapshot().ID,
},
time: baseTime,
expectErr: assert.NoError,
},
{
name: "MissingSnapshot CausesCleanup",
snapshots: []*manifest.EntryMetadata{
deetsCurrent(),
},
detailsModels: []*model.BaseModel{
deetsLegacy(),
},
backups: []backupRes{
{bup: bupCurrent()},
{bup: bupLegacy()},
},
expectDeleteIDs: []manifest.ID{
manifest.ID(bupCurrent().ModelStoreID),
deetsCurrent().ID,
manifest.ID(bupLegacy().ModelStoreID),
manifest.ID(deetsLegacy().ModelStoreID),
},
time: baseTime,
expectErr: assert.NoError,
},
{
name: "MissingDetails CausesCleanup",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
snapLegacy(),
},
backups: []backupRes{
{bup: bupCurrent()},
{bup: bupLegacy()},
},
expectDeleteIDs: []manifest.ID{
manifest.ID(bupCurrent().ModelStoreID),
manifest.ID(bupLegacy().ModelStoreID),
snapCurrent().ID,
snapLegacy().ID,
},
time: baseTime,
expectErr: assert.NoError,
},
// Tests with various errors from Storer.
{
name: "SnapshotsListError Fails",
snapshotFetchErr: assert.AnError,
backups: []backupRes{
{bup: bupCurrent()},
},
expectErr: assert.Error,
},
{
name: "LegacyDetailsListError Fails",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
},
detailsModelListErr: assert.AnError,
backups: []backupRes{
{bup: bupCurrent()},
},
time: baseTime,
expectErr: assert.Error,
},
{
name: "BackupIDsListError Fails",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
deetsCurrent(),
},
backupListErr: assert.AnError,
time: baseTime,
expectErr: assert.Error,
},
{
name: "BackupModelGetErrorNotFound CausesCleanup",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
deetsCurrent(),
snapLegacy(),
snapNoDetails(),
},
detailsModels: []*model.BaseModel{
deetsLegacy(),
},
backups: []backupRes{
{bup: bupCurrent()},
{
bup: bupLegacy(),
err: data.ErrNotFound,
},
{
bup: bupNoDetails(),
err: data.ErrNotFound,
},
},
// Backup IDs are still included in here because they're added to the
// deletion set prior to attempting to fetch models. The model store
// delete operation should ignore missing models though so there's no
// issue.
expectDeleteIDs: []manifest.ID{
snapLegacy().ID,
manifest.ID(deetsLegacy().ModelStoreID),
manifest.ID(bupLegacy().ModelStoreID),
snapNoDetails().ID,
manifest.ID(bupNoDetails().ModelStoreID),
},
time: baseTime,
expectErr: assert.NoError,
},
{
name: "BackupModelGetError Fails",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
deetsCurrent(),
snapLegacy(),
snapNoDetails(),
},
detailsModels: []*model.BaseModel{
deetsLegacy(),
},
backups: []backupRes{
{bup: bupCurrent()},
{
bup: bupLegacy(),
err: assert.AnError,
},
{bup: bupNoDetails()},
},
time: baseTime,
expectErr: assert.Error,
},
{
name: "DeleteError Fails",
snapshots: []*manifest.EntryMetadata{
snapCurrent(),
deetsCurrent(),
snapLegacy(),
snapNoDetails(),
},
detailsModels: []*model.BaseModel{
deetsLegacy(),
},
backups: []backupRes{
{bup: bupCurrent()},
{bup: bupLegacy()},
{bup: bupNoDetails()},
},
expectDeleteIDs: []manifest.ID{
snapNoDetails().ID,
manifest.ID(bupNoDetails().ModelStoreID),
},
deleteErr: assert.AnError,
time: baseTime,
expectErr: assert.Error,
},
// Tests dealing with buffer times.
{
name: "MissingSnapshot BarelyTooYoungForCleanup Noops",
snapshots: []*manifest.EntryMetadata{
manifestWithTime(baseTime, deetsCurrent()),
},
backups: []backupRes{
{bup: backupWithTime(baseTime, bupCurrent())},
},
time: baseTime.Add(24 * time.Hour),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
name: "MissingSnapshot BarelyOldEnough CausesCleanup",
snapshots: []*manifest.EntryMetadata{
manifestWithTime(baseTime, deetsCurrent()),
},
backups: []backupRes{
{bup: backupWithTime(baseTime, bupCurrent())},
},
expectDeleteIDs: []manifest.ID{
deetsCurrent().ID,
manifest.ID(bupCurrent().ModelStoreID),
},
time: baseTime.Add((24 * time.Hour) + time.Second),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
name: "BackupGetErrorNotFound TooYoung Noops",
snapshots: []*manifest.EntryMetadata{
manifestWithTime(baseTime, snapCurrent()),
manifestWithTime(baseTime, deetsCurrent()),
},
backups: []backupRes{
{
bup: backupWithTime(baseTime, bupCurrent()),
err: data.ErrNotFound,
},
},
time: baseTime,
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
mbs := mockStorer{
t: t,
details: test.detailsModels,
detailsErr: test.detailsModelListErr,
backups: test.backups,
backupListErr: test.backupListErr,
expectDeleteIDs: test.expectDeleteIDs,
deleteErr: test.deleteErr,
}
mmf := mockManifestFinder{
t: t,
manifests: test.snapshots,
err: test.snapshotFetchErr,
}
err := cleanupOrphanedData(
ctx,
mbs,
mmf,
test.buffer,
func() time.Time { return test.time })
test.expectErr(t, err, clues.ToCore(err))
})
}
}

View File

@ -52,9 +52,26 @@ var (
}
)
type snapshotLoader interface {
SnapshotRoot(man *snapshot.Manifest) (fs.Entry, error)
}
type (
manifestFinder interface {
FindManifests(
ctx context.Context,
tags map[string]string,
) ([]*manifest.EntryMetadata, error)
}
snapshotManager interface {
manifestFinder
LoadSnapshot(
ctx context.Context,
id manifest.ID,
) (*snapshot.Manifest, error)
}
snapshotLoader interface {
SnapshotRoot(man *snapshot.Manifest) (fs.Entry, error)
}
)
var (
_ snapshotManager = &conn{}

View File

@ -210,6 +210,7 @@ func (ms ModelStore) populateBaseModelFromMetadata(
base.ID = model.StableID(id)
base.ModelVersion = v
base.Tags = m.Labels
base.ModTime = m.ModTime
stripHiddenTags(base.Tags)

View File

@ -4,6 +4,7 @@ import (
"context"
"sync"
"testing"
"time"
"github.com/alcionai/clues"
"github.com/google/uuid"
@ -34,6 +35,18 @@ func getModelStore(t *testing.T, ctx context.Context) *ModelStore {
return &ModelStore{c: c, modelVersion: globalModelVersion}
}
func assertEqualNoModTime(t *testing.T, expected, got *fooModel) {
t.Helper()
expectedClean := *expected
gotClean := *got
expectedClean.ModTime = time.Time{}
gotClean.ModTime = time.Time{}
assert.Equal(t, expectedClean, gotClean)
}
// ---------------
// unit tests
// ---------------
@ -259,6 +272,8 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
// Avoid some silly test errors from comparing nil to empty map.
foo.Tags = map[string]string{}
startTime := time.Now()
err := suite.m.Put(suite.ctx, test.s, foo)
test.check(t, err, clues.ToCore(err))
@ -273,11 +288,17 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
returned := &fooModel{}
err = suite.m.Get(suite.ctx, test.s, foo.ID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
assert.WithinDuration(t, startTime, returned.ModTime, 5*time.Second)
returned = &fooModel{}
err = suite.m.GetWithModelStoreID(suite.ctx, test.s, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
assert.WithinDuration(t, startTime, returned.ModTime, 5*time.Second)
})
}
}
@ -324,11 +345,11 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_PreSetID() {
err = suite.m.Get(suite.ctx, mdl, foo.ID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, mdl, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
})
}
}
@ -350,11 +371,11 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_WithTags() {
returned := &fooModel{}
err = suite.m.Get(suite.ctx, theModelType, foo.ID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
}
func (suite *ModelStoreIntegrationSuite) TestGet_NotFoundErrors() {
@ -559,7 +580,16 @@ func (suite *ModelStoreIntegrationSuite) TestGetOfTypeWithTags() {
ids, err := suite.m.GetIDsForType(suite.ctx, test.s, test.tags)
require.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, expected, ids)
cleanIDs := make([]*model.BaseModel, 0, len(ids))
for _, id := range ids {
id2 := *id
id2.ModTime = time.Time{}
cleanIDs = append(cleanIDs, &id2)
}
assert.ElementsMatch(t, expected, cleanIDs)
})
}
}
@ -627,7 +657,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
ids, err := m.GetIDsForType(ctx, theModelType, nil)
require.NoError(t, err, clues.ToCore(err))
@ -822,7 +852,7 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, foo, returned)
assertEqualNoModTime(t, foo, returned)
}
func openConnAndModelStore(

View File

@ -25,6 +25,7 @@ import (
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
m365Mock "github.com/alcionai/corso/src/internal/m365/mock"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -1128,10 +1129,10 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
streams = append(streams, ms)
}
mc := &mockBackupCollection{
path: storePath,
loc: locPath,
streams: streams,
mc := &m365Mock.BackupCollection{
Path: storePath,
Loc: locPath,
Streams: streams,
}
return []data.BackupCollection{mc}
@ -1155,11 +1156,11 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
ItemInfo: details.ItemInfo{OneDrive: &info},
}
mc := &mockBackupCollection{
path: storePath,
loc: locPath,
streams: []data.Item{ms},
state: data.NotMovedState,
mc := &m365Mock.BackupCollection{
Path: storePath,
Loc: locPath,
Streams: []data.Item{ms},
CState: data.NotMovedState,
}
return []data.BackupCollection{mc}
@ -1293,48 +1294,6 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
testForFiles(t, ctx, expected, result)
}
// TODO(pandeyabs): Switch to m365/mock/BackupCollection.
type mockBackupCollection struct {
path path.Path
loc *path.Builder
streams []data.Item
state data.CollectionState
}
func (c *mockBackupCollection) Items(context.Context, *fault.Bus) <-chan data.Item {
res := make(chan data.Item)
go func() {
defer close(res)
for _, s := range c.streams {
res <- s
}
}()
return res
}
func (c mockBackupCollection) FullPath() path.Path {
return c.path
}
func (c mockBackupCollection) PreviousPath() path.Path {
return c.path
}
func (c mockBackupCollection) LocationPath() *path.Builder {
return c.loc
}
func (c mockBackupCollection) State() data.CollectionState {
return c.state
}
func (c mockBackupCollection) DoNotMergeItems() bool {
return false
}
func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
t := suite.T()
@ -1343,10 +1302,10 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
collections := []data.BackupCollection{
&mockBackupCollection{
path: suite.storePath1,
loc: loc1,
streams: []data.Item{
&m365Mock.BackupCollection{
Path: suite.storePath1,
Loc: loc1,
Streams: []data.Item{
&dataMock.Item{
ItemID: testFileName,
Reader: io.NopCloser(bytes.NewReader(testFileData)),
@ -1359,10 +1318,10 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
},
},
},
&mockBackupCollection{
path: suite.storePath2,
loc: loc2,
streams: []data.Item{
&m365Mock.BackupCollection{
Path: suite.storePath2,
Loc: loc2,
Streams: []data.Item{
&dataMock.Item{
ItemID: testFileName3,
Reader: io.NopCloser(bytes.NewReader(testFileData3)),
@ -1603,11 +1562,11 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
for _, parent := range []path.Path{suite.testPath1, suite.testPath2} {
loc := path.Builder{}.Append(parent.Folders()...)
collection := &mockBackupCollection{path: parent, loc: loc}
collection := &m365Mock.BackupCollection{Path: parent, Loc: loc}
for _, item := range suite.files[parent.String()] {
collection.streams = append(
collection.streams,
collection.Streams = append(
collection.Streams,
&dataMock.Item{
ItemID: item.itemPath.Item(),
Reader: io.NopCloser(bytes.NewReader(item.data)),

View File

@ -10,6 +10,7 @@ import (
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
"github.com/alcionai/corso/src/internal/m365/service/groups"
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/operations/inject"
@ -116,6 +117,18 @@ func (ctrl *Controller) ProduceBackupCollections(
return nil, nil, false, err
}
case path.GroupsService:
colls, ssmb, canUsePreviousBackup, err = groups.ProduceBackupCollections(
ctx,
bpc,
ctrl.AC,
ctrl.credentials,
ctrl.UpdateStatus,
errs)
if err != nil {
return nil, nil, false, err
}
default:
return nil, nil, false, clues.Wrap(clues.New(service.String()), "service not supported").WithClues(ctx)
}
@ -143,6 +156,17 @@ func (ctrl *Controller) IsBackupRunnable(
service path.ServiceType,
resourceOwner string,
) (bool, error) {
if service == path.GroupsService {
_, err := ctrl.AC.Groups().GetByID(ctx, resourceOwner)
if err != nil {
// TODO(meain): check for error message in case groups are
// not enabled at all similar to sharepoint
return false, err
}
return true, nil
}
if service == path.SharePointService {
_, err := ctrl.AC.Sites().GetRoot(ctx)
if err != nil {
@ -168,7 +192,7 @@ func (ctrl *Controller) IsBackupRunnable(
return true, nil
}
func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error {
func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
var ids []string
switch sels.Service {
@ -176,12 +200,13 @@ func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error {
// Exchange and OneDrive user existence now checked in checkServiceEnabled.
return nil
case selectors.ServiceSharePoint:
ids = siteIDs
case selectors.ServiceSharePoint, selectors.ServiceGroups:
ids = cachedIDs
}
if !filters.Contains(ids).Compare(sels.ID()) {
return clues.Stack(graph.ErrResourceOwnerNotFound).With("missing_protected_resource", sels.DiscreteOwner)
return clues.Stack(graph.ErrResourceOwnerNotFound).
With("selector_protected_resource", sels.DiscreteOwner)
}
return nil
@ -197,8 +222,8 @@ func checkServiceEnabled(
service path.ServiceType,
resource string,
) (bool, bool, error) {
if service == path.SharePointService {
// No "enabled" check required for sharepoint
if service == path.SharePointService || service == path.GroupsService {
// No "enabled" check required for sharepoint or groups.
return true, true, nil
}

View File

@ -465,3 +465,85 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
}
}
}
// ---------------------------------------------------------------------------
// CreateGroupsCollection tests
// ---------------------------------------------------------------------------
type GroupsCollectionIntgSuite struct {
tester.Suite
connector *Controller
user string
}
func TestGroupsCollectionIntgSuite(t *testing.T) {
suite.Run(t, &GroupsCollectionIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *GroupsCollectionIntgSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
suite.connector = newController(ctx, suite.T(), resource.Sites, path.GroupsService)
suite.user = tconfig.M365UserID(suite.T())
tester.LogTimeOfTest(suite.T())
}
func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
groupID = tconfig.M365GroupID(t)
ctrl = newController(ctx, t, resource.Groups, path.GroupsService)
groupIDs = []string{groupID}
)
id, name, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup(groupIDs)
// TODO(meain): make use of selectors
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(id, name)
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: inMock.NewProvider(id, name),
Selector: sel.Selector,
}
collections, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections(
ctx,
bpc,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")
// No excludes yet as this isn't an incremental backup.
assert.True(t, excludes.Empty())
// we don't know an exact count of drives this will produce,
// but it should be more than one.
assert.Greater(t, len(collections), 1)
for _, coll := range collections {
for object := range coll.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "reading item", clues.ToCore(err))
}
}
status := ctrl.Wait()
assert.NotZero(t, status.Successes)
t.Log(status.String())
}

View File

@ -31,13 +31,13 @@ type collectionScope int
const (
// CollectionScopeUnknown is used when we don't know and don't need
// to know the kind, like in the case of deletes
CollectionScopeUnknown collectionScope = iota
CollectionScopeUnknown collectionScope = 0
// CollectionScopeFolder is used for regular folder collections
CollectionScopeFolder
CollectionScopeFolder collectionScope = 1
// CollectionScopePackage is used to represent OneNote items
CollectionScopePackage
CollectionScopePackage collectionScope = 2
)
const restrictedDirectory = "Site Pages"
@ -471,7 +471,7 @@ func (c *Collections) addURLCacheToDriveCollections(
driveID,
prevDelta,
urlCacheRefreshInterval,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
c.handler.NewItemPager(driveID, "", api.DriveItemSelectURLCache()),
errs)
if err != nil {
return err

View File

@ -0,0 +1,48 @@
package drive
import (
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
var _ BackupHandler = &groupBackupHandler{}
type groupBackupHandler struct {
libraryBackupHandler
groupID string
scope selectors.GroupsScope
}
func NewGroupBackupHandler(groupID string, ac api.Drives, scope selectors.GroupsScope) groupBackupHandler {
return groupBackupHandler{
libraryBackupHandler{
ac: ac,
// Not adding scope here. Anything that needs scope has to
// be from group handler
service: path.GroupsService,
},
groupID,
scope,
}
}
func (h groupBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
) (path.Path, error) {
// TODO(meain): path fixes
return folders.ToDataLayerPath(tenantID, h.groupID, h.service, path.LibrariesCategory, false)
}
func (h groupBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return path.GroupsService, path.LibrariesCategory
}
func (h groupBackupHandler) IsAllPass() bool {
return h.scope.IsAny(selectors.GroupsLibraryFolder)
}
func (h groupBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.GroupsLibraryFolder, dir)
}

View File

@ -0,0 +1,112 @@
package drive
import (
"strings"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
)
func augmentItemInfo(
dii details.ItemInfo,
service path.ServiceType,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
var driveName, siteID, driveID, weburl, creatorEmail string
// TODO: we rely on this info for details/restore lookups,
// so if it's nil we have an issue, and will need an alternative
// way to source the data.
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
additionalData := item.GetCreatedBy().GetUser().GetAdditionalData()
ed, ok := additionalData["email"]
if !ok {
ed = additionalData["displayName"]
}
if ed != nil {
creatorEmail = *ed.(*string)
}
}
if service == path.SharePointService ||
service == path.GroupsService {
gsi := item.GetSharepointIds()
if gsi != nil {
siteID = ptr.Val(gsi.GetSiteId())
weburl = ptr.Val(gsi.GetSiteUrl())
if len(weburl) == 0 {
weburl = constructWebURL(item.GetAdditionalData())
}
}
}
if item.GetParentReference() != nil {
driveID = ptr.Val(item.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
}
var pps string
if parentPath != nil {
pps = parentPath.String()
}
switch service {
case path.OneDriveService:
dii.OneDrive = &details.OneDriveInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.OneDriveItem,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail,
ParentPath: pps,
Size: size,
}
case path.SharePointService:
dii.SharePoint = &details.SharePointInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.SharePointLibrary,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail,
ParentPath: pps,
SiteID: siteID,
Size: size,
WebURL: weburl,
}
case path.GroupsService:
// TODO: Add channel name and ID
dii.Groups = &details.GroupsInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.SharePointLibrary,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail,
ParentPath: pps,
SiteID: siteID,
Size: size,
WebURL: weburl,
}
}
dii.Extension = &details.ExtensionData{}
return dii
}

View File

@ -3,13 +3,11 @@ package drive
import (
"context"
"net/http"
"strings"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -85,7 +83,7 @@ func (h itemBackupHandler) AugmentItemInfo(
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
return augmentItemInfo(dii, path.OneDriveService, item, size, parentPath)
}
func (h itemBackupHandler) FormatDisplayPath(
@ -162,7 +160,7 @@ func (h itemRestoreHandler) AugmentItemInfo(
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
return augmentItemInfo(dii, path.OneDriveService, item, size, parentPath)
}
func (h itemRestoreHandler) DeleteItem(
@ -236,51 +234,3 @@ func (h itemRestoreHandler) GetRootFolder(
) (models.DriveItemable, error) {
return h.ac.GetRootFolder(ctx, driveID)
}
// ---------------------------------------------------------------------------
// Common
// ---------------------------------------------------------------------------
func augmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
var email, driveName, driveID string
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
ed, ok := item.GetCreatedBy().GetUser().GetAdditionalData()["email"]
if ok {
email = *ed.(*string)
}
}
if item.GetParentReference() != nil {
driveID = ptr.Val(item.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
}
var pps string
if parentPath != nil {
pps = parentPath.String()
}
dii.OneDrive = &details.OneDriveInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.OneDriveItem,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: email,
ParentPath: pps,
Size: size,
}
dii.Extension = &details.ExtensionData{}
return dii
}

View File

@ -20,12 +20,17 @@ import (
var _ BackupHandler = &libraryBackupHandler{}
type libraryBackupHandler struct {
ac api.Drives
scope selectors.SharePointScope
ac api.Drives
scope selectors.SharePointScope
service path.ServiceType
}
func NewLibraryBackupHandler(ac api.Drives, scope selectors.SharePointScope) libraryBackupHandler {
return libraryBackupHandler{ac, scope}
func NewLibraryBackupHandler(
ac api.Drives,
scope selectors.SharePointScope,
service path.ServiceType,
) libraryBackupHandler {
return libraryBackupHandler{ac, scope, service}
}
func (h libraryBackupHandler) Get(
@ -42,7 +47,7 @@ func (h libraryBackupHandler) PathPrefix(
return path.Build(
tenantID,
resourceOwner,
path.SharePointService,
h.service,
path.LibrariesCategory,
false,
odConsts.DrivesPathDir,
@ -54,7 +59,7 @@ func (h libraryBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
) (path.Path, error) {
return folders.ToDataLayerSharePointPath(tenantID, resourceOwner, path.LibrariesCategory, false)
return folders.ToDataLayerPath(tenantID, resourceOwner, h.service, path.LibrariesCategory, false)
}
func (h libraryBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
@ -81,7 +86,7 @@ func (h libraryBackupHandler) AugmentItemInfo(
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentLibraryItemInfo(dii, item, size, parentPath)
return augmentItemInfo(dii, h.service, item, size, parentPath)
}
// constructWebURL is a helper function for recreating the webURL
@ -128,6 +133,7 @@ func (h libraryBackupHandler) NewLocationIDer(
driveID string,
elems ...string,
) details.LocationIDer {
// TODO(meain): path related changes for groups
return details.NewSharePointLocationIDer(driveID, elems...)
}
@ -160,11 +166,12 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
var _ RestoreHandler = &libraryRestoreHandler{}
type libraryRestoreHandler struct {
ac api.Client
ac api.Client
service path.ServiceType
}
func NewLibraryRestoreHandler(ac api.Client) libraryRestoreHandler {
return libraryRestoreHandler{ac}
func NewLibraryRestoreHandler(ac api.Client, service path.ServiceType) libraryRestoreHandler {
return libraryRestoreHandler{ac, service}
}
func (h libraryRestoreHandler) PostDrive(
@ -187,7 +194,7 @@ func (h libraryRestoreHandler) AugmentItemInfo(
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentLibraryItemInfo(dii, item, size, parentPath)
return augmentItemInfo(dii, h.service, item, size, parentPath)
}
func (h libraryRestoreHandler) DeleteItem(
@ -261,73 +268,3 @@ func (h libraryRestoreHandler) GetRootFolder(
) (models.DriveItemable, error) {
return h.ac.Drives().GetRootFolder(ctx, driveID)
}
// ---------------------------------------------------------------------------
// Common
// ---------------------------------------------------------------------------
func augmentLibraryItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
var driveName, siteID, driveID, weburl, creatorEmail string
// TODO: we rely on this info for details/restore lookups,
// so if it's nil we have an issue, and will need an alternative
// way to source the data.
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
additionalData := item.GetCreatedBy().GetUser().GetAdditionalData()
ed, ok := additionalData["email"]
if !ok {
ed = additionalData["displayName"]
}
if ed != nil {
creatorEmail = *ed.(*string)
}
}
gsi := item.GetSharepointIds()
if gsi != nil {
siteID = ptr.Val(gsi.GetSiteId())
weburl = ptr.Val(gsi.GetSiteUrl())
if len(weburl) == 0 {
weburl = constructWebURL(item.GetAdditionalData())
}
}
if item.GetParentReference() != nil {
driveID = ptr.Val(item.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
}
var pps string
if parentPath != nil {
pps = parentPath.String()
}
dii.SharePoint = &details.SharePointInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.SharePointLibrary,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail,
ParentPath: pps,
SiteID: siteID,
Size: size,
WebURL: weburl,
}
dii.Extension = &details.ExtensionData{}
return dii
}

View File

@ -36,7 +36,7 @@ func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := libraryBackupHandler{}
h := libraryBackupHandler{service: path.SharePointService}
p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID, resourceOwner)

View File

@ -14,8 +14,8 @@ import (
type SharingMode int
const (
SharingModeCustom = SharingMode(iota)
SharingModeInherited
SharingModeCustom SharingMode = 0
SharingModeInherited SharingMode = 1
)
type GV2Type string

View File

@ -3,6 +3,7 @@ package drive
import (
"context"
"errors"
"io"
"math/rand"
"net/http"
"sync"
@ -87,6 +88,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
newItem(newFolderName, true),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId())
nfid := ptr.Val(newFolder.GetId())
@ -109,7 +111,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Get the previous delta to feed into url cache
prevDelta, _, _, err := collectItems(
ctx,
suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectDefault()),
suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectURLCache()),
suite.driveID,
"drive-name",
collectorFunc,
@ -131,10 +133,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
nfid,
newItem(newItemName, false),
control.Copy)
if err != nil {
// Something bad happened, skip this item
continue
}
require.NoError(t, err, clues.ToCore(err))
items = append(items, item)
}
@ -176,13 +175,23 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
nil,
nil)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, resp)
require.NotNil(t, resp.Body)
defer func(rc io.ReadCloser) {
if rc != nil {
rc.Close()
}
}(resp.Body)
require.Equal(t, http.StatusOK, resp.StatusCode)
}(i)
}
wg.Wait()
// Validate that <= 1 delta queries were made by url cache
require.LessOrEqual(t, uc.deltaQueryCount, 1)
// Validate that exactly 1 delta query was made by url cache
require.Equal(t, 1, uc.deltaQueryCount)
}
type URLCacheUnitSuite struct {

View File

@ -75,8 +75,6 @@ func CreateCollections(
return nil, clues.Wrap(err, "filling collections")
}
foldersComplete <- struct{}{}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}

View File

@ -39,8 +39,7 @@ const (
// Collection implements the interface from data.Collection
// Structure holds data for an Exchange application for a single user
type Collection struct {
// M365 user
user string // M365 user
user string
data chan data.Item
// added is a list of existing item IDs that were added to a container

View File

@ -0,0 +1,318 @@
package groups
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// TODO: incremental support
// multiple lines in this file are commented out so that
// we can focus on v0 backups and re-integrate them later
// for v1 incrementals.
// since these lines represent otherwise standard boilerplate,
// it's simpler to comment them for tracking than to delete
// and re-discover them later.
func CreateCollections(
ctx context.Context,
bpc inject.BackupProducerConfig,
handler BackupHandler,
tenantID string,
scope selectors.GroupsScope,
// dps DeltaPaths,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var (
allCollections = make([]data.BackupCollection, 0)
category = scope.Category().PathType()
qp = graph.QueryParams{
Category: category,
ProtectedResource: bpc.ProtectedResource,
TenantID: tenantID,
}
)
catProgress := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", qp.Category))
defer close(catProgress)
// TODO(keepers): probably shouldn't call out channels here specifically.
// This should be a generic container handler. But we don't need
// to worry about that until if/when we use this code to get email
// conversations as well.
// Also, this should be produced by the Handler.
// chanPager := handler.NewChannelsPager(qp.ProtectedResource.ID())
// TODO(neha): enumerate channels
channels := []graph.Displayable{}
collections, err := populateCollections(
ctx,
qp,
handler,
su,
channels,
scope,
// dps,
bpc.Options,
errs)
if err != nil {
return nil, clues.Wrap(err, "filling collections")
}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}
return allCollections, nil
}
func populateCollections(
ctx context.Context,
qp graph.QueryParams,
bh BackupHandler,
statusUpdater support.StatusUpdater,
channels []graph.Displayable,
scope selectors.GroupsScope,
// dps DeltaPaths,
ctrlOpts control.Options,
errs *fault.Bus,
) (map[string]data.BackupCollection, error) {
// channel ID -> BackupCollection.
channelCollections := map[string]data.BackupCollection{}
// channel ID -> delta url or folder path lookups
// TODO(neha/keepers): figure out if deltas are stored per channel, or per group.
// deltaURLs = map[string]string{}
// currPaths = map[string]string{}
// copy of previousPaths. every channel present in the slice param
// gets removed from this map; the remaining channels at the end of
// the process have been deleted.
// tombstones = makeTombstones(dps)
logger.Ctx(ctx).Infow("filling collections")
// , "len_deltapaths", len(dps))
el := errs.Local()
for _, c := range channels {
if el.Failure() != nil {
return nil, el.Failure()
}
cID := ptr.Val(c.GetId())
// delete(tombstones, cID)
var (
err error
// dp = dps[cID]
// prevDelta = dp.Delta
// prevPathStr = dp.Path // do not log: pii; log prevPath instead
// prevPath path.Path
ictx = clues.Add(
ctx,
"channel_id", cID)
// "previous_delta", pii.SafeURL{
// URL: prevDelta,
// SafePathElems: graph.SafeURLPathParams,
// SafeQueryKeys: graph.SafeURLQueryParams,
// })
)
// currPath, locPath
// TODO(rkeepers): the handler should provide this functionality.
// Only create a collection if the path matches the scope.
if !includeContainer(ictx, qp, c, scope, qp.Category) {
continue
}
// if len(prevPathStr) > 0 {
// if prevPath, err = pathFromPrevString(prevPathStr); err != nil {
// logger.CtxErr(ictx, err).Error("parsing prev path")
// // if the previous path is unusable, then the delta must be, too.
// prevDelta = ""
// }
// }
// ictx = clues.Add(ictx, "previous_path", prevPath)
// TODO: the handler should provide this implementation.
items, err := collectItems(
ctx,
bh.NewMessagePager(qp.ProtectedResource.ID(), ptr.Val(c.GetId())))
if err != nil {
el.AddRecoverable(ctx, clues.Stack(err))
continue
}
// if len(newDelta.URL) > 0 {
// deltaURLs[cID] = newDelta.URL
// } else if !newDelta.Reset {
// logger.Ctx(ictx).Info("missing delta url")
// }
var prevPath path.Path
// TODO: retrieve from handler
currPath, err := path.Builder{}.
Append(ptr.Val(c.GetId())).
ToDataLayerPath(
qp.TenantID,
qp.ProtectedResource.ID(),
path.GroupsService,
qp.Category,
true)
if err != nil {
el.AddRecoverable(ctx, clues.Stack(err))
continue
}
edc := NewCollection(
qp.ProtectedResource.ID(),
currPath,
prevPath,
path.Builder{}.Append(ptr.Val(c.GetDisplayName())),
qp.Category,
statusUpdater,
ctrlOpts)
channelCollections[cID] = &edc
// TODO: handle deleted items for v1 backup.
// // Remove any deleted IDs from the set of added IDs because items that are
// // deleted and then restored will have a different ID than they did
// // originally.
// for _, remove := range removed {
// delete(edc.added, remove)
// edc.removed[remove] = struct{}{}
// }
// // add the current path for the container ID to be used in the next backup
// // as the "previous path", for reference in case of a rename or relocation.
// currPaths[cID] = currPath.String()
// FIXME: normally this goes before removal, but linters
for _, item := range items {
edc.added[ptr.Val(item.GetId())] = struct{}{}
}
}
// TODO: handle tombstones here
logger.Ctx(ctx).Infow(
"adding metadata collection entries",
// "num_deltas_entries", len(deltaURLs),
"num_paths_entries", len(channelCollections))
// col, err := graph.MakeMetadataCollection(
// qp.TenantID,
// qp.ProtectedResource.ID(),
// path.ExchangeService,
// qp.Category,
// []graph.MetadataCollectionEntry{
// graph.NewMetadataEntry(graph.PreviousPathFileName, currPaths),
// graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs),
// },
// statusUpdater)
// if err != nil {
// return nil, clues.Wrap(err, "making metadata collection")
// }
// channelCollections["metadata"] = col
return channelCollections, el.Failure()
}
func collectItems(
ctx context.Context,
pager api.ChannelMessageDeltaEnumerator,
) ([]models.ChatMessageable, error) {
items := []models.ChatMessageable{}
for {
// assume delta urls here, which allows single-token consumption
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
if err != nil {
return nil, graph.Wrap(ctx, err, "getting page")
}
// if graph.IsErrInvalidDelta(err) {
// logger.Ctx(ctx).Infow("Invalid previous delta link", "link", prevDelta)
// invalidPrevDelta = true
// newPaths = map[string]string{}
// pager.Reset()
// continue
// }
vals, err := pager.ValuesIn(page)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting items in page")
}
items = append(items, vals...)
nextLink, _ := api.NextAndDeltaLink(page)
// if len(deltaLink) > 0 {
// newDeltaURL = deltaLink
// }
// Check if there are more items
if len(nextLink) == 0 {
break
}
logger.Ctx(ctx).Debugw("found nextLink", "next_link", nextLink)
pager.SetNext(nextLink)
}
return items, nil
}
// Returns true if the container passes the scope comparison and should be included.
// Returns:
// - the path representing the directory as it should be stored in the repository.
// - the human-readable path using display names.
// - true if the path passes the scope comparison.
func includeContainer(
ctx context.Context,
qp graph.QueryParams,
gd graph.Displayable,
scope selectors.GroupsScope,
category path.CategoryType,
) bool {
// assume a single-level hierarchy
directory := ptr.Val(gd.GetDisplayName())
// TODO(keepers): awaiting parent branch to update to main
ok := scope.Matches(selectors.GroupsCategoryUnknown, directory)
logger.Ctx(ctx).With(
"included", ok,
"scope", scope,
"match_target", directory,
).Debug("backup folder selection filter")
return ok
}

View File

@ -0,0 +1,180 @@
package groups
import (
"bytes"
"context"
"io"
"time"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
var (
_ data.BackupCollection = &Collection{}
_ data.Item = &Item{}
_ data.ItemInfo = &Item{}
_ data.ItemModTime = &Item{}
)
const (
collectionChannelBufferSize = 1000
numberOfRetries = 4
)
type Collection struct {
protectedResource string
items chan data.Item
// added is a list of existing item IDs that were added to a container
added map[string]struct{}
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
// items itemGetterSerializer
category path.CategoryType
statusUpdater support.StatusUpdater
ctrl control.Options
// FullPath is the current hierarchical path used by this collection.
fullPath path.Path
// PrevPath is the previous hierarchical path used by this collection.
// It may be the same as fullPath, if the folder was not renamed or
// moved. It will be empty on its first retrieval.
prevPath path.Path
// LocationPath contains the path with human-readable display names.
// IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t"
locationPath *path.Builder
state data.CollectionState
// doNotMergeItems should only be true if the old delta token expired.
// doNotMergeItems bool
}
// NewExchangeDataCollection creates an ExchangeDataCollection.
// State of the collection is set as an observation of the current
// and previous paths. If the curr path is nil, the state is assumed
// to be deleted. If the prev path is nil, it is assumed newly created.
// If both are populated, then state is either moved (if they differ),
// or notMoved (if they match).
func NewCollection(
protectedResource string,
curr, prev path.Path,
location *path.Builder,
category path.CategoryType,
statusUpdater support.StatusUpdater,
ctrlOpts control.Options,
// doNotMergeItems bool,
) Collection {
collection := Collection{
added: make(map[string]struct{}, 0),
category: category,
ctrl: ctrlOpts,
items: make(chan data.Item, collectionChannelBufferSize),
// doNotMergeItems: doNotMergeItems,
fullPath: curr,
locationPath: location,
prevPath: prev,
removed: make(map[string]struct{}, 0),
state: data.StateOf(prev, curr),
statusUpdater: statusUpdater,
protectedResource: protectedResource,
}
return collection
}
// Items utility function to asynchronously execute process to fill data channel with
// M365 exchange objects and returns the data channel
func (col *Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
// go col.streamItems(ctx, errs)
return col.items
}
// FullPath returns the Collection's fullPath []string
func (col *Collection) FullPath() path.Path {
return col.fullPath
}
// LocationPath produces the Collection's full path, but with display names
// instead of IDs in the folders. Only populated for Calendars.
func (col *Collection) LocationPath() *path.Builder {
return col.locationPath
}
// TODO(ashmrtn): Fill in with previous path once the Controller compares old
// and new folder hierarchies.
func (col Collection) PreviousPath() path.Path {
return col.prevPath
}
func (col Collection) State() data.CollectionState {
return col.state
}
func (col Collection) DoNotMergeItems() bool {
// TODO: depends on whether or not deltas are valid
return true
}
// ---------------------------------------------------------------------------
// items
// ---------------------------------------------------------------------------
// Item represents a single item retrieved from exchange
type Item struct {
id string
// TODO: We may need this to be a "oneOf" of `message`, `contact`, etc.
// going forward. Using []byte for now but I assume we'll have
// some structured type in here (serialization to []byte can be done in `Read`)
message []byte
info *details.ExchangeInfo // temporary change to bring populate function into directory
// TODO(ashmrtn): Can probably eventually be sourced from info as there's a
// request to provide modtime in ItemInfo structs.
modTime time.Time
// true if the item was marked by graph as deleted.
deleted bool
}
func (i *Item) ID() string {
return i.id
}
func (i *Item) ToReader() io.ReadCloser {
return io.NopCloser(bytes.NewReader(i.message))
}
func (i Item) Deleted() bool {
return i.deleted
}
func (i *Item) Info() details.ItemInfo {
return details.ItemInfo{Exchange: i.info}
}
func (i *Item) ModTime() time.Time {
return i.modTime
}
func NewItem(
identifier string,
dataBytes []byte,
detail details.ExchangeInfo,
modTime time.Time,
) Item {
return Item{
id: identifier,
message: dataBytes,
info: &detail,
modTime: modTime,
}
}

View File

@ -0,0 +1,33 @@
package groups
import (
"context"
"github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type BackupHandler interface {
GetChannelByID(
ctx context.Context,
teamID, channelID string,
) (models.Channelable, error)
NewChannelsPager(
teamID string,
) api.ChannelDeltaEnumerator
GetMessageByID(
ctx context.Context,
teamID, channelID, itemID string,
) (models.ChatMessageable, error)
NewMessagePager(
teamID, channelID string,
) api.ChannelMessageDeltaEnumerator
GetMessageReplies(
ctx context.Context,
teamID, channelID, messageID string,
) (serialization.Parsable, error)
}

View File

@ -25,10 +25,9 @@ import (
func CollectLibraries(
ctx context.Context,
bpc inject.BackupProducerConfig,
ad api.Drives,
bh drive.BackupHandler,
tenantID string,
ssmb *prefixmatcher.StringSetMatchBuilder,
scope selectors.SharePointScope,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, bool, error) {
@ -37,13 +36,16 @@ func CollectLibraries(
var (
collections = []data.BackupCollection{}
colls = drive.NewCollections(
drive.NewLibraryBackupHandler(ad, scope),
bh,
tenantID,
bpc.ProtectedResource.ID(),
su,
bpc.Options)
)
// TODO(meain): backup resource owner should be group id in case
// of group sharepoint site backup. As of now, we always use
// sharepoint site ids.
odcs, canUsePreviousBackup, err := colls.Get(ctx, bpc.MetadataCollections, ssmb, errs)
if err != nil {
return nil, false, graph.Wrap(ctx, err, "getting library")
@ -59,6 +61,7 @@ func CollectPages(
bpc inject.BackupProducerConfig,
creds account.M365Config,
ac api.Client,
scope selectors.SharePointScope,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
@ -105,7 +108,7 @@ func CollectPages(
collection := NewCollection(
dir,
ac,
Pages,
scope,
su,
bpc.Options)
collection.SetBetaService(betaService)
@ -122,6 +125,7 @@ func CollectLists(
bpc inject.BackupProducerConfig,
ac api.Client,
tenantID string,
scope selectors.SharePointScope,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
@ -156,7 +160,7 @@ func CollectLists(
collection := NewCollection(
dir,
ac,
List,
scope,
su,
bpc.Options)
collection.AddJob(tuple.ID)

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -61,11 +62,14 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
ProtectedResource: mock.NewProvider(siteID, siteID),
}
sel := selectors.NewSharePointBackup([]string{siteID})
col, err := CollectPages(
ctx,
bpc,
creds,
ac,
sel.Lists(selectors.Any())[0],
(&MockGraphService{}).UpdateStatus,
fault.New(true))
assert.NoError(t, err, clues.ToCore(err))

View File

@ -21,19 +21,23 @@ import (
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type DataCategory int
// channel sizes
const (
collectionChannelBufferSize = 50
fetchChannelSize = 5
)
//go:generate stringer -type=DataCategory
const (
collectionChannelBufferSize = 50
fetchChannelSize = 5
Unknown DataCategory = iota
List
Drive
Pages
Unknown DataCategory = 0
List DataCategory = 1
Pages DataCategory = 2
)
var (
@ -53,7 +57,7 @@ type Collection struct {
// jobs contain the SharePoint.Site.ListIDs for the associated list(s).
jobs []string
// M365 IDs of the items of this collection
category DataCategory
category path.CategoryType
client api.Sites
ctrl control.Options
betaService *betaAPI.BetaService
@ -64,7 +68,7 @@ type Collection struct {
func NewCollection(
folderPath path.Path,
ac api.Client,
category DataCategory,
scope selectors.SharePointScope,
statusUpdater support.StatusUpdater,
ctrlOpts control.Options,
) *Collection {
@ -74,7 +78,7 @@ func NewCollection(
data: make(chan data.Item, collectionChannelBufferSize),
client: ac.Sites(),
statusUpdater: statusUpdater,
category: category,
category: scope.Category().PathType(),
ctrl: ctrlOpts,
}
@ -198,9 +202,9 @@ func (sc *Collection) runPopulate(
// Switch retrieval function based on category
switch sc.category {
case List:
case path.ListsCategory:
metrics, err = sc.retrieveLists(ctx, writer, colProgress, errs)
case Pages:
case path.PagesCategory:
metrics, err = sc.retrievePages(ctx, sc.client, writer, colProgress, errs)
}

View File

@ -23,6 +23,7 @@ import (
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -82,16 +83,18 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
dirRoot = "directory"
)
sel := selectors.NewSharePointBackup([]string{"site"})
tables := []struct {
name, itemName string
category DataCategory
scope selectors.SharePointScope
getDir func(t *testing.T) path.Path
getItem func(t *testing.T, itemName string) *Item
}{
{
name: "List",
itemName: "MockListing",
category: List,
scope: sel.Lists(selectors.Any())[0],
getDir: func(t *testing.T) path.Path {
dir, err := path.Build(
tenant,
@ -127,7 +130,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
{
name: "Pages",
itemName: "MockPages",
category: Pages,
scope: sel.Pages(selectors.Any())[0],
getDir: func(t *testing.T) path.Path {
dir, err := path.Build(
tenant,
@ -166,7 +169,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
col := NewCollection(
test.getDir(t),
suite.ac,
test.category,
test.scope,
nil,
control.DefaultOptions())
col.data <- test.getItem(t, test.itemName)

View File

@ -1,27 +0,0 @@
// Code generated by "stringer -type=DataCategory"; DO NOT EDIT.
package site
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[Unknown-2]
_ = x[List-3]
_ = x[Drive-4]
_ = x[Pages-5]
}
const _DataCategory_name = "UnknownListDrivePages"
var _DataCategory_index = [...]uint8{0, 7, 11, 16, 21}
func (i DataCategory) String() string {
i -= 2
if i < 0 || i >= DataCategory(len(_DataCategory_index)-1) {
return "DataCategory(" + strconv.FormatInt(int64(i+2), 10) + ")"
}
return _DataCategory_name[_DataCategory_index[i]:_DataCategory_index[i+1]]
}

View File

@ -41,7 +41,7 @@ func ConsumeRestoreCollections(
ctr *count.Bus,
) (*support.ControllerOperationStatus, error) {
var (
lrh = drive.NewLibraryRestoreHandler(ac)
lrh = drive.NewLibraryRestoreHandler(ac, rcc.Selector.PathService())
restoreMetrics support.CollectionMetrics
caches = drive.NewRestoreCaches(backupDriveIDNames)
el = errs.Local()

View File

@ -170,6 +170,8 @@ func getResourceClient(rc resource.Category, ac api.Client) (*resourceClient, er
return &resourceClient{enum: rc, getter: ac.Users()}, nil
case resource.Sites:
return &resourceClient{enum: rc, getter: ac.Sites()}, nil
case resource.Groups:
return &resourceClient{enum: rc, getter: ac.Groups()}, nil
default:
return nil, clues.New("unrecognized owner resource enum").With("resource_enum", rc)
}

View File

@ -6,4 +6,5 @@ const (
UnknownResource Category = ""
Users Category = "users"
Sites Category = "sites"
Groups Category = "groups"
)

View File

@ -5,8 +5,12 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/collection/site"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
@ -56,7 +60,35 @@ func ProduceBackupCollections(
var dbcs []data.BackupCollection
switch scope.Category().PathType() {
case path.LibrariesCategory: // TODO
case path.LibrariesCategory:
// TODO(meain): Private channels get a separate SharePoint
// site. We should also back those up and not just the
// default one.
resp, err := ac.Groups().GetRootSite(ctx, bpc.ProtectedResource.ID())
if err != nil {
return nil, nil, false, err
}
pr := idname.NewProvider(ptr.Val(resp.GetId()), ptr.Val(resp.GetName()))
sbpc := inject.BackupProducerConfig{
LastBackupVersion: bpc.LastBackupVersion,
Options: bpc.Options,
ProtectedResource: pr,
Selector: bpc.Selector,
}
dbcs, canUsePreviousBackup, err = site.CollectLibraries(
ctx,
sbpc,
drive.NewGroupBackupHandler(bpc.ProtectedResource.ID(), ac.Drives(), scope),
creds.AzureTenantID,
ssmb,
su,
errs)
if err != nil {
el.AddRecoverable(ctx, err)
continue
}
}
collections = append(collections, dbcs...)
@ -70,7 +102,7 @@ func ProduceBackupCollections(
collections,
creds.AzureTenantID,
bpc.ProtectedResource.ID(),
path.UnknownService, // path.GroupsService
path.GroupsService,
categories,
su,
errs)

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/collection/site"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
@ -63,6 +64,7 @@ func ProduceBackupCollections(
bpc,
ac,
creds.AzureTenantID,
scope,
su,
errs)
if err != nil {
@ -78,10 +80,9 @@ func ProduceBackupCollections(
spcs, canUsePreviousBackup, err = site.CollectLibraries(
ctx,
bpc,
ac.Drives(),
drive.NewLibraryBackupHandler(ac.Drives(), scope, bpc.Selector.PathService()),
creds.AzureTenantID,
ssmb,
scope,
su,
errs)
if err != nil {
@ -95,6 +96,7 @@ func ProduceBackupCollections(
bpc,
creds,
ac,
scope,
su,
errs)
if err != nil {

View File

@ -50,7 +50,8 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
)
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
ep, err := drive.NewLibraryBackupHandler(api.Drives{}, nil).CanonicalPath(pb, tenantID, siteID)
ep, err := drive.NewLibraryBackupHandler(api.Drives{}, nil, path.SharePointService).
CanonicalPath(pb, tenantID, siteID)
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
@ -100,7 +101,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
)
c := drive.NewCollections(
drive.NewLibraryBackupHandler(api.Drives{}, test.scope),
drive.NewLibraryBackupHandler(api.Drives{}, test.scope, path.SharePointService),
tenantID,
siteID,
nil,

View File

@ -33,7 +33,7 @@ func ConsumeRestoreCollections(
ctr *count.Bus,
) (*support.ControllerOperationStatus, error) {
var (
lrh = drive.NewLibraryRestoreHandler(ac)
lrh = drive.NewLibraryRestoreHandler(ac, rcc.Selector.PathService())
restoreMetrics support.CollectionMetrics
caches = drive.NewRestoreCaches(backupDriveIDNames)
el = errs.Local()

View File

@ -37,10 +37,10 @@ type Operation int
//go:generate stringer -type=Operation
const (
OpUnknown Operation = iota
Backup
Restore
Export
OpUnknown Operation = 0
Backup Operation = 1
Restore Operation = 2
Export Operation = 3
)
// Constructor for ConnectorOperationStatus. If the counts do not agree, an error is returned.

View File

@ -1,6 +1,8 @@
package model
import (
"time"
"github.com/kopia/kopia/repo/manifest"
)
@ -22,12 +24,12 @@ func (id StableID) String() string {
//
//go:generate go run golang.org/x/tools/cmd/stringer -type=Schema
const (
UnknownSchema = Schema(iota)
BackupOpSchema
RestoreOpSchema
BackupSchema
BackupDetailsSchema
RepositorySchema
UnknownSchema Schema = 0
BackupOpSchema Schema = 1
RestoreOpSchema Schema = 2
BackupSchema Schema = 3
BackupDetailsSchema Schema = 4
RepositorySchema Schema = 5
)
// common tags for filtering
@ -38,7 +40,7 @@ const (
MergeBackup = "merge-backup"
)
// Valid returns true if the ModelType value fits within the iota range.
// Valid returns true if the ModelType value fits within the const range.
func (mt Schema) Valid() bool {
return mt > 0 && mt < RepositorySchema+1
}
@ -68,7 +70,8 @@ type BaseModel struct {
// Tags associated with this model in the store to facilitate lookup. Tags in
// the struct are not serialized directly into the stored model, but are part
// of the metadata for the model.
Tags map[string]string `json:"-"`
Tags map[string]string `json:"-"`
ModTime time.Time `json:"-"`
}
func (bm *BaseModel) Base() *BaseModel {

View File

@ -559,31 +559,6 @@ func getNewPathRefs(
repoRef path.Path,
backupVersion int,
) (path.Path, *path.Builder, error) {
// Right now we can't guarantee that we have an old location in the
// previous details entry so first try a lookup without a location to see
// if it matches so we don't need to try parsing from the old entry.
//
// TODO(ashmrtn): In the future we can remove this first check as we'll be
// able to assume we always have the location in the previous entry. We'll end
// up doing some extra parsing, but it will simplify this code.
if repoRef.Service() == path.ExchangeService {
newPath, newLoc, err := dataFromBackup.GetNewPathRefs(
repoRef.ToBuilder(),
entry.Modified(),
nil)
if err != nil {
return nil, nil, clues.Wrap(err, "getting new paths")
} else if newPath == nil {
// This entry doesn't need merging.
return nil, nil, nil
} else if newLoc == nil {
return nil, nil, clues.New("unable to find new exchange location")
}
return newPath, newLoc, nil
}
// We didn't have an exact entry, so retry with a location.
locRef, err := entry.ToLocationIDer(backupVersion)
if err != nil {
return nil, nil, clues.Wrap(err, "getting previous item location")

View File

@ -606,6 +606,24 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
time1 = time.Now()
time2 = time1.Add(time.Hour)
exchangeItemPath1 = makePath(
suite.T(),
[]string{
tenant,
path.ExchangeService.String(),
ro,
path.EmailCategory.String(),
"work",
"item1",
},
true)
exchangeLocationPath1 = path.Builder{}.Append("work-display-name")
exchangePathReason1 = kopia.NewReason(
"",
exchangeItemPath1.ResourceOwner(),
exchangeItemPath1.Service(),
exchangeItemPath1.Category())
)
itemParents1, err := path.GetDriveFolderPath(itemPath1)
@ -803,6 +821,36 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
},
},
{
name: "ExchangeItemMerged",
mdm: func() *mockDetailsMergeInfoer {
res := newMockDetailsMergeInfoer()
res.add(exchangeItemPath1, exchangeItemPath1, exchangeLocationPath1)
return res
}(),
inputBackups: []kopia.BackupEntry{
{
Backup: &backup1,
Reasons: []identity.Reasoner{
exchangePathReason1,
},
},
},
populatedDetails: map[string]*details.Details{
backup1.DetailsID: {
DetailsModel: details.DetailsModel{
Entries: []details.Entry{
*makeDetailsEntry(suite.T(), exchangeItemPath1, exchangeLocationPath1, 42, false),
},
},
},
},
errCheck: assert.NoError,
expectedEntries: []*details.Entry{
makeDetailsEntry(suite.T(), exchangeItemPath1, exchangeLocationPath1, 42, false),
},
},
{
name: "ItemMergedSameLocation",
mdm: func() *mockDetailsMergeInfoer {

View File

@ -33,11 +33,11 @@ type OpStatus int
//go:generate stringer -type=OpStatus -linecomment
const (
Unknown OpStatus = iota // Status Unknown
InProgress // In Progress
Completed // Completed
Failed // Failed
NoData // No Data
Unknown OpStatus = 0 // Status Unknown
InProgress OpStatus = 1 // In Progress
Completed OpStatus = 2 // Completed
Failed OpStatus = 3 // Failed
NoData OpStatus = 4 // No Data
)
// --------------------------------------------------------------------------------

View File

@ -74,7 +74,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
}
grh := func(ac api.Client) drive.RestoreHandler {
return drive.NewLibraryRestoreHandler(ac)
return drive.NewLibraryRestoreHandler(ac, path.SharePointService)
}
runDriveIncrementalTest(

View File

@ -31,6 +31,7 @@ const (
TestCfgChannelID = "m365channelid"
TestCfgUserID = "m365userid"
TestCfgSecondaryUserID = "secondarym365userid"
TestCfgSecondaryGroupID = "secondarym365groupid"
TestCfgTertiaryUserID = "tertiarym365userid"
TestCfgLoadTestUserID = "loadtestm365userid"
TestCfgLoadTestOrgUsers = "loadtestm365orgusers"

View File

@ -223,11 +223,11 @@ func UnlicensedM365UserID(t *testing.T) string {
// Teams
// M365TeamsID returns a teamID string representing the m365TeamsID described
// M365TeamID returns a teamID string representing the m365TeamsID described
// by either the env var CORSO_M365_TEST_TEAM_ID, the corso_test.toml config
// file or the default value (in that order of priority). The default is a
// last-attempt fallback that will only work on alcion's testing org.
func M365TeamsID(t *testing.T) string {
func M365TeamID(t *testing.T) string {
cfg, err := ReadTestConfig()
require.NoError(t, err, "retrieving m365 team id from test configuration: %+v", clues.ToCore(err))

View File

@ -10,8 +10,8 @@ type accountProvider int
//go:generate stringer -type=accountProvider -linecomment
const (
ProviderUnknown accountProvider = iota // Unknown Provider
ProviderM365 // M365
ProviderUnknown accountProvider = 0 // Unknown Provider
ProviderM365 accountProvider = 1 // M365
)
// storage parsing errors

View File

@ -1353,7 +1353,7 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
},
{
name: "Exchange Email Without LocationRef Old Version Errors",
name: "Exchange Email Without LocationRef Old Version",
service: path.ExchangeService.String(),
category: path.EmailCategory.String(),
itemInfo: ItemInfo{
@ -1361,11 +1361,13 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDrive7LocationRef - 1,
expectedErr: require.Error,
backupVersion: version.OneDrive7LocationRef - 1,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
},
{
name: "Exchange Email Without LocationRef New Version Errors",
name: "Exchange Email Without LocationRef New Version",
service: path.ExchangeService.String(),
category: path.EmailCategory.String(),
itemInfo: ItemInfo{
@ -1373,9 +1375,49 @@ func (suite *DetailsUnitSuite) TestLocationIDer_FromEntry() {
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDrive7LocationRef,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EmailCategory),
},
{
name: "Exchange Email Bad RepoRef Fails",
service: path.OneDriveService.String(),
category: path.EmailCategory.String(),
itemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeMail,
},
},
backupVersion: version.OneDrive7LocationRef,
expectedErr: require.Error,
},
{
name: "Exchange Event Empty LocationRef New Version Fails",
service: path.ExchangeService.String(),
category: path.EventsCategory.String(),
itemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeEvent,
},
},
backupVersion: 2,
expectedErr: require.Error,
},
{
name: "Exchange Event Empty LocationRef Old Version",
service: path.ExchangeService.String(),
category: path.EventsCategory.String(),
itemInfo: ItemInfo{
Exchange: &ExchangeInfo{
ItemType: ExchangeEvent,
},
},
backupVersion: version.OneDrive1DataAndMetaFiles,
hasLocRef: true,
expectedErr: require.NoError,
expectedUniqueLoc: fmt.Sprintf(expectedExchangeUniqueLocFmt, path.EventsCategory),
},
}
for _, test := range table {

View File

@ -56,6 +56,9 @@ type Entry struct {
// ToLocationIDer takes a backup version and produces the unique location for
// this entry if possible. Reasons it may not be possible to produce the unique
// location include an unsupported backup version or missing information.
//
// TODO(ashmrtn): Remove this function completely if we ever decide to sunset
// older corso versions that didn't populate LocationRef.
func (de Entry) ToLocationIDer(backupVersion int) (LocationIDer, error) {
if len(de.LocationRef) > 0 {
baseLoc, err := path.Builder{}.SplitUnescapeAppend(de.LocationRef)
@ -68,32 +71,44 @@ func (de Entry) ToLocationIDer(backupVersion int) (LocationIDer, error) {
return de.ItemInfo.uniqueLocation(baseLoc)
}
if backupVersion >= version.OneDrive7LocationRef ||
(de.ItemInfo.infoType() != OneDriveItem &&
de.ItemInfo.infoType() != SharePointLibrary) {
return nil, clues.New("no previous location for entry")
}
// This is a little hacky, but we only want to try to extract the old
// location if it's OneDrive or SharePoint libraries and it's known to
// be an older backup version.
//
// TODO(ashmrtn): Remove this code once OneDrive/SharePoint libraries
// LocationRef code has been out long enough that all delta tokens for
// previous backup versions will have expired. At that point, either
// we'll do a full backup (token expired, no newer backups) or have a
// backup of a higher version with the information we need.
rr, err := path.FromDataLayerPath(de.RepoRef, true)
if err != nil {
return nil, clues.Wrap(err, "getting item RepoRef")
return nil, clues.Wrap(err, "getting item RepoRef").
With("repo_ref", de.RepoRef)
}
p, err := path.ToDrivePath(rr)
if err != nil {
return nil, clues.New("converting RepoRef to drive path")
var baseLoc *path.Builder
switch de.ItemInfo.infoType() {
case ExchangeEvent:
if backupVersion >= 2 {
return nil, clues.New("no previous location for calendar entry").
With("repo_ref", rr)
}
fallthrough
case ExchangeMail, ExchangeContact:
baseLoc = path.Builder{}.Append(rr.Folders()...)
case OneDriveItem, SharePointLibrary:
if backupVersion >= version.OneDrive7LocationRef {
return nil, clues.New("no previous location for drive entry").
With("repo_ref", rr)
}
p, err := path.ToDrivePath(rr)
if err != nil {
return nil, clues.New("converting RepoRef to drive path").
With("repo_ref", rr)
}
baseLoc = path.Builder{}.Append(p.Root).Append(p.Folders...)
}
baseLoc := path.Builder{}.Append(p.Root).Append(p.Folders...)
if baseLoc == nil {
return nil, clues.New("unable to extract LocationRef from RepoRef").
With("repo_ref", rr)
}
// Individual services may add additional info to the base and return that.
return de.ItemInfo.uniqueLocation(baseLoc)

View File

@ -1,9 +1,11 @@
package details
import (
"strconv"
"time"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/path"
@ -11,39 +13,93 @@ import (
// NewGroupsLocationIDer builds a LocationIDer for the groups.
func NewGroupsLocationIDer(
category path.CategoryType,
driveID string,
escapedFolders ...string,
) uniqueLoc {
// TODO: implement
return uniqueLoc{}
) (uniqueLoc, error) {
// TODO(meain): path fixes
if err := path.ValidateServiceAndCategory(path.GroupsService, category); err != nil {
return uniqueLoc{}, clues.Wrap(err, "making groups LocationIDer")
}
pb := path.Builder{}.Append(category.String())
prefixElems := 1
if driveID != "" { // non sp paths don't have driveID
pb.Append(driveID)
prefixElems = 2
}
pb.Append(escapedFolders...)
return uniqueLoc{pb, prefixElems}, nil
}
// GroupsInfo describes a groups item
type GroupsInfo struct {
Created time.Time `json:"created,omitempty"`
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath,omitempty"`
Size int64 `json:"size,omitempty"`
// Channels Specific
ChannelName string `json:"channelName,omitempty"`
ChannelID string `json:"channelID,omitempty"`
LastResponseAt time.Time `json:"lastResponseAt,omitempty"`
MessageCreator string `json:"messageCreator,omitempty"`
MessagePreview string `json:"messagePreview,omitempty"`
ReplyCount int `json:"replyCount,omitempty"`
// SharePoint specific
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
SiteID string `json:"siteID,omitempty"`
WebURL string `json:"webURL,omitempty"`
}
// Headers returns the human-readable names of properties in a SharePointInfo
// for printing out to a terminal in a columnar display.
func (i GroupsInfo) Headers() []string {
return []string{"Created", "Modified"}
switch i.ItemType {
case SharePointLibrary:
return []string{"ItemName", "Library", "ParentPath", "Size", "Owner", "Created", "Modified"}
case TeamsChannelMessage:
return []string{"Message", "Channel", "Replies", "Creator", "Created", "Last Response"}
}
return []string{}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i GroupsInfo) Values() []string {
return []string{
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
switch i.ItemType {
case SharePointLibrary:
return []string{
i.ItemName,
i.DriveName,
i.ParentPath,
humanize.Bytes(uint64(i.Size)),
i.Owner,
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
case TeamsChannelMessage:
return []string{
i.MessagePreview,
i.ChannelName,
strconv.Itoa(i.ReplyCount),
i.MessageCreator,
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
}
return []string{}
}
func (i *GroupsInfo) UpdateParentPath(newLocPath *path.Builder) {
@ -51,9 +107,27 @@ func (i *GroupsInfo) UpdateParentPath(newLocPath *path.Builder) {
}
func (i *GroupsInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
return nil, clues.New("not yet implemented")
var category path.CategoryType
switch i.ItemType {
case SharePointLibrary:
category = path.LibrariesCategory
if len(i.DriveID) == 0 {
return nil, clues.New("empty drive ID")
}
}
loc, err := NewGroupsLocationIDer(category, i.DriveID, baseLoc.Elements()...)
return &loc, err
}
func (i *GroupsInfo) updateFolder(f *FolderInfo) error {
return clues.New("not yet implemented")
// TODO(meain): path updates if any
if i.ItemType == SharePointLibrary {
return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f)
}
return clues.New("unsupported ItemType for GroupsInfo").With("item_type", i.ItemType)
}

View File

@ -20,16 +20,17 @@ type ItemType int
// Additionally, any itemType directly assigned a number should not be altered.
// This applies to OneDriveItem and FolderItem
const (
UnknownType ItemType = iota // 0, global unknown value
UnknownType ItemType = 0
// Exchange (00x)
ExchangeContact
ExchangeEvent
ExchangeMail
ExchangeContact ItemType = 1
ExchangeEvent ItemType = 2
ExchangeMail ItemType = 3
// SharePoint (10x)
SharePointLibrary ItemType = iota + 97 // 100
SharePointList // 101...
SharePointPage
SharePointLibrary ItemType = 101 // also used for groups
SharePointList ItemType = 102
SharePointPage ItemType = 103
// OneDrive (20x)
OneDriveItem ItemType = 205
@ -37,8 +38,8 @@ const (
// Folder Management(30x)
FolderItem ItemType = 306
// GroupChannelMessage(40x)
GroupChannelMessage ItemType = 407
// Groups/Teams(40x)
TeamsChannelMessage ItemType = 401
)
func UpdateItem(item *ItemInfo, newLocPath *path.Builder) {

View File

@ -25,12 +25,10 @@ type Maintenance struct {
type MaintenanceType int
// Can't be reordered as we rely on iota for numbering.
//
//go:generate stringer -type=MaintenanceType -linecomment
const (
CompleteMaintenance MaintenanceType = iota // complete
MetadataMaintenance // metadata
CompleteMaintenance MaintenanceType = 0 // complete
MetadataMaintenance MaintenanceType = 1 // metadata
)
var StringToMaintenanceType = map[string]MaintenanceType{
@ -40,16 +38,14 @@ var StringToMaintenanceType = map[string]MaintenanceType{
type MaintenanceSafety int
// Can't be reordered as we rely on iota for numbering.
//
//go:generate stringer -type=MaintenanceSafety -linecomment
const (
FullMaintenanceSafety MaintenanceSafety = iota
FullMaintenanceSafety MaintenanceSafety = 0
//nolint:lll
// Use only if there's no other kopia instances accessing the repo and the
// storage backend is strongly consistent.
// https://github.com/kopia/kopia/blob/f9de453efc198b6e993af8922f953a7e5322dc5f/repo/maintenance/maintenance_safety.go#L42
NoMaintenanceSafety
NoMaintenanceSafety MaintenanceSafety = 1
)
type RetentionMode int

View File

@ -241,6 +241,8 @@ func (pb Builder) ToStreamStorePath(
metadataService = OneDriveMetadataService
case SharePointService:
metadataService = SharePointMetadataService
case GroupsService:
metadataService = GroupsMetadataService
}
return &dataLayerResourcePath{
@ -282,6 +284,8 @@ func (pb Builder) ToServiceCategoryMetadataPath(
metadataService = OneDriveMetadataService
case SharePointService:
metadataService = SharePointMetadataService
case GroupsService:
metadataService = GroupsMetadataService
}
return &dataLayerResourcePath{

View File

@ -17,15 +17,16 @@ type CategoryType int
//go:generate stringer -type=CategoryType -linecomment
const (
UnknownCategory CategoryType = iota
EmailCategory // email
ContactsCategory // contacts
EventsCategory // events
FilesCategory // files
ListsCategory // lists
LibrariesCategory // libraries
PagesCategory // pages
DetailsCategory // details
UnknownCategory CategoryType = 0
EmailCategory CategoryType = 1 // email
ContactsCategory CategoryType = 2 // contacts
EventsCategory CategoryType = 3 // events
FilesCategory CategoryType = 4 // files
ListsCategory CategoryType = 5 // lists
LibrariesCategory CategoryType = 6 // libraries
PagesCategory CategoryType = 7 // pages
DetailsCategory CategoryType = 8 // details
ChannelMessagesCategory CategoryType = 9 // channel messages
)
func ToCategoryType(category string) CategoryType {
@ -48,6 +49,8 @@ func ToCategoryType(category string) CategoryType {
return PagesCategory
case strings.ToLower(DetailsCategory.String()):
return DetailsCategory
case strings.ToLower(ChannelMessagesCategory.String()):
return ChannelMessagesCategory
default:
return UnknownCategory
}
@ -73,6 +76,14 @@ var serviceCategories = map[ServiceType]map[CategoryType]struct{}{
ListsCategory: {},
PagesCategory: {},
},
GroupsService: {
ChannelMessagesCategory: {},
LibrariesCategory: {},
},
TeamsService: {
ChannelMessagesCategory: {},
LibrariesCategory: {},
},
}
func validateServiceAndCategoryStrings(s, c string) (ServiceType, CategoryType, error) {

View File

@ -17,11 +17,12 @@ func _() {
_ = x[LibrariesCategory-6]
_ = x[PagesCategory-7]
_ = x[DetailsCategory-8]
_ = x[ChannelMessagesCategory-9]
}
const _CategoryType_name = "UnknownCategoryemailcontactseventsfileslistslibrariespagesdetails"
const _CategoryType_name = "UnknownCategoryemailcontactseventsfileslistslibrariespagesdetailschannel messages"
var _CategoryType_index = [...]uint8{0, 15, 20, 28, 34, 39, 44, 53, 58, 65}
var _CategoryType_index = [...]uint8{0, 15, 20, 28, 34, 39, 44, 53, 58, 65, 81}
func (i CategoryType) String() string {
if i < 0 || i >= CategoryType(len(_CategoryType_index)-1) {

View File

@ -13,10 +13,12 @@ var piiSafePathElems = pii.MapWithPlurals(
UnknownService.String(),
ExchangeService.String(),
OneDriveService.String(),
GroupsService.String(),
SharePointService.String(),
ExchangeMetadataService.String(),
OneDriveMetadataService.String(),
SharePointMetadataService.String(),
GroupsMetadataService.String(),
// categories
UnknownCategory.String(),

View File

@ -287,47 +287,54 @@ func (suite *DataLayerResourcePath) TestToServiceCategoryMetadataPath() {
check: assert.Error,
},
{
name: "Passes",
name: "Exchange Contacts",
service: path.ExchangeService,
category: path.ContactsCategory,
expectedService: path.ExchangeMetadataService,
check: assert.NoError,
},
{
name: "Passes",
name: "Exchange Events",
service: path.ExchangeService,
category: path.EventsCategory,
expectedService: path.ExchangeMetadataService,
check: assert.NoError,
},
{
name: "Passes",
name: "OneDrive Files",
service: path.OneDriveService,
category: path.FilesCategory,
expectedService: path.OneDriveMetadataService,
check: assert.NoError,
},
{
name: "Passes",
name: "SharePoint Libraries",
service: path.SharePointService,
category: path.LibrariesCategory,
expectedService: path.SharePointMetadataService,
check: assert.NoError,
},
{
name: "Passes",
name: "SharePoint Lists",
service: path.SharePointService,
category: path.ListsCategory,
expectedService: path.SharePointMetadataService,
check: assert.NoError,
},
{
name: "Passes",
name: "SharePoint Pages",
service: path.SharePointService,
category: path.PagesCategory,
expectedService: path.SharePointMetadataService,
check: assert.NoError,
},
{
name: "Groups Libraries",
service: path.GroupsService,
category: path.LibrariesCategory,
expectedService: path.GroupsMetadataService,
check: assert.NoError,
},
}
for _, test := range table {

View File

@ -22,15 +22,17 @@ type ServiceType int
//go:generate stringer -type=ServiceType -linecomment
const (
UnknownService ServiceType = iota
ExchangeService // exchange
OneDriveService // onedrive
SharePointService // sharepoint
ExchangeMetadataService // exchangeMetadata
OneDriveMetadataService // onedriveMetadata
SharePointMetadataService // sharepointMetadata
GroupsService // groups
GroupsMetadataService // groupsMetadata
UnknownService ServiceType = 0
ExchangeService ServiceType = 1 // exchange
OneDriveService ServiceType = 2 // onedrive
SharePointService ServiceType = 3 // sharepoint
ExchangeMetadataService ServiceType = 4 // exchangeMetadata
OneDriveMetadataService ServiceType = 5 // onedriveMetadata
SharePointMetadataService ServiceType = 6 // sharepointMetadata
GroupsService ServiceType = 7 // groups
GroupsMetadataService ServiceType = 8 // groupsMetadata
TeamsService ServiceType = 9 // teams
TeamsMetadataService ServiceType = 10 // teamsMetadata
)
func toServiceType(service string) ServiceType {
@ -43,12 +45,20 @@ func toServiceType(service string) ServiceType {
return OneDriveService
case strings.ToLower(SharePointService.String()):
return SharePointService
case strings.ToLower(GroupsService.String()):
return GroupsService
case strings.ToLower(TeamsService.String()):
return TeamsService
case strings.ToLower(ExchangeMetadataService.String()):
return ExchangeMetadataService
case strings.ToLower(OneDriveMetadataService.String()):
return OneDriveMetadataService
case strings.ToLower(SharePointMetadataService.String()):
return SharePointMetadataService
case strings.ToLower(GroupsMetadataService.String()):
return GroupsMetadataService
case strings.ToLower(TeamsMetadataService.String()):
return TeamsMetadataService
default:
return UnknownService
}

View File

@ -17,11 +17,13 @@ func _() {
_ = x[SharePointMetadataService-6]
_ = x[GroupsService-7]
_ = x[GroupsMetadataService-8]
_ = x[TeamsService-9]
_ = x[TeamsMetadataService-10]
}
const _ServiceType_name = "UnknownServiceexchangeonedrivesharepointexchangeMetadataonedriveMetadatasharepointMetadatagroupsgroupsMetadata"
const _ServiceType_name = "UnknownServiceexchangeonedrivesharepointexchangeMetadataonedriveMetadatasharepointMetadatagroupsgroupsMetadatateamsteamsMetadata"
var _ServiceType_index = [...]uint8{0, 14, 22, 30, 40, 56, 72, 90, 96, 110}
var _ServiceType_index = [...]uint8{0, 14, 22, 30, 40, 56, 72, 90, 96, 110, 115, 128}
func (i ServiceType) String() string {
if i < 0 || i >= ServiceType(len(_ServiceType_index)-1) {

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
)
@ -204,8 +205,8 @@ func (s *groups) Scopes() []GroupsScope {
// -------------------
// Scope Factories
// Produces one or more Groups site scopes.
// One scope is created per site entry.
// Produces one or more Groups scopes.
// One scope is created per group entry.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
@ -214,38 +215,93 @@ func (s *groups) AllData() []GroupsScope {
scopes = append(
scopes,
makeScope[GroupsScope](GroupsTODOContainer, Any()))
makeScope[GroupsScope](GroupsLibraryFolder, Any()),
makeScope[GroupsScope](GroupsChannel, Any()))
return scopes
}
// TODO produces one or more Groups TODO scopes.
// Channel produces one or more SharePoint channel scopes, where the channel
// matches upon a given channel by ID or Name. In order to ensure channel selection
// this should always be embedded within the Filter() set; include(channel()) will
// select all items in the channel without further filtering.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// Any empty slice defaults to [selectors.None]
func (s *groups) TODO(lists []string, opts ...option) []GroupsScope {
// If any slice is empty, it defaults to [selectors.None]
func (s *groups) Channel(channel string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsChannel,
GroupsInfoChannel,
[]string{channel},
filters.Equal),
}
}
// ChannelMessages produces one or more Groups channel message scopes.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
func (s *sharePoint) ChannelMessages(channels, messages []string, opts ...option) []GroupsScope {
var (
scopes = []GroupsScope{}
os = append([]option{pathComparator()}, opts...)
)
scopes = append(scopes, makeScope[GroupsScope](GroupsTODOContainer, lists, os...))
scopes = append(
scopes,
makeScope[GroupsScope](GroupsChannelMessage, messages, os...).
set(GroupsChannel, channels, opts...))
return scopes
}
// ListTODOItemsItems produces one or more Groups TODO item scopes.
// Library produces one or more Group library scopes, where the library
// matches upon a given drive by ID or Name. In order to ensure library selection
// this should always be embedded within the Filter() set; include(Library()) will
// select all items in the library without further filtering.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
// options are only applied to the list scopes.
func (s *groups) TODOItems(lists, items []string, opts ...option) []GroupsScope {
func (s *groups) Library(library string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsLibraryItem,
GroupsInfoSiteLibraryDrive,
[]string{library},
filters.Equal),
}
}
// LibraryFolders produces one or more SharePoint libraryFolder scopes.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
func (s *groups) LibraryFolders(libraryFolders []string, opts ...option) []GroupsScope {
var (
scopes = []GroupsScope{}
os = append([]option{pathComparator()}, opts...)
)
scopes = append(
scopes,
makeScope[GroupsScope](GroupsLibraryFolder, libraryFolders, os...))
return scopes
}
// LibraryItems produces one or more Groups library item scopes.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
// options are only applied to the library scopes.
func (s *groups) LibraryItems(libraries, items []string, opts ...option) []GroupsScope {
scopes := []GroupsScope{}
scopes = append(
scopes,
makeScope[GroupsScope](GroupsTODOItem, items, defaultItemOptions(s.Cfg)...).
set(GroupsTODOContainer, lists, opts...))
makeScope[GroupsScope](GroupsLibraryItem, items, defaultItemOptions(s.Cfg)...).
set(GroupsLibraryFolder, libraries, opts...))
return scopes
}
@ -270,21 +326,28 @@ const (
GroupsCategoryUnknown groupsCategory = ""
// types of data in Groups
GroupsGroup groupsCategory = "GroupsGroup"
GroupsTODOContainer groupsCategory = "GroupsTODOContainer"
GroupsTODOItem groupsCategory = "GroupsTODOItem"
GroupsGroup groupsCategory = "GroupsGroup"
GroupsChannel groupsCategory = "GroupsChannel"
GroupsChannelMessage groupsCategory = "GroupsChannelMessage"
GroupsLibraryFolder groupsCategory = "GroupsLibraryFolder"
GroupsLibraryItem groupsCategory = "GroupsLibraryItem"
// details.itemInfo comparables
// library drive selection
// channel drive selection
GroupsInfoSiteLibraryDrive groupsCategory = "GroupsInfoSiteLibraryDrive"
GroupsInfoChannel groupsCategory = "GroupsInfoChannel"
)
// groupsLeafProperties describes common metadata of the leaf categories
var groupsLeafProperties = map[categorizer]leafProperty{
GroupsTODOItem: { // the root category must be represented, even though it isn't a leaf
pathKeys: []categorizer{GroupsTODOContainer, GroupsTODOItem},
pathType: path.UnknownCategory,
GroupsChannelMessage: { // the root category must be represented, even though it isn't a leaf
pathKeys: []categorizer{GroupsChannel, GroupsChannelMessage},
pathType: path.ChannelMessagesCategory,
},
GroupsLibraryItem: {
pathKeys: []categorizer{GroupsLibraryFolder, GroupsLibraryItem},
pathType: path.LibrariesCategory,
},
GroupsGroup: { // the root category must be represented, even though it isn't a leaf
pathKeys: []categorizer{GroupsGroup},
@ -303,8 +366,12 @@ func (c groupsCategory) String() string {
// Ex: ServiceUser.leafCat() => ServiceUser
func (c groupsCategory) leafCat() categorizer {
switch c {
case GroupsTODOContainer, GroupsInfoSiteLibraryDrive:
return GroupsTODOItem
// TODO: if channels ever contain more than one type of item,
// we'll need to fix this up.
case GroupsChannel, GroupsChannelMessage:
return GroupsChannelMessage
case GroupsLibraryFolder, GroupsLibraryItem, GroupsInfoSiteLibraryDrive:
return GroupsLibraryItem
}
return c
@ -334,7 +401,7 @@ func (c groupsCategory) isLeaf() bool {
// pathValues transforms the two paths to maps of identified properties.
//
// Example:
// [tenantID, service, siteID, category, folder, itemID]
// [tenantID, service, groupID, site, siteID, category, folder, itemID]
// => {spFolder: folder, spItemID: itemID}
func (c groupsCategory) pathValues(
repo path.Path,
@ -348,14 +415,16 @@ func (c groupsCategory) pathValues(
)
switch c {
case GroupsTODOContainer, GroupsTODOItem:
case GroupsChannel, GroupsChannelMessage:
folderCat, itemCat = GroupsChannel, GroupsChannelMessage
rFld = ent.Groups.ParentPath
case GroupsLibraryFolder, GroupsLibraryItem:
if ent.Groups == nil {
return nil, clues.New("no Groups ItemInfo in details")
}
folderCat, itemCat = GroupsTODOContainer, GroupsTODOItem
folderCat, itemCat = GroupsLibraryFolder, GroupsLibraryItem
rFld = ent.Groups.ParentPath
default:
return nil, clues.New("unrecognized groupsCategory").With("category", c)
}
@ -451,7 +520,7 @@ func (s GroupsScope) set(cat groupsCategory, v []string, opts ...option) GroupsS
os := []option{}
switch cat {
case GroupsTODOContainer:
case GroupsChannel, GroupsLibraryFolder:
os = append(os, pathComparator())
}
@ -462,10 +531,14 @@ func (s GroupsScope) set(cat groupsCategory, v []string, opts ...option) GroupsS
func (s GroupsScope) setDefaults() {
switch s.Category() {
case GroupsGroup:
s[GroupsTODOContainer.String()] = passAny
s[GroupsTODOItem.String()] = passAny
case GroupsTODOContainer:
s[GroupsTODOItem.String()] = passAny
s[GroupsChannel.String()] = passAny
s[GroupsChannelMessage.String()] = passAny
s[GroupsLibraryFolder.String()] = passAny
s[GroupsLibraryItem.String()] = passAny
case GroupsChannel:
s[GroupsChannelMessage.String()] = passAny
case GroupsLibraryFolder:
s[GroupsLibraryItem.String()] = passAny
}
}
@ -485,7 +558,8 @@ func (s groups) Reduce(
deets,
s.Selector,
map[path.CategoryType]groupsCategory{
path.UnknownCategory: GroupsTODOItem,
path.ChannelMessagesCategory: GroupsChannelMessage,
path.LibrariesCategory: GroupsLibraryItem,
},
errs)
}
@ -516,6 +590,9 @@ func (s GroupsScope) matchesInfo(dii details.ItemInfo) bool {
}
return matchesAny(s, GroupsInfoSiteLibraryDrive, ds)
case GroupsInfoChannel:
ds := Any()
return matchesAny(s, GroupsInfoChannel, ds)
}
return s.Matches(infoCat, i)

View File

@ -0,0 +1,421 @@
package selectors
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
)
type GroupsSelectorSuite struct {
tester.Suite
}
func TestGroupsSelectorSuite(t *testing.T) {
suite.Run(t, &GroupsSelectorSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsSelectorSuite) TestNewGroupsBackup() {
t := suite.T()
ob := NewGroupsBackup(nil)
assert.Equal(t, ob.Service, ServiceGroups)
assert.NotZero(t, ob.Scopes())
}
func (suite *GroupsSelectorSuite) TestToGroupsBackup() {
t := suite.T()
ob := NewGroupsBackup(nil)
s := ob.Selector
ob, err := s.ToGroupsBackup()
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ob.Service, ServiceGroups)
assert.NotZero(t, ob.Scopes())
}
func (suite *GroupsSelectorSuite) TestNewGroupsRestore() {
t := suite.T()
or := NewGroupsRestore(nil)
assert.Equal(t, or.Service, ServiceGroups)
assert.NotZero(t, or.Scopes())
}
func (suite *GroupsSelectorSuite) TestToGroupsRestore() {
t := suite.T()
eb := NewGroupsRestore(nil)
s := eb.Selector
or, err := s.ToGroupsRestore()
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, or.Service, ServiceGroups)
assert.NotZero(t, or.Scopes())
}
// TODO(rkeepers): implement
// func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
// toRR := func(cat path.CategoryType, siteID string, folders []string, item string) string {
// folderElems := make([]string, 0, len(folders))
// for _, f := range folders {
// folderElems = append(folderElems, f+".d")
// }
// return stubRepoRef(
// path.GroupsService,
// cat,
// siteID,
// strings.Join(folderElems, "/"),
// item)
// }
// var (
// prefixElems = []string{
// odConsts.DrivesPathDir,
// "drive!id",
// odConsts.RootPathDir,
// }
// itemElems1 = []string{"folderA", "folderB"}
// itemElems2 = []string{"folderA", "folderC"}
// itemElems3 = []string{"folderD", "folderE"}
// pairAC = "folderA/folderC"
// pairGH = "folderG/folderH"
// item = toRR(
// path.LibrariesCategory,
// "sid",
// append(slices.Clone(prefixElems), itemElems1...),
// "item")
// item2 = toRR(
// path.LibrariesCategory,
// "sid",
// append(slices.Clone(prefixElems), itemElems2...),
// "item2")
// item3 = toRR(
// path.LibrariesCategory,
// "sid",
// append(slices.Clone(prefixElems), itemElems3...),
// "item3")
// item4 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item4")
// item5 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item5")
// )
// deets := &details.Details{
// DetailsModel: details.DetailsModel{
// Entries: []details.Entry{
// {
// RepoRef: item,
// ItemRef: "item",
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems1...), "/"),
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsLibrary,
// ItemName: "itemName",
// ParentPath: strings.Join(itemElems1, "/"),
// },
// },
// },
// {
// RepoRef: item2,
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems2...), "/"),
// // ItemRef intentionally blank to test fallback case
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsLibrary,
// ItemName: "itemName2",
// ParentPath: strings.Join(itemElems2, "/"),
// },
// },
// },
// {
// RepoRef: item3,
// ItemRef: "item3",
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems3...), "/"),
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsLibrary,
// ItemName: "itemName3",
// ParentPath: strings.Join(itemElems3, "/"),
// },
// },
// },
// {
// RepoRef: item4,
// LocationRef: pairGH,
// ItemRef: "item4",
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsPage,
// ItemName: "itemName4",
// ParentPath: pairGH,
// },
// },
// },
// {
// RepoRef: item5,
// LocationRef: pairGH,
// // ItemRef intentionally blank to test fallback case
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsPage,
// ItemName: "itemName5",
// ParentPath: pairGH,
// },
// },
// },
// },
// },
// }
// arr := func(s ...string) []string {
// return s
// }
// table := []struct {
// name string
// makeSelector func() *GroupsRestore
// expect []string
// cfg Config
// }{
// {
// name: "all",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.AllData())
// return odr
// },
// expect: arr(item, item2, item3, item4, item5),
// },
// {
// name: "only match item",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
// return odr
// },
// expect: arr(item2),
// },
// {
// name: "id doesn't match name",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
// return odr
// },
// expect: []string{},
// cfg: Config{OnlyMatchItemNames: true},
// },
// {
// name: "only match item name",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
// return odr
// },
// expect: arr(item2),
// cfg: Config{OnlyMatchItemNames: true},
// },
// {
// name: "name doesn't match",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
// return odr
// },
// expect: []string{},
// },
// {
// name: "only match folder",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore([]string{"sid"})
// odr.Include(odr.LibraryFolders([]string{"folderA/folderB", pairAC}))
// return odr
// },
// expect: arr(item, item2),
// },
// {
// name: "pages match folder",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore([]string{"sid"})
// odr.Include(odr.Pages([]string{pairGH, pairAC}))
// return odr
// },
// expect: arr(item4, item5),
// },
// }
// for _, test := range table {
// suite.Run(test.name, func() {
// t := suite.T()
// ctx, flush := tester.NewContext(t)
// defer flush()
// sel := test.makeSelector()
// sel.Configure(test.cfg)
// results := sel.Reduce(ctx, deets, fault.New(true))
// paths := results.Paths()
// assert.Equal(t, test.expect, paths)
// })
// }
// }
func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
var (
itemName = "item"
itemID = "item-id"
shortRef = "short"
elems = []string{itemID}
)
table := []struct {
name string
sc groupsCategory
pathElems []string
locRef string
parentPath string
expected map[categorizer][]string
cfg Config
}{
{
name: "Groups Channel Messages",
sc: GroupsChannelMessage,
pathElems: elems,
locRef: "",
expected: map[categorizer][]string{
GroupsChannel: {""},
GroupsChannelMessage: {itemID, shortRef},
},
cfg: Config{},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
itemPath, err := path.Build(
"tenant",
"site",
path.GroupsService,
test.sc.PathType(),
true,
test.pathElems...)
require.NoError(t, err, clues.ToCore(err))
ent := details.Entry{
RepoRef: itemPath.String(),
ShortRef: shortRef,
ItemRef: itemPath.Item(),
LocationRef: test.locRef,
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemName: itemName,
ParentPath: test.parentPath,
},
},
}
pv, err := test.sc.pathValues(itemPath, ent, test.cfg)
require.NoError(t, err)
assert.Equal(t, test.expected, pv)
})
}
}
// TODO(abin): implement
// func (suite *GroupsSelectorSuite) TestGroupsScope_MatchesInfo() {
// var (
// sel = NewGroupsRestore(Any())
// host = "www.website.com"
// pth = "/foo"
// url = host + pth
// epoch = time.Time{}
// now = time.Now()
// modification = now.Add(15 * time.Minute)
// future = now.Add(45 * time.Minute)
// )
// table := []struct {
// name string
// infoURL string
// scope []GroupsScope
// expect assert.BoolAssertionFunc
// }{
// {"host match", host, sel.WebURL([]string{host}), assert.True},
// {"url match", url, sel.WebURL([]string{url}), assert.True},
// {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
// {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
// {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
// {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
// {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
// {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
// {"file create after the epoch", host, sel.CreatedAfter(dttm.Format(epoch)), assert.True},
// {"file create after now", host, sel.CreatedAfter(dttm.Format(now)), assert.False},
// {"file create after later", url, sel.CreatedAfter(dttm.Format(future)), assert.False},
// {"file create before future", host, sel.CreatedBefore(dttm.Format(future)), assert.True},
// {"file create before now", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
// {"file create before modification", host, sel.CreatedBefore(dttm.Format(modification)), assert.True},
// {"file create before epoch", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
// {"file modified after the epoch", host, sel.ModifiedAfter(dttm.Format(epoch)), assert.True},
// {"file modified after now", host, sel.ModifiedAfter(dttm.Format(now)), assert.True},
// {"file modified after later", host, sel.ModifiedAfter(dttm.Format(future)), assert.False},
// {"file modified before future", host, sel.ModifiedBefore(dttm.Format(future)), assert.True},
// {"file modified before now", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
// {"file modified before epoch", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
// {"in library", host, sel.Library("included-library"), assert.True},
// {"not in library", host, sel.Library("not-included-library"), assert.False},
// {"library id", host, sel.Library("1234"), assert.True},
// {"not library id", host, sel.Library("abcd"), assert.False},
// }
// for _, test := range table {
// suite.Run(test.name, func() {
// t := suite.T()
// itemInfo := details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsPage,
// WebURL: test.infoURL,
// Created: now,
// Modified: modification,
// DriveName: "included-library",
// DriveID: "1234",
// },
// }
// scopes := setScopesToDefault(test.scope)
// for _, scope := range scopes {
// test.expect(t, scope.matchesInfo(itemInfo))
// }
// })
// }
// }
func (suite *GroupsSelectorSuite) TestCategory_PathType() {
table := []struct {
cat groupsCategory
pathType path.CategoryType
}{
{
cat: GroupsCategoryUnknown,
pathType: path.UnknownCategory,
},
{
cat: GroupsChannel,
pathType: path.ChannelMessagesCategory,
},
{
cat: GroupsChannelMessage,
pathType: path.ChannelMessagesCategory,
},
}
for _, test := range table {
suite.Run(test.cat.String(), func() {
assert.Equal(
suite.T(),
test.pathType.String(),
test.cat.PathType().String())
})
}
}

View File

@ -20,11 +20,11 @@ type service int
//go:generate stringer -type=service -linecomment
const (
ServiceUnknown service = iota // Unknown Service
ServiceExchange // Exchange
ServiceOneDrive // OneDrive
ServiceSharePoint // SharePoint
ServiceGroups // Groups
ServiceUnknown service = 0 // Unknown Service
ServiceExchange service = 1 // Exchange
ServiceOneDrive service = 2 // OneDrive
ServiceSharePoint service = 3 // SharePoint
ServiceGroups service = 4 // Groups
)
var serviceToPathType = map[service]path.ServiceType{
@ -32,6 +32,7 @@ var serviceToPathType = map[service]path.ServiceType{
ServiceExchange: path.ExchangeService,
ServiceOneDrive: path.OneDriveService,
ServiceSharePoint: path.SharePointService,
ServiceGroups: path.GroupsService,
}
var (

View File

@ -166,7 +166,7 @@ func ChannelMessageInfo(msg models.ChatMessageable, size int64) *details.GroupsI
)
return &details.GroupsInfo{
ItemType: details.GroupChannelMessage,
ItemType: details.TeamsChannelMessage,
Size: size,
Created: created,
Modified: ptr.OrNow(msg.GetLastModifiedDateTime()),

View File

@ -1,144 +1,43 @@
package api
import (
"context"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/teams"
)
// ---------------------------------------------------------------------------
// item pager
// ---------------------------------------------------------------------------
type MessageItemDeltaEnumerator interface {
GetPage(context.Context) (PageLinker, error)
SetNext(nextLink string)
type ChannelMessageDeltaEnumerator interface {
DeltaGetPager
ValuesInPageLinker[models.ChatMessageable]
SetNextLinker
}
var _ MessageItemDeltaEnumerator = &messagePageCtrl{}
// TODO: implement
// var _ ChannelMessageDeltaEnumerator = &messagePageCtrl{}
type messagePageCtrl struct {
gs graph.Servicer
builder *teams.ItemChannelsItemMessagesDeltaRequestBuilder
options *teams.ItemChannelsItemMessagesDeltaRequestBuilderGetRequestConfiguration
// type messagePageCtrl struct {
// gs graph.Servicer
// builder *teams.ItemChannelsItemMessagesRequestBuilder
// options *teams.ItemChannelsItemMessagesRequestBuilderGetRequestConfiguration
// }
// ---------------------------------------------------------------------------
// channel pager
// ---------------------------------------------------------------------------
type ChannelDeltaEnumerator interface {
DeltaGetPager
ValuesInPageLinker[models.Channelable]
SetNextLinker
}
func (c Channels) NewMessagePager(
teamID,
channelID string,
fields []string,
) *messagePageCtrl {
requestConfig := &teams.ItemChannelsItemMessagesDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &teams.ItemChannelsItemMessagesDeltaRequestBuilderGetQueryParameters{
Select: fields,
},
}
// TODO: implement
// var _ ChannelDeltaEnumerator = &channelsPageCtrl{}
res := &messagePageCtrl{
gs: c.Stable,
options: requestConfig,
builder: c.Stable.
Client().
Teams().
ByTeamId(teamID).
Channels().
ByChannelId(channelID).
Messages().
Delta(),
}
return res
}
func (p *messagePageCtrl) SetNext(nextLink string) {
p.builder = teams.NewItemChannelsItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *messagePageCtrl) GetPage(ctx context.Context) (PageLinker, error) {
var (
resp PageLinker
err error
)
resp, err = p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return resp, nil
}
type MessageItemIDType struct {
ItemID string
}
type channelItemPageCtrl struct {
gs graph.Servicer
builder *teams.ItemChannelsItemMessagesRequestBuilder
options *teams.ItemChannelsItemMessagesRequestBuilderGetRequestConfiguration
}
func (c Channels) GetItemIDsInContainer(
ctx context.Context,
teamID, channelID string,
) (map[string]MessageItemIDType, error) {
ctx = clues.Add(ctx, "channel_id", channelID)
pager := c.NewChannelItemPager(teamID, channelID)
items, err := enumerateItems(ctx, pager)
if err != nil {
return nil, graph.Wrap(ctx, err, "enumerating contacts")
}
m := map[string]MessageItemIDType{}
for _, item := range items {
m[ptr.Val(item.GetId())] = MessageItemIDType{
ItemID: ptr.Val(item.GetId()),
}
}
return m, nil
}
func (c Channels) NewChannelItemPager(
teamID, containerID string,
selectProps ...string,
) itemPager[models.ChatMessageable] {
options := &teams.ItemChannelsItemMessagesRequestBuilderGetRequestConfiguration{
QueryParameters: &teams.ItemChannelsItemMessagesRequestBuilderGetQueryParameters{},
}
if len(selectProps) > 0 {
options.QueryParameters.Select = selectProps
}
builder := c.Stable.
Client().
Teams().
ByTeamId(teamID).
Channels().
ByChannelId(containerID).
Messages()
return &channelItemPageCtrl{c.Stable, builder, options}
}
//lint:ignore U1000 False Positive
func (p *channelItemPageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.ChatMessageable], error) {
page, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return EmptyDeltaLinker[models.ChatMessageable]{PageLinkValuer: page}, nil
}
//lint:ignore U1000 False Positive
func (p *channelItemPageCtrl) setNext(nextLink string) {
p.builder = teams.NewItemChannelsItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
}
// type channelsPageCtrl struct {
// gs graph.Servicer
// builder *teams.ItemChannelsChannelItemRequestBuilder
// options *teams.ItemChannelsChannelItemRequestBuilderGetRequestConfiguration
// }

View File

@ -28,19 +28,20 @@ func (suite *ChannelPagerIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
}
func (suite *ChannelPagerIntgSuite) TestChannels_GetPage() {
t := suite.T()
// This will be added once 'pager' is implemented
// func (suite *ChannelPagerIntgSuite) TestChannels_GetPage() {
// t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// ctx, flush := tester.NewContext(t)
// defer flush()
teamID := tconfig.M365TeamsID(t)
channelID := tconfig.M365ChannelID(t)
pager := suite.its.ac.Channels().NewMessagePager(teamID, channelID, []string{})
a, err := pager.GetPage(ctx)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, a)
}
// teamID := tconfig.M365TeamID(t)
// channelID := tconfig.M365ChannelID(t)
// pager := suite.its.ac.Channels().NewMessagePager(teamID, channelID, []string{})
// a, err := pager.GetPage(ctx)
// assert.NoError(t, err, clues.ToCore(err))
// assert.NotNil(t, a)
// }
func (suite *ChannelPagerIntgSuite) TestChannels_Get() {
t := suite.T()
@ -49,7 +50,7 @@ func (suite *ChannelPagerIntgSuite) TestChannels_Get() {
var (
containerName = "General"
teamID = tconfig.M365TeamsID(t)
teamID = tconfig.M365TeamID(t)
chanClient = suite.its.ac.Channels()
)
@ -62,31 +63,3 @@ func (suite *ChannelPagerIntgSuite) TestChannels_Get() {
_, err = chanClient.GetChannel(ctx, teamID, ptr.Val(channel.GetId()))
assert.Error(t, err, clues.ToCore(err))
}
// func (suite *ChannelPagerIntgSuite) TestMessages_CreateGetAndDelete() {
// t := suite.T()
// ctx, flush := tester.NewContext(t)
// defer flush()
// var (
// teamID = tconfig.M365TeamsID(t)
// channelID = tconfig.M365ChannelID(t)
// credentials = suite.its.ac.Credentials
// chanClient = suite.its.ac.Channels()
// )
// // GET channel - should be not found
// message, _, err := chanClient.GetMessage(ctx, teamID, channelID, "", "")
// assert.Error(t, err, clues.ToCore(err))
// // POST channel
// // patchBody := models.NewChatMessage()
// // body := models.NewItemBody()
// // content := "Hello World"
// // body.SetContent(&content)
// // patchBody.SetBody(body)
// // _, := suite.its.ac.Channels().PostMessage(ctx, teamID, channelID, patchBody)
// // assert.NoError(t, err, clues.ToCore(err))
// }

View File

@ -112,3 +112,12 @@ func DriveItemSelectDefault() []string {
"malware",
"shared")
}
// URL cache only needs a subset of item properties
func DriveItemSelectURLCache() []string {
return idAnd(
"content.downloadUrl",
"deleted",
"file",
"folder")
}

View File

@ -7,6 +7,7 @@ import (
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/common/tform"
"github.com/alcionai/corso/src/internal/m365/graph"
@ -27,7 +28,7 @@ func (c Client) Groups() Groups {
return Groups{c}
}
// On creation of each Teams team a corrsponding group gets created.
// On creation of each Teams team a corresponding group gets created.
// The group acts as the protected resource, and all teams data like events,
// drive and mail messages are owned by that group.
@ -49,24 +50,6 @@ func (c Groups) GetAll(
return getGroups(ctx, errs, service)
}
// GetTeams retrieves all Teams.
func (c Groups) GetTeams(
ctx context.Context,
errs *fault.Bus,
) ([]models.Groupable, error) {
service, err := c.Service()
if err != nil {
return nil, err
}
groups, err := getGroups(ctx, errs, service)
if err != nil {
return nil, err
}
return OnlyTeams(ctx, groups), nil
}
// GetAll retrieves all groups.
func getGroups(
ctx context.Context,
@ -113,31 +96,6 @@ func getGroups(
return groups, el.Failure()
}
func OnlyTeams(ctx context.Context, groups []models.Groupable) []models.Groupable {
log := logger.Ctx(ctx)
var teams []models.Groupable
for _, g := range groups {
if g.GetAdditionalData()[ResourceProvisioningOptions] != nil {
val, _ := tform.AnyValueToT[[]any](ResourceProvisioningOptions, g.GetAdditionalData())
for _, v := range val {
s, err := str.AnyToString(v)
if err != nil {
log.Debug("could not be converted to string value: ", ResourceProvisioningOptions)
continue
}
if s == teamsAdditionalDataLabel {
teams = append(teams, g)
}
}
}
}
return teams
}
// GetID retrieves group by groupID.
func (c Groups) GetByID(
ctx context.Context,
@ -158,29 +116,25 @@ func (c Groups) GetByID(
return resp, graph.Stack(ctx, err).OrNil()
}
// GetTeamByID retrieves group by groupID.
func (c Groups) GetTeamByID(
// GetRootSite retrieves the root site for the group.
func (c Groups) GetRootSite(
ctx context.Context,
identifier string,
) (models.Groupable, error) {
) (models.Siteable, error) {
service, err := c.Service()
if err != nil {
return nil, err
}
resp, err := service.Client().Groups().ByGroupId(identifier).Get(ctx, nil)
resp, err := service.
Client().
Groups().
ByGroupId(identifier).
Sites().
BySiteId("root").
Get(ctx, nil)
if err != nil {
err := graph.Wrap(ctx, err, "getting group by id")
return nil, err
}
groups := []models.Groupable{resp}
if len(OnlyTeams(ctx, groups)) == 0 {
err := clues.New("given teamID is not related to any team")
return nil, err
return nil, clues.Wrap(err, "getting root site for group")
}
return resp, graph.Stack(ctx, err).OrNil()
@ -203,3 +157,49 @@ func ValidateGroup(item models.Groupable) error {
return nil
}
func OnlyTeams(ctx context.Context, groups []models.Groupable) []models.Groupable {
var teams []models.Groupable
for _, g := range groups {
if IsTeam(ctx, g) {
teams = append(teams, g)
}
}
return teams
}
func IsTeam(ctx context.Context, mg models.Groupable) bool {
log := logger.Ctx(ctx)
if mg.GetAdditionalData()[ResourceProvisioningOptions] == nil {
return false
}
val, _ := tform.AnyValueToT[[]any](ResourceProvisioningOptions, mg.GetAdditionalData())
for _, v := range val {
s, err := str.AnyToString(v)
if err != nil {
log.Debug("could not be converted to string value: ", ResourceProvisioningOptions)
continue
}
if s == teamsAdditionalDataLabel {
return true
}
}
return false
}
// GetIDAndName looks up the group matching the given ID, and returns
// its canonical ID and the name.
func (c Groups) GetIDAndName(ctx context.Context, groupID string) (string, string, error) {
s, err := c.GetByID(ctx, groupID)
if err != nil {
return "", "", err
}
return ptr.Val(s.GetId()), ptr.Val(s.GetDisplayName()), nil
}

View File

@ -97,7 +97,7 @@ func (suite *GroupsIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
}
func (suite *GroupsIntgSuite) TestGetAllGroups() {
func (suite *GroupsIntgSuite) TestGetAll() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -110,131 +110,31 @@ func (suite *GroupsIntgSuite) TestGetAllGroups() {
require.NotZero(t, len(groups), "must have at least one group")
}
func (suite *GroupsIntgSuite) TestGetAllTeams() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
teams, err := suite.its.ac.
Groups().
GetTeams(ctx, fault.New(true))
require.NoError(t, err)
require.NotZero(t, len(teams), "must have at least one teams")
groups, err := suite.its.ac.
Groups().
GetAll(ctx, fault.New(true))
require.NoError(t, err)
require.NotZero(t, len(groups), "must have at least one group")
var isTeam bool
if len(groups) > len(teams) {
isTeam = true
}
assert.True(t, isTeam, "must only return teams")
}
func (suite *GroupsIntgSuite) TestTeams_GetByID() {
var (
t = suite.T()
teamID = tconfig.M365TeamsID(t)
)
teamsAPI := suite.its.ac.Groups()
table := []struct {
name string
id string
expectErr func(*testing.T, error)
}{
{
name: "3 part id",
id: teamID,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "malformed id",
id: uuid.NewString(),
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
{
name: "random id",
id: uuid.NewString() + "," + uuid.NewString(),
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
{
name: "malformed url",
id: "barunihlda",
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := teamsAPI.GetTeamByID(ctx, test.id)
test.expectErr(t, err)
})
}
}
func (suite *GroupsIntgSuite) TestGroups_GetByID() {
var (
t = suite.T()
groupID = tconfig.M365GroupID(t)
groupID = suite.its.groupID
groupsAPI = suite.its.ac.Groups()
)
groupsAPI := suite.its.ac.Groups()
table := []struct {
name string
id string
expectErr func(*testing.T, error)
}{
{
name: "3 part id",
name: "valid id",
id: groupID,
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "malformed id",
name: "invalid id",
id: uuid.NewString(),
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
{
name: "random id",
id: uuid.NewString() + "," + uuid.NewString(),
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
{
name: "malformed url",
id: "barunihlda",
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {

View File

@ -83,7 +83,7 @@ type intgTesterSetup struct {
siteID string
siteDriveID string
siteDriveRootFolderID string
teamID string
groupID string
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
@ -132,13 +132,16 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its.siteDriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
// teams
its.teamID = tconfig.M365TeamsID(t)
// group
team, err := its.ac.Groups().GetTeamByID(ctx, its.teamID)
// use of the TeamID is intentional here, so that we are assured
// the group has full usage of the teams api.
its.groupID = tconfig.M365TeamID(t)
team, err := its.ac.Groups().GetByID(ctx, its.groupID)
require.NoError(t, err, clues.ToCore(err))
its.teamID = ptr.Val(team.GetId())
its.groupID = ptr.Val(team.GetId())
return its
}

View File

@ -13,9 +13,18 @@ import (
)
// ---------------------------------------------------------------------------
// common interfaces and funcs
// common interfaces
// ---------------------------------------------------------------------------
// TODO(keepers): replace all matching uses of GetPage with this.
type DeltaGetPager interface {
GetPage(context.Context) (DeltaPageLinker, error)
}
type ValuesInPageLinker[T any] interface {
ValuesIn(PageLinker) ([]T, error)
}
type PageLinker interface {
GetOdataNextLink() *string
}
@ -25,6 +34,14 @@ type DeltaPageLinker interface {
GetOdataDeltaLink() *string
}
type SetNextLinker interface {
SetNext(nextLink string)
}
// ---------------------------------------------------------------------------
// common funcs
// ---------------------------------------------------------------------------
// IsNextLinkValid separate check to investigate whether error is
func IsNextLinkValid(next string) bool {
return !strings.Contains(next, `users//`)

View File

@ -0,0 +1,118 @@
package m365
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// Group is the minimal information required to identify and display a M365 Group.
type Group struct {
ID string
// DisplayName is the human-readable name of the group. Normally the plaintext name that the
// user provided when they created the group, or the updated name if it was changed.
// Ex: displayName: "My Group"
DisplayName string
// IsTeam is true if the group qualifies as a Teams resource, and is able to backup and restore
// teams data.
IsTeam bool
}
// GroupsCompat returns a list of groups in the specified M365 tenant.
func GroupsCompat(ctx context.Context, acct account.Account) ([]*Group, error) {
errs := fault.New(true)
us, err := Groups(ctx, acct, errs)
if err != nil {
return nil, err
}
return us, errs.Failure()
}
// Groups returns a list of groups in the specified M365 tenant
func Groups(
ctx context.Context,
acct account.Account,
errs *fault.Bus,
) ([]*Group, error) {
ac, err := makeAC(ctx, acct, path.GroupsService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
return getAllGroups(ctx, ac.Groups())
}
func getAllGroups(
ctx context.Context,
ga getAller[models.Groupable],
) ([]*Group, error) {
groups, err := ga.GetAll(ctx, fault.New(true))
if err != nil {
return nil, clues.Wrap(err, "retrieving groups")
}
ret := make([]*Group, 0, len(groups))
for _, g := range groups {
t, err := parseGroup(ctx, g)
if err != nil {
return nil, clues.Wrap(err, "parsing groups")
}
ret = append(ret, t)
}
return ret, nil
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
// parseGroup extracts information from `models.Groupable` we care about
func parseGroup(ctx context.Context, mg models.Groupable) (*Group, error) {
if mg.GetDisplayName() == nil {
return nil, clues.New("group missing display name").
With("group_id", ptr.Val(mg.GetId()))
}
u := &Group{
ID: ptr.Val(mg.GetId()),
DisplayName: ptr.Val(mg.GetDisplayName()),
IsTeam: api.IsTeam(ctx, mg),
}
return u, nil
}
// GroupsMap retrieves an id-name cache of all groups in the tenant.
func GroupsMap(
ctx context.Context,
acct account.Account,
errs *fault.Bus,
) (idname.Cacher, error) {
groups, err := Groups(ctx, acct, errs)
if err != nil {
return idname.NewCache(nil), err
}
itn := make(map[string]string, len(groups))
for _, s := range groups {
itn[s.ID] = s.DisplayName
}
return idname.NewCache(itn), nil
}

View File

@ -0,0 +1,133 @@
package m365_test
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365"
)
type GroupsIntgSuite struct {
tester.Suite
acct account.Account
}
func TestGroupsIntgSuite(t *testing.T) {
suite.Run(t, &GroupsIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *GroupsIntgSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.acct = tconfig.NewM365Account(t)
}
func (suite *GroupsIntgSuite) TestGroups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
groups, err := m365.Groups(ctx, suite.acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, groups)
for _, group := range groups {
suite.Run("group_"+group.ID, func() {
t := suite.T()
assert.NotEmpty(t, group.ID)
assert.NotEmpty(t, group.DisplayName)
// at least one known group should be a team
if group.ID == tconfig.M365TeamID(t) {
assert.True(t, group.IsTeam)
}
})
}
}
func (suite *GroupsIntgSuite) TestGroupsMap() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
gm, err := m365.GroupsMap(ctx, suite.acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, gm)
for _, gid := range gm.IDs() {
suite.Run("group_"+gid, func() {
t := suite.T()
assert.NotEmpty(t, gid)
name, ok := gm.NameOf(gid)
assert.True(t, ok)
assert.NotEmpty(t, name)
})
}
}
func (suite *GroupsIntgSuite) TestGroups_InvalidCredentials() {
table := []struct {
name string
acct func(t *testing.T) account.Account
}{
{
name: "Invalid Credentials",
acct: func(t *testing.T) account.Account {
a, err := account.NewAccount(
account.ProviderM365,
account.M365Config{
M365: credentials.M365{
AzureClientID: "Test",
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
require.NoError(t, err, clues.ToCore(err))
return a
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
groups, err := m365.Groups(ctx, test.acct(t), fault.New(true))
assert.Empty(t, groups, "returned no groups")
assert.NotNil(t, err)
})
}
}

View File

@ -6,9 +6,6 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
@ -24,293 +21,8 @@ type getDefaultDriver interface {
GetDefaultDrive(ctx context.Context, userID string) (models.Driveable, error)
}
// ---------------------------------------------------------------------------
// Users
// ---------------------------------------------------------------------------
// User is the minimal information required to identify and display a user.
type User struct {
PrincipalName string
ID string
Name string
Info api.UserInfo
}
// UserNoInfo is the minimal information required to identify and display a user.
// TODO: Remove this once `UsersCompatNoInfo` is removed
type UserNoInfo struct {
PrincipalName string
ID string
Name string
}
// UsersCompat returns a list of users in the specified M365 tenant.
// TODO(ashmrtn): Remove when upstream consumers of the SDK support the fault
// package.
func UsersCompat(ctx context.Context, acct account.Account) ([]*User, error) {
errs := fault.New(true)
us, err := Users(ctx, acct, errs)
if err != nil {
return nil, err
}
return us, errs.Failure()
}
// UsersCompatNoInfo returns a list of users in the specified M365 tenant.
// TODO: Remove this once `Info` is removed from the `User` struct and callers
// have switched over
func UsersCompatNoInfo(ctx context.Context, acct account.Account) ([]*UserNoInfo, error) {
errs := fault.New(true)
us, err := usersNoInfo(ctx, acct, errs)
if err != nil {
return nil, err
}
return us, errs.Failure()
}
// UserHasMailbox returns true if the user has an exchange mailbox enabled
// false otherwise, and a nil pointer and an error in case of error
func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (bool, error) {
ac, err := makeAC(ctx, acct, path.ExchangeService)
if err != nil {
return false, clues.Stack(err).WithClues(ctx)
}
_, err = ac.Users().GetMailInbox(ctx, userID)
if err != nil {
if err := api.EvaluateMailboxError(err); err != nil {
return false, clues.Stack(err)
}
return false, nil
}
return true, nil
}
// UserHasDrives returns true if the user has any drives
// false otherwise, and a nil pointer and an error in case of error
func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bool, error) {
ac, err := makeAC(ctx, acct, path.OneDriveService)
if err != nil {
return false, clues.Stack(err).WithClues(ctx)
}
return checkUserHasDrives(ctx, ac.Users(), userID)
}
func checkUserHasDrives(ctx context.Context, dgdd getDefaultDriver, userID string) (bool, error) {
_, err := dgdd.GetDefaultDrive(ctx, userID)
if err != nil {
// we consider this a non-error case, since it
// answers the question the caller is asking.
if clues.HasLabel(err, graph.LabelsMysiteNotFound) || clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
return false, nil
}
if graph.IsErrUserNotFound(err) {
return false, clues.Stack(graph.ErrResourceOwnerNotFound, err)
}
return false, clues.Stack(err)
}
return true, nil
}
// usersNoInfo returns a list of users in the specified M365 tenant - with no info
// TODO: Remove this once we remove `Info` from `Users` and instead rely on the `GetUserInfo` API
// to get user information
func usersNoInfo(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*UserNoInfo, error) {
ac, err := makeAC(ctx, acct, path.UnknownService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
us, err := ac.Users().GetAll(ctx, errs)
if err != nil {
return nil, err
}
ret := make([]*UserNoInfo, 0, len(us))
for _, u := range us {
pu, err := parseUser(u)
if err != nil {
return nil, clues.Wrap(err, "formatting user data")
}
puNoInfo := &UserNoInfo{
PrincipalName: pu.PrincipalName,
ID: pu.ID,
Name: pu.Name,
}
ret = append(ret, puNoInfo)
}
return ret, nil
}
// Users returns a list of users in the specified M365 tenant
func Users(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*User, error) {
ac, err := makeAC(ctx, acct, path.ExchangeService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
us, err := ac.Users().GetAll(ctx, errs)
if err != nil {
return nil, err
}
ret := make([]*User, 0, len(us))
for _, u := range us {
pu, err := parseUser(u)
if err != nil {
return nil, clues.Wrap(err, "formatting user data")
}
userInfo, err := ac.Users().GetInfo(ctx, pu.ID)
if err != nil {
return nil, clues.Wrap(err, "getting user details")
}
pu.Info = *userInfo
ret = append(ret, pu)
}
return ret, nil
}
// parseUser extracts information from `models.Userable` we care about
func parseUser(item models.Userable) (*User, error) {
if item.GetUserPrincipalName() == nil {
return nil, clues.New("user missing principal name").
With("user_id", ptr.Val(item.GetId()))
}
u := &User{
PrincipalName: ptr.Val(item.GetUserPrincipalName()),
ID: ptr.Val(item.GetId()),
Name: ptr.Val(item.GetDisplayName()),
}
return u, nil
}
// UserInfo returns the corso-specific set of user metadata.
func GetUserInfo(
ctx context.Context,
acct account.Account,
userID string,
) (*api.UserInfo, error) {
ac, err := makeAC(ctx, acct, path.ExchangeService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
ui, err := ac.Users().GetInfo(ctx, userID)
if err != nil {
return nil, err
}
return ui, nil
}
// ---------------------------------------------------------------------------
// Sites
// ---------------------------------------------------------------------------
// Site is the minimal information required to identify and display a SharePoint site.
type Site struct {
// WebURL is the url for the site, works as an alias for the user name.
WebURL string
// ID is of the format: <site collection hostname>.<site collection unique id>.<site unique id>
// for example: contoso.sharepoint.com,abcdeab3-0ccc-4ce1-80ae-b32912c9468d,xyzud296-9f7c-44e1-af81-3c06d0d43007
ID string
// DisplayName is the human-readable name of the site. Normally the plaintext name that the
// user provided when they created the site, though it can be changed across time.
// Ex: webUrl: https://host.com/sites/TestingSite, displayName: "Testing Site"
DisplayName string
}
// Sites returns a list of Sites in a specified M365 tenant
func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) {
ac, err := makeAC(ctx, acct, path.SharePointService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
return getAllSites(ctx, ac.Sites())
}
type getAllSiteser interface {
GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error)
}
func getAllSites(ctx context.Context, gas getAllSiteser) ([]*Site, error) {
sites, err := gas.GetAll(ctx, fault.New(true))
if err != nil {
if clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
return nil, clues.Stack(graph.ErrServiceNotEnabled, err)
}
return nil, clues.Wrap(err, "retrieving sites")
}
ret := make([]*Site, 0, len(sites))
for _, s := range sites {
ps, err := parseSite(s)
if err != nil {
return nil, clues.Wrap(err, "parsing siteable")
}
ret = append(ret, ps)
}
return ret, nil
}
// parseSite extracts the information from `models.Siteable` we care about
func parseSite(item models.Siteable) (*Site, error) {
s := &Site{
ID: ptr.Val(item.GetId()),
WebURL: ptr.Val(item.GetWebUrl()),
DisplayName: ptr.Val(item.GetDisplayName()),
}
return s, nil
}
// SitesMap retrieves all sites in the tenant, and returns two maps: one id-to-webURL,
// and one webURL-to-id.
func SitesMap(
ctx context.Context,
acct account.Account,
errs *fault.Bus,
) (idname.Cacher, error) {
sites, err := Sites(ctx, acct, errs)
if err != nil {
return idname.NewCache(nil), err
}
itn := make(map[string]string, len(sites))
for _, s := range sites {
itn[s.ID] = s.WebURL
}
return idname.NewCache(itn), nil
type getAller[T any] interface {
GetAll(ctx context.Context, errs *fault.Bus) ([]T, error)
}
// ---------------------------------------------------------------------------

View File

@ -0,0 +1,99 @@
package m365
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
// Site is the minimal information required to identify and display a SharePoint site.
type Site struct {
// WebURL is the url for the site, works as an alias for the user name.
WebURL string
// ID is of the format: <site collection hostname>.<site collection unique id>.<site unique id>
// for example: contoso.sharepoint.com,abcdeab3-0ccc-4ce1-80ae-b32912c9468d,xyzud296-9f7c-44e1-af81-3c06d0d43007
ID string
// DisplayName is the human-readable name of the site. Normally the plaintext name that the
// user provided when they created the site, though it can be changed across time.
// Ex: webUrl: https://host.com/sites/TestingSite, displayName: "Testing Site"
DisplayName string
}
// Sites returns a list of Sites in a specified M365 tenant
func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) {
ac, err := makeAC(ctx, acct, path.SharePointService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
return getAllSites(ctx, ac.Sites())
}
func getAllSites(
ctx context.Context,
ga getAller[models.Siteable],
) ([]*Site, error) {
sites, err := ga.GetAll(ctx, fault.New(true))
if err != nil {
if clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
return nil, clues.Stack(graph.ErrServiceNotEnabled, err)
}
return nil, clues.Wrap(err, "retrieving sites")
}
ret := make([]*Site, 0, len(sites))
for _, s := range sites {
ps, err := parseSite(s)
if err != nil {
return nil, clues.Wrap(err, "parsing siteable")
}
ret = append(ret, ps)
}
return ret, nil
}
// parseSite extracts the information from `models.Siteable` we care about
func parseSite(item models.Siteable) (*Site, error) {
s := &Site{
ID: ptr.Val(item.GetId()),
WebURL: ptr.Val(item.GetWebUrl()),
DisplayName: ptr.Val(item.GetDisplayName()),
}
return s, nil
}
// SitesMap retrieves all sites in the tenant, and returns two maps: one id-to-webURL,
// and one webURL-to-id.
func SitesMap(
ctx context.Context,
acct account.Account,
errs *fault.Bus,
) (idname.Cacher, error) {
sites, err := Sites(ctx, acct, errs)
if err != nil {
return idname.NewCache(nil), err
}
itn := make(map[string]string, len(sites))
for _, s := range sites {
itn[s.ID] = s.WebURL
}
return idname.NewCache(itn), nil
}

View File

@ -0,0 +1,191 @@
package m365
import (
"context"
"testing"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/fault"
)
type siteIntegrationSuite struct {
tester.Suite
}
func TestSiteIntegrationSuite(t *testing.T) {
suite.Run(t, &siteIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *siteIntegrationSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
}
func (suite *siteIntegrationSuite) TestSites() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
acct := tconfig.NewM365Account(t)
sites, err := Sites(ctx, acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, sites)
for _, s := range sites {
suite.Run("site_"+s.ID, func() {
t := suite.T()
assert.NotEmpty(t, s.WebURL)
assert.NotEmpty(t, s.ID)
assert.NotEmpty(t, s.DisplayName)
})
}
}
func (suite *siteIntegrationSuite) TestSites_InvalidCredentials() {
table := []struct {
name string
acct func(t *testing.T) account.Account
}{
{
name: "Invalid Credentials",
acct: func(t *testing.T) account.Account {
a, err := account.NewAccount(
account.ProviderM365,
account.M365Config{
M365: credentials.M365{
AzureClientID: "Test",
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
require.NoError(t, err, clues.ToCore(err))
return a
},
},
{
name: "Empty Credentials",
acct: func(t *testing.T) account.Account {
// intentionally swallowing the error here
a, _ := account.NewAccount(account.ProviderM365)
return a
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
sites, err := Sites(ctx, test.acct(t), fault.New(true))
assert.Empty(t, sites, "returned some sites")
assert.NotNil(t, err)
})
}
}
// ---------------------------------------------------------------------------
// Unit
// ---------------------------------------------------------------------------
type siteUnitSuite struct {
tester.Suite
}
func TestSiteUnitSuite(t *testing.T) {
suite.Run(t, &siteUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type mockGASites struct {
response []models.Siteable
err error
}
func (m mockGASites) GetAll(context.Context, *fault.Bus) ([]models.Siteable, error) {
return m.response, m.err
}
func (suite *siteUnitSuite) TestGetAllSites() {
table := []struct {
name string
mock func(context.Context) getAller[models.Siteable]
expectErr func(*testing.T, error)
}{
{
name: "ok",
mock: func(ctx context.Context) getAller[models.Siteable] {
return mockGASites{[]models.Siteable{}, nil}
},
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no sharepoint license",
mock: func(ctx context.Context) getAller[models.Siteable] {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To(string(graph.NoSPLicense)))
odErr.SetErrorEscaped(merr)
return mockGASites{nil, graph.Stack(ctx, odErr)}
},
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrServiceNotEnabled, clues.ToCore(err))
},
},
{
name: "arbitrary error",
mock: func(ctx context.Context) getAller[models.Siteable] {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To("message"))
odErr.SetErrorEscaped(merr)
return mockGASites{nil, graph.Stack(ctx, odErr)}
},
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
gas := test.mock(ctx)
_, err := getAllSites(ctx, gas)
test.expectErr(t, err)
})
}
}

View File

@ -0,0 +1,211 @@
package m365
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// User is the minimal information required to identify and display a user.
type User struct {
PrincipalName string
ID string
Name string
Info api.UserInfo
}
// UserNoInfo is the minimal information required to identify and display a user.
// TODO: Remove this once `UsersCompatNoInfo` is removed
type UserNoInfo struct {
PrincipalName string
ID string
Name string
}
// UsersCompat returns a list of users in the specified M365 tenant.
// TODO(ashmrtn): Remove when upstream consumers of the SDK support the fault
// package.
func UsersCompat(ctx context.Context, acct account.Account) ([]*User, error) {
errs := fault.New(true)
us, err := Users(ctx, acct, errs)
if err != nil {
return nil, err
}
return us, errs.Failure()
}
// UsersCompatNoInfo returns a list of users in the specified M365 tenant.
// TODO: Remove this once `Info` is removed from the `User` struct and callers
// have switched over
func UsersCompatNoInfo(ctx context.Context, acct account.Account) ([]*UserNoInfo, error) {
errs := fault.New(true)
us, err := usersNoInfo(ctx, acct, errs)
if err != nil {
return nil, err
}
return us, errs.Failure()
}
// UserHasMailbox returns true if the user has an exchange mailbox enabled
// false otherwise, and a nil pointer and an error in case of error
func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (bool, error) {
ac, err := makeAC(ctx, acct, path.ExchangeService)
if err != nil {
return false, clues.Stack(err).WithClues(ctx)
}
_, err = ac.Users().GetMailInbox(ctx, userID)
if err != nil {
if err := api.EvaluateMailboxError(err); err != nil {
return false, clues.Stack(err)
}
return false, nil
}
return true, nil
}
// UserHasDrives returns true if the user has any drives
// false otherwise, and a nil pointer and an error in case of error
func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bool, error) {
ac, err := makeAC(ctx, acct, path.OneDriveService)
if err != nil {
return false, clues.Stack(err).WithClues(ctx)
}
return checkUserHasDrives(ctx, ac.Users(), userID)
}
func checkUserHasDrives(ctx context.Context, dgdd getDefaultDriver, userID string) (bool, error) {
_, err := dgdd.GetDefaultDrive(ctx, userID)
if err != nil {
// we consider this a non-error case, since it
// answers the question the caller is asking.
if clues.HasLabel(err, graph.LabelsMysiteNotFound) || clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
return false, nil
}
if graph.IsErrUserNotFound(err) {
return false, clues.Stack(graph.ErrResourceOwnerNotFound, err)
}
return false, clues.Stack(err)
}
return true, nil
}
// usersNoInfo returns a list of users in the specified M365 tenant - with no info
// TODO: Remove this once we remove `Info` from `Users` and instead rely on the `GetUserInfo` API
// to get user information
func usersNoInfo(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*UserNoInfo, error) {
ac, err := makeAC(ctx, acct, path.UnknownService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
us, err := ac.Users().GetAll(ctx, errs)
if err != nil {
return nil, err
}
ret := make([]*UserNoInfo, 0, len(us))
for _, u := range us {
pu, err := parseUser(u)
if err != nil {
return nil, clues.Wrap(err, "formatting user data")
}
puNoInfo := &UserNoInfo{
PrincipalName: pu.PrincipalName,
ID: pu.ID,
Name: pu.Name,
}
ret = append(ret, puNoInfo)
}
return ret, nil
}
// Users returns a list of users in the specified M365 tenant
func Users(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*User, error) {
ac, err := makeAC(ctx, acct, path.ExchangeService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
us, err := ac.Users().GetAll(ctx, errs)
if err != nil {
return nil, err
}
ret := make([]*User, 0, len(us))
for _, u := range us {
pu, err := parseUser(u)
if err != nil {
return nil, clues.Wrap(err, "formatting user data")
}
userInfo, err := ac.Users().GetInfo(ctx, pu.ID)
if err != nil {
return nil, clues.Wrap(err, "getting user details")
}
pu.Info = *userInfo
ret = append(ret, pu)
}
return ret, nil
}
// parseUser extracts information from `models.Userable` we care about
func parseUser(item models.Userable) (*User, error) {
if item.GetUserPrincipalName() == nil {
return nil, clues.New("user missing principal name").
With("user_id", ptr.Val(item.GetId()))
}
u := &User{
PrincipalName: ptr.Val(item.GetUserPrincipalName()),
ID: ptr.Val(item.GetId()),
Name: ptr.Val(item.GetDisplayName()),
}
return u, nil
}
// UserInfo returns the corso-specific set of user metadata.
func GetUserInfo(
ctx context.Context,
acct account.Account,
userID string,
) (*api.UserInfo, error) {
ac, err := makeAC(ctx, acct, path.ExchangeService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
ui, err := ac.Users().GetInfo(ctx, userID)
if err != nil {
return nil, err
}
return ui, nil
}

View File

@ -23,26 +23,29 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type M365IntegrationSuite struct {
type userIntegrationSuite struct {
tester.Suite
acct account.Account
}
func TestM365IntegrationSuite(t *testing.T) {
suite.Run(t, &M365IntegrationSuite{
func TestUserIntegrationSuite(t *testing.T) {
suite.Run(t, &userIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *M365IntegrationSuite) SetupSuite() {
func (suite *userIntegrationSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.acct = tconfig.NewM365Account(suite.T())
}
func (suite *M365IntegrationSuite) TestUsers() {
func (suite *userIntegrationSuite) TestUsers() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -50,9 +53,7 @@ func (suite *M365IntegrationSuite) TestUsers() {
graph.InitializeConcurrencyLimiter(ctx, true, 4)
acct := tconfig.NewM365Account(suite.T())
users, err := Users(ctx, acct, fault.New(true))
users, err := Users(ctx, suite.acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, users)
@ -68,7 +69,7 @@ func (suite *M365IntegrationSuite) TestUsers() {
}
}
func (suite *M365IntegrationSuite) TestUsersCompat_HasNoInfo() {
func (suite *userIntegrationSuite) TestUsersCompat_HasNoInfo() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -91,7 +92,7 @@ func (suite *M365IntegrationSuite) TestUsersCompat_HasNoInfo() {
}
}
func (suite *M365IntegrationSuite) TestUserHasMailbox() {
func (suite *userIntegrationSuite) TestUserHasMailbox() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -107,7 +108,7 @@ func (suite *M365IntegrationSuite) TestUserHasMailbox() {
assert.True(t, enabled)
}
func (suite *M365IntegrationSuite) TestUserHasDrive() {
func (suite *userIntegrationSuite) TestUserHasDrive() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -123,34 +124,155 @@ func (suite *M365IntegrationSuite) TestUserHasDrive() {
assert.True(t, enabled)
}
func (suite *M365IntegrationSuite) TestSites() {
t := suite.T()
func (suite *userIntegrationSuite) TestUsers_InvalidCredentials() {
table := []struct {
name string
acct func(t *testing.T) account.Account
}{
{
name: "Invalid Credentials",
acct: func(t *testing.T) account.Account {
a, err := account.NewAccount(
account.ProviderM365,
account.M365Config{
M365: credentials.M365{
AzureClientID: "Test",
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
require.NoError(t, err, clues.ToCore(err))
ctx, flush := tester.NewContext(t)
defer flush()
return a
},
},
}
acct := tconfig.NewM365Account(t)
sites, err := Sites(ctx, acct, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, sites)
for _, s := range sites {
suite.Run("site_"+s.ID, func() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
assert.NotEmpty(t, s.WebURL)
assert.NotEmpty(t, s.ID)
assert.NotEmpty(t, s.DisplayName)
ctx, flush := tester.NewContext(t)
defer flush()
users, err := Users(ctx, test.acct(t), fault.New(true))
assert.Empty(t, users, "returned some users")
assert.NotNil(t, err)
})
}
}
type m365UnitSuite struct {
func (suite *userIntegrationSuite) TestGetUserInfo() {
table := []struct {
name string
user string
expect *api.UserInfo
expectErr require.ErrorAssertionFunc
}{
{
name: "standard test user",
user: tconfig.M365UserID(suite.T()),
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
Mailbox: api.MailboxInfo{
Purpose: "user",
ErrGetMailBoxSetting: nil,
},
},
expectErr: require.NoError,
},
{
name: "user does not exist",
user: uuid.NewString(),
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{},
Mailbox: api.MailboxInfo{},
},
expectErr: require.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
result, err := GetUserInfo(ctx, suite.acct, test.user)
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, test.expect.ServicesEnabled, result.ServicesEnabled)
})
}
}
func (suite *userIntegrationSuite) TestGetUserInfo_userWithoutDrive() {
userID := tconfig.M365UserID(suite.T())
table := []struct {
name string
user string
expect *api.UserInfo
}{
{
name: "user without drive and exchange",
user: "a53c26f7-5100-4acb-a910-4d20960b2c19", // User: testevents@10rqc2.onmicrosoft.com
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{},
Mailbox: api.MailboxInfo{
ErrGetMailBoxSetting: []error{api.ErrMailBoxSettingsNotFound},
},
},
},
{
name: "user with drive and exchange",
user: userID,
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
Mailbox: api.MailboxInfo{
Purpose: "user",
ErrGetMailBoxSetting: []error{},
},
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
result, err := GetUserInfo(ctx, suite.acct, test.user)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect.ServicesEnabled, result.ServicesEnabled)
assert.Equal(t, test.expect.Mailbox.ErrGetMailBoxSetting, result.Mailbox.ErrGetMailBoxSetting)
assert.Equal(t, test.expect.Mailbox.Purpose, result.Mailbox.Purpose)
})
}
}
// ---------------------------------------------------------------------------
// Unit
// ---------------------------------------------------------------------------
type userUnitSuite struct {
tester.Suite
}
func TestM365UnitSuite(t *testing.T) {
suite.Run(t, &m365UnitSuite{Suite: tester.NewUnitSuite(t)})
func TestUserUnitSuite(t *testing.T) {
suite.Run(t, &userUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type mockDGDD struct {
@ -162,7 +284,7 @@ func (m mockDGDD) GetDefaultDrive(context.Context, string) (models.Driveable, er
return m.response, m.err
}
func (suite *m365UnitSuite) TestCheckUserHasDrives() {
func (suite *userUnitSuite) TestCheckUserHasDrives() {
table := []struct {
name string
mock func(context.Context) getDefaultDriver
@ -275,300 +397,3 @@ func (suite *m365UnitSuite) TestCheckUserHasDrives() {
})
}
}
type mockGAS struct {
response []models.Siteable
err error
}
func (m mockGAS) GetAll(context.Context, *fault.Bus) ([]models.Siteable, error) {
return m.response, m.err
}
func (suite *m365UnitSuite) TestGetAllSites() {
table := []struct {
name string
mock func(context.Context) getAllSiteser
expectErr func(*testing.T, error)
}{
{
name: "ok",
mock: func(ctx context.Context) getAllSiteser {
return mockGAS{[]models.Siteable{}, nil}
},
expectErr: func(t *testing.T, err error) {
assert.NoError(t, err, clues.ToCore(err))
},
},
{
name: "no sharepoint license",
mock: func(ctx context.Context) getAllSiteser {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To(string(graph.NoSPLicense)))
odErr.SetErrorEscaped(merr)
return mockGAS{nil, graph.Stack(ctx, odErr)}
},
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, graph.ErrServiceNotEnabled, clues.ToCore(err))
},
},
{
name: "arbitrary error",
mock: func(ctx context.Context) getAllSiteser {
odErr := odataerrors.NewODataError()
merr := odataerrors.NewMainError()
merr.SetCode(ptr.To("code"))
merr.SetMessage(ptr.To("message"))
odErr.SetErrorEscaped(merr)
return mockGAS{nil, graph.Stack(ctx, odErr)}
},
expectErr: func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
gas := test.mock(ctx)
_, err := getAllSites(ctx, gas)
test.expectErr(t, err)
})
}
}
type DiscoveryIntgSuite struct {
tester.Suite
acct account.Account
}
func TestDiscoveryIntgSuite(t *testing.T) {
suite.Run(t, &DiscoveryIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *DiscoveryIntgSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.acct = tconfig.NewM365Account(t)
}
func (suite *DiscoveryIntgSuite) TestUsers() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
errs := fault.New(true)
users, err := Users(ctx, suite.acct, errs)
assert.NoError(t, err, clues.ToCore(err))
ferrs := errs.Errors()
assert.Nil(t, ferrs.Failure)
assert.Empty(t, ferrs.Recovered)
assert.NotEmpty(t, users)
}
func (suite *DiscoveryIntgSuite) TestUsers_InvalidCredentials() {
table := []struct {
name string
acct func(t *testing.T) account.Account
}{
{
name: "Invalid Credentials",
acct: func(t *testing.T) account.Account {
a, err := account.NewAccount(
account.ProviderM365,
account.M365Config{
M365: credentials.M365{
AzureClientID: "Test",
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
require.NoError(t, err, clues.ToCore(err))
return a
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
users, err := Users(ctx, test.acct(t), fault.New(true))
assert.Empty(t, users, "returned some users")
assert.NotNil(t, err)
})
}
}
func (suite *DiscoveryIntgSuite) TestSites_InvalidCredentials() {
table := []struct {
name string
acct func(t *testing.T) account.Account
}{
{
name: "Invalid Credentials",
acct: func(t *testing.T) account.Account {
a, err := account.NewAccount(
account.ProviderM365,
account.M365Config{
M365: credentials.M365{
AzureClientID: "Test",
AzureClientSecret: "without",
},
AzureTenantID: "data",
},
)
require.NoError(t, err, clues.ToCore(err))
return a
},
},
{
name: "Empty Credentials",
acct: func(t *testing.T) account.Account {
// intentionally swallowing the error here
a, _ := account.NewAccount(account.ProviderM365)
return a
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
sites, err := Sites(ctx, test.acct(t), fault.New(true))
assert.Empty(t, sites, "returned some sites")
assert.NotNil(t, err)
})
}
}
func (suite *DiscoveryIntgSuite) TestGetUserInfo() {
table := []struct {
name string
user string
expect *api.UserInfo
expectErr require.ErrorAssertionFunc
}{
{
name: "standard test user",
user: tconfig.M365UserID(suite.T()),
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
Mailbox: api.MailboxInfo{
Purpose: "user",
ErrGetMailBoxSetting: nil,
},
},
expectErr: require.NoError,
},
{
name: "user does not exist",
user: uuid.NewString(),
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{},
Mailbox: api.MailboxInfo{},
},
expectErr: require.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
result, err := GetUserInfo(ctx, suite.acct, test.user)
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, test.expect.ServicesEnabled, result.ServicesEnabled)
})
}
}
func (suite *DiscoveryIntgSuite) TestGetUserInfo_userWithoutDrive() {
userID := tconfig.M365UserID(suite.T())
table := []struct {
name string
user string
expect *api.UserInfo
}{
{
name: "user without drive and exchange",
user: "a53c26f7-5100-4acb-a910-4d20960b2c19", // User: testevents@10rqc2.onmicrosoft.com
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{},
Mailbox: api.MailboxInfo{
ErrGetMailBoxSetting: []error{api.ErrMailBoxSettingsNotFound},
},
},
},
{
name: "user with drive and exchange",
user: userID,
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
Mailbox: api.MailboxInfo{
Purpose: "user",
ErrGetMailBoxSetting: []error{},
},
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
result, err := GetUserInfo(ctx, suite.acct, test.user)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect.ServicesEnabled, result.ServicesEnabled)
assert.Equal(t, test.expect.Mailbox.ErrGetMailBoxSetting, result.Mailbox.ErrGetMailBoxSetting)
assert.Equal(t, test.expect.Mailbox.Purpose, result.Mailbox.Purpose)
})
}
}

View File

@ -12,8 +12,8 @@ type storageProvider int
//go:generate stringer -type=storageProvider -linecomment
const (
ProviderUnknown storageProvider = iota // Unknown Provider
ProviderS3 // S3
ProviderUnknown storageProvider = 0 // Unknown Provider
ProviderS3 storageProvider = 1 // S3
)
// storage parsing errors

Some files were not shown because too many files have changed in this diff Show More