Merge branch 'main' into channelHandlers
This commit is contained in:
commit
6aff5c62fe
3
.github/workflows/nightly_test.yml
vendored
3
.github/workflows/nightly_test.yml
vendored
@ -92,7 +92,7 @@ jobs:
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-nightly.log
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-nightly.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
@ -101,7 +101,6 @@ jobs:
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 1h \
|
||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||
|
||||
@ -39,6 +39,7 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
addExchangeCommands,
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
addTeamsCommands,
|
||||
}
|
||||
|
||||
// AddCommands attaches all `corso backup * *` commands to the parent.
|
||||
|
||||
230
src/cli/backup/groups.go
Normal file
230
src/cli/backup/groups.go
Normal file
@ -0,0 +1,230 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
groupsServiceCommand = "groups"
|
||||
groupsServiceCommandCreateUseSuffix = "--group <groupsName> | '" + flags.Wildcard + "'"
|
||||
groupsServiceCommandDeleteUseSuffix = "--backup <backupId>"
|
||||
groupsServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
groupsServiceCommandCreateExamples = `# Backup all Groups data for Alice
|
||||
corso backup create groups --group alice@example.com
|
||||
|
||||
# Backup only Groups contacts for Alice and Bob
|
||||
corso backup create groups --group engineering,sales --data contacts
|
||||
|
||||
# Backup all Groups data for all M365 users
|
||||
corso backup create groups --group '*'`
|
||||
|
||||
groupsServiceCommandDeleteExamples = `# Delete Groups backup with ID 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup delete groups --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
|
||||
groupsServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
# Explore calendar events occurring after start of 2022
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--event-starts-after 2022-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, fs = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix
|
||||
c.Example = groupsServiceCommandCreateExamples
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
flags.AddGroupFlag(c)
|
||||
flags.AddDataFlag(c, []string{dataLibraries}, false)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
flags.AddFetchParallelismFlag(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
|
||||
case listCommand:
|
||||
c, fs = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
addFailedItemsFN(c)
|
||||
addSkippedItemsFN(c)
|
||||
addRecoveredErrorsFN(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, fs = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix
|
||||
c.Example = groupsServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, fs = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix
|
||||
c.Example = groupsServiceCommandDeleteExamples
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup create
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup create groups [<flag>...]`
|
||||
func groupsCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Backup M365 Group service data",
|
||||
RunE: createGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a groups service backup.
|
||||
func createGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup list
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup list groups [<flag>...]`
|
||||
func groupsListCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "List the history of M365 Groups service backups",
|
||||
RunE: listGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func listGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.GroupsService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details groups [<flag>...]`
|
||||
func groupsDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Shows the details of a M365 Groups service backup",
|
||||
RunE: detailsGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a groups service backup.
|
||||
func detailsGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateGroupBackupCreateFlags(flags.GroupFV); err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup delete groups [<flag>...]`
|
||||
func groupsDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Delete backed-up M365 Groups service data",
|
||||
RunE: deleteGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// deletes an groups service backup.
|
||||
func deleteGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericDeleteCommand(cmd, path.GroupsService, flags.BackupIDFV, "Groups", args)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func validateGroupBackupCreateFlags(groups []string) error {
|
||||
if len(groups) == 0 {
|
||||
return clues.New(
|
||||
"requires one or more --" +
|
||||
flags.GroupFN + " ids, or the wildcard --" +
|
||||
flags.GroupFN + " *",
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(meain)
|
||||
// for _, d := range cats {
|
||||
// if d != dataLibraries {
|
||||
// return clues.New(
|
||||
// d + " is an unrecognized data type; only " + dataLibraries + " is supported"
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
|
||||
return nil
|
||||
}
|
||||
98
src/cli/backup/groups_test.go
Normal file
98
src/cli/backup/groups_test.go
Normal file
@ -0,0 +1,98 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
expectUse := groupsServiceCommand
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
flags []string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
"create groups",
|
||||
createCommand,
|
||||
expectUse + " " + groupsServiceCommandCreateUseSuffix,
|
||||
groupsCreateCmd().Short,
|
||||
[]string{
|
||||
flags.CategoryDataFN,
|
||||
flags.FailFastFN,
|
||||
flags.FetchParallelismFN,
|
||||
flags.SkipReduceFN,
|
||||
flags.NoStatsFN,
|
||||
},
|
||||
createGroupsCmd,
|
||||
},
|
||||
{
|
||||
"list groups",
|
||||
listCommand,
|
||||
expectUse,
|
||||
groupsListCmd().Short,
|
||||
[]string{
|
||||
flags.BackupFN,
|
||||
flags.FailedItemsFN,
|
||||
flags.SkippedItemsFN,
|
||||
flags.RecoveredErrorsFN,
|
||||
},
|
||||
listGroupsCmd,
|
||||
},
|
||||
{
|
||||
"details groups",
|
||||
detailsCommand,
|
||||
expectUse + " " + groupsServiceCommandDetailsUseSuffix,
|
||||
groupsDetailsCmd().Short,
|
||||
[]string{
|
||||
flags.BackupFN,
|
||||
},
|
||||
detailsGroupsCmd,
|
||||
},
|
||||
{
|
||||
"delete groups",
|
||||
deleteCommand,
|
||||
expectUse + " " + groupsServiceCommandDeleteUseSuffix,
|
||||
groupsDeleteCmd().Short,
|
||||
[]string{flags.BackupFN},
|
||||
deleteGroupsCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
c := addGroupsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
})
|
||||
}
|
||||
}
|
||||
230
src/cli/backup/teams.go
Normal file
230
src/cli/backup/teams.go
Normal file
@ -0,0 +1,230 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
teamsServiceCommand = "teams"
|
||||
teamsServiceCommandCreateUseSuffix = "--team <teamsName> | '" + flags.Wildcard + "'"
|
||||
teamsServiceCommandDeleteUseSuffix = "--backup <backupId>"
|
||||
teamsServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
teamsServiceCommandCreateExamples = `# Backup all Teams data for Alice
|
||||
corso backup create teams --team alice@example.com
|
||||
|
||||
# Backup only Teams contacts for Alice and Bob
|
||||
corso backup create teams --team engineering,sales --data contacts
|
||||
|
||||
# Backup all Teams data for all M365 users
|
||||
corso backup create teams --team '*'`
|
||||
|
||||
teamsServiceCommandDeleteExamples = `# Delete Teams backup with ID 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup delete teams --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
|
||||
teamsServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
|
||||
corso backup details teams --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
# Explore calendar events occurring after start of 2022
|
||||
corso backup details teams --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--event-starts-after 2022-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addTeamsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, fs = utils.AddCommand(cmd, teamsCreateCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + teamsServiceCommandCreateUseSuffix
|
||||
c.Example = teamsServiceCommandCreateExamples
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
flags.AddTeamFlag(c)
|
||||
flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
flags.AddFetchParallelismFlag(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
|
||||
case listCommand:
|
||||
c, fs = utils.AddCommand(cmd, teamsListCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
addFailedItemsFN(c)
|
||||
addSkippedItemsFN(c)
|
||||
addRecoveredErrorsFN(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, fs = utils.AddCommand(cmd, teamsDetailsCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + teamsServiceCommandDetailsUseSuffix
|
||||
c.Example = teamsServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, fs = utils.AddCommand(cmd, teamsDeleteCmd(), utils.MarkPreReleaseCommand())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + teamsServiceCommandDeleteUseSuffix
|
||||
c.Example = teamsServiceCommandDeleteExamples
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup create
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup create teams [<flag>...]`
|
||||
func teamsCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamsServiceCommand,
|
||||
Short: "Backup M365 Team service data",
|
||||
RunE: createTeamsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a teams service backup.
|
||||
func createTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateTeamBackupCreateFlags(flags.TeamFV); err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup list
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup list teams [<flag>...]`
|
||||
func teamsListCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamsServiceCommand,
|
||||
Short: "List the history of M365 Teams service backups",
|
||||
RunE: listTeamsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func listTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details teams [<flag>...]`
|
||||
func teamsDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamsServiceCommand,
|
||||
Short: "Shows the details of a M365 Teams service backup",
|
||||
RunE: detailsTeamsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a teams service backup.
|
||||
func detailsTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup delete teams [<flag>...]`
|
||||
func teamsDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamsServiceCommand,
|
||||
Short: "Delete backed-up M365 Teams service data",
|
||||
RunE: deleteTeamsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// deletes an teams service backup.
|
||||
func deleteTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericDeleteCommand(cmd, path.TeamsService, flags.BackupIDFV, "Teams", args)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func validateTeamBackupCreateFlags(teams []string) error {
|
||||
if len(teams) == 0 {
|
||||
return clues.New(
|
||||
"requires one or more --" +
|
||||
flags.TeamFN + " ids, or the wildcard --" +
|
||||
flags.TeamFN + " *",
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(meain)
|
||||
// for _, d := range cats {
|
||||
// if d != dataLibraries {
|
||||
// return clues.New(
|
||||
// d + " is an unrecognized data type; only " + dataLibraries + " is supported"
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
|
||||
return nil
|
||||
}
|
||||
98
src/cli/backup/teams_test.go
Normal file
98
src/cli/backup/teams_test.go
Normal file
@ -0,0 +1,98 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type TeamsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestTeamsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &TeamsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
||||
expectUse := teamsServiceCommand
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
flags []string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
"create teams",
|
||||
createCommand,
|
||||
expectUse + " " + teamsServiceCommandCreateUseSuffix,
|
||||
teamsCreateCmd().Short,
|
||||
[]string{
|
||||
flags.CategoryDataFN,
|
||||
flags.FailFastFN,
|
||||
flags.FetchParallelismFN,
|
||||
flags.SkipReduceFN,
|
||||
flags.NoStatsFN,
|
||||
},
|
||||
createTeamsCmd,
|
||||
},
|
||||
{
|
||||
"list teams",
|
||||
listCommand,
|
||||
expectUse,
|
||||
teamsListCmd().Short,
|
||||
[]string{
|
||||
flags.BackupFN,
|
||||
flags.FailedItemsFN,
|
||||
flags.SkippedItemsFN,
|
||||
flags.RecoveredErrorsFN,
|
||||
},
|
||||
listTeamsCmd,
|
||||
},
|
||||
{
|
||||
"details teams",
|
||||
detailsCommand,
|
||||
expectUse + " " + teamsServiceCommandDetailsUseSuffix,
|
||||
teamsDetailsCmd().Short,
|
||||
[]string{
|
||||
flags.BackupFN,
|
||||
},
|
||||
detailsTeamsCmd,
|
||||
},
|
||||
{
|
||||
"delete teams",
|
||||
deleteCommand,
|
||||
expectUse + " " + teamsServiceCommandDeleteUseSuffix,
|
||||
teamsDeleteCmd().Short,
|
||||
[]string{flags.BackupFN},
|
||||
deleteTeamsCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
c := addTeamsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -21,6 +21,8 @@ import (
|
||||
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
addGroupsCommands,
|
||||
addTeamsCommands,
|
||||
}
|
||||
|
||||
// AddCommands attaches all `corso export * *` commands to the parent.
|
||||
|
||||
84
src/cli/export/groups.go
Normal file
84
src/cli/export/groups.go
Normal file
@ -0,0 +1,84 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case exportCommand:
|
||||
c, fs = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddExportConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
groupsServiceCommand = "groups"
|
||||
groupsServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
groupsServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to my-exports directory
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
|
||||
|
||||
# Export files named "FY2021 Planning.xlsx" in "Documents/Finance Reports" to current directory
|
||||
corso export groups . --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
|
||||
|
||||
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// `corso export groups [<flag>...] <destination>`
|
||||
func groupsExportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Export M365 Groups service data",
|
||||
RunE: exportGroupsCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Example: groupsServiceCommandExportExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an groups service export.
|
||||
func exportGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
94
src/cli/export/groups_test.go
Normal file
94
src/cli/export/groups_test.go
Normal file
@ -0,0 +1,94 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"export groups", exportCommand, expectUse, groupsExportCmd().Short, exportGroupsCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
// normally a persistent flag from the root.
|
||||
// required to ensure a dry run.
|
||||
flags.AddRunModeFlag(cmd, true)
|
||||
|
||||
c := addGroupsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
|
||||
cmd.SetArgs([]string{
|
||||
"groups",
|
||||
testdata.RestoreDestination,
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, testdata.BackupInput,
|
||||
|
||||
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
||||
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
||||
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
||||
|
||||
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
||||
|
||||
// bool flags
|
||||
"--" + flags.ArchiveFN,
|
||||
})
|
||||
|
||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||
err := cmd.Execute()
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
|
||||
|
||||
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
|
||||
|
||||
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -39,7 +39,7 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
|
||||
|
||||
const (
|
||||
oneDriveServiceCommand = "onedrive"
|
||||
oneDriveServiceCommandUseSuffix = "--backup <backupId> <destination>"
|
||||
oneDriveServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
oneDriveServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to my-exports directory
|
||||
@ -62,7 +62,7 @@ func oneDriveExportCmd() *cobra.Command {
|
||||
RunE: exportOneDriveCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing restore destination")
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@ -39,7 +39,7 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
|
||||
const (
|
||||
sharePointServiceCommand = "sharepoint"
|
||||
sharePointServiceCommandUseSuffix = "--backup <backupId> <destination>"
|
||||
sharePointServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
sharePointServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's latest backup (1234abcd...) to my-exports directory
|
||||
@ -66,7 +66,7 @@ func sharePointExportCmd() *cobra.Command {
|
||||
RunE: exportSharePointCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing restore destination")
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
84
src/cli/export/teams.go
Normal file
84
src/cli/export/teams.go
Normal file
@ -0,0 +1,84 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
func addTeamsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case exportCommand:
|
||||
c, fs = utils.AddCommand(cmd, teamsExportCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamsServiceCommandUseSuffix
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddExportConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
teamsServiceCommand = "teams"
|
||||
teamsServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
teamsServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to my-exports directory
|
||||
corso export teams my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
|
||||
|
||||
# Export files named "FY2021 Planning.xlsx" in "Documents/Finance Reports" to current directory
|
||||
corso export teams . --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
|
||||
|
||||
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to my-exports
|
||||
corso export teams my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// `corso export teams [<flag>...] <destination>`
|
||||
func teamsExportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamsServiceCommand,
|
||||
Short: "Export M365 Teams service data",
|
||||
RunE: exportTeamsCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Example: teamsServiceCommandExportExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an teams service export.
|
||||
func exportTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
94
src/cli/export/teams_test.go
Normal file
94
src/cli/export/teams_test.go
Normal file
@ -0,0 +1,94 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type TeamsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestTeamsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &TeamsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
||||
expectUse := teamsServiceCommand + " " + teamsServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"export teams", exportCommand, expectUse, teamsExportCmd().Short, exportTeamsCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
// normally a persistent flag from the root.
|
||||
// required to ensure a dry run.
|
||||
flags.AddRunModeFlag(cmd, true)
|
||||
|
||||
c := addTeamsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
|
||||
cmd.SetArgs([]string{
|
||||
"teams",
|
||||
testdata.RestoreDestination,
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, testdata.BackupInput,
|
||||
|
||||
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
||||
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
||||
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
||||
|
||||
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
||||
|
||||
// bool flags
|
||||
"--" + flags.ArchiveFN,
|
||||
})
|
||||
|
||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||
err := cmd.Execute()
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeTeamsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
|
||||
|
||||
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
|
||||
|
||||
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
|
||||
})
|
||||
}
|
||||
}
|
||||
28
src/cli/flags/groups.go
Normal file
28
src/cli/flags/groups.go
Normal file
@ -0,0 +1,28 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
GroupFN = "group"
|
||||
)
|
||||
|
||||
var GroupFV []string
|
||||
|
||||
func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
// TODO: implement flags
|
||||
}
|
||||
|
||||
// AddGroupFlag adds the --group flag, which accepts id or name values.
|
||||
// TODO: need to decide what the appropriate "name" to accept here is.
|
||||
// keepers thinks its either DisplayName or MailNickname or Mail
|
||||
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
|
||||
// may not see either one visibly.
|
||||
// https://learn.microsoft.com/en-us/graph/api/group-list?view=graph-rest-1.0&tabs=http
|
||||
func AddGroupFlag(cmd *cobra.Command) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&GroupFV,
|
||||
GroupFN, nil,
|
||||
"Backup data by group; accepts '"+Wildcard+"' to select all groups.")
|
||||
}
|
||||
28
src/cli/flags/teams.go
Normal file
28
src/cli/flags/teams.go
Normal file
@ -0,0 +1,28 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
TeamFN = "team"
|
||||
)
|
||||
|
||||
var TeamFV []string
|
||||
|
||||
func AddTeamDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
// TODO: implement flags
|
||||
}
|
||||
|
||||
// AddTeamFlag adds the --team flag, which accepts id or name values.
|
||||
// TODO: need to decide what the appropriate "name" to accept here is.
|
||||
// keepers thinks its either DisplayName or MailNickname or Mail
|
||||
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
|
||||
// may not see either one visibly.
|
||||
// https://learn.microsoft.com/en-us/graph/api/team-list?view=graph-rest-1.0&tabs=http
|
||||
func AddTeamFlag(cmd *cobra.Command) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&TeamFV,
|
||||
TeamFN, nil,
|
||||
"Backup data by team; accepts '"+Wildcard+"' to select all teams.")
|
||||
}
|
||||
81
src/cli/restore/groups.go
Normal file
81
src/cli/restore/groups.go
Normal file
@ -0,0 +1,81 @@
|
||||
package restore
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by restore.go to map subcommands to provider-specific handling.
|
||||
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case restoreCommand:
|
||||
c, fs = utils.AddCommand(cmd, groupsRestoreCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddRestorePermissionsFlag(c)
|
||||
flags.AddRestoreConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
groupsServiceCommand = "groups"
|
||||
groupsServiceCommandUseSuffix = "--backup <backupId>"
|
||||
|
||||
groupsServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef in Bob's last backup (1234abcd...)
|
||||
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
|
||||
|
||||
# Restore the file with ID 98765abcdef along with its associated permissions
|
||||
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions
|
||||
|
||||
# Restore files named "FY2021 Planning.xlsx" in "Documents/Finance Reports"
|
||||
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
|
||||
|
||||
# Restore all files and folders in folder "Documents/Finance Reports" that were created before 2020
|
||||
corso restore groups --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// `corso restore groups [<flag>...]`
|
||||
func groupsRestoreCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Restore M365 Groups service data",
|
||||
RunE: restoreGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: groupsServiceCommandRestoreExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an groups service restore.
|
||||
func restoreGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
108
src/cli/restore/groups_test.go
Normal file
108
src/cli/restore/groups_test.go
Normal file
@ -0,0 +1,108 @@
|
||||
package restore
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"restore groups", restoreCommand, expectUse, groupsRestoreCmd().Short, restoreGroupsCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
// normally a persistent flag from the root.
|
||||
// required to ensure a dry run.
|
||||
flags.AddRunModeFlag(cmd, true)
|
||||
|
||||
c := addGroupsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
|
||||
cmd.SetArgs([]string{
|
||||
"groups",
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, testdata.BackupInput,
|
||||
|
||||
"--" + flags.CollisionsFN, testdata.Collisions,
|
||||
"--" + flags.DestinationFN, testdata.Destination,
|
||||
"--" + flags.ToResourceFN, testdata.ToResource,
|
||||
|
||||
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
||||
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
||||
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
||||
|
||||
"--" + flags.AzureClientIDFN, testdata.AzureClientID,
|
||||
"--" + flags.AzureClientTenantFN, testdata.AzureTenantID,
|
||||
"--" + flags.AzureClientSecretFN, testdata.AzureClientSecret,
|
||||
|
||||
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
||||
|
||||
// bool flags
|
||||
"--" + flags.RestorePermissionsFN,
|
||||
})
|
||||
|
||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||
err := cmd.Execute()
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
|
||||
assert.Equal(t, testdata.Destination, opts.RestoreCfg.Destination)
|
||||
assert.Equal(t, testdata.ToResource, opts.RestoreCfg.ProtectedResource)
|
||||
|
||||
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
|
||||
|
||||
assert.Equal(t, testdata.AzureClientID, flags.AzureClientIDFV)
|
||||
assert.Equal(t, testdata.AzureTenantID, flags.AzureClientTenantFV)
|
||||
assert.Equal(t, testdata.AzureClientSecret, flags.AzureClientSecretFV)
|
||||
|
||||
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
|
||||
assert.True(t, flags.RestorePermissionsFV)
|
||||
})
|
||||
}
|
||||
}
|
||||
81
src/cli/restore/teams.go
Normal file
81
src/cli/restore/teams.go
Normal file
@ -0,0 +1,81 @@
|
||||
package restore
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by restore.go to map subcommands to provider-specific handling.
|
||||
func addTeamsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case restoreCommand:
|
||||
c, fs = utils.AddCommand(cmd, teamsRestoreCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamsServiceCommandUseSuffix
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddRestorePermissionsFlag(c)
|
||||
flags.AddRestoreConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
flags.AddCorsoPassphaseFlags(c)
|
||||
flags.AddAWSCredsFlags(c)
|
||||
flags.AddAzureCredsFlags(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
teamsServiceCommand = "teams"
|
||||
teamsServiceCommandUseSuffix = "--backup <backupId>"
|
||||
|
||||
teamsServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef in Bob's last backup (1234abcd...)
|
||||
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
|
||||
|
||||
# Restore the file with ID 98765abcdef along with its associated permissions
|
||||
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions
|
||||
|
||||
# Restore files named "FY2021 Planning.xlsx" in "Documents/Finance Reports"
|
||||
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
|
||||
|
||||
# Restore all files and folders in folder "Documents/Finance Reports" that were created before 2020
|
||||
corso restore teams --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// `corso restore teams [<flag>...]`
|
||||
func teamsRestoreCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamsServiceCommand,
|
||||
Short: "Restore M365 Teams service data",
|
||||
RunE: restoreTeamsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: teamsServiceCommandRestoreExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an teams service restore.
|
||||
func restoreTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
}
|
||||
108
src/cli/restore/teams_test.go
Normal file
108
src/cli/restore/teams_test.go
Normal file
@ -0,0 +1,108 @@
|
||||
package restore
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type TeamsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestTeamsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &TeamsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
||||
expectUse := teamsServiceCommand + " " + teamsServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"restore teams", restoreCommand, expectUse, teamsRestoreCmd().Short, restoreTeamsCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
// normally a persistent flag from the root.
|
||||
// required to ensure a dry run.
|
||||
flags.AddRunModeFlag(cmd, true)
|
||||
|
||||
c := addTeamsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
|
||||
cmd.SetArgs([]string{
|
||||
"teams",
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, testdata.BackupInput,
|
||||
|
||||
"--" + flags.CollisionsFN, testdata.Collisions,
|
||||
"--" + flags.DestinationFN, testdata.Destination,
|
||||
"--" + flags.ToResourceFN, testdata.ToResource,
|
||||
|
||||
"--" + flags.AWSAccessKeyFN, testdata.AWSAccessKeyID,
|
||||
"--" + flags.AWSSecretAccessKeyFN, testdata.AWSSecretAccessKey,
|
||||
"--" + flags.AWSSessionTokenFN, testdata.AWSSessionToken,
|
||||
|
||||
"--" + flags.AzureClientIDFN, testdata.AzureClientID,
|
||||
"--" + flags.AzureClientTenantFN, testdata.AzureTenantID,
|
||||
"--" + flags.AzureClientSecretFN, testdata.AzureClientSecret,
|
||||
|
||||
"--" + flags.CorsoPassphraseFN, testdata.CorsoPassphrase,
|
||||
|
||||
// bool flags
|
||||
"--" + flags.RestorePermissionsFN,
|
||||
})
|
||||
|
||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||
err := cmd.Execute()
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeTeamsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
|
||||
assert.Equal(t, testdata.Destination, opts.RestoreCfg.Destination)
|
||||
assert.Equal(t, testdata.ToResource, opts.RestoreCfg.ProtectedResource)
|
||||
|
||||
assert.Equal(t, testdata.AWSAccessKeyID, flags.AWSAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
|
||||
assert.Equal(t, testdata.AWSSessionToken, flags.AWSSessionTokenFV)
|
||||
|
||||
assert.Equal(t, testdata.AzureClientID, flags.AzureClientIDFV)
|
||||
assert.Equal(t, testdata.AzureTenantID, flags.AzureClientTenantFV)
|
||||
assert.Equal(t, testdata.AzureClientSecret, flags.AzureClientSecretFV)
|
||||
|
||||
assert.Equal(t, testdata.CorsoPassphrase, flags.CorsoPassphraseFV)
|
||||
assert.True(t, flags.RestorePermissionsFV)
|
||||
})
|
||||
}
|
||||
}
|
||||
30
src/cli/utils/groups.go
Normal file
30
src/cli/utils/groups.go
Normal file
@ -0,0 +1,30 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
type GroupsOpts struct {
|
||||
Groups []string
|
||||
|
||||
RestoreCfg RestoreCfgOpts
|
||||
ExportCfg ExportCfgOpts
|
||||
|
||||
Populated flags.PopulatedFlags
|
||||
}
|
||||
|
||||
func MakeGroupsOpts(cmd *cobra.Command) GroupsOpts {
|
||||
return GroupsOpts{
|
||||
Groups: flags.UserFV,
|
||||
|
||||
RestoreCfg: makeRestoreCfgOpts(cmd),
|
||||
ExportCfg: makeExportCfgOpts(cmd),
|
||||
|
||||
// populated contains the list of flags that appear in the
|
||||
// command, according to pflags. Use this to differentiate
|
||||
// between an "empty" and a "missing" value.
|
||||
Populated: flags.GetPopulatedFlags(cmd),
|
||||
}
|
||||
}
|
||||
30
src/cli/utils/teams.go
Normal file
30
src/cli/utils/teams.go
Normal file
@ -0,0 +1,30 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
type TeamsOpts struct {
|
||||
Teams []string
|
||||
|
||||
RestoreCfg RestoreCfgOpts
|
||||
ExportCfg ExportCfgOpts
|
||||
|
||||
Populated flags.PopulatedFlags
|
||||
}
|
||||
|
||||
func MakeTeamsOpts(cmd *cobra.Command) TeamsOpts {
|
||||
return TeamsOpts{
|
||||
Teams: flags.UserFV,
|
||||
|
||||
RestoreCfg: makeRestoreCfgOpts(cmd),
|
||||
ExportCfg: makeExportCfgOpts(cmd),
|
||||
|
||||
// populated contains the list of flags that appear in the
|
||||
// command, according to pflags. Use this to differentiate
|
||||
// between an "empty" and a "missing" value.
|
||||
Populated: flags.GetPopulatedFlags(cmd),
|
||||
}
|
||||
}
|
||||
@ -19,6 +19,8 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
var ErrNotYetImplemented = clues.New("not yet implemented")
|
||||
|
||||
func GetAccountAndConnect(
|
||||
ctx context.Context,
|
||||
pst path.ServiceType,
|
||||
|
||||
@ -67,6 +67,9 @@ func deleteBackups(
|
||||
// pitrListBackups connects to the repository at the given point in time and
|
||||
// lists the backups for service. It then checks the list of backups contains
|
||||
// the backups in backupIDs.
|
||||
//
|
||||
//nolint:unused
|
||||
//lint:ignore U1000 Waiting for upstream fix tracked by 4031
|
||||
func pitrListBackups(
|
||||
ctx context.Context,
|
||||
service path.ServiceType,
|
||||
@ -156,16 +159,10 @@ func main() {
|
||||
fatal(ctx, "invalid number of days provided", nil)
|
||||
}
|
||||
|
||||
beforeDel := time.Now()
|
||||
|
||||
backups, err := deleteBackups(ctx, service, days)
|
||||
_, err = deleteBackups(ctx, service, days)
|
||||
if err != nil {
|
||||
fatal(ctx, "deleting backups", clues.Stack(err))
|
||||
}
|
||||
|
||||
if err := pitrListBackups(ctx, service, beforeDel, backups); err != nil {
|
||||
fatal(ctx, "listing backups from point in time", clues.Stack(err))
|
||||
}
|
||||
}
|
||||
|
||||
func fatal(ctx context.Context, msg string, err error) {
|
||||
|
||||
@ -8,10 +8,10 @@ require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1
|
||||
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4
|
||||
github.com/armon/go-metrics v0.4.1
|
||||
github.com/aws/aws-sdk-go v1.44.326
|
||||
github.com/aws/aws-sdk-go v1.44.328
|
||||
github.com/aws/aws-xray-sdk-go v1.8.1
|
||||
github.com/cenkalti/backoff/v4 v4.2.1
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/google/uuid v1.3.1
|
||||
github.com/h2non/gock v1.2.0
|
||||
github.com/kopia/kopia v0.13.0
|
||||
github.com/microsoft/kiota-abstractions-go v1.2.0
|
||||
@ -19,10 +19,10 @@ require (
|
||||
github.com/microsoft/kiota-http-go v1.1.0
|
||||
github.com/microsoft/kiota-serialization-form-go v1.0.0
|
||||
github.com/microsoft/kiota-serialization-json-go v1.0.4
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.14.0
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.15.0
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/puzpuzpuz/xsync/v2 v2.4.1
|
||||
github.com/puzpuzpuz/xsync/v2 v2.5.0
|
||||
github.com/rudderlabs/analytics-go v3.3.3+incompatible
|
||||
github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1
|
||||
github.com/spf13/cobra v1.7.0
|
||||
@ -49,6 +49,7 @@ require (
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/magiconair/properties v1.8.7 // indirect
|
||||
github.com/microsoft/kiota-serialization-multipart-go v1.0.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.9 // indirect
|
||||
github.com/spf13/afero v1.9.5 // indirect
|
||||
|
||||
18
src/go.sum
18
src/go.sum
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/
|
||||
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||
github.com/aws/aws-sdk-go v1.44.326 h1:/6xD/9mKZ2RMTDfbhh9qCxw+CaTbJRvfHJ/NHPFbI38=
|
||||
github.com/aws/aws-sdk-go v1.44.326/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/aws/aws-sdk-go v1.44.328 h1:WBwlf8ym9SDQ/GTIBO9eXyvwappKJyOetWJKl4mT7ZU=
|
||||
github.com/aws/aws-sdk-go v1.44.328/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
|
||||
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
|
||||
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
|
||||
@ -192,8 +192,8 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4=
|
||||
github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
|
||||
@ -281,10 +281,12 @@ github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjb
|
||||
github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA=
|
||||
github.com/microsoft/kiota-serialization-json-go v1.0.4 h1:5TaISWwd2Me8clrK7SqNATo0tv9seOq59y4I5953egQ=
|
||||
github.com/microsoft/kiota-serialization-json-go v1.0.4/go.mod h1:rM4+FsAY+9AEpBsBzkFFis+b/LZLlNKKewuLwK9Q6Mg=
|
||||
github.com/microsoft/kiota-serialization-multipart-go v1.0.0 h1:3O5sb5Zj+moLBiJympbXNaeV07K0d46IfuEd5v9+pBs=
|
||||
github.com/microsoft/kiota-serialization-multipart-go v1.0.0/go.mod h1:yauLeBTpANk4L03XD985akNysG24SnRJGaveZf+p4so=
|
||||
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
|
||||
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.14.0 h1:YdhMvzu8bXcfIQGRur6NkXnv4cPOsMBJ44XjfWLOt9Y=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.14.0/go.mod h1:ccLv84FJFtwdSzYWM/HlTes5FLzkzzBsYh9kg93/WS8=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.15.0 h1:cdz6Bs0T0Hl/NTdUAZq8TRJwidTmX741X2SnVIsn5l4=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.15.0/go.mod h1:YfKdWdUwQWuS6E+Qg6+SZnHxJ/kvG2nYQutwzGa5NZs=
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
|
||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
@ -342,8 +344,8 @@ github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsT
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI=
|
||||
github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY=
|
||||
github.com/puzpuzpuz/xsync/v2 v2.4.1 h1:aGdE1C/HaR/QC6YAFdtZXi60Df8/qBIrs8PKrzkItcM=
|
||||
github.com/puzpuzpuz/xsync/v2 v2.4.1/go.mod h1:gD2H2krq/w52MfPLE+Uy64TzJDVY7lP2znR9qmR35kU=
|
||||
github.com/puzpuzpuz/xsync/v2 v2.5.0 h1:2k4qrO/orvmEXZ3hmtHqIy9XaQtPTwzMZk1+iErpE8c=
|
||||
github.com/puzpuzpuz/xsync/v2 v2.5.0/go.mod h1:gD2H2krq/w52MfPLE+Uy64TzJDVY7lP2znR9qmR35kU=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
|
||||
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
|
||||
@ -13,10 +13,10 @@ var ErrNotFound = clues.New("not found")
|
||||
type CollectionState int
|
||||
|
||||
const (
|
||||
NewState = CollectionState(iota)
|
||||
NotMovedState
|
||||
MovedState
|
||||
DeletedState
|
||||
NewState CollectionState = 0
|
||||
NotMovedState CollectionState = 1
|
||||
MovedState CollectionState = 2
|
||||
DeletedState CollectionState = 3
|
||||
)
|
||||
|
||||
type FetchRestoreCollection struct {
|
||||
|
||||
@ -115,14 +115,6 @@ func (me ManifestEntry) GetTag(key string) (string, bool) {
|
||||
return v, ok
|
||||
}
|
||||
|
||||
type snapshotManager interface {
|
||||
FindManifests(
|
||||
ctx context.Context,
|
||||
tags map[string]string,
|
||||
) ([]*manifest.EntryMetadata, error)
|
||||
LoadSnapshot(ctx context.Context, id manifest.ID) (*snapshot.Manifest, error)
|
||||
}
|
||||
|
||||
func serviceCatString(s path.ServiceType, c path.CategoryType) string {
|
||||
return s.String() + c.String()
|
||||
}
|
||||
|
||||
191
src/internal/kopia/cleanup_backups.go
Normal file
191
src/internal/kopia/cleanup_backups.go
Normal file
@ -0,0 +1,191 @@
|
||||
package kopia
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/kopia/kopia/repo/manifest"
|
||||
"github.com/kopia/kopia/snapshot"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/model"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
// cleanupOrphanedData uses bs and mf to lookup all models/snapshots for backups
|
||||
// and deletes items that are older than nowFunc() - gcBuffer (cutoff) that are
|
||||
// not "complete" backups with:
|
||||
// - a backup model
|
||||
// - an item data snapshot
|
||||
// - a details snapshot or details model
|
||||
//
|
||||
// We exclude all items younger than the cutoff to add some buffer so that even
|
||||
// if this is run concurrently with a backup it's not likely to delete models
|
||||
// just being created. For example, if there was no buffer period and this is
|
||||
// run when another corso instance has created an item data snapshot but hasn't
|
||||
// yet created the details snapshot or the backup model it would result in this
|
||||
// instance of corso marking the newly created item data snapshot for deletion
|
||||
// because it appears orphaned.
|
||||
//
|
||||
// The buffer duration should be longer than the difference in creation times
|
||||
// between the first item data snapshot/details/backup model made during a
|
||||
// backup operation and the last.
|
||||
//
|
||||
// We don't have hard numbers on the time right now, but if the order of
|
||||
// persistence is (item data snapshot, details snapshot, backup model) it should
|
||||
// be faster than creating the snapshot itself and probably happens O(minutes)
|
||||
// or O(hours) instead of O(days). Of course, that assumes a non-adversarial
|
||||
// setup where things such as machine hiberation, process freezing (i.e. paused
|
||||
// at the OS level), etc. don't occur.
|
||||
func cleanupOrphanedData(
|
||||
ctx context.Context,
|
||||
bs store.Storer,
|
||||
mf manifestFinder,
|
||||
gcBuffer time.Duration,
|
||||
nowFunc func() time.Time,
|
||||
) error {
|
||||
// Get all snapshot manifests.
|
||||
snaps, err := mf.FindManifests(
|
||||
ctx,
|
||||
map[string]string{
|
||||
manifest.TypeLabelKey: snapshot.ManifestType,
|
||||
})
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "getting snapshots")
|
||||
}
|
||||
|
||||
var (
|
||||
// deets is a hash set of the ModelStoreID or snapshot IDs for backup
|
||||
// details. It contains the IDs for both legacy details stored in the model
|
||||
// store and newer details stored as a snapshot because it doesn't matter
|
||||
// what the storage format is. We only need to know the ID so we can:
|
||||
// 1. check if there's a corresponding backup for them
|
||||
// 2. delete the details if they're orphaned
|
||||
deets = map[manifest.ID]struct{}{}
|
||||
// dataSnaps is a hash set of the snapshot IDs for item data snapshots.
|
||||
dataSnaps = map[manifest.ID]struct{}{}
|
||||
)
|
||||
|
||||
cutoff := nowFunc().Add(-gcBuffer)
|
||||
|
||||
// Sort all the snapshots as either details snapshots or item data snapshots.
|
||||
for _, snap := range snaps {
|
||||
// Don't even try to see if this needs garbage collected because it's not
|
||||
// old enough and may correspond to an in-progress operation.
|
||||
if !cutoff.After(snap.ModTime) {
|
||||
continue
|
||||
}
|
||||
|
||||
k, _ := makeTagKV(TagBackupCategory)
|
||||
if _, ok := snap.Labels[k]; ok {
|
||||
dataSnaps[snap.ID] = struct{}{}
|
||||
continue
|
||||
}
|
||||
|
||||
deets[snap.ID] = struct{}{}
|
||||
}
|
||||
|
||||
// Get all legacy backup details models. The initial version of backup delete
|
||||
// didn't seem to delete them so they may also be orphaned if the repo is old
|
||||
// enough.
|
||||
deetsModels, err := bs.GetIDsForType(ctx, model.BackupDetailsSchema, nil)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "getting legacy backup details")
|
||||
}
|
||||
|
||||
for _, d := range deetsModels {
|
||||
// Don't even try to see if this needs garbage collected because it's not
|
||||
// old enough and may correspond to an in-progress operation.
|
||||
if !cutoff.After(d.ModTime) {
|
||||
continue
|
||||
}
|
||||
|
||||
deets[d.ModelStoreID] = struct{}{}
|
||||
}
|
||||
|
||||
// Get all backup models.
|
||||
bups, err := bs.GetIDsForType(ctx, model.BackupSchema, nil)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "getting all backup models")
|
||||
}
|
||||
|
||||
toDelete := maps.Clone(deets)
|
||||
maps.Copy(toDelete, dataSnaps)
|
||||
|
||||
for _, bup := range bups {
|
||||
// Don't even try to see if this needs garbage collected because it's not
|
||||
// old enough and may correspond to an in-progress operation.
|
||||
if !cutoff.After(bup.ModTime) {
|
||||
continue
|
||||
}
|
||||
|
||||
toDelete[manifest.ID(bup.ModelStoreID)] = struct{}{}
|
||||
|
||||
bm := backup.Backup{}
|
||||
|
||||
if err := bs.GetWithModelStoreID(
|
||||
ctx,
|
||||
model.BackupSchema,
|
||||
bup.ModelStoreID,
|
||||
&bm,
|
||||
); err != nil {
|
||||
if !errors.Is(err, data.ErrNotFound) {
|
||||
return clues.Wrap(err, "getting backup model").
|
||||
With("search_backup_id", bup.ID)
|
||||
}
|
||||
|
||||
// Probably safe to continue if the model wasn't found because that means
|
||||
// that the possible item data and details for the backup are now
|
||||
// orphaned. They'll be deleted since we won't remove them from the delete
|
||||
// set.
|
||||
//
|
||||
// The fact that we exclude all items younger than the cutoff should
|
||||
// already exclude items that are from concurrent corso backup operations.
|
||||
//
|
||||
// This isn't expected to really pop up, but it's possible if this
|
||||
// function is run concurrently with either a backup delete or another
|
||||
// instance of this function.
|
||||
logger.Ctx(ctx).Debugw(
|
||||
"backup model not found",
|
||||
"search_backup_id", bup.ModelStoreID)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
ssid := bm.StreamStoreID
|
||||
if len(ssid) == 0 {
|
||||
ssid = bm.DetailsID
|
||||
}
|
||||
|
||||
_, dataOK := dataSnaps[manifest.ID(bm.SnapshotID)]
|
||||
_, deetsOK := deets[manifest.ID(ssid)]
|
||||
|
||||
// All data is present, we shouldn't garbage collect this backup.
|
||||
if deetsOK && dataOK {
|
||||
delete(toDelete, bup.ModelStoreID)
|
||||
delete(toDelete, manifest.ID(bm.SnapshotID))
|
||||
delete(toDelete, manifest.ID(ssid))
|
||||
}
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Infow(
|
||||
"garbage collecting orphaned items",
|
||||
"num_items", len(toDelete),
|
||||
"kopia_ids", maps.Keys(toDelete))
|
||||
|
||||
// Use single atomic batch delete operation to cleanup to keep from making a
|
||||
// bunch of manifest content blobs.
|
||||
if err := bs.DeleteWithModelStoreIDs(ctx, maps.Keys(toDelete)...); err != nil {
|
||||
return clues.Wrap(err, "deleting orphaned data")
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Do some pruning of assist backup models so we don't keep
|
||||
// them around forever.
|
||||
|
||||
return nil
|
||||
}
|
||||
540
src/internal/kopia/cleanup_backups_test.go
Normal file
540
src/internal/kopia/cleanup_backups_test.go
Normal file
@ -0,0 +1,540 @@
|
||||
package kopia
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/kopia/kopia/repo/manifest"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/model"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
)
|
||||
|
||||
type BackupCleanupUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestBackupCleanupUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &BackupCleanupUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
type mockManifestFinder struct {
|
||||
t *testing.T
|
||||
manifests []*manifest.EntryMetadata
|
||||
err error
|
||||
}
|
||||
|
||||
func (mmf mockManifestFinder) FindManifests(
|
||||
ctx context.Context,
|
||||
tags map[string]string,
|
||||
) ([]*manifest.EntryMetadata, error) {
|
||||
assert.Equal(
|
||||
mmf.t,
|
||||
map[string]string{"type": "snapshot"},
|
||||
tags,
|
||||
"snapshot search tags")
|
||||
|
||||
return mmf.manifests, clues.Stack(mmf.err).OrNil()
|
||||
}
|
||||
|
||||
type mockStorer struct {
|
||||
t *testing.T
|
||||
|
||||
details []*model.BaseModel
|
||||
detailsErr error
|
||||
|
||||
backups []backupRes
|
||||
backupListErr error
|
||||
|
||||
expectDeleteIDs []manifest.ID
|
||||
deleteErr error
|
||||
}
|
||||
|
||||
func (ms mockStorer) Delete(context.Context, model.Schema, model.StableID) error {
|
||||
return clues.New("not implemented")
|
||||
}
|
||||
|
||||
func (ms mockStorer) Get(context.Context, model.Schema, model.StableID, model.Model) error {
|
||||
return clues.New("not implemented")
|
||||
}
|
||||
|
||||
func (ms mockStorer) Put(context.Context, model.Schema, model.Model) error {
|
||||
return clues.New("not implemented")
|
||||
}
|
||||
|
||||
func (ms mockStorer) Update(context.Context, model.Schema, model.Model) error {
|
||||
return clues.New("not implemented")
|
||||
}
|
||||
|
||||
func (ms mockStorer) GetIDsForType(
|
||||
_ context.Context,
|
||||
s model.Schema,
|
||||
tags map[string]string,
|
||||
) ([]*model.BaseModel, error) {
|
||||
assert.Empty(ms.t, tags, "model search tags")
|
||||
|
||||
switch s {
|
||||
case model.BackupDetailsSchema:
|
||||
return ms.details, clues.Stack(ms.detailsErr).OrNil()
|
||||
|
||||
case model.BackupSchema:
|
||||
var bases []*model.BaseModel
|
||||
|
||||
for _, b := range ms.backups {
|
||||
bases = append(bases, &b.bup.BaseModel)
|
||||
}
|
||||
|
||||
return bases, clues.Stack(ms.backupListErr).OrNil()
|
||||
}
|
||||
|
||||
return nil, clues.New(fmt.Sprintf("unknown type: %s", s.String()))
|
||||
}
|
||||
|
||||
func (ms mockStorer) GetWithModelStoreID(
|
||||
_ context.Context,
|
||||
s model.Schema,
|
||||
id manifest.ID,
|
||||
m model.Model,
|
||||
) error {
|
||||
assert.Equal(ms.t, model.BackupSchema, s, "model get schema")
|
||||
|
||||
d := m.(*backup.Backup)
|
||||
|
||||
for _, b := range ms.backups {
|
||||
if id == b.bup.ModelStoreID {
|
||||
*d = *b.bup
|
||||
return clues.Stack(b.err).OrNil()
|
||||
}
|
||||
}
|
||||
|
||||
return clues.Stack(data.ErrNotFound)
|
||||
}
|
||||
|
||||
func (ms mockStorer) DeleteWithModelStoreIDs(
|
||||
_ context.Context,
|
||||
ids ...manifest.ID,
|
||||
) error {
|
||||
assert.ElementsMatch(ms.t, ms.expectDeleteIDs, ids, "model delete IDs")
|
||||
return clues.Stack(ms.deleteErr).OrNil()
|
||||
}
|
||||
|
||||
// backupRes represents an individual return value for an item in GetIDsForType
|
||||
// or the result of GetWithModelStoreID. err is used for GetWithModelStoreID
|
||||
// only.
|
||||
type backupRes struct {
|
||||
bup *backup.Backup
|
||||
err error
|
||||
}
|
||||
|
||||
func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
|
||||
backupTag, _ := makeTagKV(TagBackupCategory)
|
||||
|
||||
// Current backup and snapshots.
|
||||
bupCurrent := &backup.Backup{
|
||||
BaseModel: model.BaseModel{
|
||||
ID: model.StableID("current-bup-id"),
|
||||
ModelStoreID: manifest.ID("current-bup-msid"),
|
||||
},
|
||||
SnapshotID: "current-snap-msid",
|
||||
StreamStoreID: "current-deets-msid",
|
||||
}
|
||||
|
||||
snapCurrent := &manifest.EntryMetadata{
|
||||
ID: "current-snap-msid",
|
||||
Labels: map[string]string{
|
||||
backupTag: "0",
|
||||
},
|
||||
}
|
||||
|
||||
deetsCurrent := &manifest.EntryMetadata{
|
||||
ID: "current-deets-msid",
|
||||
}
|
||||
|
||||
// Legacy backup with details in separate model.
|
||||
bupLegacy := &backup.Backup{
|
||||
BaseModel: model.BaseModel{
|
||||
ID: model.StableID("legacy-bup-id"),
|
||||
ModelStoreID: manifest.ID("legacy-bup-msid"),
|
||||
},
|
||||
SnapshotID: "legacy-snap-msid",
|
||||
DetailsID: "legacy-deets-msid",
|
||||
}
|
||||
|
||||
snapLegacy := &manifest.EntryMetadata{
|
||||
ID: "legacy-snap-msid",
|
||||
Labels: map[string]string{
|
||||
backupTag: "0",
|
||||
},
|
||||
}
|
||||
|
||||
deetsLegacy := &model.BaseModel{
|
||||
ID: "legacy-deets-id",
|
||||
ModelStoreID: "legacy-deets-msid",
|
||||
}
|
||||
|
||||
// Incomplete backup missing data snapshot.
|
||||
bupNoSnapshot := &backup.Backup{
|
||||
BaseModel: model.BaseModel{
|
||||
ID: model.StableID("ns-bup-id"),
|
||||
ModelStoreID: manifest.ID("ns-bup-id-msid"),
|
||||
},
|
||||
StreamStoreID: "ns-deets-msid",
|
||||
}
|
||||
|
||||
deetsNoSnapshot := &manifest.EntryMetadata{
|
||||
ID: "ns-deets-msid",
|
||||
}
|
||||
|
||||
// Legacy incomplete backup missing data snapshot.
|
||||
bupLegacyNoSnapshot := &backup.Backup{
|
||||
BaseModel: model.BaseModel{
|
||||
ID: model.StableID("ns-legacy-bup-id"),
|
||||
ModelStoreID: manifest.ID("ns-legacy-bup-id-msid"),
|
||||
},
|
||||
DetailsID: "ns-legacy-deets-msid",
|
||||
}
|
||||
|
||||
deetsLegacyNoSnapshot := &model.BaseModel{
|
||||
ID: "ns-legacy-deets-id",
|
||||
ModelStoreID: "ns-legacy-deets-msid",
|
||||
}
|
||||
|
||||
// Incomplete backup missing details.
|
||||
bupNoDetails := &backup.Backup{
|
||||
BaseModel: model.BaseModel{
|
||||
ID: model.StableID("nssid-bup-id"),
|
||||
ModelStoreID: manifest.ID("nssid-bup-msid"),
|
||||
},
|
||||
SnapshotID: "nssid-snap-msid",
|
||||
}
|
||||
|
||||
snapNoDetails := &manifest.EntryMetadata{
|
||||
ID: "nssid-snap-msid",
|
||||
Labels: map[string]string{
|
||||
backupTag: "0",
|
||||
},
|
||||
}
|
||||
|
||||
// Get some stable time so that we can do everything relative to this in the
|
||||
// tests. Mostly just makes reasoning/viewing times easier because the only
|
||||
// differences will be the changes we make.
|
||||
baseTime := time.Now()
|
||||
|
||||
manifestWithTime := func(
|
||||
mt time.Time,
|
||||
m *manifest.EntryMetadata,
|
||||
) *manifest.EntryMetadata {
|
||||
res := *m
|
||||
res.ModTime = mt
|
||||
|
||||
return &res
|
||||
}
|
||||
|
||||
backupWithTime := func(mt time.Time, b *backup.Backup) *backup.Backup {
|
||||
res := *b
|
||||
res.ModTime = mt
|
||||
|
||||
return &res
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
snapshots []*manifest.EntryMetadata
|
||||
snapshotFetchErr error
|
||||
// only need BaseModel here since we never look inside the details items.
|
||||
detailsModels []*model.BaseModel
|
||||
detailsModelListErr error
|
||||
backups []backupRes
|
||||
backupListErr error
|
||||
deleteErr error
|
||||
time time.Time
|
||||
buffer time.Duration
|
||||
|
||||
expectDeleteIDs []manifest.ID
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "EmptyRepo",
|
||||
time: baseTime,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "OnlyCompleteBackups Noops",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
deetsCurrent,
|
||||
snapLegacy,
|
||||
},
|
||||
detailsModels: []*model.BaseModel{
|
||||
deetsLegacy,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
{bup: bupLegacy},
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "MissingFieldsInBackup CausesCleanup",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapNoDetails,
|
||||
deetsNoSnapshot,
|
||||
},
|
||||
detailsModels: []*model.BaseModel{
|
||||
deetsLegacyNoSnapshot,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupNoSnapshot},
|
||||
{bup: bupLegacyNoSnapshot},
|
||||
{bup: bupNoDetails},
|
||||
},
|
||||
expectDeleteIDs: []manifest.ID{
|
||||
manifest.ID(bupNoSnapshot.ModelStoreID),
|
||||
manifest.ID(bupLegacyNoSnapshot.ModelStoreID),
|
||||
manifest.ID(bupNoDetails.ModelStoreID),
|
||||
manifest.ID(deetsLegacyNoSnapshot.ModelStoreID),
|
||||
snapNoDetails.ID,
|
||||
deetsNoSnapshot.ID,
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "MissingSnapshot CausesCleanup",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
deetsCurrent,
|
||||
},
|
||||
detailsModels: []*model.BaseModel{
|
||||
deetsLegacy,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
{bup: bupLegacy},
|
||||
},
|
||||
expectDeleteIDs: []manifest.ID{
|
||||
manifest.ID(bupCurrent.ModelStoreID),
|
||||
deetsCurrent.ID,
|
||||
manifest.ID(bupLegacy.ModelStoreID),
|
||||
manifest.ID(deetsLegacy.ModelStoreID),
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "MissingDetails CausesCleanup",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
snapLegacy,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
{bup: bupLegacy},
|
||||
},
|
||||
expectDeleteIDs: []manifest.ID{
|
||||
manifest.ID(bupCurrent.ModelStoreID),
|
||||
manifest.ID(bupLegacy.ModelStoreID),
|
||||
snapCurrent.ID,
|
||||
snapLegacy.ID,
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "SnapshotsListError Fails",
|
||||
snapshotFetchErr: assert.AnError,
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "LegacyDetailsListError Fails",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
},
|
||||
detailsModelListErr: assert.AnError,
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "BackupIDsListError Fails",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
deetsCurrent,
|
||||
},
|
||||
backupListErr: assert.AnError,
|
||||
time: baseTime,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "BackupModelGetErrorNotFound CausesCleanup",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
deetsCurrent,
|
||||
snapLegacy,
|
||||
snapNoDetails,
|
||||
},
|
||||
detailsModels: []*model.BaseModel{
|
||||
deetsLegacy,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
{
|
||||
bup: bupLegacy,
|
||||
err: data.ErrNotFound,
|
||||
},
|
||||
{
|
||||
bup: bupNoDetails,
|
||||
err: data.ErrNotFound,
|
||||
},
|
||||
},
|
||||
// Backup IDs are still included in here because they're added to the
|
||||
// deletion set prior to attempting to fetch models. The model store
|
||||
// delete operation should ignore missing models though so there's no
|
||||
// issue.
|
||||
expectDeleteIDs: []manifest.ID{
|
||||
snapLegacy.ID,
|
||||
manifest.ID(deetsLegacy.ModelStoreID),
|
||||
manifest.ID(bupLegacy.ModelStoreID),
|
||||
snapNoDetails.ID,
|
||||
manifest.ID(bupNoDetails.ModelStoreID),
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "BackupModelGetError Fails",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
deetsCurrent,
|
||||
snapLegacy,
|
||||
snapNoDetails,
|
||||
},
|
||||
detailsModels: []*model.BaseModel{
|
||||
deetsLegacy,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
{
|
||||
bup: bupLegacy,
|
||||
err: assert.AnError,
|
||||
},
|
||||
{bup: bupNoDetails},
|
||||
},
|
||||
time: baseTime,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "DeleteError Fails",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
snapCurrent,
|
||||
deetsCurrent,
|
||||
snapLegacy,
|
||||
snapNoDetails,
|
||||
},
|
||||
detailsModels: []*model.BaseModel{
|
||||
deetsLegacy,
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: bupCurrent},
|
||||
{bup: bupLegacy},
|
||||
{bup: bupNoDetails},
|
||||
},
|
||||
expectDeleteIDs: []manifest.ID{
|
||||
snapNoDetails.ID,
|
||||
manifest.ID(bupNoDetails.ModelStoreID),
|
||||
},
|
||||
deleteErr: assert.AnError,
|
||||
time: baseTime,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "MissingSnapshot BarelyTooYoungForCleanup Noops",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
manifestWithTime(baseTime, deetsCurrent),
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: backupWithTime(baseTime, bupCurrent)},
|
||||
},
|
||||
time: baseTime.Add(24 * time.Hour),
|
||||
buffer: 24 * time.Hour,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "MissingSnapshot BarelyOldEnough CausesCleanup",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
manifestWithTime(baseTime, deetsCurrent),
|
||||
},
|
||||
backups: []backupRes{
|
||||
{bup: backupWithTime(baseTime, bupCurrent)},
|
||||
},
|
||||
expectDeleteIDs: []manifest.ID{
|
||||
deetsCurrent.ID,
|
||||
manifest.ID(bupCurrent.ModelStoreID),
|
||||
},
|
||||
time: baseTime.Add((24 * time.Hour) + time.Second),
|
||||
buffer: 24 * time.Hour,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "BackupGetErrorNotFound TooYoung Noops",
|
||||
snapshots: []*manifest.EntryMetadata{
|
||||
manifestWithTime(baseTime, snapCurrent),
|
||||
manifestWithTime(baseTime, deetsCurrent),
|
||||
},
|
||||
backups: []backupRes{
|
||||
{
|
||||
bup: backupWithTime(baseTime, bupCurrent),
|
||||
err: data.ErrNotFound,
|
||||
},
|
||||
},
|
||||
time: baseTime,
|
||||
buffer: 24 * time.Hour,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
mbs := mockStorer{
|
||||
t: t,
|
||||
details: test.detailsModels,
|
||||
detailsErr: test.detailsModelListErr,
|
||||
backups: test.backups,
|
||||
backupListErr: test.backupListErr,
|
||||
expectDeleteIDs: test.expectDeleteIDs,
|
||||
deleteErr: test.deleteErr,
|
||||
}
|
||||
|
||||
mmf := mockManifestFinder{
|
||||
t: t,
|
||||
manifests: test.snapshots,
|
||||
err: test.snapshotFetchErr,
|
||||
}
|
||||
|
||||
err := cleanupOrphanedData(
|
||||
ctx,
|
||||
mbs,
|
||||
mmf,
|
||||
test.buffer,
|
||||
func() time.Time { return test.time })
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -52,9 +52,26 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type snapshotLoader interface {
|
||||
SnapshotRoot(man *snapshot.Manifest) (fs.Entry, error)
|
||||
}
|
||||
type (
|
||||
manifestFinder interface {
|
||||
FindManifests(
|
||||
ctx context.Context,
|
||||
tags map[string]string,
|
||||
) ([]*manifest.EntryMetadata, error)
|
||||
}
|
||||
|
||||
snapshotManager interface {
|
||||
manifestFinder
|
||||
LoadSnapshot(
|
||||
ctx context.Context,
|
||||
id manifest.ID,
|
||||
) (*snapshot.Manifest, error)
|
||||
}
|
||||
|
||||
snapshotLoader interface {
|
||||
SnapshotRoot(man *snapshot.Manifest) (fs.Entry, error)
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
_ snapshotManager = &conn{}
|
||||
|
||||
@ -210,6 +210,7 @@ func (ms ModelStore) populateBaseModelFromMetadata(
|
||||
base.ID = model.StableID(id)
|
||||
base.ModelVersion = v
|
||||
base.Tags = m.Labels
|
||||
base.ModTime = m.ModTime
|
||||
|
||||
stripHiddenTags(base.Tags)
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
@ -34,6 +35,18 @@ func getModelStore(t *testing.T, ctx context.Context) *ModelStore {
|
||||
return &ModelStore{c: c, modelVersion: globalModelVersion}
|
||||
}
|
||||
|
||||
func assertEqualNoModTime(t *testing.T, expected, got *fooModel) {
|
||||
t.Helper()
|
||||
|
||||
expectedClean := *expected
|
||||
gotClean := *got
|
||||
|
||||
expectedClean.ModTime = time.Time{}
|
||||
gotClean.ModTime = time.Time{}
|
||||
|
||||
assert.Equal(t, expectedClean, gotClean)
|
||||
}
|
||||
|
||||
// ---------------
|
||||
// unit tests
|
||||
// ---------------
|
||||
@ -259,6 +272,8 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
|
||||
// Avoid some silly test errors from comparing nil to empty map.
|
||||
foo.Tags = map[string]string{}
|
||||
|
||||
startTime := time.Now()
|
||||
|
||||
err := suite.m.Put(suite.ctx, test.s, foo)
|
||||
test.check(t, err, clues.ToCore(err))
|
||||
|
||||
@ -273,11 +288,17 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
|
||||
returned := &fooModel{}
|
||||
err = suite.m.Get(suite.ctx, test.s, foo.ID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
assert.WithinDuration(t, startTime, returned.ModTime, 5*time.Second)
|
||||
|
||||
returned = &fooModel{}
|
||||
|
||||
err = suite.m.GetWithModelStoreID(suite.ctx, test.s, foo.ModelStoreID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
assert.WithinDuration(t, startTime, returned.ModTime, 5*time.Second)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -324,11 +345,11 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_PreSetID() {
|
||||
|
||||
err = suite.m.Get(suite.ctx, mdl, foo.ID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
|
||||
err = suite.m.GetWithModelStoreID(suite.ctx, mdl, foo.ModelStoreID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -350,11 +371,11 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet_WithTags() {
|
||||
returned := &fooModel{}
|
||||
err = suite.m.Get(suite.ctx, theModelType, foo.ID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
|
||||
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
}
|
||||
|
||||
func (suite *ModelStoreIntegrationSuite) TestGet_NotFoundErrors() {
|
||||
@ -559,7 +580,16 @@ func (suite *ModelStoreIntegrationSuite) TestGetOfTypeWithTags() {
|
||||
ids, err := suite.m.GetIDsForType(suite.ctx, test.s, test.tags)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
assert.ElementsMatch(t, expected, ids)
|
||||
cleanIDs := make([]*model.BaseModel, 0, len(ids))
|
||||
|
||||
for _, id := range ids {
|
||||
id2 := *id
|
||||
id2.ModTime = time.Time{}
|
||||
|
||||
cleanIDs = append(cleanIDs, &id2)
|
||||
}
|
||||
|
||||
assert.ElementsMatch(t, expected, cleanIDs)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -627,7 +657,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
|
||||
|
||||
err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
|
||||
ids, err := m.GetIDsForType(ctx, theModelType, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -822,7 +852,7 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
|
||||
|
||||
err = m.GetWithModelStoreID(ctx, theModelType, foo.ModelStoreID, returned)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, foo, returned)
|
||||
assertEqualNoModTime(t, foo, returned)
|
||||
}
|
||||
|
||||
func openConnAndModelStore(
|
||||
|
||||
@ -31,13 +31,13 @@ type collectionScope int
|
||||
const (
|
||||
// CollectionScopeUnknown is used when we don't know and don't need
|
||||
// to know the kind, like in the case of deletes
|
||||
CollectionScopeUnknown collectionScope = iota
|
||||
CollectionScopeUnknown collectionScope = 0
|
||||
|
||||
// CollectionScopeFolder is used for regular folder collections
|
||||
CollectionScopeFolder
|
||||
CollectionScopeFolder collectionScope = 1
|
||||
|
||||
// CollectionScopePackage is used to represent OneNote items
|
||||
CollectionScopePackage
|
||||
CollectionScopePackage collectionScope = 2
|
||||
)
|
||||
|
||||
const restrictedDirectory = "Site Pages"
|
||||
|
||||
@ -14,8 +14,8 @@ import (
|
||||
type SharingMode int
|
||||
|
||||
const (
|
||||
SharingModeCustom = SharingMode(iota)
|
||||
SharingModeInherited
|
||||
SharingModeCustom SharingMode = 0
|
||||
SharingModeInherited SharingMode = 1
|
||||
)
|
||||
|
||||
type GV2Type string
|
||||
|
||||
@ -59,6 +59,7 @@ func CollectPages(
|
||||
bpc inject.BackupProducerConfig,
|
||||
creds account.M365Config,
|
||||
ac api.Client,
|
||||
scope selectors.SharePointScope,
|
||||
su support.StatusUpdater,
|
||||
errs *fault.Bus,
|
||||
) ([]data.BackupCollection, error) {
|
||||
@ -105,7 +106,7 @@ func CollectPages(
|
||||
collection := NewCollection(
|
||||
dir,
|
||||
ac,
|
||||
Pages,
|
||||
scope,
|
||||
su,
|
||||
bpc.Options)
|
||||
collection.SetBetaService(betaService)
|
||||
@ -122,6 +123,7 @@ func CollectLists(
|
||||
bpc inject.BackupProducerConfig,
|
||||
ac api.Client,
|
||||
tenantID string,
|
||||
scope selectors.SharePointScope,
|
||||
su support.StatusUpdater,
|
||||
errs *fault.Bus,
|
||||
) ([]data.BackupCollection, error) {
|
||||
@ -156,7 +158,7 @@ func CollectLists(
|
||||
collection := NewCollection(
|
||||
dir,
|
||||
ac,
|
||||
List,
|
||||
scope,
|
||||
su,
|
||||
bpc.Options)
|
||||
collection.AddJob(tuple.ID)
|
||||
|
||||
@ -16,6 +16,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
@ -61,11 +62,14 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
|
||||
ProtectedResource: mock.NewProvider(siteID, siteID),
|
||||
}
|
||||
|
||||
sel := selectors.NewSharePointBackup([]string{siteID})
|
||||
|
||||
col, err := CollectPages(
|
||||
ctx,
|
||||
bpc,
|
||||
creds,
|
||||
ac,
|
||||
sel.Lists(selectors.Any())[0],
|
||||
(&MockGraphService{}).UpdateStatus,
|
||||
fault.New(true))
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
@ -21,19 +21,23 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type DataCategory int
|
||||
|
||||
// channel sizes
|
||||
const (
|
||||
collectionChannelBufferSize = 50
|
||||
fetchChannelSize = 5
|
||||
)
|
||||
|
||||
//go:generate stringer -type=DataCategory
|
||||
const (
|
||||
collectionChannelBufferSize = 50
|
||||
fetchChannelSize = 5
|
||||
Unknown DataCategory = iota
|
||||
List
|
||||
Drive
|
||||
Pages
|
||||
Unknown DataCategory = 0
|
||||
List DataCategory = 1
|
||||
Pages DataCategory = 2
|
||||
)
|
||||
|
||||
var (
|
||||
@ -53,7 +57,7 @@ type Collection struct {
|
||||
// jobs contain the SharePoint.Site.ListIDs for the associated list(s).
|
||||
jobs []string
|
||||
// M365 IDs of the items of this collection
|
||||
category DataCategory
|
||||
category path.CategoryType
|
||||
client api.Sites
|
||||
ctrl control.Options
|
||||
betaService *betaAPI.BetaService
|
||||
@ -64,7 +68,7 @@ type Collection struct {
|
||||
func NewCollection(
|
||||
folderPath path.Path,
|
||||
ac api.Client,
|
||||
category DataCategory,
|
||||
scope selectors.SharePointScope,
|
||||
statusUpdater support.StatusUpdater,
|
||||
ctrlOpts control.Options,
|
||||
) *Collection {
|
||||
@ -74,7 +78,7 @@ func NewCollection(
|
||||
data: make(chan data.Item, collectionChannelBufferSize),
|
||||
client: ac.Sites(),
|
||||
statusUpdater: statusUpdater,
|
||||
category: category,
|
||||
category: scope.Category().PathType(),
|
||||
ctrl: ctrlOpts,
|
||||
}
|
||||
|
||||
@ -198,9 +202,9 @@ func (sc *Collection) runPopulate(
|
||||
|
||||
// Switch retrieval function based on category
|
||||
switch sc.category {
|
||||
case List:
|
||||
case path.ListsCategory:
|
||||
metrics, err = sc.retrieveLists(ctx, writer, colProgress, errs)
|
||||
case Pages:
|
||||
case path.PagesCategory:
|
||||
metrics, err = sc.retrievePages(ctx, sc.client, writer, colProgress, errs)
|
||||
}
|
||||
|
||||
|
||||
@ -23,6 +23,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
@ -82,16 +83,18 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
||||
dirRoot = "directory"
|
||||
)
|
||||
|
||||
sel := selectors.NewSharePointBackup([]string{"site"})
|
||||
|
||||
tables := []struct {
|
||||
name, itemName string
|
||||
category DataCategory
|
||||
scope selectors.SharePointScope
|
||||
getDir func(t *testing.T) path.Path
|
||||
getItem func(t *testing.T, itemName string) *Item
|
||||
}{
|
||||
{
|
||||
name: "List",
|
||||
itemName: "MockListing",
|
||||
category: List,
|
||||
scope: sel.Lists(selectors.Any())[0],
|
||||
getDir: func(t *testing.T) path.Path {
|
||||
dir, err := path.Build(
|
||||
tenant,
|
||||
@ -127,7 +130,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
||||
{
|
||||
name: "Pages",
|
||||
itemName: "MockPages",
|
||||
category: Pages,
|
||||
scope: sel.Pages(selectors.Any())[0],
|
||||
getDir: func(t *testing.T) path.Path {
|
||||
dir, err := path.Build(
|
||||
tenant,
|
||||
@ -166,7 +169,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
||||
col := NewCollection(
|
||||
test.getDir(t),
|
||||
suite.ac,
|
||||
test.category,
|
||||
test.scope,
|
||||
nil,
|
||||
control.DefaultOptions())
|
||||
col.data <- test.getItem(t, test.itemName)
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
// Code generated by "stringer -type=DataCategory"; DO NOT EDIT.
|
||||
|
||||
package site
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[Unknown-2]
|
||||
_ = x[List-3]
|
||||
_ = x[Drive-4]
|
||||
_ = x[Pages-5]
|
||||
}
|
||||
|
||||
const _DataCategory_name = "UnknownListDrivePages"
|
||||
|
||||
var _DataCategory_index = [...]uint8{0, 7, 11, 16, 21}
|
||||
|
||||
func (i DataCategory) String() string {
|
||||
i -= 2
|
||||
if i < 0 || i >= DataCategory(len(_DataCategory_index)-1) {
|
||||
return "DataCategory(" + strconv.FormatInt(int64(i+2), 10) + ")"
|
||||
}
|
||||
return _DataCategory_name[_DataCategory_index[i]:_DataCategory_index[i+1]]
|
||||
}
|
||||
@ -63,6 +63,7 @@ func ProduceBackupCollections(
|
||||
bpc,
|
||||
ac,
|
||||
creds.AzureTenantID,
|
||||
scope,
|
||||
su,
|
||||
errs)
|
||||
if err != nil {
|
||||
@ -95,6 +96,7 @@ func ProduceBackupCollections(
|
||||
bpc,
|
||||
creds,
|
||||
ac,
|
||||
scope,
|
||||
su,
|
||||
errs)
|
||||
if err != nil {
|
||||
|
||||
@ -37,10 +37,10 @@ type Operation int
|
||||
|
||||
//go:generate stringer -type=Operation
|
||||
const (
|
||||
OpUnknown Operation = iota
|
||||
Backup
|
||||
Restore
|
||||
Export
|
||||
OpUnknown Operation = 0
|
||||
Backup Operation = 1
|
||||
Restore Operation = 2
|
||||
Export Operation = 3
|
||||
)
|
||||
|
||||
// Constructor for ConnectorOperationStatus. If the counts do not agree, an error is returned.
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/kopia/kopia/repo/manifest"
|
||||
)
|
||||
|
||||
@ -22,12 +24,12 @@ func (id StableID) String() string {
|
||||
//
|
||||
//go:generate go run golang.org/x/tools/cmd/stringer -type=Schema
|
||||
const (
|
||||
UnknownSchema = Schema(iota)
|
||||
BackupOpSchema
|
||||
RestoreOpSchema
|
||||
BackupSchema
|
||||
BackupDetailsSchema
|
||||
RepositorySchema
|
||||
UnknownSchema Schema = 0
|
||||
BackupOpSchema Schema = 1
|
||||
RestoreOpSchema Schema = 2
|
||||
BackupSchema Schema = 3
|
||||
BackupDetailsSchema Schema = 4
|
||||
RepositorySchema Schema = 5
|
||||
)
|
||||
|
||||
// common tags for filtering
|
||||
@ -38,7 +40,7 @@ const (
|
||||
MergeBackup = "merge-backup"
|
||||
)
|
||||
|
||||
// Valid returns true if the ModelType value fits within the iota range.
|
||||
// Valid returns true if the ModelType value fits within the const range.
|
||||
func (mt Schema) Valid() bool {
|
||||
return mt > 0 && mt < RepositorySchema+1
|
||||
}
|
||||
@ -68,7 +70,8 @@ type BaseModel struct {
|
||||
// Tags associated with this model in the store to facilitate lookup. Tags in
|
||||
// the struct are not serialized directly into the stored model, but are part
|
||||
// of the metadata for the model.
|
||||
Tags map[string]string `json:"-"`
|
||||
Tags map[string]string `json:"-"`
|
||||
ModTime time.Time `json:"-"`
|
||||
}
|
||||
|
||||
func (bm *BaseModel) Base() *BaseModel {
|
||||
|
||||
@ -33,11 +33,11 @@ type OpStatus int
|
||||
|
||||
//go:generate stringer -type=OpStatus -linecomment
|
||||
const (
|
||||
Unknown OpStatus = iota // Status Unknown
|
||||
InProgress // In Progress
|
||||
Completed // Completed
|
||||
Failed // Failed
|
||||
NoData // No Data
|
||||
Unknown OpStatus = 0 // Status Unknown
|
||||
InProgress OpStatus = 1 // In Progress
|
||||
Completed OpStatus = 2 // Completed
|
||||
Failed OpStatus = 3 // Failed
|
||||
NoData OpStatus = 4 // No Data
|
||||
)
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
|
||||
@ -223,11 +223,11 @@ func UnlicensedM365UserID(t *testing.T) string {
|
||||
|
||||
// Teams
|
||||
|
||||
// M365TeamsID returns a teamID string representing the m365TeamsID described
|
||||
// M365TeamID returns a teamID string representing the m365TeamsID described
|
||||
// by either the env var CORSO_M365_TEST_TEAM_ID, the corso_test.toml config
|
||||
// file or the default value (in that order of priority). The default is a
|
||||
// last-attempt fallback that will only work on alcion's testing org.
|
||||
func M365TeamsID(t *testing.T) string {
|
||||
func M365TeamID(t *testing.T) string {
|
||||
cfg, err := ReadTestConfig()
|
||||
require.NoError(t, err, "retrieving m365 team id from test configuration: %+v", clues.ToCore(err))
|
||||
|
||||
|
||||
@ -10,8 +10,8 @@ type accountProvider int
|
||||
|
||||
//go:generate stringer -type=accountProvider -linecomment
|
||||
const (
|
||||
ProviderUnknown accountProvider = iota // Unknown Provider
|
||||
ProviderM365 // M365
|
||||
ProviderUnknown accountProvider = 0 // Unknown Provider
|
||||
ProviderM365 accountProvider = 1 // M365
|
||||
)
|
||||
|
||||
// storage parsing errors
|
||||
|
||||
@ -20,16 +20,17 @@ type ItemType int
|
||||
// Additionally, any itemType directly assigned a number should not be altered.
|
||||
// This applies to OneDriveItem and FolderItem
|
||||
const (
|
||||
UnknownType ItemType = iota // 0, global unknown value
|
||||
UnknownType ItemType = 0
|
||||
|
||||
// Exchange (00x)
|
||||
ExchangeContact
|
||||
ExchangeEvent
|
||||
ExchangeMail
|
||||
ExchangeContact ItemType = 1
|
||||
ExchangeEvent ItemType = 2
|
||||
ExchangeMail ItemType = 3
|
||||
|
||||
// SharePoint (10x)
|
||||
SharePointLibrary ItemType = iota + 97 // 100
|
||||
SharePointList // 101...
|
||||
SharePointPage
|
||||
SharePointLibrary ItemType = 101
|
||||
SharePointList ItemType = 102
|
||||
SharePointPage ItemType = 103
|
||||
|
||||
// OneDrive (20x)
|
||||
OneDriveItem ItemType = 205
|
||||
|
||||
@ -25,12 +25,10 @@ type Maintenance struct {
|
||||
|
||||
type MaintenanceType int
|
||||
|
||||
// Can't be reordered as we rely on iota for numbering.
|
||||
//
|
||||
//go:generate stringer -type=MaintenanceType -linecomment
|
||||
const (
|
||||
CompleteMaintenance MaintenanceType = iota // complete
|
||||
MetadataMaintenance // metadata
|
||||
CompleteMaintenance MaintenanceType = 0 // complete
|
||||
MetadataMaintenance MaintenanceType = 1 // metadata
|
||||
)
|
||||
|
||||
var StringToMaintenanceType = map[string]MaintenanceType{
|
||||
@ -40,16 +38,14 @@ var StringToMaintenanceType = map[string]MaintenanceType{
|
||||
|
||||
type MaintenanceSafety int
|
||||
|
||||
// Can't be reordered as we rely on iota for numbering.
|
||||
//
|
||||
//go:generate stringer -type=MaintenanceSafety -linecomment
|
||||
const (
|
||||
FullMaintenanceSafety MaintenanceSafety = iota
|
||||
FullMaintenanceSafety MaintenanceSafety = 0
|
||||
//nolint:lll
|
||||
// Use only if there's no other kopia instances accessing the repo and the
|
||||
// storage backend is strongly consistent.
|
||||
// https://github.com/kopia/kopia/blob/f9de453efc198b6e993af8922f953a7e5322dc5f/repo/maintenance/maintenance_safety.go#L42
|
||||
NoMaintenanceSafety
|
||||
NoMaintenanceSafety MaintenanceSafety = 1
|
||||
)
|
||||
|
||||
type RetentionMode int
|
||||
|
||||
@ -17,15 +17,16 @@ type CategoryType int
|
||||
|
||||
//go:generate stringer -type=CategoryType -linecomment
|
||||
const (
|
||||
UnknownCategory CategoryType = iota
|
||||
EmailCategory // email
|
||||
ContactsCategory // contacts
|
||||
EventsCategory // events
|
||||
FilesCategory // files
|
||||
ListsCategory // lists
|
||||
LibrariesCategory // libraries
|
||||
PagesCategory // pages
|
||||
DetailsCategory // details
|
||||
UnknownCategory CategoryType = 0
|
||||
EmailCategory CategoryType = 1 // email
|
||||
ContactsCategory CategoryType = 2 // contacts
|
||||
EventsCategory CategoryType = 3 // events
|
||||
FilesCategory CategoryType = 4 // files
|
||||
ListsCategory CategoryType = 5 // lists
|
||||
LibrariesCategory CategoryType = 6 // libraries
|
||||
PagesCategory CategoryType = 7 // pages
|
||||
DetailsCategory CategoryType = 8 // details
|
||||
ChannelMessagesCategory CategoryType = 9 // channel messages
|
||||
)
|
||||
|
||||
func ToCategoryType(category string) CategoryType {
|
||||
@ -48,6 +49,8 @@ func ToCategoryType(category string) CategoryType {
|
||||
return PagesCategory
|
||||
case strings.ToLower(DetailsCategory.String()):
|
||||
return DetailsCategory
|
||||
case strings.ToLower(ChannelMessagesCategory.String()):
|
||||
return ChannelMessagesCategory
|
||||
default:
|
||||
return UnknownCategory
|
||||
}
|
||||
@ -73,6 +76,12 @@ var serviceCategories = map[ServiceType]map[CategoryType]struct{}{
|
||||
ListsCategory: {},
|
||||
PagesCategory: {},
|
||||
},
|
||||
GroupsService: {
|
||||
ChannelMessagesCategory: {},
|
||||
},
|
||||
TeamsService: {
|
||||
ChannelMessagesCategory: {},
|
||||
},
|
||||
}
|
||||
|
||||
func validateServiceAndCategoryStrings(s, c string) (ServiceType, CategoryType, error) {
|
||||
|
||||
@ -17,11 +17,12 @@ func _() {
|
||||
_ = x[LibrariesCategory-6]
|
||||
_ = x[PagesCategory-7]
|
||||
_ = x[DetailsCategory-8]
|
||||
_ = x[ChannelMessagesCategory-9]
|
||||
}
|
||||
|
||||
const _CategoryType_name = "UnknownCategoryemailcontactseventsfileslistslibrariespagesdetails"
|
||||
const _CategoryType_name = "UnknownCategoryemailcontactseventsfileslistslibrariespagesdetailschannel messages"
|
||||
|
||||
var _CategoryType_index = [...]uint8{0, 15, 20, 28, 34, 39, 44, 53, 58, 65}
|
||||
var _CategoryType_index = [...]uint8{0, 15, 20, 28, 34, 39, 44, 53, 58, 65, 81}
|
||||
|
||||
func (i CategoryType) String() string {
|
||||
if i < 0 || i >= CategoryType(len(_CategoryType_index)-1) {
|
||||
|
||||
@ -22,15 +22,17 @@ type ServiceType int
|
||||
|
||||
//go:generate stringer -type=ServiceType -linecomment
|
||||
const (
|
||||
UnknownService ServiceType = iota
|
||||
ExchangeService // exchange
|
||||
OneDriveService // onedrive
|
||||
SharePointService // sharepoint
|
||||
ExchangeMetadataService // exchangeMetadata
|
||||
OneDriveMetadataService // onedriveMetadata
|
||||
SharePointMetadataService // sharepointMetadata
|
||||
GroupsService // groups
|
||||
GroupsMetadataService // groupsMetadata
|
||||
UnknownService ServiceType = 0
|
||||
ExchangeService ServiceType = 1 // exchange
|
||||
OneDriveService ServiceType = 2 // onedrive
|
||||
SharePointService ServiceType = 3 // sharepoint
|
||||
ExchangeMetadataService ServiceType = 4 // exchangeMetadata
|
||||
OneDriveMetadataService ServiceType = 5 // onedriveMetadata
|
||||
SharePointMetadataService ServiceType = 6 // sharepointMetadata
|
||||
GroupsService ServiceType = 7 // groups
|
||||
GroupsMetadataService ServiceType = 8 // groupsMetadata
|
||||
TeamsService ServiceType = 9 // teams
|
||||
TeamsMetadataService ServiceType = 10 // teamsMetadata
|
||||
)
|
||||
|
||||
func toServiceType(service string) ServiceType {
|
||||
@ -43,12 +45,20 @@ func toServiceType(service string) ServiceType {
|
||||
return OneDriveService
|
||||
case strings.ToLower(SharePointService.String()):
|
||||
return SharePointService
|
||||
case strings.ToLower(GroupsService.String()):
|
||||
return GroupsService
|
||||
case strings.ToLower(TeamsService.String()):
|
||||
return TeamsService
|
||||
case strings.ToLower(ExchangeMetadataService.String()):
|
||||
return ExchangeMetadataService
|
||||
case strings.ToLower(OneDriveMetadataService.String()):
|
||||
return OneDriveMetadataService
|
||||
case strings.ToLower(SharePointMetadataService.String()):
|
||||
return SharePointMetadataService
|
||||
case strings.ToLower(GroupsMetadataService.String()):
|
||||
return GroupsMetadataService
|
||||
case strings.ToLower(TeamsMetadataService.String()):
|
||||
return TeamsMetadataService
|
||||
default:
|
||||
return UnknownService
|
||||
}
|
||||
|
||||
@ -17,11 +17,13 @@ func _() {
|
||||
_ = x[SharePointMetadataService-6]
|
||||
_ = x[GroupsService-7]
|
||||
_ = x[GroupsMetadataService-8]
|
||||
_ = x[TeamsService-9]
|
||||
_ = x[TeamsMetadataService-10]
|
||||
}
|
||||
|
||||
const _ServiceType_name = "UnknownServiceexchangeonedrivesharepointexchangeMetadataonedriveMetadatasharepointMetadatagroupsgroupsMetadata"
|
||||
const _ServiceType_name = "UnknownServiceexchangeonedrivesharepointexchangeMetadataonedriveMetadatasharepointMetadatagroupsgroupsMetadatateamsteamsMetadata"
|
||||
|
||||
var _ServiceType_index = [...]uint8{0, 14, 22, 30, 40, 56, 72, 90, 96, 110}
|
||||
var _ServiceType_index = [...]uint8{0, 14, 22, 30, 40, 56, 72, 90, 96, 110, 115, 128}
|
||||
|
||||
func (i ServiceType) String() string {
|
||||
if i < 0 || i >= ServiceType(len(_ServiceType_index)-1) {
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
@ -214,38 +215,42 @@ func (s *groups) AllData() []GroupsScope {
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[GroupsScope](GroupsTODOContainer, Any()))
|
||||
makeScope[GroupsScope](GroupsChannel, Any()))
|
||||
|
||||
return scopes
|
||||
}
|
||||
|
||||
// TODO produces one or more Groups TODO scopes.
|
||||
// Channel produces one or more SharePoint channel scopes, where the channel
|
||||
// matches upon a given channel by ID or Name. In order to ensure channel selection
|
||||
// this should always be embedded within the Filter() set; include(channel()) will
|
||||
// select all items in the channel without further filtering.
|
||||
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||
// Any empty slice defaults to [selectors.None]
|
||||
func (s *groups) TODO(lists []string, opts ...option) []GroupsScope {
|
||||
// If any slice is empty, it defaults to [selectors.None]
|
||||
func (s *groups) Channel(channel string) []GroupsScope {
|
||||
return []GroupsScope{
|
||||
makeInfoScope[GroupsScope](
|
||||
GroupsChannel,
|
||||
GroupsInfoChannel,
|
||||
[]string{channel},
|
||||
filters.Equal),
|
||||
}
|
||||
}
|
||||
|
||||
// ChannelMessages produces one or more Groups channel message scopes.
|
||||
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||
// If any slice is empty, it defaults to [selectors.None]
|
||||
func (s *sharePoint) ChannelMessages(channels, messages []string, opts ...option) []GroupsScope {
|
||||
var (
|
||||
scopes = []GroupsScope{}
|
||||
os = append([]option{pathComparator()}, opts...)
|
||||
)
|
||||
|
||||
scopes = append(scopes, makeScope[GroupsScope](GroupsTODOContainer, lists, os...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
|
||||
// ListTODOItemsItems produces one or more Groups TODO item scopes.
|
||||
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||
// If any slice is empty, it defaults to [selectors.None]
|
||||
// options are only applied to the list scopes.
|
||||
func (s *groups) TODOItems(lists, items []string, opts ...option) []GroupsScope {
|
||||
scopes := []GroupsScope{}
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[GroupsScope](GroupsTODOItem, items, defaultItemOptions(s.Cfg)...).
|
||||
set(GroupsTODOContainer, lists, opts...))
|
||||
makeScope[GroupsScope](GroupsChannelMessage, messages, os...).
|
||||
set(GroupsChannel, channels, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -270,21 +275,22 @@ const (
|
||||
GroupsCategoryUnknown groupsCategory = ""
|
||||
|
||||
// types of data in Groups
|
||||
GroupsGroup groupsCategory = "GroupsGroup"
|
||||
GroupsTODOContainer groupsCategory = "GroupsTODOContainer"
|
||||
GroupsTODOItem groupsCategory = "GroupsTODOItem"
|
||||
GroupsGroup groupsCategory = "GroupsGroup"
|
||||
GroupsChannel groupsCategory = "GroupsChannel"
|
||||
GroupsChannelMessage groupsCategory = "GroupsChannelMessage"
|
||||
|
||||
// details.itemInfo comparables
|
||||
|
||||
// library drive selection
|
||||
// channel drive selection
|
||||
GroupsInfoSiteLibraryDrive groupsCategory = "GroupsInfoSiteLibraryDrive"
|
||||
GroupsInfoChannel groupsCategory = "GroupsInfoChannel"
|
||||
)
|
||||
|
||||
// groupsLeafProperties describes common metadata of the leaf categories
|
||||
var groupsLeafProperties = map[categorizer]leafProperty{
|
||||
GroupsTODOItem: { // the root category must be represented, even though it isn't a leaf
|
||||
pathKeys: []categorizer{GroupsTODOContainer, GroupsTODOItem},
|
||||
pathType: path.UnknownCategory,
|
||||
GroupsChannelMessage: { // the root category must be represented, even though it isn't a leaf
|
||||
pathKeys: []categorizer{GroupsChannel, GroupsChannelMessage},
|
||||
pathType: path.ChannelMessagesCategory,
|
||||
},
|
||||
GroupsGroup: { // the root category must be represented, even though it isn't a leaf
|
||||
pathKeys: []categorizer{GroupsGroup},
|
||||
@ -303,8 +309,10 @@ func (c groupsCategory) String() string {
|
||||
// Ex: ServiceUser.leafCat() => ServiceUser
|
||||
func (c groupsCategory) leafCat() categorizer {
|
||||
switch c {
|
||||
case GroupsTODOContainer, GroupsInfoSiteLibraryDrive:
|
||||
return GroupsTODOItem
|
||||
// TODO: if channels ever contain more than one type of item,
|
||||
// we'll need to fix this up.
|
||||
case GroupsChannel, GroupsChannelMessage, GroupsInfoSiteLibraryDrive:
|
||||
return GroupsChannelMessage
|
||||
}
|
||||
|
||||
return c
|
||||
@ -348,12 +356,12 @@ func (c groupsCategory) pathValues(
|
||||
)
|
||||
|
||||
switch c {
|
||||
case GroupsTODOContainer, GroupsTODOItem:
|
||||
case GroupsChannel, GroupsChannelMessage:
|
||||
if ent.Groups == nil {
|
||||
return nil, clues.New("no Groups ItemInfo in details")
|
||||
}
|
||||
|
||||
folderCat, itemCat = GroupsTODOContainer, GroupsTODOItem
|
||||
folderCat, itemCat = GroupsChannel, GroupsChannelMessage
|
||||
rFld = ent.Groups.ParentPath
|
||||
|
||||
default:
|
||||
@ -451,7 +459,7 @@ func (s GroupsScope) set(cat groupsCategory, v []string, opts ...option) GroupsS
|
||||
os := []option{}
|
||||
|
||||
switch cat {
|
||||
case GroupsTODOContainer:
|
||||
case GroupsChannel:
|
||||
os = append(os, pathComparator())
|
||||
}
|
||||
|
||||
@ -462,10 +470,10 @@ func (s GroupsScope) set(cat groupsCategory, v []string, opts ...option) GroupsS
|
||||
func (s GroupsScope) setDefaults() {
|
||||
switch s.Category() {
|
||||
case GroupsGroup:
|
||||
s[GroupsTODOContainer.String()] = passAny
|
||||
s[GroupsTODOItem.String()] = passAny
|
||||
case GroupsTODOContainer:
|
||||
s[GroupsTODOItem.String()] = passAny
|
||||
s[GroupsChannel.String()] = passAny
|
||||
s[GroupsChannelMessage.String()] = passAny
|
||||
case GroupsChannel:
|
||||
s[GroupsChannelMessage.String()] = passAny
|
||||
}
|
||||
}
|
||||
|
||||
@ -485,7 +493,7 @@ func (s groups) Reduce(
|
||||
deets,
|
||||
s.Selector,
|
||||
map[path.CategoryType]groupsCategory{
|
||||
path.UnknownCategory: GroupsTODOItem,
|
||||
path.ChannelMessagesCategory: GroupsChannelMessage,
|
||||
},
|
||||
errs)
|
||||
}
|
||||
@ -516,6 +524,9 @@ func (s GroupsScope) matchesInfo(dii details.ItemInfo) bool {
|
||||
}
|
||||
|
||||
return matchesAny(s, GroupsInfoSiteLibraryDrive, ds)
|
||||
case GroupsInfoChannel:
|
||||
ds := Any()
|
||||
return matchesAny(s, GroupsInfoChannel, ds)
|
||||
}
|
||||
|
||||
return s.Matches(infoCat, i)
|
||||
|
||||
421
src/pkg/selectors/groups_test.go
Normal file
421
src/pkg/selectors/groups_test.go
Normal file
@ -0,0 +1,421 @@
|
||||
package selectors
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type GroupsSelectorSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsSelectorSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsSelectorSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsSelectorSuite) TestNewGroupsBackup() {
|
||||
t := suite.T()
|
||||
ob := NewGroupsBackup(nil)
|
||||
assert.Equal(t, ob.Service, ServiceGroups)
|
||||
assert.NotZero(t, ob.Scopes())
|
||||
}
|
||||
|
||||
func (suite *GroupsSelectorSuite) TestToGroupsBackup() {
|
||||
t := suite.T()
|
||||
ob := NewGroupsBackup(nil)
|
||||
s := ob.Selector
|
||||
ob, err := s.ToGroupsBackup()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, ob.Service, ServiceGroups)
|
||||
assert.NotZero(t, ob.Scopes())
|
||||
}
|
||||
|
||||
func (suite *GroupsSelectorSuite) TestNewGroupsRestore() {
|
||||
t := suite.T()
|
||||
or := NewGroupsRestore(nil)
|
||||
assert.Equal(t, or.Service, ServiceGroups)
|
||||
assert.NotZero(t, or.Scopes())
|
||||
}
|
||||
|
||||
func (suite *GroupsSelectorSuite) TestToGroupsRestore() {
|
||||
t := suite.T()
|
||||
eb := NewGroupsRestore(nil)
|
||||
s := eb.Selector
|
||||
or, err := s.ToGroupsRestore()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, or.Service, ServiceGroups)
|
||||
assert.NotZero(t, or.Scopes())
|
||||
}
|
||||
|
||||
// TODO(rkeepers): implement
|
||||
// func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
|
||||
// toRR := func(cat path.CategoryType, siteID string, folders []string, item string) string {
|
||||
// folderElems := make([]string, 0, len(folders))
|
||||
|
||||
// for _, f := range folders {
|
||||
// folderElems = append(folderElems, f+".d")
|
||||
// }
|
||||
|
||||
// return stubRepoRef(
|
||||
// path.GroupsService,
|
||||
// cat,
|
||||
// siteID,
|
||||
// strings.Join(folderElems, "/"),
|
||||
// item)
|
||||
// }
|
||||
|
||||
// var (
|
||||
// prefixElems = []string{
|
||||
// odConsts.DrivesPathDir,
|
||||
// "drive!id",
|
||||
// odConsts.RootPathDir,
|
||||
// }
|
||||
// itemElems1 = []string{"folderA", "folderB"}
|
||||
// itemElems2 = []string{"folderA", "folderC"}
|
||||
// itemElems3 = []string{"folderD", "folderE"}
|
||||
// pairAC = "folderA/folderC"
|
||||
// pairGH = "folderG/folderH"
|
||||
// item = toRR(
|
||||
// path.LibrariesCategory,
|
||||
// "sid",
|
||||
// append(slices.Clone(prefixElems), itemElems1...),
|
||||
// "item")
|
||||
// item2 = toRR(
|
||||
// path.LibrariesCategory,
|
||||
// "sid",
|
||||
// append(slices.Clone(prefixElems), itemElems2...),
|
||||
// "item2")
|
||||
// item3 = toRR(
|
||||
// path.LibrariesCategory,
|
||||
// "sid",
|
||||
// append(slices.Clone(prefixElems), itemElems3...),
|
||||
// "item3")
|
||||
// item4 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item4")
|
||||
// item5 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item5")
|
||||
// )
|
||||
|
||||
// deets := &details.Details{
|
||||
// DetailsModel: details.DetailsModel{
|
||||
// Entries: []details.Entry{
|
||||
// {
|
||||
// RepoRef: item,
|
||||
// ItemRef: "item",
|
||||
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems1...), "/"),
|
||||
// ItemInfo: details.ItemInfo{
|
||||
// Groups: &details.GroupsInfo{
|
||||
// ItemType: details.GroupsLibrary,
|
||||
// ItemName: "itemName",
|
||||
// ParentPath: strings.Join(itemElems1, "/"),
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// RepoRef: item2,
|
||||
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems2...), "/"),
|
||||
// // ItemRef intentionally blank to test fallback case
|
||||
// ItemInfo: details.ItemInfo{
|
||||
// Groups: &details.GroupsInfo{
|
||||
// ItemType: details.GroupsLibrary,
|
||||
// ItemName: "itemName2",
|
||||
// ParentPath: strings.Join(itemElems2, "/"),
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// RepoRef: item3,
|
||||
// ItemRef: "item3",
|
||||
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems3...), "/"),
|
||||
// ItemInfo: details.ItemInfo{
|
||||
// Groups: &details.GroupsInfo{
|
||||
// ItemType: details.GroupsLibrary,
|
||||
// ItemName: "itemName3",
|
||||
// ParentPath: strings.Join(itemElems3, "/"),
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// RepoRef: item4,
|
||||
// LocationRef: pairGH,
|
||||
// ItemRef: "item4",
|
||||
// ItemInfo: details.ItemInfo{
|
||||
// Groups: &details.GroupsInfo{
|
||||
// ItemType: details.GroupsPage,
|
||||
// ItemName: "itemName4",
|
||||
// ParentPath: pairGH,
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// RepoRef: item5,
|
||||
// LocationRef: pairGH,
|
||||
// // ItemRef intentionally blank to test fallback case
|
||||
// ItemInfo: details.ItemInfo{
|
||||
// Groups: &details.GroupsInfo{
|
||||
// ItemType: details.GroupsPage,
|
||||
// ItemName: "itemName5",
|
||||
// ParentPath: pairGH,
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// arr := func(s ...string) []string {
|
||||
// return s
|
||||
// }
|
||||
|
||||
// table := []struct {
|
||||
// name string
|
||||
// makeSelector func() *GroupsRestore
|
||||
// expect []string
|
||||
// cfg Config
|
||||
// }{
|
||||
// {
|
||||
// name: "all",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore(Any())
|
||||
// odr.Include(odr.AllData())
|
||||
// return odr
|
||||
// },
|
||||
// expect: arr(item, item2, item3, item4, item5),
|
||||
// },
|
||||
// {
|
||||
// name: "only match item",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore(Any())
|
||||
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
|
||||
// return odr
|
||||
// },
|
||||
// expect: arr(item2),
|
||||
// },
|
||||
// {
|
||||
// name: "id doesn't match name",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore(Any())
|
||||
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
|
||||
// return odr
|
||||
// },
|
||||
// expect: []string{},
|
||||
// cfg: Config{OnlyMatchItemNames: true},
|
||||
// },
|
||||
// {
|
||||
// name: "only match item name",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore(Any())
|
||||
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
|
||||
// return odr
|
||||
// },
|
||||
// expect: arr(item2),
|
||||
// cfg: Config{OnlyMatchItemNames: true},
|
||||
// },
|
||||
// {
|
||||
// name: "name doesn't match",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore(Any())
|
||||
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
|
||||
// return odr
|
||||
// },
|
||||
// expect: []string{},
|
||||
// },
|
||||
// {
|
||||
// name: "only match folder",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore([]string{"sid"})
|
||||
// odr.Include(odr.LibraryFolders([]string{"folderA/folderB", pairAC}))
|
||||
// return odr
|
||||
// },
|
||||
// expect: arr(item, item2),
|
||||
// },
|
||||
// {
|
||||
// name: "pages match folder",
|
||||
// makeSelector: func() *GroupsRestore {
|
||||
// odr := NewGroupsRestore([]string{"sid"})
|
||||
// odr.Include(odr.Pages([]string{pairGH, pairAC}))
|
||||
// return odr
|
||||
// },
|
||||
// expect: arr(item4, item5),
|
||||
// },
|
||||
// }
|
||||
// for _, test := range table {
|
||||
// suite.Run(test.name, func() {
|
||||
// t := suite.T()
|
||||
|
||||
// ctx, flush := tester.NewContext(t)
|
||||
// defer flush()
|
||||
|
||||
// sel := test.makeSelector()
|
||||
// sel.Configure(test.cfg)
|
||||
// results := sel.Reduce(ctx, deets, fault.New(true))
|
||||
// paths := results.Paths()
|
||||
// assert.Equal(t, test.expect, paths)
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
|
||||
func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
|
||||
var (
|
||||
itemName = "item"
|
||||
itemID = "item-id"
|
||||
shortRef = "short"
|
||||
elems = []string{itemID}
|
||||
)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
sc groupsCategory
|
||||
pathElems []string
|
||||
locRef string
|
||||
parentPath string
|
||||
expected map[categorizer][]string
|
||||
cfg Config
|
||||
}{
|
||||
{
|
||||
name: "Groups Channel Messages",
|
||||
sc: GroupsChannelMessage,
|
||||
pathElems: elems,
|
||||
locRef: "",
|
||||
expected: map[categorizer][]string{
|
||||
GroupsChannel: {""},
|
||||
GroupsChannelMessage: {itemID, shortRef},
|
||||
},
|
||||
cfg: Config{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
itemPath, err := path.Build(
|
||||
"tenant",
|
||||
"site",
|
||||
path.GroupsService,
|
||||
test.sc.PathType(),
|
||||
true,
|
||||
test.pathElems...)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
ent := details.Entry{
|
||||
RepoRef: itemPath.String(),
|
||||
ShortRef: shortRef,
|
||||
ItemRef: itemPath.Item(),
|
||||
LocationRef: test.locRef,
|
||||
ItemInfo: details.ItemInfo{
|
||||
Groups: &details.GroupsInfo{
|
||||
ItemName: itemName,
|
||||
ParentPath: test.parentPath,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
pv, err := test.sc.pathValues(itemPath, ent, test.cfg)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected, pv)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(abin): implement
|
||||
// func (suite *GroupsSelectorSuite) TestGroupsScope_MatchesInfo() {
|
||||
// var (
|
||||
// sel = NewGroupsRestore(Any())
|
||||
// host = "www.website.com"
|
||||
// pth = "/foo"
|
||||
// url = host + pth
|
||||
// epoch = time.Time{}
|
||||
// now = time.Now()
|
||||
// modification = now.Add(15 * time.Minute)
|
||||
// future = now.Add(45 * time.Minute)
|
||||
// )
|
||||
|
||||
// table := []struct {
|
||||
// name string
|
||||
// infoURL string
|
||||
// scope []GroupsScope
|
||||
// expect assert.BoolAssertionFunc
|
||||
// }{
|
||||
// {"host match", host, sel.WebURL([]string{host}), assert.True},
|
||||
// {"url match", url, sel.WebURL([]string{url}), assert.True},
|
||||
// {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
|
||||
// {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
|
||||
// {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
|
||||
// {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
|
||||
// {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
|
||||
// {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
|
||||
// {"file create after the epoch", host, sel.CreatedAfter(dttm.Format(epoch)), assert.True},
|
||||
// {"file create after now", host, sel.CreatedAfter(dttm.Format(now)), assert.False},
|
||||
// {"file create after later", url, sel.CreatedAfter(dttm.Format(future)), assert.False},
|
||||
// {"file create before future", host, sel.CreatedBefore(dttm.Format(future)), assert.True},
|
||||
// {"file create before now", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
|
||||
// {"file create before modification", host, sel.CreatedBefore(dttm.Format(modification)), assert.True},
|
||||
// {"file create before epoch", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
|
||||
// {"file modified after the epoch", host, sel.ModifiedAfter(dttm.Format(epoch)), assert.True},
|
||||
// {"file modified after now", host, sel.ModifiedAfter(dttm.Format(now)), assert.True},
|
||||
// {"file modified after later", host, sel.ModifiedAfter(dttm.Format(future)), assert.False},
|
||||
// {"file modified before future", host, sel.ModifiedBefore(dttm.Format(future)), assert.True},
|
||||
// {"file modified before now", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
|
||||
// {"file modified before epoch", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
|
||||
// {"in library", host, sel.Library("included-library"), assert.True},
|
||||
// {"not in library", host, sel.Library("not-included-library"), assert.False},
|
||||
// {"library id", host, sel.Library("1234"), assert.True},
|
||||
// {"not library id", host, sel.Library("abcd"), assert.False},
|
||||
// }
|
||||
// for _, test := range table {
|
||||
// suite.Run(test.name, func() {
|
||||
// t := suite.T()
|
||||
|
||||
// itemInfo := details.ItemInfo{
|
||||
// Groups: &details.GroupsInfo{
|
||||
// ItemType: details.GroupsPage,
|
||||
// WebURL: test.infoURL,
|
||||
// Created: now,
|
||||
// Modified: modification,
|
||||
// DriveName: "included-library",
|
||||
// DriveID: "1234",
|
||||
// },
|
||||
// }
|
||||
|
||||
// scopes := setScopesToDefault(test.scope)
|
||||
// for _, scope := range scopes {
|
||||
// test.expect(t, scope.matchesInfo(itemInfo))
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
|
||||
func (suite *GroupsSelectorSuite) TestCategory_PathType() {
|
||||
table := []struct {
|
||||
cat groupsCategory
|
||||
pathType path.CategoryType
|
||||
}{
|
||||
{
|
||||
cat: GroupsCategoryUnknown,
|
||||
pathType: path.UnknownCategory,
|
||||
},
|
||||
{
|
||||
cat: GroupsChannel,
|
||||
pathType: path.ChannelMessagesCategory,
|
||||
},
|
||||
{
|
||||
cat: GroupsChannelMessage,
|
||||
pathType: path.ChannelMessagesCategory,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.cat.String(), func() {
|
||||
assert.Equal(
|
||||
suite.T(),
|
||||
test.pathType.String(),
|
||||
test.cat.PathType().String())
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -20,11 +20,11 @@ type service int
|
||||
|
||||
//go:generate stringer -type=service -linecomment
|
||||
const (
|
||||
ServiceUnknown service = iota // Unknown Service
|
||||
ServiceExchange // Exchange
|
||||
ServiceOneDrive // OneDrive
|
||||
ServiceSharePoint // SharePoint
|
||||
ServiceGroups // Groups
|
||||
ServiceUnknown service = 0 // Unknown Service
|
||||
ServiceExchange service = 1 // Exchange
|
||||
ServiceOneDrive service = 2 // OneDrive
|
||||
ServiceSharePoint service = 3 // SharePoint
|
||||
ServiceGroups service = 4 // Groups
|
||||
)
|
||||
|
||||
var serviceToPathType = map[service]path.ServiceType{
|
||||
|
||||
@ -49,24 +49,6 @@ func (c Groups) GetAll(
|
||||
return getGroups(ctx, errs, service)
|
||||
}
|
||||
|
||||
// GetTeams retrieves all Teams.
|
||||
func (c Groups) GetTeams(
|
||||
ctx context.Context,
|
||||
errs *fault.Bus,
|
||||
) ([]models.Groupable, error) {
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
groups, err := getGroups(ctx, errs, service)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return OnlyTeams(ctx, groups), nil
|
||||
}
|
||||
|
||||
// GetAll retrieves all groups.
|
||||
func getGroups(
|
||||
ctx context.Context,
|
||||
@ -113,31 +95,6 @@ func getGroups(
|
||||
return groups, el.Failure()
|
||||
}
|
||||
|
||||
func OnlyTeams(ctx context.Context, groups []models.Groupable) []models.Groupable {
|
||||
log := logger.Ctx(ctx)
|
||||
|
||||
var teams []models.Groupable
|
||||
|
||||
for _, g := range groups {
|
||||
if g.GetAdditionalData()[ResourceProvisioningOptions] != nil {
|
||||
val, _ := tform.AnyValueToT[[]any](ResourceProvisioningOptions, g.GetAdditionalData())
|
||||
for _, v := range val {
|
||||
s, err := str.AnyToString(v)
|
||||
if err != nil {
|
||||
log.Debug("could not be converted to string value: ", ResourceProvisioningOptions)
|
||||
continue
|
||||
}
|
||||
|
||||
if s == teamsAdditionalDataLabel {
|
||||
teams = append(teams, g)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return teams
|
||||
}
|
||||
|
||||
// GetID retrieves group by groupID.
|
||||
func (c Groups) GetByID(
|
||||
ctx context.Context,
|
||||
@ -158,34 +115,6 @@ func (c Groups) GetByID(
|
||||
return resp, graph.Stack(ctx, err).OrNil()
|
||||
}
|
||||
|
||||
// GetTeamByID retrieves group by groupID.
|
||||
func (c Groups) GetTeamByID(
|
||||
ctx context.Context,
|
||||
identifier string,
|
||||
) (models.Groupable, error) {
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := service.Client().Groups().ByGroupId(identifier).Get(ctx, nil)
|
||||
if err != nil {
|
||||
err := graph.Wrap(ctx, err, "getting group by id")
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
groups := []models.Groupable{resp}
|
||||
|
||||
if len(OnlyTeams(ctx, groups)) == 0 {
|
||||
err := clues.New("given teamID is not related to any team")
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return resp, graph.Stack(ctx, err).OrNil()
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -203,3 +132,38 @@ func ValidateGroup(item models.Groupable) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func OnlyTeams(ctx context.Context, groups []models.Groupable) []models.Groupable {
|
||||
var teams []models.Groupable
|
||||
|
||||
for _, g := range groups {
|
||||
if IsTeam(ctx, g) {
|
||||
teams = append(teams, g)
|
||||
}
|
||||
}
|
||||
|
||||
return teams
|
||||
}
|
||||
|
||||
func IsTeam(ctx context.Context, mg models.Groupable) bool {
|
||||
log := logger.Ctx(ctx)
|
||||
|
||||
if mg.GetAdditionalData()[ResourceProvisioningOptions] == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
val, _ := tform.AnyValueToT[[]any](ResourceProvisioningOptions, mg.GetAdditionalData())
|
||||
for _, v := range val {
|
||||
s, err := str.AnyToString(v)
|
||||
if err != nil {
|
||||
log.Debug("could not be converted to string value: ", ResourceProvisioningOptions)
|
||||
continue
|
||||
}
|
||||
|
||||
if s == teamsAdditionalDataLabel {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@ -97,7 +97,7 @@ func (suite *GroupsIntgSuite) SetupSuite() {
|
||||
suite.its = newIntegrationTesterSetup(suite.T())
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) TestGetAllGroups() {
|
||||
func (suite *GroupsIntgSuite) TestGetAll() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
@ -107,100 +107,15 @@ func (suite *GroupsIntgSuite) TestGetAllGroups() {
|
||||
Groups().
|
||||
GetAll(ctx, fault.New(true))
|
||||
require.NoError(t, err)
|
||||
require.NotZero(t, len(groups), "must have at least one group")
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) TestGetAllTeams() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
teams, err := suite.its.ac.
|
||||
Groups().
|
||||
GetTeams(ctx, fault.New(true))
|
||||
require.NoError(t, err)
|
||||
require.NotZero(t, len(teams), "must have at least one teams")
|
||||
|
||||
groups, err := suite.its.ac.
|
||||
Groups().
|
||||
GetAll(ctx, fault.New(true))
|
||||
require.NoError(t, err)
|
||||
require.NotZero(t, len(groups), "must have at least one group")
|
||||
|
||||
var isTeam bool
|
||||
|
||||
if len(groups) > len(teams) {
|
||||
isTeam = true
|
||||
}
|
||||
|
||||
assert.True(t, isTeam, "must only return teams")
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) TestTeams_GetByID() {
|
||||
var (
|
||||
t = suite.T()
|
||||
teamID = tconfig.M365TeamsID(t)
|
||||
)
|
||||
|
||||
teamsAPI := suite.its.ac.Groups()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
id string
|
||||
expectErr func(*testing.T, error)
|
||||
}{
|
||||
{
|
||||
name: "3 part id",
|
||||
id: teamID,
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "malformed id",
|
||||
id: uuid.NewString(),
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "random id",
|
||||
id: uuid.NewString() + "," + uuid.NewString(),
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "malformed url",
|
||||
id: "barunihlda",
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
_, err := teamsAPI.GetTeamByID(ctx, test.id)
|
||||
test.expectErr(t, err)
|
||||
})
|
||||
}
|
||||
require.NotZero(t, len(groups), "must find at least one group")
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) TestGroups_GetByID() {
|
||||
var (
|
||||
t = suite.T()
|
||||
groupID = tconfig.M365GroupID(t)
|
||||
groupID = suite.its.groupID
|
||||
groupsAPI = suite.its.ac.Groups()
|
||||
)
|
||||
|
||||
groupsAPI := suite.its.ac.Groups()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
id string
|
||||
|
||||
@ -83,7 +83,7 @@ type intgTesterSetup struct {
|
||||
siteID string
|
||||
siteDriveID string
|
||||
siteDriveRootFolderID string
|
||||
teamID string
|
||||
groupID string
|
||||
}
|
||||
|
||||
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
||||
@ -132,13 +132,16 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
||||
|
||||
its.siteDriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
|
||||
|
||||
// teams
|
||||
its.teamID = tconfig.M365TeamsID(t)
|
||||
// group
|
||||
|
||||
team, err := its.ac.Groups().GetTeamByID(ctx, its.teamID)
|
||||
// use of the TeamID is intentional here, so that we are assured
|
||||
// the group has full usage of the teams api.
|
||||
its.groupID = tconfig.M365TeamID(t)
|
||||
|
||||
team, err := its.ac.Groups().GetByID(ctx, its.groupID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
its.teamID = ptr.Val(team.GetId())
|
||||
its.groupID = ptr.Val(team.GetId())
|
||||
|
||||
return its
|
||||
}
|
||||
|
||||
97
src/pkg/services/m365/groups.go
Normal file
97
src/pkg/services/m365/groups.go
Normal file
@ -0,0 +1,97 @@
|
||||
package m365
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// Group is the minimal information required to identify and display a M365 Group.
|
||||
type Group struct {
|
||||
ID string
|
||||
|
||||
// DisplayName is the human-readable name of the group. Normally the plaintext name that the
|
||||
// user provided when they created the group, or the updated name if it was changed.
|
||||
// Ex: displayName: "My Group"
|
||||
DisplayName string
|
||||
|
||||
// IsTeam is true if the group qualifies as a Teams resource, and is able to backup and restore
|
||||
// teams data.
|
||||
IsTeam bool
|
||||
}
|
||||
|
||||
// GroupsCompat returns a list of groups in the specified M365 tenant.
|
||||
func GroupsCompat(ctx context.Context, acct account.Account) ([]*Group, error) {
|
||||
errs := fault.New(true)
|
||||
|
||||
us, err := Groups(ctx, acct, errs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return us, errs.Failure()
|
||||
}
|
||||
|
||||
// Groups returns a list of groups in the specified M365 tenant
|
||||
func Groups(
|
||||
ctx context.Context,
|
||||
acct account.Account,
|
||||
errs *fault.Bus,
|
||||
) ([]*Group, error) {
|
||||
ac, err := makeAC(ctx, acct, path.GroupsService)
|
||||
if err != nil {
|
||||
return nil, clues.Stack(err).WithClues(ctx)
|
||||
}
|
||||
|
||||
return getAllGroups(ctx, ac.Groups())
|
||||
}
|
||||
|
||||
func getAllGroups(
|
||||
ctx context.Context,
|
||||
ga getAller[models.Groupable],
|
||||
) ([]*Group, error) {
|
||||
groups, err := ga.GetAll(ctx, fault.New(true))
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "retrieving groups")
|
||||
}
|
||||
|
||||
ret := make([]*Group, 0, len(groups))
|
||||
|
||||
for _, g := range groups {
|
||||
t, err := parseGroup(ctx, g)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "parsing groups")
|
||||
}
|
||||
|
||||
ret = append(ret, t)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// parseUser extracts information from `models.Groupable` we care about
|
||||
func parseGroup(ctx context.Context, mg models.Groupable) (*Group, error) {
|
||||
if mg.GetDisplayName() == nil {
|
||||
return nil, clues.New("group missing display name").
|
||||
With("group_id", ptr.Val(mg.GetId()))
|
||||
}
|
||||
|
||||
u := &Group{
|
||||
ID: ptr.Val(mg.GetId()),
|
||||
DisplayName: ptr.Val(mg.GetDisplayName()),
|
||||
IsTeam: api.IsTeam(ctx, mg),
|
||||
}
|
||||
|
||||
return u, nil
|
||||
}
|
||||
108
src/pkg/services/m365/groups_test.go
Normal file
108
src/pkg/services/m365/groups_test.go
Normal file
@ -0,0 +1,108 @@
|
||||
package m365_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
)
|
||||
|
||||
type GroupsIntgSuite struct {
|
||||
tester.Suite
|
||||
acct account.Account
|
||||
}
|
||||
|
||||
func TestGroupsIntgSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsIntgSuite{
|
||||
Suite: tester.NewIntegrationSuite(
|
||||
t,
|
||||
[][]string{tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
suite.acct = tconfig.NewM365Account(t)
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) TestGroups() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
groups, err := m365.Groups(ctx, suite.acct, fault.New(true))
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.NotEmpty(t, groups)
|
||||
|
||||
for _, group := range groups {
|
||||
suite.Run("group_"+group.ID, func() {
|
||||
t := suite.T()
|
||||
|
||||
assert.NotEmpty(t, group.ID)
|
||||
assert.NotEmpty(t, group.DisplayName)
|
||||
|
||||
// at least one known group should be a team
|
||||
if group.ID == tconfig.M365TeamID(t) {
|
||||
assert.True(t, group.IsTeam)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *GroupsIntgSuite) TestGroups_InvalidCredentials() {
|
||||
table := []struct {
|
||||
name string
|
||||
acct func(t *testing.T) account.Account
|
||||
}{
|
||||
{
|
||||
name: "Invalid Credentials",
|
||||
acct: func(t *testing.T) account.Account {
|
||||
a, err := account.NewAccount(
|
||||
account.ProviderM365,
|
||||
account.M365Config{
|
||||
M365: credentials.M365{
|
||||
AzureClientID: "Test",
|
||||
AzureClientSecret: "without",
|
||||
},
|
||||
AzureTenantID: "data",
|
||||
},
|
||||
)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return a
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
groups, err := m365.Groups(ctx, test.acct(t), fault.New(true))
|
||||
assert.Empty(t, groups, "returned no groups")
|
||||
assert.NotNil(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -24,6 +24,10 @@ type getDefaultDriver interface {
|
||||
GetDefaultDrive(ctx context.Context, userID string) (models.Driveable, error)
|
||||
}
|
||||
|
||||
type getAller[T any] interface {
|
||||
GetAll(ctx context.Context, errs *fault.Bus) ([]T, error)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Users
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -253,12 +257,11 @@ func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site,
|
||||
return getAllSites(ctx, ac.Sites())
|
||||
}
|
||||
|
||||
type getAllSiteser interface {
|
||||
GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error)
|
||||
}
|
||||
|
||||
func getAllSites(ctx context.Context, gas getAllSiteser) ([]*Site, error) {
|
||||
sites, err := gas.GetAll(ctx, fault.New(true))
|
||||
func getAllSites(
|
||||
ctx context.Context,
|
||||
ga getAller[models.Siteable],
|
||||
) ([]*Site, error) {
|
||||
sites, err := ga.GetAll(ctx, fault.New(true))
|
||||
if err != nil {
|
||||
if clues.HasLabel(err, graph.LabelsNoSharePointLicense) {
|
||||
return nil, clues.Stack(graph.ErrServiceNotEnabled, err)
|
||||
|
||||
@ -276,25 +276,25 @@ func (suite *m365UnitSuite) TestCheckUserHasDrives() {
|
||||
}
|
||||
}
|
||||
|
||||
type mockGAS struct {
|
||||
type mockGASites struct {
|
||||
response []models.Siteable
|
||||
err error
|
||||
}
|
||||
|
||||
func (m mockGAS) GetAll(context.Context, *fault.Bus) ([]models.Siteable, error) {
|
||||
func (m mockGASites) GetAll(context.Context, *fault.Bus) ([]models.Siteable, error) {
|
||||
return m.response, m.err
|
||||
}
|
||||
|
||||
func (suite *m365UnitSuite) TestGetAllSites() {
|
||||
table := []struct {
|
||||
name string
|
||||
mock func(context.Context) getAllSiteser
|
||||
mock func(context.Context) getAller[models.Siteable]
|
||||
expectErr func(*testing.T, error)
|
||||
}{
|
||||
{
|
||||
name: "ok",
|
||||
mock: func(ctx context.Context) getAllSiteser {
|
||||
return mockGAS{[]models.Siteable{}, nil}
|
||||
mock: func(ctx context.Context) getAller[models.Siteable] {
|
||||
return mockGASites{[]models.Siteable{}, nil}
|
||||
},
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
@ -302,14 +302,14 @@ func (suite *m365UnitSuite) TestGetAllSites() {
|
||||
},
|
||||
{
|
||||
name: "no sharepoint license",
|
||||
mock: func(ctx context.Context) getAllSiteser {
|
||||
mock: func(ctx context.Context) getAller[models.Siteable] {
|
||||
odErr := odataerrors.NewODataError()
|
||||
merr := odataerrors.NewMainError()
|
||||
merr.SetCode(ptr.To("code"))
|
||||
merr.SetMessage(ptr.To(string(graph.NoSPLicense)))
|
||||
odErr.SetErrorEscaped(merr)
|
||||
|
||||
return mockGAS{nil, graph.Stack(ctx, odErr)}
|
||||
return mockGASites{nil, graph.Stack(ctx, odErr)}
|
||||
},
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.ErrorIs(t, err, graph.ErrServiceNotEnabled, clues.ToCore(err))
|
||||
@ -317,14 +317,14 @@ func (suite *m365UnitSuite) TestGetAllSites() {
|
||||
},
|
||||
{
|
||||
name: "arbitrary error",
|
||||
mock: func(ctx context.Context) getAllSiteser {
|
||||
mock: func(ctx context.Context) getAller[models.Siteable] {
|
||||
odErr := odataerrors.NewODataError()
|
||||
merr := odataerrors.NewMainError()
|
||||
merr.SetCode(ptr.To("code"))
|
||||
merr.SetMessage(ptr.To("message"))
|
||||
odErr.SetErrorEscaped(merr)
|
||||
|
||||
return mockGAS{nil, graph.Stack(ctx, odErr)}
|
||||
return mockGASites{nil, graph.Stack(ctx, odErr)}
|
||||
},
|
||||
expectErr: func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
|
||||
@ -12,8 +12,8 @@ type storageProvider int
|
||||
|
||||
//go:generate stringer -type=storageProvider -linecomment
|
||||
const (
|
||||
ProviderUnknown storageProvider = iota // Unknown Provider
|
||||
ProviderS3 // S3
|
||||
ProviderUnknown storageProvider = 0 // Unknown Provider
|
||||
ProviderS3 storageProvider = 1 // S3
|
||||
)
|
||||
|
||||
// storage parsing errors
|
||||
|
||||
14
website/package-lock.json
generated
14
website/package-lock.json
generated
@ -24,7 +24,7 @@
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"sass": "^1.65.1",
|
||||
"sass": "^1.66.1",
|
||||
"tiny-slider": "^2.9.4",
|
||||
"tw-elements": "^1.0.0-alpha13",
|
||||
"wow.js": "^1.2.2"
|
||||
@ -12639,9 +12639,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/sass": {
|
||||
"version": "1.65.1",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.65.1.tgz",
|
||||
"integrity": "sha512-9DINwtHmA41SEd36eVPQ9BJKpn7eKDQmUHmpI0y5Zv2Rcorrh0zS+cFrt050hdNbmmCNKTW3hV5mWfuegNRsEA==",
|
||||
"version": "1.66.1",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.66.1.tgz",
|
||||
"integrity": "sha512-50c+zTsZOJVgFfTgwwEzkjA3/QACgdNsKueWPyAR0mRINIvLAStVQBbPg14iuqEQ74NPDbXzJARJ/O4SI1zftA==",
|
||||
"dependencies": {
|
||||
"chokidar": ">=3.0.0 <4.0.0",
|
||||
"immutable": "^4.0.0",
|
||||
@ -23932,9 +23932,9 @@
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"sass": {
|
||||
"version": "1.65.1",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.65.1.tgz",
|
||||
"integrity": "sha512-9DINwtHmA41SEd36eVPQ9BJKpn7eKDQmUHmpI0y5Zv2Rcorrh0zS+cFrt050hdNbmmCNKTW3hV5mWfuegNRsEA==",
|
||||
"version": "1.66.1",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.66.1.tgz",
|
||||
"integrity": "sha512-50c+zTsZOJVgFfTgwwEzkjA3/QACgdNsKueWPyAR0mRINIvLAStVQBbPg14iuqEQ74NPDbXzJARJ/O4SI1zftA==",
|
||||
"requires": {
|
||||
"chokidar": ">=3.0.0 <4.0.0",
|
||||
"immutable": "^4.0.0",
|
||||
|
||||
@ -30,7 +30,7 @@
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"sass": "^1.65.1",
|
||||
"sass": "^1.66.1",
|
||||
"tiny-slider": "^2.9.4",
|
||||
"tw-elements": "^1.0.0-alpha13",
|
||||
"wow.js": "^1.2.2"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user