add export support to teamschats service layer (#5126)
more boilerplate adaptation
This commit is contained in:
parent
7ab1276d61
commit
8badbdd146
@ -102,7 +102,7 @@ func teamschatsCreateCmd() *cobra.Command {
|
|||||||
return &cobra.Command{
|
return &cobra.Command{
|
||||||
Use: teamschatsServiceCommand,
|
Use: teamschatsServiceCommand,
|
||||||
Aliases: []string{teamsServiceCommand},
|
Aliases: []string{teamsServiceCommand},
|
||||||
Short: "Backup M365 Chats service data",
|
Short: "Backup M365 Chats data",
|
||||||
RunE: createTeamsChatsCmd,
|
RunE: createTeamsChatsCmd,
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
}
|
}
|
||||||
@ -170,7 +170,7 @@ func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
|||||||
func teamschatsListCmd() *cobra.Command {
|
func teamschatsListCmd() *cobra.Command {
|
||||||
return &cobra.Command{
|
return &cobra.Command{
|
||||||
Use: teamschatsServiceCommand,
|
Use: teamschatsServiceCommand,
|
||||||
Short: "List the history of M365 TeamsChats service backups",
|
Short: "List the history of M365 Chats backups",
|
||||||
RunE: listTeamsChatsCmd,
|
RunE: listTeamsChatsCmd,
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
}
|
}
|
||||||
@ -189,7 +189,7 @@ func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
|||||||
func teamschatsDetailsCmd() *cobra.Command {
|
func teamschatsDetailsCmd() *cobra.Command {
|
||||||
return &cobra.Command{
|
return &cobra.Command{
|
||||||
Use: teamschatsServiceCommand,
|
Use: teamschatsServiceCommand,
|
||||||
Short: "Shows the details of a M365 TeamsChats service backup",
|
Short: "Shows the details of a M365 Chats backup",
|
||||||
RunE: detailsTeamsChatsCmd,
|
RunE: detailsTeamsChatsCmd,
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
}
|
}
|
||||||
@ -237,7 +237,7 @@ func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
|
|||||||
func teamschatsDeleteCmd() *cobra.Command {
|
func teamschatsDeleteCmd() *cobra.Command {
|
||||||
return &cobra.Command{
|
return &cobra.Command{
|
||||||
Use: teamschatsServiceCommand,
|
Use: teamschatsServiceCommand,
|
||||||
Short: "Delete backed-up M365 TeamsChats service data",
|
Short: "Delete backed-up M365 Chats data",
|
||||||
RunE: deleteTeamsChatsCmd,
|
RunE: deleteTeamsChatsCmd,
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -25,6 +25,7 @@ var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
|||||||
addSharePointCommands,
|
addSharePointCommands,
|
||||||
addGroupsCommands,
|
addGroupsCommands,
|
||||||
addExchangeCommands,
|
addExchangeCommands,
|
||||||
|
addTeamsChatsCommands,
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultAcceptedFormatTypes = []string{string(control.DefaultFormat)}
|
var defaultAcceptedFormatTypes = []string{string(control.DefaultFormat)}
|
||||||
|
|||||||
101
src/cli/export/teamschats.go
Normal file
101
src/cli/export/teamschats.go
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
package export
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
)
|
||||||
|
|
||||||
|
// called by export.go to map subcommands to provider-specific handling.
|
||||||
|
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
|
||||||
|
var c *cobra.Command
|
||||||
|
|
||||||
|
switch cmd.Use {
|
||||||
|
case exportCommand:
|
||||||
|
c, _ = utils.AddCommand(cmd, teamschatsExportCmd(), utils.MarkPreviewCommand())
|
||||||
|
|
||||||
|
c.Use = c.Use + " " + teamschatsServiceCommandUseSuffix
|
||||||
|
|
||||||
|
flags.AddBackupIDFlag(c, true)
|
||||||
|
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
|
||||||
|
flags.AddExportConfigFlags(c)
|
||||||
|
flags.AddFailFastFlag(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
teamschatsServiceCommand = "chats"
|
||||||
|
teamschatsServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||||
|
|
||||||
|
//nolint:lll
|
||||||
|
teamschatsServiceCommandExportExamples = `# Export a specific chat from the last backup (1234abcd...) to /my-exports
|
||||||
|
corso export chats my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --chat 98765abcdef
|
||||||
|
|
||||||
|
# Export all of Bob's chats to the current directory
|
||||||
|
corso export chats . --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
|
--chat '*'
|
||||||
|
|
||||||
|
# Export all chats that were created before 2020 to /my-exports
|
||||||
|
corso export chats my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||||
|
--chat-created-before 2020-01-01T00:00:00`
|
||||||
|
)
|
||||||
|
|
||||||
|
// `corso export chats [<flag>...] <destination>`
|
||||||
|
func teamschatsExportCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: teamschatsServiceCommand,
|
||||||
|
Aliases: []string{teamsServiceCommand},
|
||||||
|
Short: "Export M365 Chats data",
|
||||||
|
RunE: exportTeamsChatsCmd,
|
||||||
|
Args: func(cmd *cobra.Command, args []string) error {
|
||||||
|
if len(args) != 1 {
|
||||||
|
return errors.New("missing export destination")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
Example: teamschatsServiceCommandExportExamples,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// processes an teamschats service export.
|
||||||
|
func exportTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||||
|
ctx := cmd.Context()
|
||||||
|
|
||||||
|
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := utils.MakeTeamsChatsOpts(cmd)
|
||||||
|
|
||||||
|
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := utils.ValidateTeamsChatsRestoreFlags(flags.BackupIDFV, opts, false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
|
||||||
|
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
acceptedTeamsChatsFormatTypes := []string{
|
||||||
|
string(control.DefaultFormat),
|
||||||
|
string(control.JSONFormat),
|
||||||
|
}
|
||||||
|
|
||||||
|
return runExport(
|
||||||
|
ctx,
|
||||||
|
cmd,
|
||||||
|
args,
|
||||||
|
opts.ExportCfg,
|
||||||
|
sel.Selector,
|
||||||
|
flags.BackupIDFV,
|
||||||
|
"Chats",
|
||||||
|
acceptedTeamsChatsFormatTypes)
|
||||||
|
}
|
||||||
78
src/cli/export/teamschats_test.go
Normal file
78
src/cli/export/teamschats_test.go
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
package export
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
|
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||||
|
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||||
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TeamsChatsUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTeamsChatsUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
|
||||||
|
expectUse := teamschatsServiceCommand + " " + teamschatsServiceCommandUseSuffix
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
use string
|
||||||
|
expectUse string
|
||||||
|
expectShort string
|
||||||
|
expectRunE func(*cobra.Command, []string) error
|
||||||
|
}{
|
||||||
|
{"export teamschats", exportCommand, expectUse, teamschatsExportCmd().Short, exportTeamsChatsCmd},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
parent := &cobra.Command{Use: exportCommand}
|
||||||
|
|
||||||
|
cmd := cliTD.SetUpCmdHasFlags(
|
||||||
|
t,
|
||||||
|
parent,
|
||||||
|
addTeamsChatsCommands,
|
||||||
|
[]cliTD.UseCobraCommandFn{
|
||||||
|
flags.AddAllProviderFlags,
|
||||||
|
flags.AddAllStorageFlags,
|
||||||
|
},
|
||||||
|
flagsTD.WithFlags(
|
||||||
|
teamschatsServiceCommand,
|
||||||
|
[]string{
|
||||||
|
flagsTD.RestoreDestination,
|
||||||
|
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||||
|
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||||
|
"--" + flags.FormatFN, flagsTD.FormatType,
|
||||||
|
"--" + flags.ArchiveFN,
|
||||||
|
},
|
||||||
|
flagsTD.PreparedProviderFlags(),
|
||||||
|
flagsTD.PreparedStorageFlags()))
|
||||||
|
|
||||||
|
cliTD.CheckCmdChild(
|
||||||
|
t,
|
||||||
|
parent,
|
||||||
|
3,
|
||||||
|
test.expectUse,
|
||||||
|
test.expectShort,
|
||||||
|
test.expectRunE)
|
||||||
|
|
||||||
|
opts := utils.MakeTeamsChatsOpts(cmd)
|
||||||
|
|
||||||
|
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||||
|
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
|
||||||
|
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
|
||||||
|
flagsTD.AssertStorageFlags(t, cmd)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/teamschats"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -30,6 +31,9 @@ func (ctrl *Controller) NewServiceHandler(
|
|||||||
|
|
||||||
case path.ExchangeService:
|
case path.ExchangeService:
|
||||||
return exchange.NewExchangeHandler(ctrl.AC, ctrl.resourceHandler), nil
|
return exchange.NewExchangeHandler(ctrl.AC, ctrl.resourceHandler), nil
|
||||||
|
|
||||||
|
case path.TeamsChatsService:
|
||||||
|
return teamschats.NewTeamsChatsHandler(ctrl.AC, ctrl.resourceHandler), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, clues.New("unrecognized service").
|
return nil, clues.New("unrecognized service").
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
@ -79,7 +80,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
p, err := path.Build("t", "pr", path.GroupsService, path.ChannelMessagesCategory, false, containerName)
|
p, err := path.Build("t", "pr", path.GroupsService, path.ChannelMessagesCategory, false, containerName)
|
||||||
assert.NoError(t, err, "build path")
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
dcs := []data.RestoreCollection{
|
dcs := []data.RestoreCollection{
|
||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
@ -106,7 +107,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
|||||||
dcs,
|
dcs,
|
||||||
stats,
|
stats,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
assert.NoError(t, err, "export collections error")
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.Len(t, ecs, 1, "num of collections")
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
@ -117,7 +118,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
|||||||
|
|
||||||
for item := range ecs[0].Items(ctx) {
|
for item := range ecs[0].Items(ctx) {
|
||||||
b, err := io.ReadAll(item.Body)
|
b, err := io.ReadAll(item.Body)
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// count up size for tests
|
// count up size for tests
|
||||||
size += len(b)
|
size += len(b)
|
||||||
@ -181,7 +182,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
false,
|
false,
|
||||||
odConsts.SitesPathDir,
|
odConsts.SitesPathDir,
|
||||||
siteID)
|
siteID)
|
||||||
assert.NoError(t, err, "build path")
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
dcs := []data.RestoreCollection{
|
dcs := []data.RestoreCollection{
|
||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
@ -210,7 +211,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
dcs,
|
dcs,
|
||||||
stats,
|
stats,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
assert.NoError(t, err, "export collections error")
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.Len(t, ecs, 1, "num of collections")
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
@ -222,7 +223,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
for item := range ecs[0].Items(ctx) {
|
for item := range ecs[0].Items(ctx) {
|
||||||
// unwrap the body from stats reader
|
// unwrap the body from stats reader
|
||||||
b, err := io.ReadAll(item.Body)
|
b, err := io.ReadAll(item.Body)
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
size += len(b)
|
size += len(b)
|
||||||
bitem := io.NopCloser(bytes.NewBuffer(b))
|
bitem := io.NopCloser(bytes.NewBuffer(b))
|
||||||
|
|||||||
119
src/internal/m365/service/teamschats/export.go
Normal file
119
src/internal/m365/service/teamschats/export.go
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
package teamschats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/teamschats"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/metrics"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ inject.ServiceHandler = &teamsChatsHandler{}
|
||||||
|
|
||||||
|
func NewTeamsChatsHandler(
|
||||||
|
apiClient api.Client,
|
||||||
|
resourceGetter idname.GetResourceIDAndNamer,
|
||||||
|
) *teamsChatsHandler {
|
||||||
|
return &teamsChatsHandler{
|
||||||
|
baseTeamsChatsHandler: baseTeamsChatsHandler{},
|
||||||
|
apiClient: apiClient,
|
||||||
|
resourceGetter: resourceGetter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================== //
|
||||||
|
// baseTeamsChatsHandler
|
||||||
|
// ========================================================================== //
|
||||||
|
|
||||||
|
// baseTeamsChatsHandler contains logic for tracking data and doing operations
|
||||||
|
// (e.x. export) that don't require contact with external M356 services.
|
||||||
|
type baseTeamsChatsHandler struct{}
|
||||||
|
|
||||||
|
func (h *baseTeamsChatsHandler) CacheItemInfo(v details.ItemInfo) {}
|
||||||
|
|
||||||
|
// ProduceExportCollections will create the export collections for the
|
||||||
|
// given restore collections.
|
||||||
|
func (h *baseTeamsChatsHandler) ProduceExportCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
backupVersion int,
|
||||||
|
exportCfg control.ExportConfig,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
stats *metrics.ExportStats,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]export.Collectioner, error) {
|
||||||
|
var (
|
||||||
|
el = errs.Local()
|
||||||
|
ec = make([]export.Collectioner, 0, len(dcs))
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, dc := range dcs {
|
||||||
|
category := dc.FullPath().Category()
|
||||||
|
|
||||||
|
switch category {
|
||||||
|
case path.ChatsCategory:
|
||||||
|
folders := dc.FullPath().Folders()
|
||||||
|
pth := path.Builder{}.Append(category.HumanString()).Append(folders...)
|
||||||
|
|
||||||
|
ec = append(
|
||||||
|
ec,
|
||||||
|
teamschats.NewExportCollection(
|
||||||
|
pth.String(),
|
||||||
|
[]data.RestoreCollection{dc},
|
||||||
|
backupVersion,
|
||||||
|
exportCfg,
|
||||||
|
stats))
|
||||||
|
default:
|
||||||
|
return nil, clues.NewWC(ctx, "data category not supported").
|
||||||
|
With("category", category)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ec, el.Failure()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================== //
|
||||||
|
// teamschatsHandler
|
||||||
|
// ========================================================================== //
|
||||||
|
|
||||||
|
// teamsChatsHandler contains logic for handling data and performing operations
|
||||||
|
// (e.x. restore) regardless of whether they require contact with external M365
|
||||||
|
// services or not.
|
||||||
|
type teamsChatsHandler struct {
|
||||||
|
baseTeamsChatsHandler
|
||||||
|
apiClient api.Client
|
||||||
|
resourceGetter idname.GetResourceIDAndNamer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *teamsChatsHandler) IsServiceEnabled(
|
||||||
|
ctx context.Context,
|
||||||
|
resourceID string,
|
||||||
|
) (bool, error) {
|
||||||
|
// TODO(ashmrtn): Move free function implementation to this function.
|
||||||
|
res, err := IsServiceEnabled(ctx, h.apiClient.Users(), resourceID)
|
||||||
|
return res, clues.Stack(err).OrNil()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *teamsChatsHandler) PopulateProtectedResourceIDAndName(
|
||||||
|
ctx context.Context,
|
||||||
|
resourceID string, // Can be either ID or name.
|
||||||
|
ins idname.Cacher,
|
||||||
|
) (idname.Provider, error) {
|
||||||
|
if h.resourceGetter == nil {
|
||||||
|
return nil, clues.StackWC(ctx, resource.ErrNoResourceLookup)
|
||||||
|
}
|
||||||
|
|
||||||
|
pr, err := h.resourceGetter.GetResourceIDAndNameFrom(ctx, resourceID, ins)
|
||||||
|
|
||||||
|
return pr, clues.Wrap(err, "identifying resource owner").OrNil()
|
||||||
|
}
|
||||||
140
src/internal/m365/service/teamschats/export_test.go
Normal file
140
src/internal/m365/service/teamschats/export_test.go
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
package teamschats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
teamschatMock "github.com/alcionai/corso/src/internal/m365/service/teamschats/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/metrics"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
type finD struct {
|
||||||
|
id string
|
||||||
|
key string
|
||||||
|
name string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
||||||
|
if fd.err != nil {
|
||||||
|
return nil, fd.err
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == fd.id {
|
||||||
|
return &dataMock.Item{
|
||||||
|
ItemID: fd.id,
|
||||||
|
Reader: io.NopCloser(bytes.NewBufferString(`{"` + fd.key + `": "` + fd.name + `"}`)),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, assert.AnError
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestExportRestoreCollections_chats() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
category = path.ChatsCategory
|
||||||
|
itemID = "itemID"
|
||||||
|
dii = teamschatMock.ItemInfo()
|
||||||
|
content = `{"topic": "` + dii.TeamsChats.Chat.Topic + `"}`
|
||||||
|
body = io.NopCloser(bytes.NewBufferString(content))
|
||||||
|
exportCfg = control.ExportConfig{}
|
||||||
|
expectedPath = category.HumanString()
|
||||||
|
expectedItems = []export.Item{
|
||||||
|
{
|
||||||
|
ID: itemID,
|
||||||
|
Name: itemID + ".json",
|
||||||
|
// Body: body, not checked
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
p, err := path.BuildPrefix("t", "pr", path.TeamsChatsService, category)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
dcs := []data.RestoreCollection{
|
||||||
|
data.FetchRestoreCollection{
|
||||||
|
Collection: dataMock.Collection{
|
||||||
|
Path: p,
|
||||||
|
ItemData: []data.Item{
|
||||||
|
&dataMock.Item{
|
||||||
|
ItemID: itemID,
|
||||||
|
Reader: body,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
FetchItemByNamer: finD{
|
||||||
|
id: itemID,
|
||||||
|
key: "id",
|
||||||
|
name: itemID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
stats := metrics.NewExportStats()
|
||||||
|
|
||||||
|
ecs, err := NewTeamsChatsHandler(api.Client{}, nil).
|
||||||
|
ProduceExportCollections(
|
||||||
|
ctx,
|
||||||
|
int(version.Backup),
|
||||||
|
exportCfg,
|
||||||
|
dcs,
|
||||||
|
stats,
|
||||||
|
fault.New(true))
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
|
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
|
|
||||||
|
fitems := []export.Item{}
|
||||||
|
|
||||||
|
size := 0
|
||||||
|
|
||||||
|
for item := range ecs[0].Items(ctx) {
|
||||||
|
b, err := io.ReadAll(item.Body)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
// count up size for tests
|
||||||
|
size += len(b)
|
||||||
|
|
||||||
|
// have to nil out body, otherwise assert fails due to
|
||||||
|
// pointer memory location differences
|
||||||
|
item.Body = nil
|
||||||
|
fitems = append(fitems, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedItems, fitems, "items")
|
||||||
|
|
||||||
|
expectedStats := metrics.NewExportStats()
|
||||||
|
expectedStats.UpdateBytes(category, int64(size))
|
||||||
|
expectedStats.UpdateResourceCount(category)
|
||||||
|
assert.Equal(t, expectedStats.GetStats(), stats.GetStats(), "stats")
|
||||||
|
}
|
||||||
100
src/internal/m365/service/teamschats/restore.go
Normal file
100
src/internal/m365/service/teamschats/restore.go
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
package teamschats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConsumeRestoreCollections will restore the specified data collections
|
||||||
|
func (h *teamsChatsHandler) ConsumeRestoreCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
rcc inject.RestoreConsumerConfig,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
errs *fault.Bus,
|
||||||
|
ctr *count.Bus,
|
||||||
|
) (*details.Details, *data.CollectionStats, error) {
|
||||||
|
if len(dcs) == 0 {
|
||||||
|
return nil, nil, clues.WrapWC(ctx, data.ErrNoData, "performing restore")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(ashmrtn): We should stop relying on the context for rate limiter stuff
|
||||||
|
// and instead configure this when we make the handler instance. We can't
|
||||||
|
// initialize it in the NewHandler call right now because those functions
|
||||||
|
// aren't (and shouldn't be) returning a context along with the handler. Since
|
||||||
|
// that call isn't directly calling into this function even if we did
|
||||||
|
// initialize the rate limiter there it would be lost because it wouldn't get
|
||||||
|
// stored in an ancestor of the context passed to this function.
|
||||||
|
ctx = graph.BindRateLimiterConfig(
|
||||||
|
ctx,
|
||||||
|
graph.LimiterCfg{Service: path.TeamsChatsService})
|
||||||
|
|
||||||
|
var (
|
||||||
|
deets = &details.Builder{}
|
||||||
|
restoreMetrics support.CollectionMetrics
|
||||||
|
el = errs.Local()
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reorder collections so that the parents directories are created
|
||||||
|
// before the child directories; a requirement for permissions.
|
||||||
|
data.SortRestoreCollections(dcs)
|
||||||
|
|
||||||
|
// Iterate through the data collections and restore the contents of each
|
||||||
|
for _, dc := range dcs {
|
||||||
|
if el.Failure() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
category = dc.FullPath().Category()
|
||||||
|
metrics support.CollectionMetrics
|
||||||
|
ictx = clues.Add(ctx,
|
||||||
|
"category", category,
|
||||||
|
"restore_location", clues.Hide(rcc.RestoreConfig.Location),
|
||||||
|
"protected_resource", clues.Hide(dc.FullPath().ProtectedResource()),
|
||||||
|
"full_path", dc.FullPath())
|
||||||
|
)
|
||||||
|
|
||||||
|
switch dc.FullPath().Category() {
|
||||||
|
case path.ChatsCategory:
|
||||||
|
// chats cannot be restored using Graph API.
|
||||||
|
// a delegated token is required, and Corso has no
|
||||||
|
// good way of obtaining such a token.
|
||||||
|
logger.Ctx(ictx).Debug("Skipping restore for channel messages")
|
||||||
|
default:
|
||||||
|
return nil, nil, clues.NewWC(ictx, "data category not supported").
|
||||||
|
With("category", category)
|
||||||
|
}
|
||||||
|
|
||||||
|
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
el.AddRecoverable(ictx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if errors.Is(err, context.Canceled) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
status := support.CreateStatus(
|
||||||
|
ctx,
|
||||||
|
support.Restore,
|
||||||
|
len(dcs),
|
||||||
|
restoreMetrics,
|
||||||
|
rcc.RestoreConfig.Location)
|
||||||
|
|
||||||
|
return deets.Details(), status.ToCollectionStats(), el.Failure()
|
||||||
|
}
|
||||||
54
src/internal/m365/service/teamschats/restore_test.go
Normal file
54
src/internal/m365/service/teamschats/restore_test.go
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package teamschats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RestoreUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRestoreUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &RestoreUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *RestoreUnitSuite) TestConsumeRestoreCollections_noErrorOnChats() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
rcc := inject.RestoreConsumerConfig{}
|
||||||
|
pth, err := path.BuildPrefix(
|
||||||
|
"t",
|
||||||
|
"pr",
|
||||||
|
path.TeamsChatsService,
|
||||||
|
path.ChatsCategory)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
dcs := []data.RestoreCollection{
|
||||||
|
mock.Collection{Path: pth},
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = NewTeamsChatsHandler(api.Client{}, nil).
|
||||||
|
ConsumeRestoreCollections(
|
||||||
|
ctx,
|
||||||
|
rcc,
|
||||||
|
dcs,
|
||||||
|
fault.New(false),
|
||||||
|
nil)
|
||||||
|
assert.NoError(t, err, "Chats restore")
|
||||||
|
}
|
||||||
@ -142,6 +142,7 @@ func makeRestorePathsForEntry(
|
|||||||
// * OneDrive/SharePoint (needs drive information)
|
// * OneDrive/SharePoint (needs drive information)
|
||||||
switch true {
|
switch true {
|
||||||
case ent.Exchange != nil ||
|
case ent.Exchange != nil ||
|
||||||
|
ent.TeamsChats != nil ||
|
||||||
(ent.Groups != nil && ent.Groups.ItemType == details.GroupsChannelMessage) ||
|
(ent.Groups != nil && ent.Groups.ItemType == details.GroupsChannelMessage) ||
|
||||||
(ent.SharePoint != nil && ent.SharePoint.ItemType == details.SharePointList):
|
(ent.SharePoint != nil && ent.SharePoint.ItemType == details.SharePointList):
|
||||||
// TODO(ashmrtn): Eventually make Events have it's own function to handle
|
// TODO(ashmrtn): Eventually make Events have it's own function to handle
|
||||||
|
|||||||
@ -399,6 +399,30 @@ func (suite *RestorePathTransformerUnitSuite) TestGetPaths() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "TeamsChats Chats",
|
||||||
|
backupVersion: version.Groups9Update,
|
||||||
|
input: []*details.Entry{
|
||||||
|
{
|
||||||
|
RepoRef: testdata.ExchangeEmailItemPath3.RR.String(),
|
||||||
|
LocationRef: testdata.ExchangeEmailItemPath3.Loc.String(),
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Exchange: &details.ExchangeInfo{
|
||||||
|
ItemType: details.ExchangeMail,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
expected: []expectPaths{
|
||||||
|
{
|
||||||
|
storage: testdata.ExchangeEmailItemPath3.RR.String(),
|
||||||
|
restore: toRestore(
|
||||||
|
testdata.ExchangeEmailItemPath3.RR,
|
||||||
|
testdata.ExchangeEmailItemPath3.Loc.Elements()...),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
|
|||||||
@ -396,3 +396,120 @@ func RunMergeBaseGroupsUpdate(
|
|||||||
"cached items")
|
"cached items")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func RunBasicBackupTest(
|
||||||
|
suite tester.Suite,
|
||||||
|
sel selectors.Selector,
|
||||||
|
) {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
mb = evmock.NewBus()
|
||||||
|
counter = count.New()
|
||||||
|
opts = control.DefaultOptions()
|
||||||
|
whatSet = deeTD.CategoryFromRepoRef
|
||||||
|
)
|
||||||
|
|
||||||
|
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup, counter)
|
||||||
|
defer bod.Close(t, ctx)
|
||||||
|
|
||||||
|
reasons, err := bod.Sel.Reasons(bod.Acct.ID(), false)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
RunAndCheckBackup(t, ctx, &bo, mb, false)
|
||||||
|
|
||||||
|
for _, reason := range reasons {
|
||||||
|
CheckBackupIsInManifests(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
bod.KW,
|
||||||
|
bod.SW,
|
||||||
|
&bo,
|
||||||
|
bod.Sel,
|
||||||
|
bod.Sel.ID(),
|
||||||
|
reason.Category())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, expectDeets := deeTD.GetDeetsInBackup(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
bo.Results.BackupID,
|
||||||
|
bod.Acct.ID(),
|
||||||
|
bod.Sel.ID(),
|
||||||
|
sel.PathService(),
|
||||||
|
whatSet,
|
||||||
|
bod.KMS,
|
||||||
|
bod.SSS)
|
||||||
|
deeTD.CheckBackupDetails(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
bo.Results.BackupID,
|
||||||
|
whatSet,
|
||||||
|
bod.KMS,
|
||||||
|
bod.SSS,
|
||||||
|
expectDeets,
|
||||||
|
false)
|
||||||
|
|
||||||
|
// Basic, happy path incremental test. No changes are dictated or expected.
|
||||||
|
// This only tests that an incremental backup is runnable at all, and that it
|
||||||
|
// produces fewer results than the last backup.
|
||||||
|
//
|
||||||
|
// Incremental testing for conversations is limited because of API restrictions.
|
||||||
|
// Since graph doesn't provide us a way to programmatically delete conversations,
|
||||||
|
// or create new conversations without a delegated token, we can't do incremental
|
||||||
|
// testing with newly added items.
|
||||||
|
incMB := evmock.NewBus()
|
||||||
|
incBO := NewTestBackupOp(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
bod,
|
||||||
|
incMB,
|
||||||
|
opts,
|
||||||
|
count.New())
|
||||||
|
|
||||||
|
RunAndCheckBackup(t, ctx, &incBO, incMB, true)
|
||||||
|
|
||||||
|
for _, reason := range reasons {
|
||||||
|
CheckBackupIsInManifests(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
bod.KW,
|
||||||
|
bod.SW,
|
||||||
|
&incBO,
|
||||||
|
bod.Sel,
|
||||||
|
bod.Sel.ID(),
|
||||||
|
reason.Category())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, expectDeets = deeTD.GetDeetsInBackup(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
incBO.Results.BackupID,
|
||||||
|
bod.Acct.ID(),
|
||||||
|
bod.Sel.ID(),
|
||||||
|
bod.Sel.PathService(),
|
||||||
|
whatSet,
|
||||||
|
bod.KMS,
|
||||||
|
bod.SSS)
|
||||||
|
deeTD.CheckBackupDetails(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
incBO.Results.BackupID,
|
||||||
|
whatSet,
|
||||||
|
bod.KMS,
|
||||||
|
bod.SSS,
|
||||||
|
expectDeets,
|
||||||
|
false)
|
||||||
|
|
||||||
|
assert.NotZero(
|
||||||
|
t,
|
||||||
|
incBO.Results.Counts[string(count.PersistedCachedFiles)],
|
||||||
|
"cached items")
|
||||||
|
assert.Greater(t, bo.Results.ItemsWritten, incBO.Results.ItemsWritten, "incremental items written")
|
||||||
|
assert.Greater(t, bo.Results.BytesRead, incBO.Results.BytesRead, "incremental bytes read")
|
||||||
|
assert.Greater(t, bo.Results.BytesUploaded, incBO.Results.BytesUploaded, "incremental bytes uploaded")
|
||||||
|
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
|
||||||
|
}
|
||||||
|
|||||||
@ -70,7 +70,7 @@ func MetadataFileNames(cat path.CategoryType) [][]string {
|
|||||||
|
|
||||||
// TestBackup_Run ensures that Integration Testing works
|
// TestBackup_Run ensures that Integration Testing works
|
||||||
// for the following scopes: Contacts, Events, and Mail
|
// for the following scopes: Contacts, Events, and Mail
|
||||||
func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
func (suite *ExchangeBackupIntgSuite) TestBackup_Run_basicBackup() {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
selector func() *selectors.ExchangeBackup
|
selector func() *selectors.ExchangeBackup
|
||||||
@ -112,124 +112,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
|
|||||||
}
|
}
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
RunBasicBackupTest(suite, test.selector().Selector)
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
var (
|
|
||||||
mb = evmock.NewBus()
|
|
||||||
counter = count.New()
|
|
||||||
sel = test.selector().Selector
|
|
||||||
opts = control.DefaultOptions()
|
|
||||||
whatSet = deeTD.CategoryFromRepoRef
|
|
||||||
)
|
|
||||||
|
|
||||||
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel, opts, version.Backup, counter)
|
|
||||||
defer bod.Close(t, ctx)
|
|
||||||
|
|
||||||
sel = bod.Sel
|
|
||||||
|
|
||||||
userID := sel.ID()
|
|
||||||
|
|
||||||
m365, err := bod.Acct.M365Config()
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// run the tests
|
|
||||||
RunAndCheckBackup(t, ctx, &bo, mb, false)
|
|
||||||
CheckBackupIsInManifests(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bod.KW,
|
|
||||||
bod.SW,
|
|
||||||
&bo,
|
|
||||||
sel,
|
|
||||||
userID,
|
|
||||||
test.category)
|
|
||||||
CheckMetadataFilesExist(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bo.Results.BackupID,
|
|
||||||
bod.KW,
|
|
||||||
bod.KMS,
|
|
||||||
m365.AzureTenantID,
|
|
||||||
userID,
|
|
||||||
path.ExchangeService,
|
|
||||||
map[path.CategoryType][][]string{test.category: test.metadataFiles})
|
|
||||||
|
|
||||||
_, expectDeets := deeTD.GetDeetsInBackup(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bo.Results.BackupID,
|
|
||||||
bod.Acct.ID(),
|
|
||||||
userID,
|
|
||||||
path.ExchangeService,
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS)
|
|
||||||
deeTD.CheckBackupDetails(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bo.Results.BackupID,
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS,
|
|
||||||
expectDeets,
|
|
||||||
false)
|
|
||||||
|
|
||||||
// Basic, happy path incremental test. No changes are dictated or expected.
|
|
||||||
// This only tests that an incremental backup is runnable at all, and that it
|
|
||||||
// produces fewer results than the last backup.
|
|
||||||
var (
|
|
||||||
incMB = evmock.NewBus()
|
|
||||||
incBO = NewTestBackupOp(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bod,
|
|
||||||
incMB,
|
|
||||||
opts,
|
|
||||||
counter)
|
|
||||||
)
|
|
||||||
|
|
||||||
RunAndCheckBackup(t, ctx, &incBO, incMB, true)
|
|
||||||
CheckBackupIsInManifests(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bod.KW,
|
|
||||||
bod.SW,
|
|
||||||
&incBO,
|
|
||||||
sel,
|
|
||||||
userID,
|
|
||||||
test.category)
|
|
||||||
CheckMetadataFilesExist(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
incBO.Results.BackupID,
|
|
||||||
bod.KW,
|
|
||||||
bod.KMS,
|
|
||||||
m365.AzureTenantID,
|
|
||||||
userID,
|
|
||||||
path.ExchangeService,
|
|
||||||
map[path.CategoryType][][]string{test.category: test.metadataFiles})
|
|
||||||
deeTD.CheckBackupDetails(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
incBO.Results.BackupID,
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS,
|
|
||||||
expectDeets,
|
|
||||||
false)
|
|
||||||
|
|
||||||
// do some additional checks to ensure the incremental dealt with fewer items.
|
|
||||||
assert.Greater(t, bo.Results.ItemsWritten, incBO.Results.ItemsWritten, "incremental items written")
|
|
||||||
assert.Greater(t, bo.Results.ItemsRead, incBO.Results.ItemsRead, "incremental items read")
|
|
||||||
assert.Greater(t, bo.Results.BytesRead, incBO.Results.BytesRead, "incremental bytes read")
|
|
||||||
assert.Greater(t, bo.Results.BytesUploaded, incBO.Results.BytesUploaded, "incremental bytes uploaded")
|
|
||||||
assert.Equal(t, bo.Results.ResourceOwners, incBO.Results.ResourceOwners, "incremental backup resource owner")
|
|
||||||
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(bo.Errors.Failure()))
|
|
||||||
assert.Empty(t, incBO.Errors.Recovered(), "count incremental recoverable/iteration errors")
|
|
||||||
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,21 +4,13 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
|
||||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
. "github.com/alcionai/corso/src/internal/operations/test/m365"
|
. "github.com/alcionai/corso/src/internal/operations/test/m365"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
|
||||||
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/count"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||||
@ -190,124 +182,14 @@ func runGroupsIncrementalBackupTests(
|
|||||||
true)
|
true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
func (suite *GroupsBackupIntgSuite) TestBackup_Run_basicBackup() {
|
||||||
t := suite.T()
|
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
var (
|
|
||||||
mb = evmock.NewBus()
|
|
||||||
counter = count.New()
|
|
||||||
sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
|
|
||||||
opts = control.DefaultOptions()
|
|
||||||
whatSet = deeTD.CategoryFromRepoRef
|
|
||||||
)
|
|
||||||
|
|
||||||
sel.Include(
|
sel.Include(
|
||||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||||
selTD.GroupsBackupChannelScope(sel),
|
selTD.GroupsBackupChannelScope(sel),
|
||||||
selTD.GroupsBackupConversationScope(sel))
|
selTD.GroupsBackupConversationScope(sel))
|
||||||
|
|
||||||
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
|
RunBasicBackupTest(suite, sel.Selector)
|
||||||
defer bod.Close(t, ctx)
|
|
||||||
|
|
||||||
reasons, err := bod.Sel.Reasons(bod.Acct.ID(), false)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
RunAndCheckBackup(t, ctx, &bo, mb, false)
|
|
||||||
|
|
||||||
for _, reason := range reasons {
|
|
||||||
CheckBackupIsInManifests(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bod.KW,
|
|
||||||
bod.SW,
|
|
||||||
&bo,
|
|
||||||
bod.Sel,
|
|
||||||
bod.Sel.ID(),
|
|
||||||
reason.Category())
|
|
||||||
}
|
|
||||||
|
|
||||||
_, expectDeets := deeTD.GetDeetsInBackup(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bo.Results.BackupID,
|
|
||||||
bod.Acct.ID(),
|
|
||||||
bod.Sel.ID(),
|
|
||||||
path.GroupsService,
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS)
|
|
||||||
deeTD.CheckBackupDetails(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bo.Results.BackupID,
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS,
|
|
||||||
expectDeets,
|
|
||||||
false)
|
|
||||||
|
|
||||||
// Basic, happy path incremental test. No changes are dictated or expected.
|
|
||||||
// This only tests that an incremental backup is runnable at all, and that it
|
|
||||||
// produces fewer results than the last backup.
|
|
||||||
//
|
|
||||||
// Incremental testing for conversations is limited because of API restrictions.
|
|
||||||
// Since graph doesn't provide us a way to programmatically delete conversations,
|
|
||||||
// or create new conversations without a delegated token, we can't do incremental
|
|
||||||
// testing with newly added items.
|
|
||||||
incMB := evmock.NewBus()
|
|
||||||
incBO := NewTestBackupOp(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bod,
|
|
||||||
incMB,
|
|
||||||
opts,
|
|
||||||
count.New())
|
|
||||||
|
|
||||||
RunAndCheckBackup(t, ctx, &incBO, incMB, true)
|
|
||||||
|
|
||||||
for _, reason := range reasons {
|
|
||||||
CheckBackupIsInManifests(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
bod.KW,
|
|
||||||
bod.SW,
|
|
||||||
&incBO,
|
|
||||||
bod.Sel,
|
|
||||||
bod.Sel.ID(),
|
|
||||||
reason.Category())
|
|
||||||
}
|
|
||||||
|
|
||||||
_, expectDeets = deeTD.GetDeetsInBackup(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
incBO.Results.BackupID,
|
|
||||||
bod.Acct.ID(),
|
|
||||||
bod.Sel.ID(),
|
|
||||||
bod.Sel.PathService(),
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS)
|
|
||||||
deeTD.CheckBackupDetails(
|
|
||||||
t,
|
|
||||||
ctx,
|
|
||||||
incBO.Results.BackupID,
|
|
||||||
whatSet,
|
|
||||||
bod.KMS,
|
|
||||||
bod.SSS,
|
|
||||||
expectDeets,
|
|
||||||
false)
|
|
||||||
|
|
||||||
assert.NotZero(
|
|
||||||
t,
|
|
||||||
incBO.Results.Counts[string(count.PersistedCachedFiles)],
|
|
||||||
"cached items")
|
|
||||||
assert.Greater(t, bo.Results.ItemsWritten, incBO.Results.ItemsWritten, "incremental items written")
|
|
||||||
assert.Greater(t, bo.Results.BytesRead, incBO.Results.BytesRead, "incremental bytes read")
|
|
||||||
assert.Greater(t, bo.Results.BytesUploaded, incBO.Results.BytesUploaded, "incremental bytes uploaded")
|
|
||||||
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "incremental backup-end events")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type GroupsBackupNightlyIntgSuite struct {
|
type GroupsBackupNightlyIntgSuite struct {
|
||||||
|
|||||||
@ -0,0 +1,73 @@
|
|||||||
|
package teamschats_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
. "github.com/alcionai/corso/src/internal/operations/test/m365"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||||
|
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BackupIntgSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
its IntgTesterSetup
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBackupIntgSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &BackupIntgSuite{
|
||||||
|
Suite: tester.NewIntegrationSuite(
|
||||||
|
t,
|
||||||
|
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *BackupIntgSuite) SetupSuite() {
|
||||||
|
suite.its = NewIntegrationTesterSetup(suite.T())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *BackupIntgSuite) TestBackup_Run_basicBackup() {
|
||||||
|
sel := selectors.NewTeamsChatsBackup([]string{suite.its.User.ID})
|
||||||
|
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
|
||||||
|
|
||||||
|
RunBasicBackupTest(suite, sel.Selector)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// nightly tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type BackupNightlyIntgSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
its IntgTesterSetup
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestsBackupNightlyIntgSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &BackupNightlyIntgSuite{
|
||||||
|
Suite: tester.NewNightlySuite(
|
||||||
|
t,
|
||||||
|
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *BackupNightlyIntgSuite) SetupSuite() {
|
||||||
|
suite.its = NewIntegrationTesterSetup(suite.T())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *BackupNightlyIntgSuite) TestBackup_Run_vVersion9MergeBase() {
|
||||||
|
sel := selectors.NewTeamsChatsBackup([]string{suite.its.User.ID})
|
||||||
|
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
|
||||||
|
|
||||||
|
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *BackupNightlyIntgSuite) TestBackup_Run_version9AssistBases() {
|
||||||
|
sel := selectors.NewTeamsChatsBackup([]string{suite.its.User.ID})
|
||||||
|
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
|
||||||
|
|
||||||
|
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
|
||||||
|
}
|
||||||
84
src/pkg/backup/details/testdata/testdata.go
vendored
84
src/pkg/backup/details/testdata/testdata.go
vendored
@ -16,11 +16,13 @@ import (
|
|||||||
// mustParsePath takes a string representing a resource path and returns a path
|
// mustParsePath takes a string representing a resource path and returns a path
|
||||||
// instance. Panics if the path cannot be parsed. Useful for simple variable
|
// instance. Panics if the path cannot be parsed. Useful for simple variable
|
||||||
// assignments.
|
// assignments.
|
||||||
func mustParsePath(ref string, isItem, isSharepointList bool) path.Path {
|
func mustParsePath(ref string, isItem, allowPrefix bool) path.Path {
|
||||||
var p path.Path
|
var (
|
||||||
var err error
|
p path.Path
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
if isSharepointList {
|
if allowPrefix {
|
||||||
p, err = path.PrefixOrPathFromDataLayerPath(ref, isItem)
|
p, err = path.PrefixOrPathFromDataLayerPath(ref, isItem)
|
||||||
} else {
|
} else {
|
||||||
p, err = path.FromDataLayerPath(ref, isItem)
|
p, err = path.FromDataLayerPath(ref, isItem)
|
||||||
@ -126,9 +128,9 @@ func (p repoRefAndLocRef) locationAsRepoRef() path.Path {
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func mustPathRep(ref string, isItem, isSharepointList bool) repoRefAndLocRef {
|
func mustPathRep(ref string, isItem, allowPrefix bool) repoRefAndLocRef {
|
||||||
res := repoRefAndLocRef{}
|
res := repoRefAndLocRef{}
|
||||||
tmp := mustParsePath(ref, isItem, isSharepointList)
|
tmp := mustParsePath(ref, isItem, allowPrefix)
|
||||||
|
|
||||||
// Now append stuff to the RepoRef elements so we have distinct LocationRef
|
// Now append stuff to the RepoRef elements so we have distinct LocationRef
|
||||||
// and RepoRef elements to simulate using IDs in the path instead of display
|
// and RepoRef elements to simulate using IDs in the path instead of display
|
||||||
@ -969,6 +971,68 @@ var (
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TeamsChatsRootPath = mustPathRep("tenant-id/"+path.TeamsChatsService.String()+"/user-id/chats", false, true)
|
||||||
|
|
||||||
|
TeamsChatsChatItemPath1 = TeamsChatsRootPath.MustAppend(ItemName1, true)
|
||||||
|
TeamsChatsChatItemPath2 = TeamsChatsRootPath.MustAppend(ItemName2, true)
|
||||||
|
TeamsChatsChatItemPath3 = TeamsChatsRootPath.MustAppend(ItemName3, true)
|
||||||
|
|
||||||
|
teamsChatsChatItemsByVersion = map[int][]details.Entry{
|
||||||
|
version.Groups9Update: {
|
||||||
|
{
|
||||||
|
RepoRef: TeamsChatsChatItemPath1.locationAsRepoRef().String(),
|
||||||
|
ShortRef: TeamsChatsChatItemPath1.locationAsRepoRef().ShortRef(),
|
||||||
|
ParentRef: TeamsChatsChatItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(),
|
||||||
|
ItemRef: TeamsChatsChatItemPath1.ItemLocation(),
|
||||||
|
LocationRef: "",
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
TeamsChats: &details.TeamsChatsInfo{
|
||||||
|
ItemType: details.TeamsChat,
|
||||||
|
Modified: Time4,
|
||||||
|
ParentPath: "",
|
||||||
|
Chat: details.ChatInfo{
|
||||||
|
Topic: "item 1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: TeamsChatsChatItemPath2.locationAsRepoRef().String(),
|
||||||
|
ShortRef: TeamsChatsChatItemPath2.locationAsRepoRef().ShortRef(),
|
||||||
|
ParentRef: TeamsChatsChatItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(),
|
||||||
|
ItemRef: TeamsChatsChatItemPath2.ItemLocation(),
|
||||||
|
LocationRef: "",
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
TeamsChats: &details.TeamsChatsInfo{
|
||||||
|
ItemType: details.TeamsChat,
|
||||||
|
Modified: Time3,
|
||||||
|
ParentPath: "",
|
||||||
|
Chat: details.ChatInfo{
|
||||||
|
Topic: "item 2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: TeamsChatsChatItemPath3.locationAsRepoRef().String(),
|
||||||
|
ShortRef: TeamsChatsChatItemPath3.locationAsRepoRef().ShortRef(),
|
||||||
|
ParentRef: TeamsChatsChatItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(),
|
||||||
|
ItemRef: TeamsChatsChatItemPath3.ItemLocation(),
|
||||||
|
LocationRef: "",
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
TeamsChats: &details.TeamsChatsInfo{
|
||||||
|
ItemType: details.TeamsChat,
|
||||||
|
ParentPath: "",
|
||||||
|
Modified: Time4,
|
||||||
|
Chat: details.ChatInfo{
|
||||||
|
Topic: "item 3",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetDetailsSetForVersion(t *testing.T, wantedVersion int) *details.Details {
|
func GetDetailsSetForVersion(t *testing.T, wantedVersion int) *details.Details {
|
||||||
@ -987,6 +1051,9 @@ func GetDetailsSetForVersion(t *testing.T, wantedVersion int) *details.Details {
|
|||||||
path.SharePointService: {
|
path.SharePointService: {
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
},
|
},
|
||||||
|
path.TeamsChatsService: {
|
||||||
|
path.ChatsCategory,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for s, cats := range dataTypes {
|
for s, cats := range dataTypes {
|
||||||
@ -1060,6 +1127,11 @@ func GetDeetsForVersion(
|
|||||||
if cat == path.LibrariesCategory {
|
if cat == path.LibrariesCategory {
|
||||||
input = sharePointLibraryItemsByVersion
|
input = sharePointLibraryItemsByVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case path.TeamsChatsService:
|
||||||
|
if cat == path.ChatsCategory {
|
||||||
|
input = teamsChatsChatItemsByVersion
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NotNil(
|
require.NotNil(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user