prep groups handler for addition of conversations (#4583)
Makes modifications to the groups handlers to facilitate the incoming introduction of conversations backup handling. Changes include: * new handler methods (canMakeDeltas and LocationRef) * binding handlers to generics * some naming normalization --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🌻 Feature #### Issue(s) * #4536 #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
d9c42f790c
commit
6307d8cbf6
@ -71,7 +71,7 @@ func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
flags.AddGroupFlag(c)
|
||||
flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages}, false)
|
||||
flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages, flags.DataConversations}, false)
|
||||
flags.AddFetchParallelismFlag(c)
|
||||
flags.AddDisableDeltaFlag(c)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
@ -121,7 +121,7 @@ func groupsCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Aliases: []string{teamsServiceCommand},
|
||||
Short: "Backup M365 Group service data",
|
||||
Short: "Backup M365 Groups & Teams service data",
|
||||
RunE: createGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
@ -290,10 +290,16 @@ func validateGroupsBackupCreateFlags(groups, cats []string) error {
|
||||
flags.GroupFN + " *")
|
||||
}
|
||||
|
||||
// TODO(keepers): release conversations support
|
||||
|
||||
msg := fmt.Sprintf(
|
||||
" is an unrecognized data type; only %s and %s are supported",
|
||||
flags.DataLibraries, flags.DataMessages)
|
||||
|
||||
// msg := fmt.Sprintf(
|
||||
// " is an unrecognized data type; only %s, %s and %s are supported",
|
||||
// flags.DataLibraries, flags.DataMessages, flags.DataConversations)
|
||||
|
||||
allowedCats := utils.GroupsAllowedCategories()
|
||||
|
||||
for _, d := range cats {
|
||||
|
||||
@ -28,11 +28,6 @@ import (
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
var (
|
||||
channelMessages = path.ChannelMessagesCategory
|
||||
libraries = path.LibrariesCategory
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests that require no existing backups
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -114,11 +109,15 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
||||
runGroupsBackupCategoryTest(suite, "messages")
|
||||
runGroupsBackupCategoryTest(suite, flags.DataMessages)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() {
|
||||
runGroupsBackupCategoryTest(suite, libraries.String())
|
||||
runGroupsBackupCategoryTest(suite, flags.DataLibraries)
|
||||
}
|
||||
|
||||
func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
|
||||
@ -148,11 +147,15 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() {
|
||||
runGroupsBackupGroupNotFoundTest(suite, "messages")
|
||||
runGroupsBackupGroupNotFoundTest(suite, flags.DataMessages)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_conversations() {
|
||||
runGroupsBackupGroupNotFoundTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() {
|
||||
runGroupsBackupGroupNotFoundTest(suite, libraries.String())
|
||||
runGroupsBackupGroupNotFoundTest(suite, flags.DataLibraries)
|
||||
}
|
||||
|
||||
func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) {
|
||||
@ -293,19 +296,27 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
||||
var (
|
||||
groups = []string{suite.its.group.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChannelMessagesCategory,
|
||||
path.ConversationPostsCategory,
|
||||
path.LibrariesCategory,
|
||||
}
|
||||
)
|
||||
|
||||
for _, set := range []path.CategoryType{channelMessages, libraries} {
|
||||
for _, set := range cats {
|
||||
var (
|
||||
sel = selectors.NewGroupsBackup(groups)
|
||||
scopes []selectors.GroupsScope
|
||||
)
|
||||
|
||||
switch set {
|
||||
case channelMessages:
|
||||
case path.ChannelMessagesCategory:
|
||||
scopes = selTD.GroupsBackupChannelScope(sel)
|
||||
|
||||
case libraries:
|
||||
case path.ConversationPostsCategory:
|
||||
scopes = selTD.GroupsBackupConversationScope(sel)
|
||||
|
||||
case path.LibrariesCategory:
|
||||
scopes = selTD.GroupsBackupLibraryFolderScope(sel)
|
||||
}
|
||||
|
||||
@ -334,11 +345,15 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() {
|
||||
runGroupsListCmdTest(suite, channelMessages)
|
||||
runGroupsListCmdTest(suite, path.ChannelMessagesCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_conversations() {
|
||||
runGroupsListCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() {
|
||||
runGroupsListCmdTest(suite, libraries)
|
||||
runGroupsListCmdTest(suite, path.LibrariesCategory)
|
||||
}
|
||||
|
||||
func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
|
||||
@ -369,11 +384,15 @@ func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.Cat
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() {
|
||||
runGroupsListSingleCmdTest(suite, channelMessages)
|
||||
runGroupsListSingleCmdTest(suite, path.ChannelMessagesCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_conversations() {
|
||||
runGroupsListSingleCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() {
|
||||
runGroupsListSingleCmdTest(suite, libraries)
|
||||
runGroupsListSingleCmdTest(suite, path.LibrariesCategory)
|
||||
}
|
||||
|
||||
func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
|
||||
@ -429,11 +448,15 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() {
|
||||
runGroupsDetailsCmdTest(suite, channelMessages)
|
||||
runGroupsDetailsCmdTest(suite, path.ChannelMessagesCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() {
|
||||
runGroupsDetailsCmdTest(suite, libraries)
|
||||
runGroupsDetailsCmdTest(suite, path.LibrariesCategory)
|
||||
}
|
||||
|
||||
func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
|
||||
|
||||
@ -107,8 +107,17 @@ func (suite *GroupsUnitSuite) TestValidateGroupsBackupCreateFlags() {
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{flags.DataLibraries, flags.DataMessages},
|
||||
name: "conversations",
|
||||
cats: []string{flags.DataConversations},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{
|
||||
flags.DataLibraries,
|
||||
flags.DataMessages,
|
||||
flags.DataConversations,
|
||||
},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
@ -208,8 +217,11 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedChannelFlags(),
|
||||
flagsTD.PreparedConversationFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
flagsTD.PreparedStorageFlags(),
|
||||
flagsTD.PreparedLibraryFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
@ -217,6 +229,9 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
|
||||
assert.True(t, co.SkipReduce)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
flagsTD.AssertChannelFlags(t, cmd)
|
||||
flagsTD.AssertConversationFlags(t, cmd)
|
||||
flagsTD.AssertLibraryFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestBackupDeleteFlags() {
|
||||
|
||||
@ -4,12 +4,17 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const DataMessages = "messages"
|
||||
const (
|
||||
DataMessages = "messages"
|
||||
DataConversations = "conversations"
|
||||
)
|
||||
|
||||
const (
|
||||
ChannelFN = "channel"
|
||||
GroupFN = "group"
|
||||
MessageFN = "message"
|
||||
ChannelFN = "channel"
|
||||
ConversationFN = "conversation"
|
||||
GroupFN = "group"
|
||||
MessageFN = "message"
|
||||
PostFN = "post"
|
||||
|
||||
MessageCreatedAfterFN = "message-created-after"
|
||||
MessageCreatedBeforeFN = "message-created-before"
|
||||
@ -18,9 +23,11 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
ChannelFV []string
|
||||
GroupFV []string
|
||||
MessageFV []string
|
||||
ChannelFV []string
|
||||
ConversationFV []string
|
||||
GroupFV []string
|
||||
MessageFV []string
|
||||
PostFV []string
|
||||
|
||||
MessageCreatedAfterFV string
|
||||
MessageCreatedBeforeFV string
|
||||
@ -60,14 +67,24 @@ func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
&MessageLastReplyBeforeFV,
|
||||
MessageLastReplyBeforeFN, "",
|
||||
"Select messages with replies before this datetime.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&ConversationFV,
|
||||
ConversationFN, nil,
|
||||
"Select data within a Group's Conversation.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&PostFV,
|
||||
PostFN, nil,
|
||||
"Select Conversation Posts by reference.")
|
||||
}
|
||||
|
||||
// AddGroupFlag adds the --group flag, which accepts id or name values.
|
||||
// TODO: need to decide what the appropriate "name" to accept here is.
|
||||
// keepers thinks its either DisplayName or MailNickname or Mail
|
||||
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
|
||||
// may not see either one visibly.
|
||||
// https://learn.microsoft.com/en-us/graph/api/group-list?view=graph-rest-1.0&tabs=http
|
||||
// AddGroupFlag adds the --group flag, which accepts either the id,
|
||||
// the display name, or the mailbox address as its values. Users are
|
||||
// expected to supply the display name. The ID is supported becase, well,
|
||||
// IDs. The mailbox address is supported as a lookup fallback for certain
|
||||
// SDK cases, therefore it's also supported here, though that support
|
||||
// isn't exposed to end users.
|
||||
func AddGroupFlag(cmd *cobra.Command) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&GroupFV,
|
||||
|
||||
10
src/cli/flags/testdata/flags.go
vendored
10
src/cli/flags/testdata/flags.go
vendored
@ -22,10 +22,20 @@ var (
|
||||
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
|
||||
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
|
||||
|
||||
ChannelInput = []string{"channel1", "channel2"}
|
||||
MessageInput = []string{"message1", "message2"}
|
||||
MessageCreatedAfterInput = "messageCreatedAfter"
|
||||
MessageCreatedBeforeInput = "messageCreatedBefore"
|
||||
MessageLastReplyAfterInput = "messageLastReplyAfter"
|
||||
MessageLastReplyBeforeInput = "messageLastReplyBefore"
|
||||
|
||||
ContactInput = []string{"contact1", "contact2"}
|
||||
ContactFldInput = []string{"contactFld1", "contactFld2"}
|
||||
ContactNameInput = "contactName"
|
||||
|
||||
ConversationInput = []string{"conversation1", "conversation2"}
|
||||
PostInput = []string{"post1", "post2"}
|
||||
|
||||
EmailInput = []string{"mail1", "mail2"}
|
||||
EmailFldInput = []string{"mailFld1", "mailFld2"}
|
||||
EmailReceivedAfterInput = "mailReceivedAfter"
|
||||
|
||||
42
src/cli/flags/testdata/groups.go
vendored
Normal file
42
src/cli/flags/testdata/groups.go
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
func PreparedChannelFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.ChannelFN, FlgInputs(ChannelInput),
|
||||
"--" + flags.MessageFN, FlgInputs(MessageInput),
|
||||
"--" + flags.MessageCreatedAfterFN, MessageCreatedAfterInput,
|
||||
"--" + flags.MessageCreatedBeforeFN, MessageCreatedBeforeInput,
|
||||
"--" + flags.MessageLastReplyAfterFN, MessageLastReplyAfterInput,
|
||||
"--" + flags.MessageLastReplyBeforeFN, MessageLastReplyBeforeInput,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertChannelFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.ElementsMatch(t, ChannelInput, flags.ChannelFV)
|
||||
assert.ElementsMatch(t, MessageInput, flags.MessageFV)
|
||||
assert.Equal(t, MessageCreatedAfterInput, flags.MessageCreatedAfterFV)
|
||||
assert.Equal(t, MessageCreatedBeforeInput, flags.MessageCreatedBeforeFV)
|
||||
assert.Equal(t, MessageLastReplyAfterInput, flags.MessageLastReplyAfterFV)
|
||||
assert.Equal(t, MessageLastReplyBeforeInput, flags.MessageLastReplyBeforeFV)
|
||||
}
|
||||
|
||||
func PreparedConversationFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.ConversationFN, FlgInputs(ConversationInput),
|
||||
"--" + flags.PostFN, FlgInputs(PostInput),
|
||||
}
|
||||
}
|
||||
|
||||
func AssertConversationFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, ConversationInput, flags.ConversationFV)
|
||||
assert.Equal(t, PostInput, flags.PostFV)
|
||||
}
|
||||
32
src/cli/flags/testdata/sharepoint.go
vendored
Normal file
32
src/cli/flags/testdata/sharepoint.go
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
func PreparedLibraryFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.LibraryFN, LibraryInput,
|
||||
"--" + flags.FolderFN, FlgInputs(FolderPathInput),
|
||||
"--" + flags.FileFN, FlgInputs(FileNameInput),
|
||||
"--" + flags.FileCreatedAfterFN, FileCreatedAfterInput,
|
||||
"--" + flags.FileCreatedBeforeFN, FileCreatedBeforeInput,
|
||||
"--" + flags.FileModifiedAfterFN, FileModifiedAfterInput,
|
||||
"--" + flags.FileModifiedBeforeFN, FileModifiedBeforeInput,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertLibraryFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, LibraryInput, flags.LibraryFV)
|
||||
assert.Equal(t, FolderPathInput, flags.FolderPathFV)
|
||||
assert.Equal(t, FileNameInput, flags.FileNameFV)
|
||||
assert.Equal(t, FileCreatedAfterInput, flags.FileCreatedAfterFV)
|
||||
assert.Equal(t, FileCreatedBeforeInput, flags.FileCreatedBeforeFV)
|
||||
assert.Equal(t, FileModifiedAfterInput, flags.FileModifiedAfterFV)
|
||||
assert.Equal(t, FileModifiedBeforeInput, flags.FileModifiedBeforeFV)
|
||||
}
|
||||
@ -11,9 +11,11 @@ import (
|
||||
)
|
||||
|
||||
type GroupsOpts struct {
|
||||
Groups []string
|
||||
Channels []string
|
||||
Messages []string
|
||||
Groups []string
|
||||
Channels []string
|
||||
Messages []string
|
||||
Conversations []string
|
||||
Posts []string
|
||||
|
||||
MessageCreatedAfter string
|
||||
MessageCreatedBefore string
|
||||
@ -44,8 +46,9 @@ type GroupsOpts struct {
|
||||
|
||||
func GroupsAllowedCategories() map[string]struct{} {
|
||||
return map[string]struct{}{
|
||||
flags.DataLibraries: {},
|
||||
flags.DataMessages: {},
|
||||
flags.DataLibraries: {},
|
||||
flags.DataMessages: {},
|
||||
flags.DataConversations: {},
|
||||
}
|
||||
}
|
||||
|
||||
@ -60,6 +63,8 @@ func AddGroupsCategories(sel *selectors.GroupsBackup, cats []string) *selectors.
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
case flags.DataMessages:
|
||||
sel.Include(sel.ChannelMessages(selectors.Any(), selectors.Any()))
|
||||
case flags.DataConversations:
|
||||
sel.Include(sel.ConversationPosts(selectors.Any(), selectors.Any()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -68,11 +73,13 @@ func AddGroupsCategories(sel *selectors.GroupsBackup, cats []string) *selectors.
|
||||
|
||||
func MakeGroupsOpts(cmd *cobra.Command) GroupsOpts {
|
||||
return GroupsOpts{
|
||||
Groups: flags.GroupFV,
|
||||
Channels: flags.ChannelFV,
|
||||
Messages: flags.MessageFV,
|
||||
WebURL: flags.WebURLFV,
|
||||
SiteID: flags.SiteIDFV,
|
||||
Groups: flags.GroupFV,
|
||||
Channels: flags.ChannelFV,
|
||||
Messages: flags.MessageFV,
|
||||
Conversations: flags.ConversationFV,
|
||||
Posts: flags.PostFV,
|
||||
WebURL: flags.WebURLFV,
|
||||
SiteID: flags.SiteIDFV,
|
||||
|
||||
Library: flags.LibraryFV,
|
||||
FileName: flags.FileNameFV,
|
||||
@ -172,29 +179,36 @@ func AddGroupsFilter(
|
||||
// inclusions for Group commands.
|
||||
func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *selectors.GroupsRestore {
|
||||
var (
|
||||
groups = opts.Groups
|
||||
lfp, lfn = len(opts.FolderPath), len(opts.FileName)
|
||||
llf, lli = len(opts.ListFolder), len(opts.ListItem)
|
||||
lpf, lpi = len(opts.PageFolder), len(opts.Page)
|
||||
lg, lch, lm = len(opts.Groups), len(opts.Channels), len(opts.Messages)
|
||||
groups = opts.Groups
|
||||
folderPaths, fileNames = len(opts.FolderPath), len(opts.FileName)
|
||||
listFolders, listItems = len(opts.ListFolder), len(opts.ListItem)
|
||||
pageFolders, pageItems = len(opts.PageFolder), len(opts.Page)
|
||||
chans, chanMsgs = len(opts.Channels), len(opts.Messages)
|
||||
convs, convPosts = len(opts.Conversations), len(opts.Posts)
|
||||
)
|
||||
|
||||
if lg == 0 {
|
||||
if len(opts.Groups) == 0 {
|
||||
groups = selectors.Any()
|
||||
}
|
||||
|
||||
sel := selectors.NewGroupsRestore(groups)
|
||||
|
||||
if lfp+lfn+llf+lli+lpf+lpi+lch+lm == 0 {
|
||||
if folderPaths+fileNames+
|
||||
listFolders+listItems+
|
||||
pageFolders+pageItems+
|
||||
chans+chanMsgs+
|
||||
convs+convPosts == 0 {
|
||||
sel.Include(sel.AllData())
|
||||
return sel
|
||||
}
|
||||
|
||||
// sharepoint site selectors
|
||||
|
||||
if lfp+lfn+llf+lli+lpf+lpi > 0 {
|
||||
if lfp+lfn > 0 {
|
||||
if lfn == 0 {
|
||||
if folderPaths+fileNames+
|
||||
listFolders+listItems+
|
||||
pageFolders+pageItems > 0 {
|
||||
if folderPaths+fileNames > 0 {
|
||||
if fileNames == 0 {
|
||||
opts.FileName = selectors.Any()
|
||||
}
|
||||
|
||||
@ -210,8 +224,8 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
|
||||
}
|
||||
}
|
||||
|
||||
if llf+lli > 0 {
|
||||
if lli == 0 {
|
||||
if listFolders+listItems > 0 {
|
||||
if listItems == 0 {
|
||||
opts.ListItem = selectors.Any()
|
||||
}
|
||||
|
||||
@ -227,8 +241,8 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
|
||||
}
|
||||
}
|
||||
|
||||
if lpf+lpi > 0 {
|
||||
if lpi == 0 {
|
||||
if pageFolders+pageItems > 0 {
|
||||
if pageItems == 0 {
|
||||
opts.Page = selectors.Any()
|
||||
}
|
||||
|
||||
@ -247,21 +261,38 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
|
||||
|
||||
// channel and message selectors
|
||||
|
||||
if lch+lm > 0 {
|
||||
if chans+chanMsgs > 0 {
|
||||
// if no channel is specified, include all channels
|
||||
if lch == 0 {
|
||||
if chans == 0 {
|
||||
opts.Channels = selectors.Any()
|
||||
}
|
||||
|
||||
// if no message is specified, only select channels
|
||||
// otherwise, look for channel/message pairs
|
||||
if lm == 0 {
|
||||
if chanMsgs == 0 {
|
||||
sel.Include(sel.Channels(opts.Channels))
|
||||
} else {
|
||||
sel.Include(sel.ChannelMessages(opts.Channels, opts.Messages))
|
||||
}
|
||||
}
|
||||
|
||||
// conversation and post selectors
|
||||
|
||||
if convs+convPosts > 0 {
|
||||
// if no conversation is specified, include all conversations
|
||||
if convs == 0 {
|
||||
opts.Conversations = selectors.Any()
|
||||
}
|
||||
|
||||
// if no post is specified, only select conversations;
|
||||
// otherwise, look for channel/message pairs
|
||||
if chanMsgs == 0 {
|
||||
sel.Include(sel.Conversation(opts.Conversations))
|
||||
} else {
|
||||
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))
|
||||
}
|
||||
}
|
||||
|
||||
return sel
|
||||
}
|
||||
|
||||
|
||||
@ -40,30 +40,34 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
|
||||
}{
|
||||
// resource
|
||||
{
|
||||
name: "no inputs",
|
||||
opts: utils.GroupsOpts{},
|
||||
expectIncludeLen: 3,
|
||||
name: "no inputs",
|
||||
opts: utils.GroupsOpts{},
|
||||
// TODO: bump to 3 when we release conversations
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: empty,
|
||||
},
|
||||
expectIncludeLen: 3,
|
||||
// TODO: bump to 3 when we release conversations
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
{
|
||||
name: "single inputs",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
},
|
||||
expectIncludeLen: 3,
|
||||
// TODO: bump to 3 when we release conversations
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
{
|
||||
name: "multi inputs",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: multi,
|
||||
},
|
||||
expectIncludeLen: 3,
|
||||
// TODO: bump to 3 when we release conversations
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
// sharepoint
|
||||
{
|
||||
@ -114,22 +118,6 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
{
|
||||
name: "library folder suffixes",
|
||||
opts: utils.GroupsOpts{
|
||||
FileName: empty,
|
||||
FolderPath: empty,
|
||||
},
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "library folder suffixes and contains",
|
||||
opts: utils.GroupsOpts{
|
||||
FileName: empty,
|
||||
FolderPath: empty,
|
||||
},
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "Page Folder",
|
||||
opts: utils.GroupsOpts{
|
||||
@ -203,6 +191,50 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
// conversations
|
||||
{
|
||||
name: "multiple conversations multiple posts",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
Conversations: multi,
|
||||
Posts: multi,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "single conversations multiple post",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
Conversations: single,
|
||||
Posts: multi,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "single conversations and post",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
Conversations: single,
|
||||
Posts: single,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "multiple conversations only",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
Conversations: multi,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "single conversations only",
|
||||
opts: utils.GroupsOpts{
|
||||
Groups: single,
|
||||
Conversations: single,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
@ -389,7 +421,7 @@ func (suite *GroupsUtilsSuite) TestAddGroupsCategories() {
|
||||
{
|
||||
name: "none",
|
||||
cats: []string{},
|
||||
expectScopeLen: 3,
|
||||
expectScopeLen: 2,
|
||||
},
|
||||
{
|
||||
name: "libraries",
|
||||
@ -402,8 +434,18 @@ func (suite *GroupsUtilsSuite) TestAddGroupsCategories() {
|
||||
expectScopeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{flags.DataLibraries, flags.DataMessages},
|
||||
name: "conversations",
|
||||
cats: []string{flags.DataConversations},
|
||||
expectScopeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{
|
||||
flags.DataLibraries,
|
||||
flags.DataMessages,
|
||||
// flags.DataConversations,
|
||||
},
|
||||
// TODO: bump to 3 when we include conversations in all data
|
||||
expectScopeLen: 2,
|
||||
},
|
||||
{
|
||||
|
||||
@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/pii"
|
||||
@ -32,18 +31,16 @@ import (
|
||||
// it's simpler to comment them for tracking than to delete
|
||||
// and re-discover them later.
|
||||
|
||||
func CreateCollections(
|
||||
func CreateCollections[C graph.GetIDer, I groupsItemer](
|
||||
ctx context.Context,
|
||||
bpc inject.BackupProducerConfig,
|
||||
bh backupHandler,
|
||||
bh backupHandler[C, I],
|
||||
tenantID string,
|
||||
scope selectors.GroupsScope,
|
||||
su support.StatusUpdater,
|
||||
counter *count.Bus,
|
||||
errs *fault.Bus,
|
||||
) ([]data.BackupCollection, bool, error) {
|
||||
ctx = clues.Add(ctx, "category", scope.Category().PathType())
|
||||
|
||||
var (
|
||||
allCollections = make([]data.BackupCollection, 0)
|
||||
category = scope.Category().PathType()
|
||||
@ -61,19 +58,23 @@ func CreateCollections(
|
||||
|
||||
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup)
|
||||
|
||||
channels, err := bh.getContainers(ctx)
|
||||
cc := api.CallConfig{
|
||||
CanMakeDeltaQueries: bh.canMakeDeltaQueries(),
|
||||
}
|
||||
|
||||
containers, err := bh.getContainers(ctx, cc)
|
||||
if err != nil {
|
||||
return nil, false, clues.Stack(err)
|
||||
}
|
||||
|
||||
counter.Add(count.Channels, int64(len(channels)))
|
||||
counter.Add(count.Channels, int64(len(containers)))
|
||||
|
||||
collections, err := populateCollections(
|
||||
ctx,
|
||||
qp,
|
||||
bh,
|
||||
su,
|
||||
channels,
|
||||
containers,
|
||||
scope,
|
||||
cdps[scope.Category().PathType()],
|
||||
bpc.Options,
|
||||
@ -90,12 +91,12 @@ func CreateCollections(
|
||||
return allCollections, canUsePreviousBackup, nil
|
||||
}
|
||||
|
||||
func populateCollections(
|
||||
func populateCollections[C graph.GetIDer, I groupsItemer](
|
||||
ctx context.Context,
|
||||
qp graph.QueryParams,
|
||||
bh backupHandler,
|
||||
bh backupHandler[C, I],
|
||||
statusUpdater support.StatusUpdater,
|
||||
channels []models.Channelable,
|
||||
containers []container[C],
|
||||
scope selectors.GroupsScope,
|
||||
dps metadata.DeltaPaths,
|
||||
ctrlOpts control.Options,
|
||||
@ -117,23 +118,22 @@ func populateCollections(
|
||||
|
||||
logger.Ctx(ctx).Infow("filling collections", "len_deltapaths", len(dps))
|
||||
|
||||
for _, c := range channels {
|
||||
for _, c := range containers {
|
||||
if el.Failure() != nil {
|
||||
return nil, el.Failure()
|
||||
}
|
||||
|
||||
var (
|
||||
cl = counter.Local()
|
||||
cID = ptr.Val(c.GetId())
|
||||
cName = ptr.Val(c.GetDisplayName())
|
||||
cID = ptr.Val(c.container.GetId())
|
||||
err error
|
||||
dp = dps[cID]
|
||||
dp = dps[c.storageDirFolders.String()]
|
||||
prevDelta = dp.Delta
|
||||
prevPathStr = dp.Path // do not log: pii; log prevPath instead
|
||||
prevPath path.Path
|
||||
ictx = clues.Add(
|
||||
ctx,
|
||||
"channel_id", cID,
|
||||
"collection_path", c,
|
||||
"previous_delta", pii.SafeURL{
|
||||
URL: prevDelta,
|
||||
SafePathElems: graph.SafeURLPathParams,
|
||||
@ -146,7 +146,7 @@ func populateCollections(
|
||||
delete(tombstones, cID)
|
||||
|
||||
// Only create a collection if the path matches the scope.
|
||||
if !bh.includeContainer(ictx, qp, c, scope) {
|
||||
if !bh.includeContainer(c.container, scope) {
|
||||
cl.Inc(count.SkippedContainers)
|
||||
continue
|
||||
}
|
||||
@ -165,10 +165,10 @@ func populateCollections(
|
||||
// if the channel has no email property, it is unable to process delta tokens
|
||||
// and will return an error if a delta token is queried.
|
||||
cc := api.CallConfig{
|
||||
CanMakeDeltaQueries: len(ptr.Val(c.GetEmail())) > 0,
|
||||
CanMakeDeltaQueries: bh.canMakeDeltaQueries() && c.canMakeDeltaQueries,
|
||||
}
|
||||
|
||||
addAndRem, err := bh.getContainerItemIDs(ctx, cID, prevDelta, cc)
|
||||
addAndRem, err := bh.getContainerItemIDs(ctx, c.storageDirFolders, prevDelta, cc)
|
||||
if err != nil {
|
||||
el.AddRecoverable(ctx, clues.Stack(err))
|
||||
continue
|
||||
@ -181,12 +181,12 @@ func populateCollections(
|
||||
cl.Add(count.ItemsRemoved, int64(len(removed)))
|
||||
|
||||
if len(addAndRem.DU.URL) > 0 {
|
||||
deltaURLs[cID] = addAndRem.DU.URL
|
||||
deltaURLs[c.storageDirFolders.String()] = addAndRem.DU.URL
|
||||
} else if !addAndRem.DU.Reset {
|
||||
logger.Ctx(ictx).Info("missing delta url")
|
||||
}
|
||||
|
||||
currPath, err := bh.canonicalPath(path.Builder{}.Append(cID), qp.TenantID)
|
||||
currPath, err := bh.canonicalPath(c.storageDirFolders, qp.TenantID)
|
||||
if err != nil {
|
||||
err = clues.StackWC(ctx, err).Label(count.BadCollPath)
|
||||
el.AddRecoverable(ctx, err)
|
||||
@ -205,7 +205,7 @@ func populateCollections(
|
||||
data.NewBaseCollection(
|
||||
currPath,
|
||||
prevPath,
|
||||
path.Builder{}.Append(cName),
|
||||
c.humanLocation.Builder(),
|
||||
ctrlOpts,
|
||||
addAndRem.DU.Reset,
|
||||
cl),
|
||||
@ -215,11 +215,11 @@ func populateCollections(
|
||||
removed,
|
||||
statusUpdater)
|
||||
|
||||
collections[cID] = &edc
|
||||
collections[c.storageDirFolders.String()] = &edc
|
||||
|
||||
// add the current path for the container ID to be used in the next backup
|
||||
// as the "previous path", for reference in case of a rename or relocation.
|
||||
currPaths[cID] = currPath.String()
|
||||
currPaths[c.storageDirFolders.String()] = currPath.String()
|
||||
}
|
||||
|
||||
// A tombstone is a channel that needs to be marked for deletion.
|
||||
|
||||
@ -37,11 +37,10 @@ import (
|
||||
// mocks
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
var _ backupHandler = &mockBackupHandler{}
|
||||
var _ backupHandler[models.Channelable, models.ChatMessageable] = &mockBackupHandler{}
|
||||
|
||||
type mockBackupHandler struct {
|
||||
channels []models.Channelable
|
||||
channelsErr error
|
||||
messageIDs []string
|
||||
deletedMsgIDs []string
|
||||
messagesErr error
|
||||
@ -51,13 +50,32 @@ type mockBackupHandler struct {
|
||||
doNotInclude bool
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) getContainers(context.Context) ([]models.Channelable, error) {
|
||||
return bh.channels, bh.channelsErr
|
||||
func (bh mockBackupHandler) canMakeDeltaQueries() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) containers() []container[models.Channelable] {
|
||||
containers := make([]container[models.Channelable], 0, len(bh.channels))
|
||||
|
||||
for _, ch := range bh.channels {
|
||||
containers = append(containers, channelContainer(ch))
|
||||
}
|
||||
|
||||
return containers
|
||||
}
|
||||
|
||||
//lint:ignore U1000 required for interface compliance
|
||||
func (bh mockBackupHandler) getContainers(
|
||||
context.Context,
|
||||
api.CallConfig,
|
||||
) ([]container[models.Channelable], error) {
|
||||
return bh.containers(), nil
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) getContainerItemIDs(
|
||||
_ context.Context,
|
||||
_, _ string,
|
||||
_ path.Elements,
|
||||
_ string,
|
||||
_ api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error) {
|
||||
idRes := make(map[string]time.Time, len(bh.messageIDs))
|
||||
@ -76,9 +94,8 @@ func (bh mockBackupHandler) getContainerItemIDs(
|
||||
return aar, bh.messagesErr
|
||||
}
|
||||
|
||||
//lint:ignore U1000 required for interface compliance
|
||||
func (bh mockBackupHandler) includeContainer(
|
||||
context.Context,
|
||||
graph.QueryParams,
|
||||
models.Channelable,
|
||||
selectors.GroupsScope,
|
||||
) bool {
|
||||
@ -86,10 +103,11 @@ func (bh mockBackupHandler) includeContainer(
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) canonicalPath(
|
||||
folders *path.Builder,
|
||||
storageDirFolders path.Elements,
|
||||
tenantID string,
|
||||
) (path.Path, error) {
|
||||
return folders.
|
||||
return storageDirFolders.
|
||||
Builder().
|
||||
ToDataLayerPath(
|
||||
tenantID,
|
||||
"protectedResource",
|
||||
@ -98,9 +116,11 @@ func (bh mockBackupHandler) canonicalPath(
|
||||
false)
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) GetItemByID(
|
||||
func (bh mockBackupHandler) GetItem(
|
||||
_ context.Context,
|
||||
_, _, itemID string,
|
||||
_ string,
|
||||
_ path.Elements,
|
||||
itemID string,
|
||||
) (models.ChatMessageable, *details.GroupsInfo, error) {
|
||||
return bh.messages[itemID], bh.info[itemID], bh.getMessageErr[itemID]
|
||||
}
|
||||
@ -242,7 +262,7 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
|
||||
qp,
|
||||
test.mock,
|
||||
statusUpdater,
|
||||
test.mock.channels,
|
||||
test.mock.containers(),
|
||||
selectors.NewGroupsBackup(nil).Channels(selectors.Any())[0],
|
||||
nil,
|
||||
ctrlOpts,
|
||||
@ -402,7 +422,7 @@ func (suite *BackupUnitSuite) TestPopulateCollections_incremental() {
|
||||
qp,
|
||||
test.mock,
|
||||
statusUpdater,
|
||||
test.mock.channels,
|
||||
test.mock.containers(),
|
||||
allScope,
|
||||
test.deltaPaths,
|
||||
ctrlOpts,
|
||||
|
||||
@ -3,6 +3,7 @@ package groups
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
@ -10,11 +11,10 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
var _ backupHandler = &channelsBackupHandler{}
|
||||
var _ backupHandler[models.Channelable, models.ChatMessageable] = &channelsBackupHandler{}
|
||||
|
||||
type channelsBackupHandler struct {
|
||||
ac api.Channels
|
||||
@ -31,23 +31,41 @@ func NewChannelBackupHandler(
|
||||
}
|
||||
}
|
||||
|
||||
func (bh channelsBackupHandler) canMakeDeltaQueries() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
//lint:ignore U1000 required for interface compliance
|
||||
func (bh channelsBackupHandler) getContainers(
|
||||
ctx context.Context,
|
||||
) ([]models.Channelable, error) {
|
||||
return bh.ac.GetChannels(ctx, bh.protectedResource)
|
||||
_ api.CallConfig,
|
||||
) ([]container[models.Channelable], error) {
|
||||
chans, err := bh.ac.GetChannels(ctx, bh.protectedResource)
|
||||
results := make([]container[models.Channelable], 0, len(chans))
|
||||
|
||||
for _, ch := range chans {
|
||||
results = append(results, channelContainer(ch))
|
||||
}
|
||||
|
||||
return results, clues.Stack(err).OrNil()
|
||||
}
|
||||
|
||||
func (bh channelsBackupHandler) getContainerItemIDs(
|
||||
ctx context.Context,
|
||||
channelID, prevDelta string,
|
||||
containerPath path.Elements,
|
||||
prevDelta string,
|
||||
cc api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error) {
|
||||
return bh.ac.GetChannelMessageIDs(ctx, bh.protectedResource, channelID, prevDelta, cc)
|
||||
return bh.ac.GetChannelMessageIDs(
|
||||
ctx,
|
||||
bh.protectedResource,
|
||||
containerPath[0],
|
||||
prevDelta,
|
||||
cc)
|
||||
}
|
||||
|
||||
//lint:ignore U1000 required for interface compliance
|
||||
func (bh channelsBackupHandler) includeContainer(
|
||||
ctx context.Context,
|
||||
qp graph.QueryParams,
|
||||
ch models.Channelable,
|
||||
scope selectors.GroupsScope,
|
||||
) bool {
|
||||
@ -55,10 +73,11 @@ func (bh channelsBackupHandler) includeContainer(
|
||||
}
|
||||
|
||||
func (bh channelsBackupHandler) canonicalPath(
|
||||
folders *path.Builder,
|
||||
storageDirFolders path.Elements,
|
||||
tenantID string,
|
||||
) (path.Path, error) {
|
||||
return folders.
|
||||
return storageDirFolders.
|
||||
Builder().
|
||||
ToDataLayerPath(
|
||||
tenantID,
|
||||
bh.protectedResource,
|
||||
@ -76,9 +95,20 @@ func (bh channelsBackupHandler) PathPrefix(tenantID string) (path.Path, error) {
|
||||
false)
|
||||
}
|
||||
|
||||
func (bh channelsBackupHandler) GetItemByID(
|
||||
func (bh channelsBackupHandler) GetItem(
|
||||
ctx context.Context,
|
||||
groupID, channelID, itemID string,
|
||||
groupID string,
|
||||
containerIDs path.Elements,
|
||||
messageID string,
|
||||
) (models.ChatMessageable, *details.GroupsInfo, error) {
|
||||
return bh.ac.GetChannelMessage(ctx, groupID, channelID, itemID)
|
||||
return bh.ac.GetChannelMessage(ctx, groupID, containerIDs[0], messageID)
|
||||
}
|
||||
|
||||
func channelContainer(ch models.Channelable) container[models.Channelable] {
|
||||
return container[models.Channelable]{
|
||||
storageDirFolders: path.Elements{ptr.Val(ch.GetId())},
|
||||
humanLocation: path.Elements{ptr.Val(ch.GetDisplayName())},
|
||||
canMakeDeltaQueries: len(ptr.Val(ch.GetEmail())) > 0,
|
||||
container: ch,
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,14 +19,14 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
var _ data.BackupCollection = &Collection{}
|
||||
var _ data.BackupCollection = &Collection[groupsItemer]{}
|
||||
|
||||
const (
|
||||
collectionChannelBufferSize = 1000
|
||||
numberOfRetries = 4
|
||||
)
|
||||
|
||||
type Collection struct {
|
||||
type Collection[I groupsItemer] struct {
|
||||
data.BaseCollection
|
||||
protectedResource string
|
||||
stream chan data.Item
|
||||
@ -36,7 +36,7 @@ type Collection struct {
|
||||
// removed is a list of item IDs that were deleted from, or moved out, of a container
|
||||
removed map[string]struct{}
|
||||
|
||||
getter getItemByIDer
|
||||
getter getItemer[I]
|
||||
|
||||
statusUpdater support.StatusUpdater
|
||||
}
|
||||
@ -47,15 +47,15 @@ type Collection struct {
|
||||
// to be deleted. If the prev path is nil, it is assumed newly created.
|
||||
// If both are populated, then state is either moved (if they differ),
|
||||
// or notMoved (if they match).
|
||||
func NewCollection(
|
||||
func NewCollection[I groupsItemer](
|
||||
baseCol data.BaseCollection,
|
||||
getter getItemByIDer,
|
||||
getter getItemer[I],
|
||||
protectedResource string,
|
||||
added map[string]struct{},
|
||||
removed map[string]struct{},
|
||||
statusUpdater support.StatusUpdater,
|
||||
) Collection {
|
||||
collection := Collection{
|
||||
) Collection[I] {
|
||||
collection := Collection[I]{
|
||||
BaseCollection: baseCol,
|
||||
added: added,
|
||||
getter: getter,
|
||||
@ -70,7 +70,7 @@ func NewCollection(
|
||||
|
||||
// Items utility function to asynchronously execute process to fill data channel with
|
||||
// M365 exchange objects and returns the data channel
|
||||
func (col *Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||
func (col *Collection[I]) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||
go col.streamItems(ctx, errs)
|
||||
return col.stream
|
||||
}
|
||||
@ -79,7 +79,7 @@ func (col *Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.I
|
||||
// items() production
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
|
||||
func (col *Collection[I]) streamItems(ctx context.Context, errs *fault.Bus) {
|
||||
var (
|
||||
streamedItems int64
|
||||
totalBytes int64
|
||||
@ -145,13 +145,10 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
|
||||
writer := kjson.NewJsonSerializationWriter()
|
||||
defer writer.Close()
|
||||
|
||||
flds := col.FullPath().Folders()
|
||||
parentFolderID := flds[len(flds)-1]
|
||||
|
||||
item, info, err := col.getter.GetItemByID(
|
||||
item, info, err := col.getter.GetItem(
|
||||
ctx,
|
||||
col.protectedResource,
|
||||
parentFolderID,
|
||||
col.FullPath().Folders(),
|
||||
id)
|
||||
if err != nil {
|
||||
err = clues.Wrap(err, "getting channel message data").Label(fault.LabelForceNoBackupCreation)
|
||||
@ -210,7 +207,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
|
||||
|
||||
// finishPopulation is a utility function used to close a Collection's data channel
|
||||
// and to send the status update through the channel.
|
||||
func (col *Collection) finishPopulation(
|
||||
func (col *Collection[I]) finishPopulation(
|
||||
ctx context.Context,
|
||||
streamedItems, totalBytes int64,
|
||||
err error,
|
||||
|
||||
@ -7,6 +7,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
@ -116,7 +117,7 @@ func (suite *CollectionUnitSuite) TestNewCollection_state() {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
c := NewCollection(
|
||||
c := NewCollection[models.ChatMessageable](
|
||||
data.NewBaseCollection(
|
||||
test.curr,
|
||||
test.prev,
|
||||
@ -198,7 +199,7 @@ func (suite *CollectionUnitSuite) TestCollection_streamItems() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
col := &Collection{
|
||||
col := &Collection[models.ChatMessageable]{
|
||||
BaseCollection: data.NewBaseCollection(
|
||||
fullPath,
|
||||
nil,
|
||||
|
||||
145
src/internal/m365/collection/groups/conversation_handler.go
Normal file
145
src/internal/m365/collection/groups/conversation_handler.go
Normal file
@ -0,0 +1,145 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
var _ backupHandler[models.Conversationable, models.Postable] = &conversationsBackupHandler{}
|
||||
|
||||
type conversationsBackupHandler struct {
|
||||
ac api.Conversations
|
||||
protectedResource string
|
||||
}
|
||||
|
||||
func NewConversationBackupHandler(
|
||||
protectedResource string,
|
||||
ac api.Conversations,
|
||||
) conversationsBackupHandler {
|
||||
return conversationsBackupHandler{
|
||||
ac: ac,
|
||||
protectedResource: protectedResource,
|
||||
}
|
||||
}
|
||||
|
||||
func (bh conversationsBackupHandler) canMakeDeltaQueries() bool {
|
||||
// not supported for conversations
|
||||
return false
|
||||
}
|
||||
|
||||
//lint:ignore U1000 required for interface compliance
|
||||
func (bh conversationsBackupHandler) getContainers(
|
||||
ctx context.Context,
|
||||
cc api.CallConfig,
|
||||
) ([]container[models.Conversationable], error) {
|
||||
convs, err := bh.ac.GetConversations(ctx, bh.protectedResource, cc)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "getting conversations")
|
||||
}
|
||||
|
||||
results := []container[models.Conversationable]{}
|
||||
|
||||
for _, conv := range convs {
|
||||
ictx := clues.Add(ctx, "conversation_id", ptr.Val(conv.GetId()))
|
||||
|
||||
threads, err := bh.ac.GetConversationThreads(
|
||||
ictx,
|
||||
bh.protectedResource,
|
||||
ptr.Val(conv.GetId()),
|
||||
cc)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "getting threads in conversation")
|
||||
}
|
||||
|
||||
for _, thread := range threads {
|
||||
results = append(results, conversationThreadContainer(conv, thread))
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (bh conversationsBackupHandler) getContainerItemIDs(
|
||||
ctx context.Context,
|
||||
containerPath path.Elements,
|
||||
_ string,
|
||||
cc api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error) {
|
||||
return bh.ac.GetConversationThreadPostIDs(
|
||||
ctx,
|
||||
bh.protectedResource,
|
||||
containerPath[0],
|
||||
containerPath[1],
|
||||
cc)
|
||||
}
|
||||
|
||||
//lint:ignore U1000 required for interface compliance
|
||||
func (bh conversationsBackupHandler) includeContainer(
|
||||
conv models.Conversationable,
|
||||
scope selectors.GroupsScope,
|
||||
) bool {
|
||||
return scope.Matches(selectors.GroupsConversation, ptr.Val(conv.GetTopic()))
|
||||
}
|
||||
|
||||
func (bh conversationsBackupHandler) canonicalPath(
|
||||
storageDirFolders path.Elements,
|
||||
tenantID string,
|
||||
) (path.Path, error) {
|
||||
return storageDirFolders.
|
||||
Builder().
|
||||
ToDataLayerPath(
|
||||
tenantID,
|
||||
bh.protectedResource,
|
||||
path.GroupsService,
|
||||
path.ConversationPostsCategory,
|
||||
false)
|
||||
}
|
||||
|
||||
func (bh conversationsBackupHandler) PathPrefix(tenantID string) (path.Path, error) {
|
||||
return path.Build(
|
||||
tenantID,
|
||||
bh.protectedResource,
|
||||
path.GroupsService,
|
||||
path.ConversationPostsCategory,
|
||||
false)
|
||||
}
|
||||
|
||||
func (bh conversationsBackupHandler) GetItem(
|
||||
ctx context.Context,
|
||||
groupID string,
|
||||
containerIDs path.Elements, // expects: [conversationID, threadID]
|
||||
postID string,
|
||||
) (models.Postable, *details.GroupsInfo, error) {
|
||||
return bh.ac.GetConversationPost(
|
||||
ctx,
|
||||
groupID,
|
||||
containerIDs[0],
|
||||
containerIDs[1],
|
||||
postID,
|
||||
api.CallConfig{})
|
||||
}
|
||||
|
||||
func conversationThreadContainer(
|
||||
c models.Conversationable,
|
||||
t models.ConversationThreadable,
|
||||
) container[models.Conversationable] {
|
||||
return container[models.Conversationable]{
|
||||
storageDirFolders: path.Elements{ptr.Val(c.GetId()), ptr.Val(t.GetId())},
|
||||
// microsoft UX doesn't display any sort of container name that would make a reasonable
|
||||
// "location" for the posts in the conversation. We may need to revisit this, perhaps
|
||||
// the subject is sufficiently acceptable. But at this time it's left empty so that
|
||||
// we don't populate it with problematic data.
|
||||
humanLocation: path.Elements{},
|
||||
canMakeDeltaQueries: false,
|
||||
container: c,
|
||||
}
|
||||
}
|
||||
@ -3,7 +3,7 @@ package groups
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
@ -13,41 +13,81 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
)
|
||||
|
||||
type backupHandler interface {
|
||||
getItemByIDer
|
||||
// itemer standardizes common behavior that can be expected from all
|
||||
// items within a groups collection backup.
|
||||
type groupsItemer interface {
|
||||
serialization.Parsable
|
||||
graph.GetIDer
|
||||
graph.GetLastModifiedDateTimer
|
||||
}
|
||||
|
||||
// gets all containers for the resource
|
||||
type backupHandler[C graph.GetIDer, I groupsItemer] interface {
|
||||
getItemer[I]
|
||||
getContainerser[C]
|
||||
getContainerItemIDser
|
||||
includeContainerer[C]
|
||||
canonicalPather
|
||||
canMakeDeltaQuerieser
|
||||
}
|
||||
|
||||
type getItemer[I groupsItemer] interface {
|
||||
GetItem(
|
||||
ctx context.Context,
|
||||
protectedResource string,
|
||||
containerIDs path.Elements,
|
||||
itemID string,
|
||||
) (I, *details.GroupsInfo, error)
|
||||
}
|
||||
|
||||
// gets all containers for the resource
|
||||
type getContainerser[C graph.GetIDer] interface {
|
||||
getContainers(
|
||||
ctx context.Context,
|
||||
) ([]models.Channelable, error)
|
||||
cc api.CallConfig,
|
||||
) ([]container[C], error)
|
||||
}
|
||||
|
||||
// gets all item IDs (by delta, if possible) in the container
|
||||
// gets all item IDs (by delta, if possible) in the container
|
||||
type getContainerItemIDser interface {
|
||||
getContainerItemIDs(
|
||||
ctx context.Context,
|
||||
containerID, prevDelta string,
|
||||
containerPath path.Elements,
|
||||
prevDelta string,
|
||||
cc api.CallConfig,
|
||||
) (pagers.AddedAndRemoved, error)
|
||||
}
|
||||
|
||||
// includeContainer evaluates whether the container is included
|
||||
// in the provided scope.
|
||||
// includeContainer evaluates whether the container is included
|
||||
// in the provided scope.
|
||||
type includeContainerer[C graph.GetIDer] interface {
|
||||
includeContainer(
|
||||
ctx context.Context,
|
||||
qp graph.QueryParams,
|
||||
ch models.Channelable,
|
||||
c C,
|
||||
scope selectors.GroupsScope,
|
||||
) bool
|
||||
}
|
||||
|
||||
// canonicalPath constructs the service and category specific path for
|
||||
// the given builder.
|
||||
// canonicalPath constructs the service and category specific path for
|
||||
// the given builder.
|
||||
type canonicalPather interface {
|
||||
canonicalPath(
|
||||
folders *path.Builder,
|
||||
storageDir path.Elements,
|
||||
tenantID string,
|
||||
) (path.Path, error)
|
||||
}
|
||||
|
||||
type getItemByIDer interface {
|
||||
GetItemByID(
|
||||
ctx context.Context,
|
||||
resourceID, containerID, itemID string,
|
||||
) (models.ChatMessageable, *details.GroupsInfo, error)
|
||||
// canMakeDeltaQueries evaluates whether the handler can support a
|
||||
// delta query when enumerating its items.
|
||||
type canMakeDeltaQuerieser interface {
|
||||
canMakeDeltaQueries() bool
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Container management
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type container[C graph.GetIDer] struct {
|
||||
storageDirFolders path.Elements
|
||||
humanLocation path.Elements
|
||||
canMakeDeltaQueries bool
|
||||
container C
|
||||
}
|
||||
|
||||
@ -7,18 +7,21 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type GetChannelMessage struct {
|
||||
Err error
|
||||
}
|
||||
|
||||
func (m GetChannelMessage) GetItemByID(
|
||||
ctx context.Context,
|
||||
groupID, channelID, messageID string,
|
||||
func (m GetChannelMessage) GetItem(
|
||||
_ context.Context,
|
||||
_ string,
|
||||
_ path.Elements,
|
||||
itemID string,
|
||||
) (models.ChatMessageable, *details.GroupsInfo, error) {
|
||||
msg := models.NewChatMessage()
|
||||
msg.SetId(ptr.To(messageID))
|
||||
msg.SetId(ptr.To(itemID))
|
||||
|
||||
return msg, &details.GroupsInfo{}, m.Err
|
||||
}
|
||||
|
||||
@ -66,8 +66,6 @@ func ProduceBackupCollections(
|
||||
return nil, nil, clues.WrapWC(ctx, err, "getting group")
|
||||
}
|
||||
|
||||
isTeam := api.IsTeam(ctx, group)
|
||||
|
||||
for _, scope := range b.Scopes() {
|
||||
if el.Failure() != nil {
|
||||
break
|
||||
@ -75,12 +73,16 @@ func ProduceBackupCollections(
|
||||
|
||||
cl := counter.Local()
|
||||
ictx := clues.AddLabelCounter(ctx, cl.PlainAdder())
|
||||
ictx = clues.Add(ictx, "category", scope.Category().PathType())
|
||||
|
||||
var dbcs []data.BackupCollection
|
||||
|
||||
switch scope.Category().PathType() {
|
||||
case path.LibrariesCategory:
|
||||
sites, err := ac.Groups().GetAllSites(ictx, bpc.ProtectedResource.ID(), errs)
|
||||
sites, err := ac.Groups().GetAllSites(
|
||||
ictx,
|
||||
bpc.ProtectedResource.ID(),
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@ -159,7 +161,7 @@ func ProduceBackupCollections(
|
||||
}
|
||||
progressBar := observe.MessageWithCompletion(ictx, pcfg, scope.Category().PathType().HumanString())
|
||||
|
||||
if !isTeam {
|
||||
if !api.IsTeam(ictx, group) {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -190,6 +192,45 @@ func ProduceBackupCollections(
|
||||
|
||||
dbcs = append(dbcs, cs...)
|
||||
|
||||
close(progressBar)
|
||||
case path.ConversationPostsCategory:
|
||||
var (
|
||||
bh = groups.NewConversationBackupHandler(bpc.ProtectedResource.ID(), ac.Conversations())
|
||||
cs []data.BackupCollection
|
||||
err error
|
||||
)
|
||||
|
||||
pcfg := observe.ProgressCfg{
|
||||
Indent: 1,
|
||||
CompletionMessage: func() string { return fmt.Sprintf("(found %d conversations)", len(cs)) },
|
||||
}
|
||||
progressBar := observe.MessageWithCompletion(ictx, pcfg, scope.Category().PathType().HumanString())
|
||||
|
||||
cs, canUsePreviousBackup, err := groups.CreateCollections(
|
||||
ictx,
|
||||
bpc,
|
||||
bh,
|
||||
creds.AzureTenantID,
|
||||
scope,
|
||||
su,
|
||||
counter,
|
||||
errs)
|
||||
if err != nil {
|
||||
el.AddRecoverable(ictx, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !canUsePreviousBackup {
|
||||
tp, err := bh.PathPrefix(creds.AzureTenantID)
|
||||
if err != nil {
|
||||
return nil, nil, clues.Wrap(err, "getting conversations path")
|
||||
}
|
||||
|
||||
dbcs = append(dbcs, data.NewTombstoneCollection(tp, control.Options{}, counter))
|
||||
}
|
||||
|
||||
dbcs = append(dbcs, cs...)
|
||||
|
||||
close(progressBar)
|
||||
}
|
||||
|
||||
@ -236,6 +277,10 @@ func ProduceBackupCollections(
|
||||
return collections, ssmb.ToReader(), el.Failure()
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// metadata
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func getSitesMetadataCollection(
|
||||
tenantID, groupID string,
|
||||
sites map[string]string,
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||
@ -79,6 +80,115 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
|
||||
true)
|
||||
}
|
||||
|
||||
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
mb = evmock.NewBus()
|
||||
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID})
|
||||
opts = control.DefaultOptions()
|
||||
whatSet = deeTD.CategoryFromRepoRef
|
||||
)
|
||||
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
sel.Conversation(selectors.Any()))
|
||||
|
||||
bo, bod := prepNewTestBackupOp(
|
||||
t,
|
||||
ctx,
|
||||
mb,
|
||||
sel.Selector,
|
||||
opts,
|
||||
version.All8MigrateUserPNToID,
|
||||
count.New())
|
||||
defer bod.close(t, ctx)
|
||||
|
||||
runAndCheckBackup(t, ctx, &bo, mb, false)
|
||||
checkBackupIsInManifests(
|
||||
t,
|
||||
ctx,
|
||||
bod.kw,
|
||||
bod.sw,
|
||||
&bo,
|
||||
bod.sel,
|
||||
bod.sel.ID(),
|
||||
path.ChannelMessagesCategory)
|
||||
|
||||
_, expectDeets := deeTD.GetDeetsInBackup(
|
||||
t,
|
||||
ctx,
|
||||
bo.Results.BackupID,
|
||||
bod.acct.ID(),
|
||||
bod.sel.ID(),
|
||||
path.GroupsService,
|
||||
whatSet,
|
||||
bod.kms,
|
||||
bod.sss)
|
||||
deeTD.CheckBackupDetails(
|
||||
t,
|
||||
ctx,
|
||||
bo.Results.BackupID,
|
||||
whatSet,
|
||||
bod.kms,
|
||||
bod.sss,
|
||||
expectDeets,
|
||||
false)
|
||||
|
||||
mb = evmock.NewBus()
|
||||
forcedFull := newTestBackupOp(
|
||||
t,
|
||||
ctx,
|
||||
bod,
|
||||
mb,
|
||||
opts,
|
||||
count.New())
|
||||
forcedFull.BackupVersion = version.Groups9Update
|
||||
|
||||
runAndCheckBackup(t, ctx, &forcedFull, mb, false)
|
||||
checkBackupIsInManifests(
|
||||
t,
|
||||
ctx,
|
||||
bod.kw,
|
||||
bod.sw,
|
||||
&forcedFull,
|
||||
bod.sel,
|
||||
bod.sel.ID(),
|
||||
path.ChannelMessagesCategory)
|
||||
|
||||
_, expectDeets = deeTD.GetDeetsInBackup(
|
||||
t,
|
||||
ctx,
|
||||
forcedFull.Results.BackupID,
|
||||
bod.acct.ID(),
|
||||
bod.sel.ID(),
|
||||
path.GroupsService,
|
||||
whatSet,
|
||||
bod.kms,
|
||||
bod.sss)
|
||||
deeTD.CheckBackupDetails(
|
||||
t,
|
||||
ctx,
|
||||
forcedFull.Results.BackupID,
|
||||
whatSet,
|
||||
bod.kms,
|
||||
bod.sss,
|
||||
expectDeets,
|
||||
false)
|
||||
|
||||
// The number of items backed up in the forced full backup should be roughly
|
||||
// the same as the number of items in the original backup.
|
||||
assert.Equal(
|
||||
t,
|
||||
bo.Results.Counts[string(count.PersistedNonCachedFiles)],
|
||||
forcedFull.Results.Counts[string(count.PersistedNonCachedFiles)],
|
||||
"items written")
|
||||
}
|
||||
|
||||
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||
t := suite.T()
|
||||
|
||||
@ -95,7 +205,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
sel.Conversation(selectors.Any()))
|
||||
|
||||
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
|
||||
defer bod.close(t, ctx)
|
||||
|
||||
@ -62,9 +62,10 @@ func (b *Builder) addFolderEntries(
|
||||
b.knownFolders = map[string]Entry{}
|
||||
}
|
||||
|
||||
// Need a unique location because we want to have separate folders for
|
||||
// different drives and categories even if there's duplicate folder names in
|
||||
// them.
|
||||
// Unique location ensures that the location reference includes all
|
||||
// possible hierarchy. In many handlers, the location ref is only partially
|
||||
// constructed (ex: drive locations do not contain the drive ID). This
|
||||
// transformer ensures that the location is complete and fully populated.
|
||||
uniqueLoc, err := entry.uniqueLocation(locationRef)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "getting LocationIDer")
|
||||
|
||||
@ -145,6 +145,8 @@ func (i *GroupsInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
|
||||
loc, err = NewGroupsLocationIDer(path.LibrariesCategory, i.DriveID, baseLoc.Elements()...)
|
||||
case GroupsChannelMessage:
|
||||
loc, err = NewGroupsLocationIDer(path.ChannelMessagesCategory, "", baseLoc.Elements()...)
|
||||
case GroupsConversationPost:
|
||||
loc, err = NewGroupsLocationIDer(path.ConversationPostsCategory, "", baseLoc.Elements()...)
|
||||
}
|
||||
|
||||
return &loc, err
|
||||
@ -156,7 +158,7 @@ func (i *GroupsInfo) updateFolder(f *FolderInfo) error {
|
||||
switch i.ItemType {
|
||||
case SharePointLibrary:
|
||||
return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f)
|
||||
case GroupsChannelMessage:
|
||||
case GroupsChannelMessage, GroupsConversationPost:
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@ -65,6 +65,11 @@ func NewElements(p string) Elements {
|
||||
return Split(p)
|
||||
}
|
||||
|
||||
// Builder produces a *Builder{} containing the elements.
|
||||
func (el Elements) Builder() *Builder {
|
||||
return Builder{}.Append(el...)
|
||||
}
|
||||
|
||||
// Conceal produces a concealed representation of the elements, suitable for
|
||||
// logging, storing in errors, and other output.
|
||||
func (el Elements) Conceal() string {
|
||||
|
||||
@ -217,8 +217,9 @@ func (s *groups) AllData() []GroupsScope {
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[GroupsScope](GroupsLibraryFolder, Any()),
|
||||
makeScope[GroupsScope](GroupsChannel, Any()),
|
||||
makeScope[GroupsScope](GroupsConversation, Any()))
|
||||
makeScope[GroupsScope](GroupsChannel, Any()))
|
||||
// TODO: enable conversations in all-data backups
|
||||
// makeScope[GroupsScope](GroupsConversation, Any()))
|
||||
|
||||
return scopes
|
||||
}
|
||||
|
||||
@ -245,8 +245,9 @@ func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
|
||||
},
|
||||
expect: arr(
|
||||
libItem, libItem2, libItem3,
|
||||
chanItem, chanItem2, chanItem3,
|
||||
convItem, convItem2, convItem3),
|
||||
chanItem, chanItem2, chanItem3),
|
||||
// TODO: re-add when we release conversations
|
||||
// convItem, convItem2, convItem3),
|
||||
},
|
||||
{
|
||||
name: "only match library item",
|
||||
|
||||
7
src/pkg/selectors/testdata/groups.go
vendored
7
src/pkg/selectors/testdata/groups.go
vendored
@ -17,3 +17,10 @@ func GroupsBackupLibraryFolderScope(sel *selectors.GroupsBackup) []selectors.Gro
|
||||
func GroupsBackupChannelScope(sel *selectors.GroupsBackup) []selectors.GroupsScope {
|
||||
return sel.Channels([]string{TestChannelName})
|
||||
}
|
||||
|
||||
// GroupsBackupConversationScope is the standard folder scope that should be used
|
||||
// in integration backups with groups when interacting with conversations.
|
||||
func GroupsBackupConversationScope(sel *selectors.GroupsBackup) []selectors.GroupsScope {
|
||||
// there's no way to easily specify a test conversation by name.
|
||||
return sel.Conversation(selectors.Any())
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user