prep groups handler for addition of conversations (#4583)

Makes modifications to the groups handlers to facilitate the incoming introduction of conversations backup handling. Changes include:
* new handler methods (canMakeDeltas and LocationRef)
* binding handlers to generics
* some naming normalization

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🌻 Feature

#### Issue(s)

* #4536

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-11-30 17:40:37 -07:00 committed by GitHub
parent d9c42f790c
commit 6307d8cbf6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 819 additions and 192 deletions

View File

@ -71,7 +71,7 @@ func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddGroupFlag(c) flags.AddGroupFlag(c)
flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages}, false) flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages, flags.DataConversations}, false)
flags.AddFetchParallelismFlag(c) flags.AddFetchParallelismFlag(c)
flags.AddDisableDeltaFlag(c) flags.AddDisableDeltaFlag(c)
flags.AddGenericBackupFlags(c) flags.AddGenericBackupFlags(c)
@ -121,7 +121,7 @@ func groupsCreateCmd() *cobra.Command {
return &cobra.Command{ return &cobra.Command{
Use: groupsServiceCommand, Use: groupsServiceCommand,
Aliases: []string{teamsServiceCommand}, Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Group service data", Short: "Backup M365 Groups & Teams service data",
RunE: createGroupsCmd, RunE: createGroupsCmd,
Args: cobra.NoArgs, Args: cobra.NoArgs,
} }
@ -290,10 +290,16 @@ func validateGroupsBackupCreateFlags(groups, cats []string) error {
flags.GroupFN + " *") flags.GroupFN + " *")
} }
// TODO(keepers): release conversations support
msg := fmt.Sprintf( msg := fmt.Sprintf(
" is an unrecognized data type; only %s and %s are supported", " is an unrecognized data type; only %s and %s are supported",
flags.DataLibraries, flags.DataMessages) flags.DataLibraries, flags.DataMessages)
// msg := fmt.Sprintf(
// " is an unrecognized data type; only %s, %s and %s are supported",
// flags.DataLibraries, flags.DataMessages, flags.DataConversations)
allowedCats := utils.GroupsAllowedCategories() allowedCats := utils.GroupsAllowedCategories()
for _, d := range cats { for _, d := range cats {

View File

@ -28,11 +28,6 @@ import (
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata" storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
) )
var (
channelMessages = path.ChannelMessagesCategory
libraries = path.LibrariesCategory
)
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests that require no existing backups // tests that require no existing backups
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -114,11 +109,15 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
runGroupsBackupCategoryTest(suite, "messages") runGroupsBackupCategoryTest(suite, flags.DataMessages)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
runGroupsBackupCategoryTest(suite, flags.DataConversations)
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() {
runGroupsBackupCategoryTest(suite, libraries.String()) runGroupsBackupCategoryTest(suite, flags.DataLibraries)
} }
func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) { func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
@ -148,11 +147,15 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() {
runGroupsBackupGroupNotFoundTest(suite, "messages") runGroupsBackupGroupNotFoundTest(suite, flags.DataMessages)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_conversations() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataConversations)
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() {
runGroupsBackupGroupNotFoundTest(suite, libraries.String()) runGroupsBackupGroupNotFoundTest(suite, flags.DataLibraries)
} }
func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) { func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) {
@ -293,19 +296,27 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
var ( var (
groups = []string{suite.its.group.ID} groups = []string{suite.its.group.ID}
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID}) ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
path.ConversationPostsCategory,
path.LibrariesCategory,
}
) )
for _, set := range []path.CategoryType{channelMessages, libraries} { for _, set := range cats {
var ( var (
sel = selectors.NewGroupsBackup(groups) sel = selectors.NewGroupsBackup(groups)
scopes []selectors.GroupsScope scopes []selectors.GroupsScope
) )
switch set { switch set {
case channelMessages: case path.ChannelMessagesCategory:
scopes = selTD.GroupsBackupChannelScope(sel) scopes = selTD.GroupsBackupChannelScope(sel)
case libraries: case path.ConversationPostsCategory:
scopes = selTD.GroupsBackupConversationScope(sel)
case path.LibrariesCategory:
scopes = selTD.GroupsBackupLibraryFolderScope(sel) scopes = selTD.GroupsBackupLibraryFolderScope(sel)
} }
@ -334,11 +345,15 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() {
runGroupsListCmdTest(suite, channelMessages) runGroupsListCmdTest(suite, path.ChannelMessagesCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_conversations() {
runGroupsListCmdTest(suite, path.ConversationPostsCategory)
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() {
runGroupsListCmdTest(suite, libraries) runGroupsListCmdTest(suite, path.LibrariesCategory)
} }
func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) { func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
@ -369,11 +384,15 @@ func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.Cat
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() {
runGroupsListSingleCmdTest(suite, channelMessages) runGroupsListSingleCmdTest(suite, path.ChannelMessagesCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_conversations() {
runGroupsListSingleCmdTest(suite, path.ConversationPostsCategory)
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() {
runGroupsListSingleCmdTest(suite, libraries) runGroupsListSingleCmdTest(suite, path.LibrariesCategory)
} }
func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) { func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
@ -429,11 +448,15 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() {
runGroupsDetailsCmdTest(suite, channelMessages) runGroupsDetailsCmdTest(suite, path.ChannelMessagesCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() {
runGroupsDetailsCmdTest(suite, libraries) runGroupsDetailsCmdTest(suite, path.LibrariesCategory)
} }
func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) { func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {

View File

@ -106,9 +106,18 @@ func (suite *GroupsUnitSuite) TestValidateGroupsBackupCreateFlags() {
cats: []string{flags.DataMessages}, cats: []string{flags.DataMessages},
expect: assert.NoError, expect: assert.NoError,
}, },
{
name: "conversations",
cats: []string{flags.DataConversations},
expect: assert.NoError,
},
{ {
name: "all allowed", name: "all allowed",
cats: []string{flags.DataLibraries, flags.DataMessages}, cats: []string{
flags.DataLibraries,
flags.DataMessages,
flags.DataConversations,
},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
@ -208,8 +217,11 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
"--" + flags.BackupFN, flagsTD.BackupInput, "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN, "--" + flags.SkipReduceFN,
}, },
flagsTD.PreparedChannelFlags(),
flagsTD.PreparedConversationFlags(),
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags())) flagsTD.PreparedStorageFlags(),
flagsTD.PreparedLibraryFlags()))
co := utils.Control() co := utils.Control()
@ -217,6 +229,9 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
assert.True(t, co.SkipReduce) assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertChannelFlags(t, cmd)
flagsTD.AssertConversationFlags(t, cmd)
flagsTD.AssertLibraryFlags(t, cmd)
} }
func (suite *GroupsUnitSuite) TestBackupDeleteFlags() { func (suite *GroupsUnitSuite) TestBackupDeleteFlags() {

View File

@ -4,12 +4,17 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
const DataMessages = "messages" const (
DataMessages = "messages"
DataConversations = "conversations"
)
const ( const (
ChannelFN = "channel" ChannelFN = "channel"
ConversationFN = "conversation"
GroupFN = "group" GroupFN = "group"
MessageFN = "message" MessageFN = "message"
PostFN = "post"
MessageCreatedAfterFN = "message-created-after" MessageCreatedAfterFN = "message-created-after"
MessageCreatedBeforeFN = "message-created-before" MessageCreatedBeforeFN = "message-created-before"
@ -19,8 +24,10 @@ const (
var ( var (
ChannelFV []string ChannelFV []string
ConversationFV []string
GroupFV []string GroupFV []string
MessageFV []string MessageFV []string
PostFV []string
MessageCreatedAfterFV string MessageCreatedAfterFV string
MessageCreatedBeforeFV string MessageCreatedBeforeFV string
@ -60,14 +67,24 @@ func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
&MessageLastReplyBeforeFV, &MessageLastReplyBeforeFV,
MessageLastReplyBeforeFN, "", MessageLastReplyBeforeFN, "",
"Select messages with replies before this datetime.") "Select messages with replies before this datetime.")
fs.StringSliceVar(
&ConversationFV,
ConversationFN, nil,
"Select data within a Group's Conversation.")
fs.StringSliceVar(
&PostFV,
PostFN, nil,
"Select Conversation Posts by reference.")
} }
// AddGroupFlag adds the --group flag, which accepts id or name values. // AddGroupFlag adds the --group flag, which accepts either the id,
// TODO: need to decide what the appropriate "name" to accept here is. // the display name, or the mailbox address as its values. Users are
// keepers thinks its either DisplayName or MailNickname or Mail // expected to supply the display name. The ID is supported becase, well,
// Mail is most accurate, MailNickame is accurate and shorter, but the end user // IDs. The mailbox address is supported as a lookup fallback for certain
// may not see either one visibly. // SDK cases, therefore it's also supported here, though that support
// https://learn.microsoft.com/en-us/graph/api/group-list?view=graph-rest-1.0&tabs=http // isn't exposed to end users.
func AddGroupFlag(cmd *cobra.Command) { func AddGroupFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar( cmd.Flags().StringSliceVar(
&GroupFV, &GroupFV,

View File

@ -22,10 +22,20 @@ var (
SharepointCategoryDataInput = []string{"files", "lists", "pages"} SharepointCategoryDataInput = []string{"files", "lists", "pages"}
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"} GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
ChannelInput = []string{"channel1", "channel2"}
MessageInput = []string{"message1", "message2"}
MessageCreatedAfterInput = "messageCreatedAfter"
MessageCreatedBeforeInput = "messageCreatedBefore"
MessageLastReplyAfterInput = "messageLastReplyAfter"
MessageLastReplyBeforeInput = "messageLastReplyBefore"
ContactInput = []string{"contact1", "contact2"} ContactInput = []string{"contact1", "contact2"}
ContactFldInput = []string{"contactFld1", "contactFld2"} ContactFldInput = []string{"contactFld1", "contactFld2"}
ContactNameInput = "contactName" ContactNameInput = "contactName"
ConversationInput = []string{"conversation1", "conversation2"}
PostInput = []string{"post1", "post2"}
EmailInput = []string{"mail1", "mail2"} EmailInput = []string{"mail1", "mail2"}
EmailFldInput = []string{"mailFld1", "mailFld2"} EmailFldInput = []string{"mailFld1", "mailFld2"}
EmailReceivedAfterInput = "mailReceivedAfter" EmailReceivedAfterInput = "mailReceivedAfter"

42
src/cli/flags/testdata/groups.go vendored Normal file
View File

@ -0,0 +1,42 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedChannelFlags() []string {
return []string{
"--" + flags.ChannelFN, FlgInputs(ChannelInput),
"--" + flags.MessageFN, FlgInputs(MessageInput),
"--" + flags.MessageCreatedAfterFN, MessageCreatedAfterInput,
"--" + flags.MessageCreatedBeforeFN, MessageCreatedBeforeInput,
"--" + flags.MessageLastReplyAfterFN, MessageLastReplyAfterInput,
"--" + flags.MessageLastReplyBeforeFN, MessageLastReplyBeforeInput,
}
}
func AssertChannelFlags(t *testing.T, cmd *cobra.Command) {
assert.ElementsMatch(t, ChannelInput, flags.ChannelFV)
assert.ElementsMatch(t, MessageInput, flags.MessageFV)
assert.Equal(t, MessageCreatedAfterInput, flags.MessageCreatedAfterFV)
assert.Equal(t, MessageCreatedBeforeInput, flags.MessageCreatedBeforeFV)
assert.Equal(t, MessageLastReplyAfterInput, flags.MessageLastReplyAfterFV)
assert.Equal(t, MessageLastReplyBeforeInput, flags.MessageLastReplyBeforeFV)
}
func PreparedConversationFlags() []string {
return []string{
"--" + flags.ConversationFN, FlgInputs(ConversationInput),
"--" + flags.PostFN, FlgInputs(PostInput),
}
}
func AssertConversationFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, ConversationInput, flags.ConversationFV)
assert.Equal(t, PostInput, flags.PostFV)
}

32
src/cli/flags/testdata/sharepoint.go vendored Normal file
View File

@ -0,0 +1,32 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedLibraryFlags() []string {
return []string{
"--" + flags.LibraryFN, LibraryInput,
"--" + flags.FolderFN, FlgInputs(FolderPathInput),
"--" + flags.FileFN, FlgInputs(FileNameInput),
"--" + flags.FileCreatedAfterFN, FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, FileModifiedBeforeInput,
}
}
func AssertLibraryFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, LibraryInput, flags.LibraryFV)
assert.Equal(t, FolderPathInput, flags.FolderPathFV)
assert.Equal(t, FileNameInput, flags.FileNameFV)
assert.Equal(t, FileCreatedAfterInput, flags.FileCreatedAfterFV)
assert.Equal(t, FileCreatedBeforeInput, flags.FileCreatedBeforeFV)
assert.Equal(t, FileModifiedAfterInput, flags.FileModifiedAfterFV)
assert.Equal(t, FileModifiedBeforeInput, flags.FileModifiedBeforeFV)
}

View File

@ -14,6 +14,8 @@ type GroupsOpts struct {
Groups []string Groups []string
Channels []string Channels []string
Messages []string Messages []string
Conversations []string
Posts []string
MessageCreatedAfter string MessageCreatedAfter string
MessageCreatedBefore string MessageCreatedBefore string
@ -46,6 +48,7 @@ func GroupsAllowedCategories() map[string]struct{} {
return map[string]struct{}{ return map[string]struct{}{
flags.DataLibraries: {}, flags.DataLibraries: {},
flags.DataMessages: {}, flags.DataMessages: {},
flags.DataConversations: {},
} }
} }
@ -60,6 +63,8 @@ func AddGroupsCategories(sel *selectors.GroupsBackup, cats []string) *selectors.
sel.Include(sel.LibraryFolders(selectors.Any())) sel.Include(sel.LibraryFolders(selectors.Any()))
case flags.DataMessages: case flags.DataMessages:
sel.Include(sel.ChannelMessages(selectors.Any(), selectors.Any())) sel.Include(sel.ChannelMessages(selectors.Any(), selectors.Any()))
case flags.DataConversations:
sel.Include(sel.ConversationPosts(selectors.Any(), selectors.Any()))
} }
} }
@ -71,6 +76,8 @@ func MakeGroupsOpts(cmd *cobra.Command) GroupsOpts {
Groups: flags.GroupFV, Groups: flags.GroupFV,
Channels: flags.ChannelFV, Channels: flags.ChannelFV,
Messages: flags.MessageFV, Messages: flags.MessageFV,
Conversations: flags.ConversationFV,
Posts: flags.PostFV,
WebURL: flags.WebURLFV, WebURL: flags.WebURLFV,
SiteID: flags.SiteIDFV, SiteID: flags.SiteIDFV,
@ -173,28 +180,35 @@ func AddGroupsFilter(
func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *selectors.GroupsRestore { func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *selectors.GroupsRestore {
var ( var (
groups = opts.Groups groups = opts.Groups
lfp, lfn = len(opts.FolderPath), len(opts.FileName) folderPaths, fileNames = len(opts.FolderPath), len(opts.FileName)
llf, lli = len(opts.ListFolder), len(opts.ListItem) listFolders, listItems = len(opts.ListFolder), len(opts.ListItem)
lpf, lpi = len(opts.PageFolder), len(opts.Page) pageFolders, pageItems = len(opts.PageFolder), len(opts.Page)
lg, lch, lm = len(opts.Groups), len(opts.Channels), len(opts.Messages) chans, chanMsgs = len(opts.Channels), len(opts.Messages)
convs, convPosts = len(opts.Conversations), len(opts.Posts)
) )
if lg == 0 { if len(opts.Groups) == 0 {
groups = selectors.Any() groups = selectors.Any()
} }
sel := selectors.NewGroupsRestore(groups) sel := selectors.NewGroupsRestore(groups)
if lfp+lfn+llf+lli+lpf+lpi+lch+lm == 0 { if folderPaths+fileNames+
listFolders+listItems+
pageFolders+pageItems+
chans+chanMsgs+
convs+convPosts == 0 {
sel.Include(sel.AllData()) sel.Include(sel.AllData())
return sel return sel
} }
// sharepoint site selectors // sharepoint site selectors
if lfp+lfn+llf+lli+lpf+lpi > 0 { if folderPaths+fileNames+
if lfp+lfn > 0 { listFolders+listItems+
if lfn == 0 { pageFolders+pageItems > 0 {
if folderPaths+fileNames > 0 {
if fileNames == 0 {
opts.FileName = selectors.Any() opts.FileName = selectors.Any()
} }
@ -210,8 +224,8 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
} }
} }
if llf+lli > 0 { if listFolders+listItems > 0 {
if lli == 0 { if listItems == 0 {
opts.ListItem = selectors.Any() opts.ListItem = selectors.Any()
} }
@ -227,8 +241,8 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
} }
} }
if lpf+lpi > 0 { if pageFolders+pageItems > 0 {
if lpi == 0 { if pageItems == 0 {
opts.Page = selectors.Any() opts.Page = selectors.Any()
} }
@ -247,21 +261,38 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
// channel and message selectors // channel and message selectors
if lch+lm > 0 { if chans+chanMsgs > 0 {
// if no channel is specified, include all channels // if no channel is specified, include all channels
if lch == 0 { if chans == 0 {
opts.Channels = selectors.Any() opts.Channels = selectors.Any()
} }
// if no message is specified, only select channels // if no message is specified, only select channels
// otherwise, look for channel/message pairs // otherwise, look for channel/message pairs
if lm == 0 { if chanMsgs == 0 {
sel.Include(sel.Channels(opts.Channels)) sel.Include(sel.Channels(opts.Channels))
} else { } else {
sel.Include(sel.ChannelMessages(opts.Channels, opts.Messages)) sel.Include(sel.ChannelMessages(opts.Channels, opts.Messages))
} }
} }
// conversation and post selectors
if convs+convPosts > 0 {
// if no conversation is specified, include all conversations
if convs == 0 {
opts.Conversations = selectors.Any()
}
// if no post is specified, only select conversations;
// otherwise, look for channel/message pairs
if chanMsgs == 0 {
sel.Include(sel.Conversation(opts.Conversations))
} else {
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))
}
}
return sel return sel
} }

View File

@ -42,28 +42,32 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
{ {
name: "no inputs", name: "no inputs",
opts: utils.GroupsOpts{}, opts: utils.GroupsOpts{},
expectIncludeLen: 3, // TODO: bump to 3 when we release conversations
expectIncludeLen: 2,
}, },
{ {
name: "empty", name: "empty",
opts: utils.GroupsOpts{ opts: utils.GroupsOpts{
Groups: empty, Groups: empty,
}, },
expectIncludeLen: 3, // TODO: bump to 3 when we release conversations
expectIncludeLen: 2,
}, },
{ {
name: "single inputs", name: "single inputs",
opts: utils.GroupsOpts{ opts: utils.GroupsOpts{
Groups: single, Groups: single,
}, },
expectIncludeLen: 3, // TODO: bump to 3 when we release conversations
expectIncludeLen: 2,
}, },
{ {
name: "multi inputs", name: "multi inputs",
opts: utils.GroupsOpts{ opts: utils.GroupsOpts{
Groups: multi, Groups: multi,
}, },
expectIncludeLen: 3, // TODO: bump to 3 when we release conversations
expectIncludeLen: 2,
}, },
// sharepoint // sharepoint
{ {
@ -114,22 +118,6 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
}, },
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
{
name: "library folder suffixes",
opts: utils.GroupsOpts{
FileName: empty,
FolderPath: empty,
},
expectIncludeLen: 3,
},
{
name: "library folder suffixes and contains",
opts: utils.GroupsOpts{
FileName: empty,
FolderPath: empty,
},
expectIncludeLen: 3,
},
{ {
name: "Page Folder", name: "Page Folder",
opts: utils.GroupsOpts{ opts: utils.GroupsOpts{
@ -203,6 +191,50 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
}, },
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
// conversations
{
name: "multiple conversations multiple posts",
opts: utils.GroupsOpts{
Groups: single,
Conversations: multi,
Posts: multi,
},
expectIncludeLen: 1,
},
{
name: "single conversations multiple post",
opts: utils.GroupsOpts{
Groups: single,
Conversations: single,
Posts: multi,
},
expectIncludeLen: 1,
},
{
name: "single conversations and post",
opts: utils.GroupsOpts{
Groups: single,
Conversations: single,
Posts: single,
},
expectIncludeLen: 1,
},
{
name: "multiple conversations only",
opts: utils.GroupsOpts{
Groups: single,
Conversations: multi,
},
expectIncludeLen: 1,
},
{
name: "single conversations only",
opts: utils.GroupsOpts{
Groups: single,
Conversations: single,
},
expectIncludeLen: 1,
},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
@ -389,7 +421,7 @@ func (suite *GroupsUtilsSuite) TestAddGroupsCategories() {
{ {
name: "none", name: "none",
cats: []string{}, cats: []string{},
expectScopeLen: 3, expectScopeLen: 2,
}, },
{ {
name: "libraries", name: "libraries",
@ -401,9 +433,19 @@ func (suite *GroupsUtilsSuite) TestAddGroupsCategories() {
cats: []string{flags.DataMessages}, cats: []string{flags.DataMessages},
expectScopeLen: 1, expectScopeLen: 1,
}, },
{
name: "conversations",
cats: []string{flags.DataConversations},
expectScopeLen: 1,
},
{ {
name: "all allowed", name: "all allowed",
cats: []string{flags.DataLibraries, flags.DataMessages}, cats: []string{
flags.DataLibraries,
flags.DataMessages,
// flags.DataConversations,
},
// TODO: bump to 3 when we include conversations in all data
expectScopeLen: 2, expectScopeLen: 2,
}, },
{ {

View File

@ -4,7 +4,6 @@ import (
"context" "context"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/pii" "github.com/alcionai/corso/src/internal/common/pii"
@ -32,18 +31,16 @@ import (
// it's simpler to comment them for tracking than to delete // it's simpler to comment them for tracking than to delete
// and re-discover them later. // and re-discover them later.
func CreateCollections( func CreateCollections[C graph.GetIDer, I groupsItemer](
ctx context.Context, ctx context.Context,
bpc inject.BackupProducerConfig, bpc inject.BackupProducerConfig,
bh backupHandler, bh backupHandler[C, I],
tenantID string, tenantID string,
scope selectors.GroupsScope, scope selectors.GroupsScope,
su support.StatusUpdater, su support.StatusUpdater,
counter *count.Bus, counter *count.Bus,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, bool, error) { ) ([]data.BackupCollection, bool, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var ( var (
allCollections = make([]data.BackupCollection, 0) allCollections = make([]data.BackupCollection, 0)
category = scope.Category().PathType() category = scope.Category().PathType()
@ -61,19 +58,23 @@ func CreateCollections(
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup) ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup)
channels, err := bh.getContainers(ctx) cc := api.CallConfig{
CanMakeDeltaQueries: bh.canMakeDeltaQueries(),
}
containers, err := bh.getContainers(ctx, cc)
if err != nil { if err != nil {
return nil, false, clues.Stack(err) return nil, false, clues.Stack(err)
} }
counter.Add(count.Channels, int64(len(channels))) counter.Add(count.Channels, int64(len(containers)))
collections, err := populateCollections( collections, err := populateCollections(
ctx, ctx,
qp, qp,
bh, bh,
su, su,
channels, containers,
scope, scope,
cdps[scope.Category().PathType()], cdps[scope.Category().PathType()],
bpc.Options, bpc.Options,
@ -90,12 +91,12 @@ func CreateCollections(
return allCollections, canUsePreviousBackup, nil return allCollections, canUsePreviousBackup, nil
} }
func populateCollections( func populateCollections[C graph.GetIDer, I groupsItemer](
ctx context.Context, ctx context.Context,
qp graph.QueryParams, qp graph.QueryParams,
bh backupHandler, bh backupHandler[C, I],
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
channels []models.Channelable, containers []container[C],
scope selectors.GroupsScope, scope selectors.GroupsScope,
dps metadata.DeltaPaths, dps metadata.DeltaPaths,
ctrlOpts control.Options, ctrlOpts control.Options,
@ -117,23 +118,22 @@ func populateCollections(
logger.Ctx(ctx).Infow("filling collections", "len_deltapaths", len(dps)) logger.Ctx(ctx).Infow("filling collections", "len_deltapaths", len(dps))
for _, c := range channels { for _, c := range containers {
if el.Failure() != nil { if el.Failure() != nil {
return nil, el.Failure() return nil, el.Failure()
} }
var ( var (
cl = counter.Local() cl = counter.Local()
cID = ptr.Val(c.GetId()) cID = ptr.Val(c.container.GetId())
cName = ptr.Val(c.GetDisplayName())
err error err error
dp = dps[cID] dp = dps[c.storageDirFolders.String()]
prevDelta = dp.Delta prevDelta = dp.Delta
prevPathStr = dp.Path // do not log: pii; log prevPath instead prevPathStr = dp.Path // do not log: pii; log prevPath instead
prevPath path.Path prevPath path.Path
ictx = clues.Add( ictx = clues.Add(
ctx, ctx,
"channel_id", cID, "collection_path", c,
"previous_delta", pii.SafeURL{ "previous_delta", pii.SafeURL{
URL: prevDelta, URL: prevDelta,
SafePathElems: graph.SafeURLPathParams, SafePathElems: graph.SafeURLPathParams,
@ -146,7 +146,7 @@ func populateCollections(
delete(tombstones, cID) delete(tombstones, cID)
// Only create a collection if the path matches the scope. // Only create a collection if the path matches the scope.
if !bh.includeContainer(ictx, qp, c, scope) { if !bh.includeContainer(c.container, scope) {
cl.Inc(count.SkippedContainers) cl.Inc(count.SkippedContainers)
continue continue
} }
@ -165,10 +165,10 @@ func populateCollections(
// if the channel has no email property, it is unable to process delta tokens // if the channel has no email property, it is unable to process delta tokens
// and will return an error if a delta token is queried. // and will return an error if a delta token is queried.
cc := api.CallConfig{ cc := api.CallConfig{
CanMakeDeltaQueries: len(ptr.Val(c.GetEmail())) > 0, CanMakeDeltaQueries: bh.canMakeDeltaQueries() && c.canMakeDeltaQueries,
} }
addAndRem, err := bh.getContainerItemIDs(ctx, cID, prevDelta, cc) addAndRem, err := bh.getContainerItemIDs(ctx, c.storageDirFolders, prevDelta, cc)
if err != nil { if err != nil {
el.AddRecoverable(ctx, clues.Stack(err)) el.AddRecoverable(ctx, clues.Stack(err))
continue continue
@ -181,12 +181,12 @@ func populateCollections(
cl.Add(count.ItemsRemoved, int64(len(removed))) cl.Add(count.ItemsRemoved, int64(len(removed)))
if len(addAndRem.DU.URL) > 0 { if len(addAndRem.DU.URL) > 0 {
deltaURLs[cID] = addAndRem.DU.URL deltaURLs[c.storageDirFolders.String()] = addAndRem.DU.URL
} else if !addAndRem.DU.Reset { } else if !addAndRem.DU.Reset {
logger.Ctx(ictx).Info("missing delta url") logger.Ctx(ictx).Info("missing delta url")
} }
currPath, err := bh.canonicalPath(path.Builder{}.Append(cID), qp.TenantID) currPath, err := bh.canonicalPath(c.storageDirFolders, qp.TenantID)
if err != nil { if err != nil {
err = clues.StackWC(ctx, err).Label(count.BadCollPath) err = clues.StackWC(ctx, err).Label(count.BadCollPath)
el.AddRecoverable(ctx, err) el.AddRecoverable(ctx, err)
@ -205,7 +205,7 @@ func populateCollections(
data.NewBaseCollection( data.NewBaseCollection(
currPath, currPath,
prevPath, prevPath,
path.Builder{}.Append(cName), c.humanLocation.Builder(),
ctrlOpts, ctrlOpts,
addAndRem.DU.Reset, addAndRem.DU.Reset,
cl), cl),
@ -215,11 +215,11 @@ func populateCollections(
removed, removed,
statusUpdater) statusUpdater)
collections[cID] = &edc collections[c.storageDirFolders.String()] = &edc
// add the current path for the container ID to be used in the next backup // add the current path for the container ID to be used in the next backup
// as the "previous path", for reference in case of a rename or relocation. // as the "previous path", for reference in case of a rename or relocation.
currPaths[cID] = currPath.String() currPaths[c.storageDirFolders.String()] = currPath.String()
} }
// A tombstone is a channel that needs to be marked for deletion. // A tombstone is a channel that needs to be marked for deletion.

View File

@ -37,11 +37,10 @@ import (
// mocks // mocks
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var _ backupHandler = &mockBackupHandler{} var _ backupHandler[models.Channelable, models.ChatMessageable] = &mockBackupHandler{}
type mockBackupHandler struct { type mockBackupHandler struct {
channels []models.Channelable channels []models.Channelable
channelsErr error
messageIDs []string messageIDs []string
deletedMsgIDs []string deletedMsgIDs []string
messagesErr error messagesErr error
@ -51,13 +50,32 @@ type mockBackupHandler struct {
doNotInclude bool doNotInclude bool
} }
func (bh mockBackupHandler) getContainers(context.Context) ([]models.Channelable, error) { func (bh mockBackupHandler) canMakeDeltaQueries() bool {
return bh.channels, bh.channelsErr return true
}
func (bh mockBackupHandler) containers() []container[models.Channelable] {
containers := make([]container[models.Channelable], 0, len(bh.channels))
for _, ch := range bh.channels {
containers = append(containers, channelContainer(ch))
}
return containers
}
//lint:ignore U1000 required for interface compliance
func (bh mockBackupHandler) getContainers(
context.Context,
api.CallConfig,
) ([]container[models.Channelable], error) {
return bh.containers(), nil
} }
func (bh mockBackupHandler) getContainerItemIDs( func (bh mockBackupHandler) getContainerItemIDs(
_ context.Context, _ context.Context,
_, _ string, _ path.Elements,
_ string,
_ api.CallConfig, _ api.CallConfig,
) (pagers.AddedAndRemoved, error) { ) (pagers.AddedAndRemoved, error) {
idRes := make(map[string]time.Time, len(bh.messageIDs)) idRes := make(map[string]time.Time, len(bh.messageIDs))
@ -76,9 +94,8 @@ func (bh mockBackupHandler) getContainerItemIDs(
return aar, bh.messagesErr return aar, bh.messagesErr
} }
//lint:ignore U1000 required for interface compliance
func (bh mockBackupHandler) includeContainer( func (bh mockBackupHandler) includeContainer(
context.Context,
graph.QueryParams,
models.Channelable, models.Channelable,
selectors.GroupsScope, selectors.GroupsScope,
) bool { ) bool {
@ -86,10 +103,11 @@ func (bh mockBackupHandler) includeContainer(
} }
func (bh mockBackupHandler) canonicalPath( func (bh mockBackupHandler) canonicalPath(
folders *path.Builder, storageDirFolders path.Elements,
tenantID string, tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return folders. return storageDirFolders.
Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, tenantID,
"protectedResource", "protectedResource",
@ -98,9 +116,11 @@ func (bh mockBackupHandler) canonicalPath(
false) false)
} }
func (bh mockBackupHandler) GetItemByID( func (bh mockBackupHandler) GetItem(
_ context.Context, _ context.Context,
_, _, itemID string, _ string,
_ path.Elements,
itemID string,
) (models.ChatMessageable, *details.GroupsInfo, error) { ) (models.ChatMessageable, *details.GroupsInfo, error) {
return bh.messages[itemID], bh.info[itemID], bh.getMessageErr[itemID] return bh.messages[itemID], bh.info[itemID], bh.getMessageErr[itemID]
} }
@ -242,7 +262,7 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
qp, qp,
test.mock, test.mock,
statusUpdater, statusUpdater,
test.mock.channels, test.mock.containers(),
selectors.NewGroupsBackup(nil).Channels(selectors.Any())[0], selectors.NewGroupsBackup(nil).Channels(selectors.Any())[0],
nil, nil,
ctrlOpts, ctrlOpts,
@ -402,7 +422,7 @@ func (suite *BackupUnitSuite) TestPopulateCollections_incremental() {
qp, qp,
test.mock, test.mock,
statusUpdater, statusUpdater,
test.mock.channels, test.mock.containers(),
allScope, allScope,
test.deltaPaths, test.deltaPaths,
ctrlOpts, ctrlOpts,

View File

@ -3,6 +3,7 @@ package groups
import ( import (
"context" "context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
@ -10,11 +11,10 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
) )
var _ backupHandler = &channelsBackupHandler{} var _ backupHandler[models.Channelable, models.ChatMessageable] = &channelsBackupHandler{}
type channelsBackupHandler struct { type channelsBackupHandler struct {
ac api.Channels ac api.Channels
@ -31,23 +31,41 @@ func NewChannelBackupHandler(
} }
} }
func (bh channelsBackupHandler) canMakeDeltaQueries() bool {
return true
}
//lint:ignore U1000 required for interface compliance
func (bh channelsBackupHandler) getContainers( func (bh channelsBackupHandler) getContainers(
ctx context.Context, ctx context.Context,
) ([]models.Channelable, error) { _ api.CallConfig,
return bh.ac.GetChannels(ctx, bh.protectedResource) ) ([]container[models.Channelable], error) {
chans, err := bh.ac.GetChannels(ctx, bh.protectedResource)
results := make([]container[models.Channelable], 0, len(chans))
for _, ch := range chans {
results = append(results, channelContainer(ch))
}
return results, clues.Stack(err).OrNil()
} }
func (bh channelsBackupHandler) getContainerItemIDs( func (bh channelsBackupHandler) getContainerItemIDs(
ctx context.Context, ctx context.Context,
channelID, prevDelta string, containerPath path.Elements,
prevDelta string,
cc api.CallConfig, cc api.CallConfig,
) (pagers.AddedAndRemoved, error) { ) (pagers.AddedAndRemoved, error) {
return bh.ac.GetChannelMessageIDs(ctx, bh.protectedResource, channelID, prevDelta, cc) return bh.ac.GetChannelMessageIDs(
ctx,
bh.protectedResource,
containerPath[0],
prevDelta,
cc)
} }
//lint:ignore U1000 required for interface compliance
func (bh channelsBackupHandler) includeContainer( func (bh channelsBackupHandler) includeContainer(
ctx context.Context,
qp graph.QueryParams,
ch models.Channelable, ch models.Channelable,
scope selectors.GroupsScope, scope selectors.GroupsScope,
) bool { ) bool {
@ -55,10 +73,11 @@ func (bh channelsBackupHandler) includeContainer(
} }
func (bh channelsBackupHandler) canonicalPath( func (bh channelsBackupHandler) canonicalPath(
folders *path.Builder, storageDirFolders path.Elements,
tenantID string, tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return folders. return storageDirFolders.
Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, tenantID,
bh.protectedResource, bh.protectedResource,
@ -76,9 +95,20 @@ func (bh channelsBackupHandler) PathPrefix(tenantID string) (path.Path, error) {
false) false)
} }
func (bh channelsBackupHandler) GetItemByID( func (bh channelsBackupHandler) GetItem(
ctx context.Context, ctx context.Context,
groupID, channelID, itemID string, groupID string,
containerIDs path.Elements,
messageID string,
) (models.ChatMessageable, *details.GroupsInfo, error) { ) (models.ChatMessageable, *details.GroupsInfo, error) {
return bh.ac.GetChannelMessage(ctx, groupID, channelID, itemID) return bh.ac.GetChannelMessage(ctx, groupID, containerIDs[0], messageID)
}
func channelContainer(ch models.Channelable) container[models.Channelable] {
return container[models.Channelable]{
storageDirFolders: path.Elements{ptr.Val(ch.GetId())},
humanLocation: path.Elements{ptr.Val(ch.GetDisplayName())},
canMakeDeltaQueries: len(ptr.Val(ch.GetEmail())) > 0,
container: ch,
}
} }

View File

@ -19,14 +19,14 @@ import (
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
var _ data.BackupCollection = &Collection{} var _ data.BackupCollection = &Collection[groupsItemer]{}
const ( const (
collectionChannelBufferSize = 1000 collectionChannelBufferSize = 1000
numberOfRetries = 4 numberOfRetries = 4
) )
type Collection struct { type Collection[I groupsItemer] struct {
data.BaseCollection data.BaseCollection
protectedResource string protectedResource string
stream chan data.Item stream chan data.Item
@ -36,7 +36,7 @@ type Collection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container // removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{} removed map[string]struct{}
getter getItemByIDer getter getItemer[I]
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
} }
@ -47,15 +47,15 @@ type Collection struct {
// to be deleted. If the prev path is nil, it is assumed newly created. // to be deleted. If the prev path is nil, it is assumed newly created.
// If both are populated, then state is either moved (if they differ), // If both are populated, then state is either moved (if they differ),
// or notMoved (if they match). // or notMoved (if they match).
func NewCollection( func NewCollection[I groupsItemer](
baseCol data.BaseCollection, baseCol data.BaseCollection,
getter getItemByIDer, getter getItemer[I],
protectedResource string, protectedResource string,
added map[string]struct{}, added map[string]struct{},
removed map[string]struct{}, removed map[string]struct{},
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
) Collection { ) Collection[I] {
collection := Collection{ collection := Collection[I]{
BaseCollection: baseCol, BaseCollection: baseCol,
added: added, added: added,
getter: getter, getter: getter,
@ -70,7 +70,7 @@ func NewCollection(
// Items utility function to asynchronously execute process to fill data channel with // Items utility function to asynchronously execute process to fill data channel with
// M365 exchange objects and returns the data channel // M365 exchange objects and returns the data channel
func (col *Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { func (col *Collection[I]) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
go col.streamItems(ctx, errs) go col.streamItems(ctx, errs)
return col.stream return col.stream
} }
@ -79,7 +79,7 @@ func (col *Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.I
// items() production // items() production
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) { func (col *Collection[I]) streamItems(ctx context.Context, errs *fault.Bus) {
var ( var (
streamedItems int64 streamedItems int64
totalBytes int64 totalBytes int64
@ -145,13 +145,10 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
writer := kjson.NewJsonSerializationWriter() writer := kjson.NewJsonSerializationWriter()
defer writer.Close() defer writer.Close()
flds := col.FullPath().Folders() item, info, err := col.getter.GetItem(
parentFolderID := flds[len(flds)-1]
item, info, err := col.getter.GetItemByID(
ctx, ctx,
col.protectedResource, col.protectedResource,
parentFolderID, col.FullPath().Folders(),
id) id)
if err != nil { if err != nil {
err = clues.Wrap(err, "getting channel message data").Label(fault.LabelForceNoBackupCreation) err = clues.Wrap(err, "getting channel message data").Label(fault.LabelForceNoBackupCreation)
@ -210,7 +207,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
// finishPopulation is a utility function used to close a Collection's data channel // finishPopulation is a utility function used to close a Collection's data channel
// and to send the status update through the channel. // and to send the status update through the channel.
func (col *Collection) finishPopulation( func (col *Collection[I]) finishPopulation(
ctx context.Context, ctx context.Context,
streamedItems, totalBytes int64, streamedItems, totalBytes int64,
err error, err error,

View File

@ -7,6 +7,7 @@ import (
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -116,7 +117,7 @@ func (suite *CollectionUnitSuite) TestNewCollection_state() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
c := NewCollection( c := NewCollection[models.ChatMessageable](
data.NewBaseCollection( data.NewBaseCollection(
test.curr, test.curr,
test.prev, test.prev,
@ -198,7 +199,7 @@ func (suite *CollectionUnitSuite) TestCollection_streamItems() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
col := &Collection{ col := &Collection[models.ChatMessageable]{
BaseCollection: data.NewBaseCollection( BaseCollection: data.NewBaseCollection(
fullPath, fullPath,
nil, nil,

View File

@ -0,0 +1,145 @@
package groups
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
)
var _ backupHandler[models.Conversationable, models.Postable] = &conversationsBackupHandler{}
type conversationsBackupHandler struct {
ac api.Conversations
protectedResource string
}
func NewConversationBackupHandler(
protectedResource string,
ac api.Conversations,
) conversationsBackupHandler {
return conversationsBackupHandler{
ac: ac,
protectedResource: protectedResource,
}
}
func (bh conversationsBackupHandler) canMakeDeltaQueries() bool {
// not supported for conversations
return false
}
//lint:ignore U1000 required for interface compliance
func (bh conversationsBackupHandler) getContainers(
ctx context.Context,
cc api.CallConfig,
) ([]container[models.Conversationable], error) {
convs, err := bh.ac.GetConversations(ctx, bh.protectedResource, cc)
if err != nil {
return nil, clues.Wrap(err, "getting conversations")
}
results := []container[models.Conversationable]{}
for _, conv := range convs {
ictx := clues.Add(ctx, "conversation_id", ptr.Val(conv.GetId()))
threads, err := bh.ac.GetConversationThreads(
ictx,
bh.protectedResource,
ptr.Val(conv.GetId()),
cc)
if err != nil {
return nil, clues.Wrap(err, "getting threads in conversation")
}
for _, thread := range threads {
results = append(results, conversationThreadContainer(conv, thread))
}
}
return results, nil
}
func (bh conversationsBackupHandler) getContainerItemIDs(
ctx context.Context,
containerPath path.Elements,
_ string,
cc api.CallConfig,
) (pagers.AddedAndRemoved, error) {
return bh.ac.GetConversationThreadPostIDs(
ctx,
bh.protectedResource,
containerPath[0],
containerPath[1],
cc)
}
//lint:ignore U1000 required for interface compliance
func (bh conversationsBackupHandler) includeContainer(
conv models.Conversationable,
scope selectors.GroupsScope,
) bool {
return scope.Matches(selectors.GroupsConversation, ptr.Val(conv.GetTopic()))
}
func (bh conversationsBackupHandler) canonicalPath(
storageDirFolders path.Elements,
tenantID string,
) (path.Path, error) {
return storageDirFolders.
Builder().
ToDataLayerPath(
tenantID,
bh.protectedResource,
path.GroupsService,
path.ConversationPostsCategory,
false)
}
func (bh conversationsBackupHandler) PathPrefix(tenantID string) (path.Path, error) {
return path.Build(
tenantID,
bh.protectedResource,
path.GroupsService,
path.ConversationPostsCategory,
false)
}
func (bh conversationsBackupHandler) GetItem(
ctx context.Context,
groupID string,
containerIDs path.Elements, // expects: [conversationID, threadID]
postID string,
) (models.Postable, *details.GroupsInfo, error) {
return bh.ac.GetConversationPost(
ctx,
groupID,
containerIDs[0],
containerIDs[1],
postID,
api.CallConfig{})
}
func conversationThreadContainer(
c models.Conversationable,
t models.ConversationThreadable,
) container[models.Conversationable] {
return container[models.Conversationable]{
storageDirFolders: path.Elements{ptr.Val(c.GetId()), ptr.Val(t.GetId())},
// microsoft UX doesn't display any sort of container name that would make a reasonable
// "location" for the posts in the conversation. We may need to revisit this, perhaps
// the subject is sufficiently acceptable. But at this time it's left empty so that
// we don't populate it with problematic data.
humanLocation: path.Elements{},
canMakeDeltaQueries: false,
container: c,
}
}

View File

@ -3,7 +3,7 @@ package groups
import ( import (
"context" "context"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -13,41 +13,81 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
) )
type backupHandler interface { // itemer standardizes common behavior that can be expected from all
getItemByIDer // items within a groups collection backup.
type groupsItemer interface {
serialization.Parsable
graph.GetIDer
graph.GetLastModifiedDateTimer
}
type backupHandler[C graph.GetIDer, I groupsItemer] interface {
getItemer[I]
getContainerser[C]
getContainerItemIDser
includeContainerer[C]
canonicalPather
canMakeDeltaQuerieser
}
type getItemer[I groupsItemer] interface {
GetItem(
ctx context.Context,
protectedResource string,
containerIDs path.Elements,
itemID string,
) (I, *details.GroupsInfo, error)
}
// gets all containers for the resource // gets all containers for the resource
type getContainerser[C graph.GetIDer] interface {
getContainers( getContainers(
ctx context.Context, ctx context.Context,
) ([]models.Channelable, error) cc api.CallConfig,
) ([]container[C], error)
}
// gets all item IDs (by delta, if possible) in the container // gets all item IDs (by delta, if possible) in the container
type getContainerItemIDser interface {
getContainerItemIDs( getContainerItemIDs(
ctx context.Context, ctx context.Context,
containerID, prevDelta string, containerPath path.Elements,
prevDelta string,
cc api.CallConfig, cc api.CallConfig,
) (pagers.AddedAndRemoved, error) ) (pagers.AddedAndRemoved, error)
}
// includeContainer evaluates whether the container is included // includeContainer evaluates whether the container is included
// in the provided scope. // in the provided scope.
type includeContainerer[C graph.GetIDer] interface {
includeContainer( includeContainer(
ctx context.Context, c C,
qp graph.QueryParams,
ch models.Channelable,
scope selectors.GroupsScope, scope selectors.GroupsScope,
) bool ) bool
}
// canonicalPath constructs the service and category specific path for // canonicalPath constructs the service and category specific path for
// the given builder. // the given builder.
type canonicalPather interface {
canonicalPath( canonicalPath(
folders *path.Builder, storageDir path.Elements,
tenantID string, tenantID string,
) (path.Path, error) ) (path.Path, error)
} }
type getItemByIDer interface { // canMakeDeltaQueries evaluates whether the handler can support a
GetItemByID( // delta query when enumerating its items.
ctx context.Context, type canMakeDeltaQuerieser interface {
resourceID, containerID, itemID string, canMakeDeltaQueries() bool
) (models.ChatMessageable, *details.GroupsInfo, error) }
// ---------------------------------------------------------------------------
// Container management
// ---------------------------------------------------------------------------
type container[C graph.GetIDer] struct {
storageDirFolders path.Elements
humanLocation path.Elements
canMakeDeltaQueries bool
container C
} }

View File

@ -7,18 +7,21 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
) )
type GetChannelMessage struct { type GetChannelMessage struct {
Err error Err error
} }
func (m GetChannelMessage) GetItemByID( func (m GetChannelMessage) GetItem(
ctx context.Context, _ context.Context,
groupID, channelID, messageID string, _ string,
_ path.Elements,
itemID string,
) (models.ChatMessageable, *details.GroupsInfo, error) { ) (models.ChatMessageable, *details.GroupsInfo, error) {
msg := models.NewChatMessage() msg := models.NewChatMessage()
msg.SetId(ptr.To(messageID)) msg.SetId(ptr.To(itemID))
return msg, &details.GroupsInfo{}, m.Err return msg, &details.GroupsInfo{}, m.Err
} }

View File

@ -66,8 +66,6 @@ func ProduceBackupCollections(
return nil, nil, clues.WrapWC(ctx, err, "getting group") return nil, nil, clues.WrapWC(ctx, err, "getting group")
} }
isTeam := api.IsTeam(ctx, group)
for _, scope := range b.Scopes() { for _, scope := range b.Scopes() {
if el.Failure() != nil { if el.Failure() != nil {
break break
@ -75,12 +73,16 @@ func ProduceBackupCollections(
cl := counter.Local() cl := counter.Local()
ictx := clues.AddLabelCounter(ctx, cl.PlainAdder()) ictx := clues.AddLabelCounter(ctx, cl.PlainAdder())
ictx = clues.Add(ictx, "category", scope.Category().PathType())
var dbcs []data.BackupCollection var dbcs []data.BackupCollection
switch scope.Category().PathType() { switch scope.Category().PathType() {
case path.LibrariesCategory: case path.LibrariesCategory:
sites, err := ac.Groups().GetAllSites(ictx, bpc.ProtectedResource.ID(), errs) sites, err := ac.Groups().GetAllSites(
ictx,
bpc.ProtectedResource.ID(),
errs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -159,7 +161,7 @@ func ProduceBackupCollections(
} }
progressBar := observe.MessageWithCompletion(ictx, pcfg, scope.Category().PathType().HumanString()) progressBar := observe.MessageWithCompletion(ictx, pcfg, scope.Category().PathType().HumanString())
if !isTeam { if !api.IsTeam(ictx, group) {
continue continue
} }
@ -190,6 +192,45 @@ func ProduceBackupCollections(
dbcs = append(dbcs, cs...) dbcs = append(dbcs, cs...)
close(progressBar)
case path.ConversationPostsCategory:
var (
bh = groups.NewConversationBackupHandler(bpc.ProtectedResource.ID(), ac.Conversations())
cs []data.BackupCollection
err error
)
pcfg := observe.ProgressCfg{
Indent: 1,
CompletionMessage: func() string { return fmt.Sprintf("(found %d conversations)", len(cs)) },
}
progressBar := observe.MessageWithCompletion(ictx, pcfg, scope.Category().PathType().HumanString())
cs, canUsePreviousBackup, err := groups.CreateCollections(
ictx,
bpc,
bh,
creds.AzureTenantID,
scope,
su,
counter,
errs)
if err != nil {
el.AddRecoverable(ictx, err)
continue
}
if !canUsePreviousBackup {
tp, err := bh.PathPrefix(creds.AzureTenantID)
if err != nil {
return nil, nil, clues.Wrap(err, "getting conversations path")
}
dbcs = append(dbcs, data.NewTombstoneCollection(tp, control.Options{}, counter))
}
dbcs = append(dbcs, cs...)
close(progressBar) close(progressBar)
} }
@ -236,6 +277,10 @@ func ProduceBackupCollections(
return collections, ssmb.ToReader(), el.Failure() return collections, ssmb.ToReader(), el.Failure()
} }
// ---------------------------------------------------------------------------
// metadata
// ---------------------------------------------------------------------------
func getSitesMetadataCollection( func getSitesMetadataCollection(
tenantID, groupID string, tenantID, groupID string,
sites map[string]string, sites map[string]string,

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"testing" "testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
evmock "github.com/alcionai/corso/src/internal/events/mock" evmock "github.com/alcionai/corso/src/internal/events/mock"
@ -79,6 +80,115 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
true) true)
} }
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups9VersionBumpBackup() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
mb = evmock.NewBus()
sel = selectors.NewGroupsBackup([]string{suite.its.group.ID})
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel),
sel.Conversation(selectors.Any()))
bo, bod := prepNewTestBackupOp(
t,
ctx,
mb,
sel.Selector,
opts,
version.All8MigrateUserPNToID,
count.New())
defer bod.close(t, ctx)
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&bo,
bod.sel,
bod.sel.ID(),
path.ChannelMessagesCategory)
_, expectDeets := deeTD.GetDeetsInBackup(
t,
ctx,
bo.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
path.GroupsService,
whatSet,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
bo.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
expectDeets,
false)
mb = evmock.NewBus()
forcedFull := newTestBackupOp(
t,
ctx,
bod,
mb,
opts,
count.New())
forcedFull.BackupVersion = version.Groups9Update
runAndCheckBackup(t, ctx, &forcedFull, mb, false)
checkBackupIsInManifests(
t,
ctx,
bod.kw,
bod.sw,
&forcedFull,
bod.sel,
bod.sel.ID(),
path.ChannelMessagesCategory)
_, expectDeets = deeTD.GetDeetsInBackup(
t,
ctx,
forcedFull.Results.BackupID,
bod.acct.ID(),
bod.sel.ID(),
path.GroupsService,
whatSet,
bod.kms,
bod.sss)
deeTD.CheckBackupDetails(
t,
ctx,
forcedFull.Results.BackupID,
whatSet,
bod.kms,
bod.sss,
expectDeets,
false)
// The number of items backed up in the forced full backup should be roughly
// the same as the number of items in the original backup.
assert.Equal(
t,
bo.Results.Counts[string(count.PersistedNonCachedFiles)],
forcedFull.Results.Counts[string(count.PersistedNonCachedFiles)],
"items written")
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() { func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
t := suite.T() t := suite.T()
@ -95,7 +205,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
sel.Include( sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel)) selTD.GroupsBackupChannelScope(sel),
sel.Conversation(selectors.Any()))
bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter) bo, bod := prepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.close(t, ctx) defer bod.close(t, ctx)

View File

@ -62,9 +62,10 @@ func (b *Builder) addFolderEntries(
b.knownFolders = map[string]Entry{} b.knownFolders = map[string]Entry{}
} }
// Need a unique location because we want to have separate folders for // Unique location ensures that the location reference includes all
// different drives and categories even if there's duplicate folder names in // possible hierarchy. In many handlers, the location ref is only partially
// them. // constructed (ex: drive locations do not contain the drive ID). This
// transformer ensures that the location is complete and fully populated.
uniqueLoc, err := entry.uniqueLocation(locationRef) uniqueLoc, err := entry.uniqueLocation(locationRef)
if err != nil { if err != nil {
return clues.Wrap(err, "getting LocationIDer") return clues.Wrap(err, "getting LocationIDer")

View File

@ -145,6 +145,8 @@ func (i *GroupsInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
loc, err = NewGroupsLocationIDer(path.LibrariesCategory, i.DriveID, baseLoc.Elements()...) loc, err = NewGroupsLocationIDer(path.LibrariesCategory, i.DriveID, baseLoc.Elements()...)
case GroupsChannelMessage: case GroupsChannelMessage:
loc, err = NewGroupsLocationIDer(path.ChannelMessagesCategory, "", baseLoc.Elements()...) loc, err = NewGroupsLocationIDer(path.ChannelMessagesCategory, "", baseLoc.Elements()...)
case GroupsConversationPost:
loc, err = NewGroupsLocationIDer(path.ConversationPostsCategory, "", baseLoc.Elements()...)
} }
return &loc, err return &loc, err
@ -156,7 +158,7 @@ func (i *GroupsInfo) updateFolder(f *FolderInfo) error {
switch i.ItemType { switch i.ItemType {
case SharePointLibrary: case SharePointLibrary:
return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f) return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f)
case GroupsChannelMessage: case GroupsChannelMessage, GroupsConversationPost:
return nil return nil
} }

View File

@ -65,6 +65,11 @@ func NewElements(p string) Elements {
return Split(p) return Split(p)
} }
// Builder produces a *Builder{} containing the elements.
func (el Elements) Builder() *Builder {
return Builder{}.Append(el...)
}
// Conceal produces a concealed representation of the elements, suitable for // Conceal produces a concealed representation of the elements, suitable for
// logging, storing in errors, and other output. // logging, storing in errors, and other output.
func (el Elements) Conceal() string { func (el Elements) Conceal() string {

View File

@ -217,8 +217,9 @@ func (s *groups) AllData() []GroupsScope {
scopes = append( scopes = append(
scopes, scopes,
makeScope[GroupsScope](GroupsLibraryFolder, Any()), makeScope[GroupsScope](GroupsLibraryFolder, Any()),
makeScope[GroupsScope](GroupsChannel, Any()), makeScope[GroupsScope](GroupsChannel, Any()))
makeScope[GroupsScope](GroupsConversation, Any())) // TODO: enable conversations in all-data backups
// makeScope[GroupsScope](GroupsConversation, Any()))
return scopes return scopes
} }

View File

@ -245,8 +245,9 @@ func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
}, },
expect: arr( expect: arr(
libItem, libItem2, libItem3, libItem, libItem2, libItem3,
chanItem, chanItem2, chanItem3, chanItem, chanItem2, chanItem3),
convItem, convItem2, convItem3), // TODO: re-add when we release conversations
// convItem, convItem2, convItem3),
}, },
{ {
name: "only match library item", name: "only match library item",

View File

@ -17,3 +17,10 @@ func GroupsBackupLibraryFolderScope(sel *selectors.GroupsBackup) []selectors.Gro
func GroupsBackupChannelScope(sel *selectors.GroupsBackup) []selectors.GroupsScope { func GroupsBackupChannelScope(sel *selectors.GroupsBackup) []selectors.GroupsScope {
return sel.Channels([]string{TestChannelName}) return sel.Channels([]string{TestChannelName})
} }
// GroupsBackupConversationScope is the standard folder scope that should be used
// in integration backups with groups when interacting with conversations.
func GroupsBackupConversationScope(sel *selectors.GroupsBackup) []selectors.GroupsScope {
// there's no way to easily specify a test conversation by name.
return sel.Conversation(selectors.Any())
}