enable groups integration tests (#4161)

enables nightly cli e2e tests, operations layer integration tests, and sanity tests for groups and teams.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Supportability/Tests

#### Issue(s)

* #3989

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-09-08 14:18:59 -06:00 committed by GitHub
parent 59c541a7db
commit d3eda28989
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 1979 additions and 400 deletions

View File

@ -330,7 +330,6 @@ jobs:
# generate new entries for test
- name: Groups - Create new data
if: false # TODO: enable when ready
id: new-data-creation-groups
working-directory: ./src/cmd/factory
run: |
@ -347,7 +346,6 @@ jobs:
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: Groups - Backup
if: false # TODO: enable when ready
id: groups-backup
uses: ./.github/actions/backup-restore-test
with:
@ -358,7 +356,6 @@ jobs:
log-dir: ${{ env.CORSO_LOG_DIR }}
- name: Teams - Backup
if: false # TODO: enable when ready
id: teams-backup
uses: ./.github/actions/backup-restore-test
with:

View File

@ -0,0 +1,615 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
var (
channelMessages = path.ChannelMessagesCategory
libraries = path.LibrariesCategory
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
its intgTesterSetup
}
func TestNoBackupGroupsE2ESuite(t *testing.T) {
suite.Run(t, &BackupGroupsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
}
func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx)
}
func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 group id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
its intgTesterSetup
}
func TestBackupGroupsE2ESuite(t *testing.T) {
suite.Run(t, &BackupGroupsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
}
func (suite *BackupGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
runGroupsBackupCategoryTest(suite, channelMessages)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() {
runGroupsBackupCategoryTest(suite, libraries)
}
func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category path.CategoryType) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.its.group.ID,
category.String(),
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
// as an offhand check: the result should contain the m365 group id
assert.Contains(t, result, suite.its.group.ID)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() {
runGroupsBackupGroupNotFoundTest(suite, channelMessages)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() {
runGroupsBackupGroupNotFoundTest(suite, libraries)
}
func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category path.CategoryType) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category.String(),
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found in tenant", "error missing group not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.its.group.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.its.group.ID,
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// as an offhand check: the result should contain the m365 group id
assert.Contains(t, result, suite.its.group.ID)
}
// AWS flags
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.its.group.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value",
)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
its intgTesterSetup
}
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupGroupsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx)
suite.backupOps = make(map[path.CategoryType]string)
var (
groups = []string{suite.its.group.ID}
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
)
for _, set := range []path.CategoryType{channelMessages, libraries} {
var (
sel = selectors.NewGroupsBackup(groups)
scopes []selectors.GroupsScope
)
switch set {
case channelMessages:
scopes = selTD.GroupsBackupChannelScope(sel)
case libraries:
scopes = selTD.GroupsBackupLibraryFolderScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() {
runGroupsListCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() {
runGroupsListCmdTest(suite, libraries)
}
func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() {
runGroupsListSingleCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() {
runGroupsListSingleCmdTest(suite, libraries)
}
func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--config-file", suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--config-file", suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() {
runGroupsDetailsCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() {
runGroupsDetailsCmdTest(suite, libraries)
}
func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "groups",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end group.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOp operations.BackupOperation
}
func TestBackupDeleteGroupsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteGroupsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
})
}
func (suite *BackupDeleteGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx)
m365GroupID := tconfig.M365GroupID(t)
groups := []string{m365GroupID}
// some tests require an existing backup
sel := selectors.NewGroupsBackup(groups)
sel.Include(selTD.GroupsBackupChannelScope(sel))
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOp = backupOp
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "groups",
"--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildGroupsBackupCmd(
ctx context.Context,
configFile, group, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--config-file", configFile,
"--"+flags.GroupFN, group,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -0,0 +1,610 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupTeamsE2ESuite struct {
tester.Suite
dpnd dependencies
its intgTesterSetup
}
func TestNoBackupTeamsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
}
func (suite *NoBackupTeamsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx)
}
func (suite *NoBackupTeamsE2ESuite) TestTeamsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "teams",
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 team id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupTeamsE2ESuite struct {
tester.Suite
dpnd dependencies
its intgTesterSetup
}
func TestBackupTeamsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
)})
}
func (suite *BackupTeamsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx)
}
func (suite *BackupTeamsE2ESuite) TestTeamsBackupCmd_channelMessages() {
runTeamsBackupCategoryTest(suite, channelMessages)
}
func (suite *BackupTeamsE2ESuite) TestTeamsBackupCmd_libraries() {
runTeamsBackupCategoryTest(suite, libraries)
}
func runTeamsBackupCategoryTest(suite *BackupTeamsE2ESuite, category path.CategoryType) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.its.team.ID,
category.String(),
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
// as an offhand check: the result should contain the m365 team id
assert.Contains(t, result, suite.its.team.ID)
}
func (suite *BackupTeamsE2ESuite) TestTeamsBackupCmd_teamNotFound_channelMessages() {
runTeamsBackupTeamNotFoundTest(suite, channelMessages)
}
func (suite *BackupTeamsE2ESuite) TestTeamsBackupCmd_teamNotFound_libraries() {
runTeamsBackupTeamNotFoundTest(suite, libraries)
}
func runTeamsBackupTeamNotFoundTest(suite *BackupTeamsE2ESuite, category path.CategoryType) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category.String(),
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found in tenant", "error missing team not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsE2ESuite) TestBackupCreateTeams_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "teams",
"--team", suite.its.team.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupTeamsE2ESuite) TestBackupCreateTeams_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "teams",
"--team", suite.its.team.ID,
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// as an offhand check: the result should contain the m365 team id
assert.Contains(t, result, suite.its.team.ID)
}
// AWS flags
func (suite *BackupTeamsE2ESuite) TestBackupCreateTeams_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "teams",
"--team", suite.its.team.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value",
)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupTeamsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
its intgTesterSetup
}
func TestPreparedBackupTeamsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupTeamsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupTeamsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx)
suite.backupOps = make(map[path.CategoryType]string)
var (
teams = []string{suite.its.team.ID}
ins = idname.NewCache(map[string]string{suite.its.team.ID: suite.its.team.ID})
)
for _, set := range []path.CategoryType{channelMessages, libraries} {
var (
sel = selectors.NewGroupsBackup(teams)
scopes []selectors.GroupsScope
)
switch set {
case channelMessages:
scopes = selTD.GroupsBackupChannelScope(sel)
case libraries:
scopes = selTD.GroupsBackupLibraryFolderScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsListCmd_channelMessages() {
runTeamsListCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsListCmd_libraries() {
runTeamsListCmdTest(suite, libraries)
}
func runTeamsListCmdTest(suite *PreparedBackupTeamsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "teams",
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsListCmd_singleID_channelMessages() {
runTeamsListSingleCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsListCmd_singleID_libraries() {
runTeamsListSingleCmdTest(suite, libraries)
}
func runTeamsListSingleCmdTest(suite *PreparedBackupTeamsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "teams",
"--config-file", suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "teams",
"--config-file", suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsDetailsCmd_channelMessages() {
runTeamsDetailsCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupTeamsE2ESuite) TestTeamsDetailsCmd_libraries() {
runTeamsDetailsCmdTest(suite, libraries)
}
func runTeamsDetailsCmdTest(suite *PreparedBackupTeamsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "teams",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end team.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteTeamsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOp operations.BackupOperation
}
func TestBackupDeleteTeamsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteTeamsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs},
),
})
}
func (suite *BackupDeleteTeamsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx)
m365TeamID := tconfig.M365TeamID(t)
teams := []string{m365TeamID}
// some tests require an existing backup
sel := selectors.NewGroupsBackup(teams)
sel.Include(selTD.GroupsBackupChannelScope(sel))
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOp = backupOp
err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsE2ESuite) TestTeamsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "teams",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "teams",
"--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsE2ESuite) TestTeamsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "teams",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildTeamsBackupCmd(
ctx context.Context,
configFile, team, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "teams",
"--config-file", configFile,
"--"+flags.TeamFN, team,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -19,6 +19,7 @@ import (
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -120,10 +121,10 @@ func deserializeMetadata(
)
switch item.ID() {
case graph.PreviousPathFileName:
case bupMD.PreviousPathFileName:
err = deserializeMap(item.ToReader(), prevFolders)
case graph.DeltaURLsFileName:
case bupMD.DeltaURLsFileName:
err = deserializeMap(item.ToReader(), prevDeltas)
default:
@ -449,8 +450,8 @@ func (c *Collections) Get(
md, err := graph.MakeMetadataCollection(
pathPrefix,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(graph.PreviousPathFileName, folderPaths),
graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs),
graph.NewMetadataEntry(bupMD.PreviousPathFileName, folderPaths),
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, deltaURLs),
},
c.statusUpdater)

View File

@ -23,6 +23,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -814,10 +815,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -843,7 +844,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
}
},
@ -859,7 +860,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -886,10 +887,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {},
}),
@ -910,12 +911,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{
driveID1: "",
}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -939,10 +940,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -953,10 +954,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID2: deltaURL2}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID2: {
folderID2: path2,
@ -988,7 +989,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]string{driveID1: deltaURL1}),
}
},
@ -1004,10 +1005,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -1036,10 +1037,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -1050,7 +1051,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID2: path2,
@ -1070,10 +1071,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL1}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
map[string]map[string]string{
driveID1: {
folderID1: path1,
@ -1084,7 +1085,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func() []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{driveID1: deltaURL2}),
}
},
@ -2281,13 +2282,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
pathPrefix,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
bupMD.DeltaURLsFileName,
map[string]string{
driveID1: prevDelta,
driveID2: prevDelta,
}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
bupMD.PreviousPathFileName,
test.prevFolderPaths),
},
func(*support.ControllerOperationStatus) {})

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -29,7 +30,7 @@ func CreateCollections(
handlers map[path.CategoryType]backupHandler,
tenantID string,
scope selectors.ExchangeScope,
dps DeltaPaths,
dps metadata.DeltaPaths,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
@ -98,7 +99,7 @@ func populateCollections(
statusUpdater support.StatusUpdater,
resolver graph.ContainerResolver,
scope selectors.ExchangeScope,
dps DeltaPaths,
dps metadata.DeltaPaths,
ctrlOpts control.Options,
errs *fault.Bus,
) (map[string]data.BackupCollection, error) {
@ -280,8 +281,8 @@ func populateCollections(
col, err := graph.MakeMetadataCollection(
pathPrefix,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(graph.PreviousPathFileName, currPaths),
graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs),
graph.NewMetadataEntry(metadata.PreviousPathFileName, currPaths),
graph.NewMetadataEntry(metadata.DeltaURLsFileName, deltaURLs),
},
statusUpdater)
if err != nil {
@ -296,7 +297,7 @@ func populateCollections(
// produces a set of id:path pairs from the deltapaths map.
// Each entry in the set will, if not removed, produce a collection
// that will delete the tombstone by path.
func makeTombstones(dps DeltaPaths) map[string]string {
func makeTombstones(dps metadata.DeltaPaths) map[string]string {
r := make(map[string]string, len(dps))
for id, v := range dps {

View File

@ -21,6 +21,7 @@ import (
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -150,24 +151,24 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
table := []struct {
name string
data []fileValues
expect map[string]DeltaPath
expect map[string]metadata.DeltaPath
canUsePreviousBackup bool
expectError assert.ErrorAssertionFunc
}{
{
name: "delta urls only",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{metadata.DeltaURLsFileName, "delta-link"},
},
expect: map[string]DeltaPath{},
expect: map[string]metadata.DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "multiple delta urls",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaURLsFileName, "delta-link-2"},
{metadata.DeltaURLsFileName, "delta-link"},
{metadata.DeltaURLsFileName, "delta-link-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
@ -175,9 +176,9 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{
name: "previous path only",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
{metadata.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
expect: map[string]metadata.DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
@ -189,8 +190,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{
name: "multiple previous paths",
data: []fileValues{
{graph.PreviousPathFileName, "prev-path"},
{graph.PreviousPathFileName, "prev-path-2"},
{metadata.PreviousPathFileName, "prev-path"},
{metadata.PreviousPathFileName, "prev-path-2"},
},
canUsePreviousBackup: false,
expectError: assert.Error,
@ -198,10 +199,10 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{
name: "delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, "prev-path"},
{metadata.DeltaURLsFileName, "delta-link"},
{metadata.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
expect: map[string]metadata.DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
@ -213,20 +214,20 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{
name: "delta urls and empty previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, ""},
{metadata.DeltaURLsFileName, "delta-link"},
{metadata.PreviousPathFileName, ""},
},
expect: map[string]DeltaPath{},
expect: map[string]metadata.DeltaPath{},
canUsePreviousBackup: true,
expectError: assert.NoError,
},
{
name: "empty delta urls and previous paths",
data: []fileValues{
{graph.DeltaURLsFileName, ""},
{graph.PreviousPathFileName, "prev-path"},
{metadata.DeltaURLsFileName, ""},
{metadata.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
expect: map[string]metadata.DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
@ -238,10 +239,10 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{
name: "delta urls with special chars",
data: []fileValues{
{graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
{graph.PreviousPathFileName, "prev-path"},
{metadata.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
{metadata.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
expect: map[string]metadata.DeltaPath{
"key": {
Delta: "`!@#$%^&*()_[]{}/\"\\",
Path: "prev-path",
@ -253,10 +254,10 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{
name: "delta urls with escaped chars",
data: []fileValues{
{graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
{graph.PreviousPathFileName, "prev-path"},
{metadata.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
{metadata.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
expect: map[string]metadata.DeltaPath{
"key": {
Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\",
Path: "prev-path",
@ -271,10 +272,10 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
// rune(92) = \, rune(110) = n. Ensuring it's not possible to
// error in serializing/deserializing and produce a single newline
// character from those two runes.
{graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
{graph.PreviousPathFileName, "prev-path"},
{metadata.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
{metadata.PreviousPathFileName, "prev-path"},
},
expect: map[string]DeltaPath{
expect: map[string]metadata.DeltaPath{
"key": {
Delta: "\\n",
Path: "prev-path",
@ -485,7 +486,7 @@ func (suite *BackupIntgSuite) TestMailFetch() {
handlers,
suite.tenantID,
test.scope,
DeltaPaths{},
metadata.DeltaPaths{},
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -565,7 +566,7 @@ func (suite *BackupIntgSuite) TestDelta() {
handlers,
suite.tenantID,
test.scope,
DeltaPaths{},
metadata.DeltaPaths{},
func(status *support.ControllerOperationStatus) {},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -649,7 +650,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
handlers,
suite.tenantID,
sel.Scopes()[0],
DeltaPaths{},
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -730,7 +731,7 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
handlers,
suite.tenantID,
test.scope,
DeltaPaths{},
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -858,7 +859,7 @@ func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
handlers,
suite.tenantID,
test.scope,
DeltaPaths{},
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -923,7 +924,7 @@ func (suite *CollectionPopulationSuite) TestPopulateCollections() {
}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
dps = DeltaPaths{} // incrementals are tested separately
dps = metadata.DeltaPaths{} // incrementals are tested separately
commonResult = mockGetterResults{
added: []string{"a1", "a2", "a3"},
removed: []string{"r1", "r2", "r3"},
@ -1192,7 +1193,7 @@ func checkMetadata(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
cat path.CategoryType,
expect DeltaPaths,
expect metadata.DeltaPaths,
c data.BackupCollection,
) {
catPaths, _, err := ParseMetadataCollections(
@ -1313,10 +1314,10 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
name string
getter mockGetter
resolver graph.ContainerResolver
inputMetadata func(t *testing.T, cat path.CategoryType) DeltaPaths
inputMetadata func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths
expectNewColls int
expectDeleted int
expectMetadata func(t *testing.T, cat path.CategoryType) DeltaPaths
expectMetadata func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths
}{
{
name: "1 moved to duplicate",
@ -1327,25 +1328,25 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
},
},
resolver: newMockResolver(container1, container2),
inputMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"1": DeltaPath{
inputMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta",
Path: oldPath1(t, cat).String(),
},
"2": DeltaPath{
"2": metadata.DeltaPath{
Delta: "old_delta",
Path: idPath2(t, cat).String(),
},
}
},
expectMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"1": DeltaPath{
expectMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "delta_url",
Path: idPath1(t, cat).String(),
},
"2": DeltaPath{
"2": metadata.DeltaPath{
Delta: "delta_url2",
Path: idPath2(t, cat).String(),
},
@ -1361,25 +1362,25 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
},
},
resolver: newMockResolver(container1, container2),
inputMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"1": DeltaPath{
inputMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta",
Path: oldPath1(t, cat).String(),
},
"2": DeltaPath{
"2": metadata.DeltaPath{
Delta: "old_delta",
Path: oldPath2(t, cat).String(),
},
}
},
expectMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"1": DeltaPath{
expectMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "delta_url",
Path: idPath1(t, cat).String(),
},
"2": DeltaPath{
"2": metadata.DeltaPath{
Delta: "delta_url2",
Path: idPath2(t, cat).String(),
},
@ -1395,17 +1396,17 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
},
},
resolver: newMockResolver(container1, container2),
inputMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{}
inputMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{}
},
expectNewColls: 2,
expectMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"1": DeltaPath{
expectMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "delta_url",
Path: idPath1(t, cat).String(),
},
"2": DeltaPath{
"2": metadata.DeltaPath{
Delta: "delta_url2",
Path: idPath2(t, cat).String(),
},
@ -1420,9 +1421,9 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
},
},
resolver: newMockResolver(container1),
inputMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"2": DeltaPath{
inputMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"2": metadata.DeltaPath{
Delta: "old_delta",
Path: idPath2(t, cat).String(),
},
@ -1430,9 +1431,9 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
},
expectNewColls: 1,
expectDeleted: 1,
expectMetadata: func(t *testing.T, cat path.CategoryType) DeltaPaths {
return DeltaPaths{
"1": DeltaPath{
expectMetadata: func(t *testing.T, cat path.CategoryType) metadata.DeltaPaths {
return metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "delta_url",
Path: idPath1(t, cat).String(),
},
@ -1604,7 +1605,7 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_r
}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0]
dps = DeltaPaths{} // incrementals are tested separately
dps = metadata.DeltaPaths{} // incrementals are tested separately
container1 = mockContainer{
id: strPtr("1"),
displayName: strPtr("display_name_1"),
@ -1718,7 +1719,7 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
name string
getter mockGetter
resolver graph.ContainerResolver
dps DeltaPaths
dps metadata.DeltaPaths
expect map[string]endState
skipWhenForcedNoDelta bool
}{
@ -1735,7 +1736,7 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("1", "new"),
l: path.Builder{}.Append("1", "new"),
}),
dps: DeltaPaths{},
dps: metadata.DeltaPaths{},
expect: map[string]endState{
"1": {data.NewState, false},
},
@ -1753,8 +1754,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("1", "not_moved"),
l: path.Builder{}.Append("1", "not_moved"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "not_moved").String(),
},
@ -1776,8 +1777,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("1", "moved"),
l: path.Builder{}.Append("1", "moved"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "prev").String(),
},
@ -1792,8 +1793,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
results: map[string]mockGetterResults{},
},
resolver: newMockResolver(),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "deleted").String(),
},
@ -1815,8 +1816,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("2", "new"),
l: path.Builder{}.Append("2", "new"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "deleted").String(),
},
@ -1839,8 +1840,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("2", "same"),
l: path.Builder{}.Append("2", "same"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "same").String(),
},
@ -1871,8 +1872,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("2", "prev"),
l: path.Builder{}.Append("2", "prev"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "prev").String(),
},
@ -1895,12 +1896,12 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("1", "not_moved"),
l: path.Builder{}.Append("1", "not_moved"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: "1/fnords/mc/smarfs",
},
"2": DeltaPath{
"2": metadata.DeltaPath{
Delta: "old_delta_url",
Path: "2/fnords/mc/smarfs",
},
@ -1922,8 +1923,8 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("1", "same"),
l: path.Builder{}.Append("1", "same"),
}),
dps: DeltaPaths{
"1": DeltaPath{
dps: metadata.DeltaPaths{
"1": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "1", "same").String(),
},
@ -1969,20 +1970,20 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
p: path.Builder{}.Append("4", "moved"),
l: path.Builder{}.Append("4", "moved"),
}),
dps: DeltaPaths{
"2": DeltaPath{
dps: metadata.DeltaPaths{
"2": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "2", "not_moved").String(),
},
"3": DeltaPath{
"3": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "3", "prev").String(),
},
"4": DeltaPath{
"4": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "4", "prev").String(),
},
"5": DeltaPath{
"5": metadata.DeltaPath{
Delta: "old_delta_url",
Path: prevPath(suite.T(), "5", "deleted").String(),
},

View File

@ -7,7 +7,7 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
@ -17,50 +17,22 @@ import (
// store graph metadata such as delta tokens and folderID->path references.
func MetadataFileNames(cat path.CategoryType) []string {
switch cat {
// TODO: should this include events?
case path.EmailCategory, path.ContactsCategory:
return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName}
return []string{metadata.DeltaURLsFileName, metadata.PreviousPathFileName}
default:
return []string{graph.PreviousPathFileName}
return []string{metadata.PreviousPathFileName}
}
}
type CatDeltaPaths map[path.CategoryType]DeltaPaths
type DeltaPaths map[string]DeltaPath
func (dps DeltaPaths) AddDelta(k, d string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Delta = d
dps[k] = dp
}
func (dps DeltaPaths) AddPath(k, p string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Path = p
dps[k] = dp
}
type DeltaPath struct {
Delta string
Path string
}
// ParseMetadataCollections produces a map of structs holding delta
// and path lookup maps.
func ParseMetadataCollections(
ctx context.Context,
colls []data.RestoreCollection,
) (CatDeltaPaths, bool, error) {
) (metadata.CatDeltaPaths, bool, error) {
// cdp stores metadata
cdp := CatDeltaPaths{
cdp := metadata.CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},
@ -107,8 +79,8 @@ func ParseMetadataCollections(
}
switch item.ID() {
case graph.PreviousPathFileName:
if _, ok := found[category]["path"]; ok {
case metadata.PreviousPathFileName:
if _, ok := found[category][metadata.PathKey]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
}
@ -116,10 +88,10 @@ func ParseMetadataCollections(
cdps.AddPath(k, p)
}
found[category]["path"] = struct{}{}
found[category][metadata.PathKey] = struct{}{}
case graph.DeltaURLsFileName:
if _, ok := found[category]["delta"]; ok {
case metadata.DeltaURLsFileName:
if _, ok := found[category][metadata.DeltaKey]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
}
@ -127,7 +99,7 @@ func ParseMetadataCollections(
cdps.AddDelta(k, d)
}
found[category]["delta"] = struct{}{}
found[category][metadata.DeltaKey] = struct{}{}
}
cdp[category] = cdps
@ -142,7 +114,7 @@ func ParseMetadataCollections(
if errs.Failure() != nil {
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
return CatDeltaPaths{
return metadata.CatDeltaPaths{
path.ContactsCategory: {},
path.EmailCategory: {},
path.EventsCategory: {},

View File

@ -5,14 +5,15 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/pii"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -34,10 +35,9 @@ func CreateCollections(
bh backupHandler,
tenantID string,
scope selectors.GroupsScope,
// dps DeltaPaths,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
) ([]data.BackupCollection, bool, error) {
ctx = clues.Add(ctx, "category", scope.Category().PathType())
var (
@ -50,6 +50,13 @@ func CreateCollections(
}
)
cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, bpc.MetadataCollections)
if err != nil {
return nil, false, err
}
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup)
catProgress := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", qp.Category))
@ -57,7 +64,7 @@ func CreateCollections(
channels, err := bh.getChannels(ctx)
if err != nil {
return nil, clues.Stack(err)
return nil, false, clues.Stack(err)
}
collections, err := populateCollections(
@ -67,18 +74,18 @@ func CreateCollections(
su,
channels,
scope,
// dps,
cdps[scope.Category().PathType()],
bpc.Options,
errs)
if err != nil {
return nil, clues.Wrap(err, "filling collections")
return nil, false, clues.Wrap(err, "filling collections")
}
for _, coll := range collections {
allCollections = append(allCollections, coll)
}
return allCollections, nil
return allCollections, canUsePreviousBackup, nil
}
func populateCollections(
@ -88,79 +95,76 @@ func populateCollections(
statusUpdater support.StatusUpdater,
channels []models.Channelable,
scope selectors.GroupsScope,
// dps DeltaPaths,
dps metadata.DeltaPaths,
ctrlOpts control.Options,
errs *fault.Bus,
) (map[string]data.BackupCollection, error) {
var (
// channel ID -> BackupCollection.
channelCollections := map[string]data.BackupCollection{}
collections = map[string]data.BackupCollection{}
// channel ID -> delta url or folder path lookups
// deltaURLs = map[string]string{}
// currPaths = map[string]string{}
deltaURLs = map[string]string{}
currPaths = map[string]string{}
// copy of previousPaths. every channel present in the slice param
// gets removed from this map; the remaining channels at the end of
// the process have been deleted.
// tombstones = makeTombstones(dps)
tombstones = makeTombstones(dps)
el = errs.Local()
)
logger.Ctx(ctx).Info("filling collections")
// , "len_deltapaths", len(dps))
el := errs.Local()
logger.Ctx(ctx).Info("filling collections", "len_deltapaths", len(dps))
for _, c := range channels {
if el.Failure() != nil {
return nil, el.Failure()
}
// delete(tombstones, cID)
var (
cID = ptr.Val(c.GetId())
cName = ptr.Val(c.GetDisplayName())
err error
// dp = dps[cID]
// prevDelta = dp.Delta
// prevPathStr = dp.Path // do not log: pii; log prevPath instead
// prevPath path.Path
dp = dps[cID]
prevDelta = dp.Delta
prevPathStr = dp.Path // do not log: pii; log prevPath instead
prevPath path.Path
ictx = clues.Add(
ctx,
"channel_id", cID)
// "previous_delta", pii.SafeURL{
// URL: prevDelta,
// SafePathElems: graph.SafeURLPathParams,
// SafeQueryKeys: graph.SafeURLQueryParams,
// })
"channel_id", cID,
"previous_delta", pii.SafeURL{
URL: prevDelta,
SafePathElems: graph.SafeURLPathParams,
SafeQueryKeys: graph.SafeURLQueryParams,
})
)
delete(tombstones, cID)
// Only create a collection if the path matches the scope.
if !bh.includeContainer(ictx, qp, c, scope) {
continue
}
// if len(prevPathStr) > 0 {
// if prevPath, err = pathFromPrevString(prevPathStr); err != nil {
// logger.CtxErr(ictx, err).Error("parsing prev path")
// // if the previous path is unusable, then the delta must be, too.
// prevDelta = ""
// }
// }
if len(prevPathStr) > 0 {
if prevPath, err = pathFromPrevString(prevPathStr); err != nil {
logger.CtxErr(ictx, err).Error("parsing prev path")
// if the previous path is unusable, then the delta must be, too.
prevDelta = ""
}
}
// ictx = clues.Add(ictx, "previous_path", prevPath)
ictx = clues.Add(ictx, "previous_path", prevPath)
items, _, err := bh.getChannelMessageIDsDelta(ctx, cID, "")
added, removed, du, err := bh.getChannelMessageIDsDelta(ctx, cID, prevDelta)
if err != nil {
el.AddRecoverable(ctx, clues.Stack(err))
continue
}
// if len(newDelta.URL) > 0 {
// deltaURLs[cID] = newDelta.URL
// } else if !newDelta.Reset {
// logger.Ctx(ictx).Info("missing delta url")
// }
var prevPath path.Path
if len(du.URL) > 0 {
deltaURLs[cID] = du.URL
} else if !du.Reset {
logger.Ctx(ictx).Info("missing delta url")
}
currPath, err := bh.canonicalPath(path.Builder{}.Append(cID), qp.TenantID)
if err != nil {
@ -168,6 +172,13 @@ func populateCollections(
continue
}
// Remove any deleted IDs from the set of added IDs because items that are
// deleted and then restored will have a different ID than they did
// originally.
for remove := range removed {
delete(added, remove)
}
edc := NewCollection(
bh,
qp.ProtectedResource.ID(),
@ -175,50 +186,93 @@ func populateCollections(
prevPath,
path.Builder{}.Append(cName),
qp.Category,
added,
removed,
statusUpdater,
ctrlOpts)
ctrlOpts,
du.Reset)
channelCollections[cID] = &edc
collections[cID] = &edc
// TODO: handle deleted items for v1 backup.
// // Remove any deleted IDs from the set of added IDs because items that are
// // deleted and then restored will have a different ID than they did
// // originally.
// for _, remove := range removed {
// delete(edc.added, remove)
// edc.removed[remove] = struct{}{}
// }
// // add the current path for the container ID to be used in the next backup
// // as the "previous path", for reference in case of a rename or relocation.
// currPaths[cID] = currPath.String()
// FIXME: normally this goes before removal, but the linters require no bottom comments
maps.Copy(edc.added, items)
// add the current path for the container ID to be used in the next backup
// as the "previous path", for reference in case of a rename or relocation.
currPaths[cID] = currPath.String()
}
// TODO: handle tombstones here
// A tombstone is a channel that needs to be marked for deletion.
// The only situation where a tombstone should appear is if the channel exists
// in the `previousPath` set, but does not exist in the enumeration.
for id, p := range tombstones {
if el.Failure() != nil {
return nil, el.Failure()
}
var (
err error
ictx = clues.Add(ctx, "tombstone_id", id)
)
if collections[id] != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ictx))
continue
}
// only occurs if it was a new folder that we picked up during the container
// resolver phase that got deleted in flight by the time we hit this stage.
if len(p) == 0 {
continue
}
prevPath, err := pathFromPrevString(p)
if err != nil {
// technically shouldn't ever happen. But just in case...
logger.CtxErr(ictx, err).Error("parsing tombstone prev path")
continue
}
edc := NewCollection(
bh,
qp.ProtectedResource.ID(),
nil, // marks the collection as deleted
prevPath,
nil, // tombstones don't need a location
qp.Category,
nil, // no items added
nil, // this deletes a directory, so no items deleted either
statusUpdater,
ctrlOpts,
false)
collections[id] = &edc
}
logger.Ctx(ctx).Infow(
"adding metadata collection entries",
// "num_deltas_entries", len(deltaURLs),
"num_paths_entries", len(channelCollections))
"num_deltas_entries", len(deltaURLs),
"num_paths_entries", len(collections))
// col, err := graph.MakeMetadataCollection(
// qp.TenantID,
// qp.ProtectedResource.ID(),
// path.ExchangeService,
// qp.Category,
// []graph.MetadataCollectionEntry{
// graph.NewMetadataEntry(graph.PreviousPathFileName, currPaths),
// graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs),
// },
// statusUpdater)
// if err != nil {
// return nil, clues.Wrap(err, "making metadata collection")
// }
// channelCollections["metadata"] = col
return channelCollections, el.Failure()
pathPrefix, err := path.Builder{}.ToServiceCategoryMetadataPath(
qp.TenantID,
qp.ProtectedResource.ID(),
path.GroupsService,
qp.Category,
false)
if err != nil {
return nil, clues.Wrap(err, "making metadata path")
}
col, err := graph.MakeMetadataCollection(
pathPrefix,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(metadata.PreviousPathFileName, currPaths),
graph.NewMetadataEntry(metadata.DeltaURLsFileName, deltaURLs),
},
statusUpdater)
if err != nil {
return nil, clues.Wrap(err, "making metadata collection")
}
collections["metadata"] = col
return collections, el.Failure()
}

View File

@ -22,6 +22,7 @@ import (
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -40,6 +41,7 @@ type mockBackupHandler struct {
channels []models.Channelable
channelsErr error
messageIDs map[string]struct{}
deletedMsgIDs map[string]struct{}
messagesErr error
messages map[string]models.ChatMessageable
info map[string]*details.GroupsInfo
@ -54,8 +56,8 @@ func (bh mockBackupHandler) getChannels(context.Context) ([]models.Channelable,
func (bh mockBackupHandler) getChannelMessageIDsDelta(
_ context.Context,
_, _ string,
) (map[string]struct{}, api.DeltaUpdate, error) {
return bh.messageIDs, api.DeltaUpdate{}, bh.messagesErr
) (map[string]struct{}, map[string]struct{}, api.DeltaUpdate, error) {
return bh.messageIDs, bh.deletedMsgIDs, api.DeltaUpdate{}, bh.messagesErr
}
func (bh mockBackupHandler) includeContainer(
@ -115,19 +117,15 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
TenantID: suite.creds.AzureTenantID,
}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewGroupsBackup(nil).Channels(selectors.Any())[0]
)
table := []struct {
name string
mock mockBackupHandler
scope selectors.GroupsScope
failFast control.FailurePolicy
expectErr require.ErrorAssertionFunc
expectColls int
expectNewColls int
expectMetadataColls int
expectDoNotMergeColls int
}{
{
name: "happy path, one container",
@ -135,12 +133,21 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
channels: testdata.StubChannels("one"),
messageIDs: map[string]struct{}{"msg-one": {}},
},
scope: allScope,
expectErr: require.NoError,
expectColls: 1,
expectColls: 2,
expectNewColls: 1,
expectMetadataColls: 0,
expectDoNotMergeColls: 1,
expectMetadataColls: 1,
},
{
name: "happy path, one container, only deleted messages",
mock: mockBackupHandler{
channels: testdata.StubChannels("one"),
deletedMsgIDs: map[string]struct{}{"msg-one": {}},
},
expectErr: require.NoError,
expectColls: 2,
expectNewColls: 1,
expectMetadataColls: 1,
},
{
name: "happy path, many containers",
@ -148,12 +155,10 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
channels: testdata.StubChannels("one", "two"),
messageIDs: map[string]struct{}{"msg-one": {}},
},
scope: allScope,
expectErr: require.NoError,
expectColls: 2,
expectColls: 3,
expectNewColls: 2,
expectMetadataColls: 0,
expectDoNotMergeColls: 2,
expectMetadataColls: 1,
},
{
name: "no containers pass scope",
@ -161,34 +166,28 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
channels: testdata.StubChannels("one"),
doNotInclude: true,
},
scope: selectors.NewGroupsBackup(nil).Channels(selectors.None())[0],
expectErr: require.NoError,
expectColls: 0,
expectColls: 1,
expectNewColls: 0,
expectMetadataColls: 0,
expectDoNotMergeColls: 0,
expectMetadataColls: 1,
},
{
name: "no channels",
mock: mockBackupHandler{},
scope: allScope,
expectErr: require.NoError,
expectColls: 0,
expectColls: 1,
expectNewColls: 0,
expectMetadataColls: 0,
expectDoNotMergeColls: 0,
expectMetadataColls: 1,
},
{
name: "no channel messages",
mock: mockBackupHandler{
channels: testdata.StubChannels("one"),
},
scope: allScope,
expectErr: require.NoError,
expectColls: 1,
expectColls: 2,
expectNewColls: 1,
expectMetadataColls: 0,
expectDoNotMergeColls: 1,
expectMetadataColls: 1,
},
{
name: "err: deleted in flight",
@ -196,12 +195,10 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
channels: testdata.StubChannels("one"),
messagesErr: graph.ErrDeletedInFlight,
},
scope: allScope,
expectErr: require.Error,
expectColls: 0,
expectColls: 1,
expectNewColls: 0,
expectMetadataColls: 0,
expectDoNotMergeColls: 0,
expectMetadataColls: 1,
},
{
name: "err: other error",
@ -209,32 +206,20 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
channels: testdata.StubChannels("one"),
messagesErr: assert.AnError,
},
scope: allScope,
expectErr: require.Error,
expectColls: 0,
expectColls: 1,
expectNewColls: 0,
expectMetadataColls: 0,
expectDoNotMergeColls: 0,
expectMetadataColls: 1,
},
}
for _, test := range table {
// for _, canMakeDeltaQueries := range []bool{true, false} {
name := test.name
// if canMakeDeltaQueries {
// name += "-delta"
// } else {
// name += "-non-delta"
// }
suite.Run(name, func() {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctrlOpts := control.Options{FailureHandling: test.failFast}
// ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries
ctrlOpts := control.Options{FailureHandling: control.FailFast}
collections, err := populateCollections(
ctx,
@ -242,7 +227,8 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
test.mock,
statusUpdater,
test.mock.channels,
test.scope,
selectors.NewGroupsBackup(nil).Channels(selectors.Any())[0],
nil,
ctrlOpts,
fault.New(true))
test.expectErr(t, err, clues.ToCore(err))
@ -273,12 +259,168 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
assert.Zero(t, deleteds, "deleted collections")
assert.Equal(t, test.expectNewColls, news, "new collections")
assert.Equal(t, test.expectMetadataColls, metadatas, "metadata collections")
assert.Equal(t, test.expectDoNotMergeColls, doNotMerges, "doNotMerge collections")
})
}
}
// }
func (suite *BackupUnitSuite) TestPopulateCollections_incremental() {
var (
qp = graph.QueryParams{
Category: path.ChannelMessagesCategory, // doesn't matter which one we use.
ProtectedResource: inMock.NewProvider("group_id", "user_name"),
TenantID: suite.creds.AzureTenantID,
}
statusUpdater = func(*support.ControllerOperationStatus) {}
allScope = selectors.NewGroupsBackup(nil).Channels(selectors.Any())[0]
)
chanPath, err := path.Build("tid", "grp", path.GroupsService, path.ChannelMessagesCategory, false, "chan")
require.NoError(suite.T(), err, clues.ToCore(err))
table := []struct {
name string
mock mockBackupHandler
deltaPaths metadata.DeltaPaths
expectErr require.ErrorAssertionFunc
expectColls int
expectNewColls int
expectTombstoneCols int
expectMetadataColls int
}{
{
name: "non incremental",
mock: mockBackupHandler{
channels: testdata.StubChannels("chan"),
messageIDs: map[string]struct{}{"msg": {}},
},
deltaPaths: metadata.DeltaPaths{},
expectErr: require.NoError,
expectColls: 2,
expectNewColls: 1,
expectTombstoneCols: 0,
expectMetadataColls: 1,
},
{
name: "incremental",
mock: mockBackupHandler{
channels: testdata.StubChannels("chan"),
deletedMsgIDs: map[string]struct{}{"msg": {}},
},
deltaPaths: metadata.DeltaPaths{
"chan": {
Delta: "chan",
Path: chanPath.String(),
},
},
expectErr: require.NoError,
expectColls: 2,
expectNewColls: 0,
expectTombstoneCols: 0,
expectMetadataColls: 1,
},
{
name: "incremental no new messages",
mock: mockBackupHandler{
channels: testdata.StubChannels("chan"),
},
deltaPaths: metadata.DeltaPaths{
"chan": {
Delta: "chan",
Path: chanPath.String(),
},
},
expectErr: require.NoError,
expectColls: 2,
expectNewColls: 0,
expectTombstoneCols: 0,
expectMetadataColls: 1,
},
{
name: "incremental deleted channel",
mock: mockBackupHandler{
channels: testdata.StubChannels(),
},
deltaPaths: metadata.DeltaPaths{
"chan": {
Delta: "chan",
Path: chanPath.String(),
},
},
expectErr: require.NoError,
expectColls: 2,
expectNewColls: 0,
expectTombstoneCols: 1,
expectMetadataColls: 1,
},
{
name: "incremental new and deleted channel",
mock: mockBackupHandler{
channels: testdata.StubChannels("chan2"),
messageIDs: map[string]struct{}{"msg": {}},
},
deltaPaths: metadata.DeltaPaths{
"chan": {
Delta: "chan",
Path: chanPath.String(),
},
},
expectErr: require.NoError,
expectColls: 3,
expectNewColls: 1,
expectTombstoneCols: 1,
expectMetadataColls: 1,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctrlOpts := control.Options{FailureHandling: control.FailFast}
collections, err := populateCollections(
ctx,
qp,
test.mock,
statusUpdater,
test.mock.channels,
allScope,
test.deltaPaths,
ctrlOpts,
fault.New(true))
test.expectErr(t, err, clues.ToCore(err))
assert.Len(t, collections, test.expectColls, "number of collections")
// collection assertions
tombstones, news, metadatas, doNotMerges := 0, 0, 0, 0
for _, c := range collections {
if c.FullPath() != nil && c.FullPath().Service() == path.GroupsMetadataService {
metadatas++
continue
}
if c.State() == data.DeletedState {
tombstones++
}
if c.State() == data.NewState {
news++
}
if c.DoNotMergeItems() {
doNotMerges++
}
}
assert.Equal(t, test.expectNewColls, news, "new collections")
assert.Equal(t, test.expectTombstoneCols, tombstones, "tombstone collections")
assert.Equal(t, test.expectMetadataColls, metadatas, "metadata collections")
})
}
}
// ---------------------------------------------------------------------------
// Integration tests
@ -330,15 +472,13 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
name string
scope selectors.GroupsScope
channelNames map[string]struct{}
canMakeDeltaQueries bool
}{
{
name: "channel messages non-delta",
name: "channel messages",
scope: selTD.GroupsBackupChannelScope(selectors.NewGroupsBackup(resources))[0],
channelNames: map[string]struct{}{
selTD.TestChannelName: {},
},
canMakeDeltaQueries: false,
},
}
@ -350,7 +490,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
defer flush()
ctrlOpts := control.DefaultOptions()
ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries
sel := selectors.NewGroupsBackup([]string{protectedResource})
sel.Include(selTD.GroupsBackupChannelScope(sel))
@ -362,7 +501,7 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
Selector: sel.Selector,
}
collections, err := CreateCollections(
collections, _, err := CreateCollections(
ctx,
bpc,
handler,

View File

@ -39,7 +39,7 @@ func (bh channelsBackupHandler) getChannels(
func (bh channelsBackupHandler) getChannelMessageIDsDelta(
ctx context.Context,
channelID, prevDelta string,
) (map[string]struct{}, api.DeltaUpdate, error) {
) (map[string]struct{}, map[string]struct{}, api.DeltaUpdate, error) {
return bh.ac.GetChannelMessageIDsDelta(ctx, bh.protectedResource, channelID, prevDelta)
}

View File

@ -64,7 +64,7 @@ type Collection struct {
state data.CollectionState
// doNotMergeItems should only be true if the old delta token expired.
// doNotMergeItems bool
doNotMergeItems bool
}
// NewExchangeDataCollection creates an ExchangeDataCollection.
@ -79,20 +79,22 @@ func NewCollection(
curr, prev path.Path,
location *path.Builder,
category path.CategoryType,
added map[string]struct{},
removed map[string]struct{},
statusUpdater support.StatusUpdater,
ctrlOpts control.Options,
// doNotMergeItems bool,
doNotMergeItems bool,
) Collection {
collection := Collection{
added: map[string]struct{}{},
added: added,
category: category,
ctrl: ctrlOpts,
// doNotMergeItems: doNotMergeItems,
doNotMergeItems: doNotMergeItems,
fullPath: curr,
getter: getter,
locationPath: location,
prevPath: prev,
removed: make(map[string]struct{}, 0),
removed: removed,
state: data.StateOf(prev, curr),
statusUpdater: statusUpdater,
stream: make(chan data.Item, collectionChannelBufferSize),

View File

@ -124,8 +124,10 @@ func (suite *CollectionSuite) TestNewCollection_state() {
"g",
test.curr, test.prev, test.loc,
0,
nil, nil,
nil,
control.DefaultOptions())
control.DefaultOptions(),
false)
assert.Equal(t, test.expect, c.State(), "collection state")
assert.Equal(t, test.curr, c.fullPath, "full path")
assert.Equal(t, test.prev, c.prevPath, "prev path")

View File

@ -24,7 +24,7 @@ type backupHandler interface {
getChannelMessageIDsDelta(
ctx context.Context,
channelID, prevDelta string,
) (map[string]struct{}, api.DeltaUpdate, error)
) (map[string]struct{}, map[string]struct{}, api.DeltaUpdate, error)
// includeContainer evaluates whether the channel is included
// in the provided scope.

View File

@ -0,0 +1,130 @@
package groups
import (
"context"
"encoding/json"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
)
// ParseMetadataCollections produces a map of structs holding delta
// and path lookup maps.
func parseMetadataCollections(
ctx context.Context,
colls []data.RestoreCollection,
) (metadata.CatDeltaPaths, bool, error) {
// cdp stores metadata
cdp := metadata.CatDeltaPaths{
path.ChannelMessagesCategory: {},
}
// found tracks the metadata we've loaded, to make sure we don't
// fetch overlapping copies.
found := map[path.CategoryType]map[string]struct{}{
path.ChannelMessagesCategory: {},
}
// errors from metadata items should not stop the backup,
// but it should prevent us from using previous backups
errs := fault.New(true)
for _, coll := range colls {
var (
breakLoop bool
items = coll.Items(ctx, errs)
category = coll.FullPath().Category()
)
for {
select {
case <-ctx.Done():
return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
case item, ok := <-items:
if !ok || errs.Failure() != nil {
breakLoop = true
break
}
var (
m = map[string]string{}
cdps, wantedCategory = cdp[category]
)
// avoid sharepoint site deltapaths
if !wantedCategory {
continue
}
err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil {
return nil, false, clues.New("decoding metadata json").WithClues(ctx)
}
switch item.ID() {
case metadata.PreviousPathFileName:
// no-op at this time, previous paths not needed
case metadata.DeltaURLsFileName:
if _, ok := found[category][metadata.DeltaKey]; ok {
return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
}
for k, d := range m {
cdps.AddDelta(k, d)
}
found[category][metadata.DeltaKey] = struct{}{}
}
cdp[category] = cdps
}
if breakLoop {
break
}
}
}
if errs.Failure() != nil {
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
return metadata.CatDeltaPaths{
path.ChannelMessagesCategory: {},
}, false, nil
}
// Do not remove entries that contain only a path or a delta, but not both.
// This condition is expected. Channels only record their path. Messages
// only record their deltas.
return cdp, true, nil
}
// produces a set of id:path pairs from the deltapaths map.
// Each entry in the set will, if not removed, produce a collection
// that will delete the tombstone by path.
func makeTombstones(dps metadata.DeltaPaths) map[string]string {
r := make(map[string]string, len(dps))
for id, v := range dps {
r[id] = v.Path
}
return r
}
func pathFromPrevString(ps string) (path.Path, error) {
p, err := path.FromDataLayerPath(ps, false)
if err != nil {
return nil, clues.Wrap(err, "parsing previous path string")
}
return p, nil
}

View File

@ -7,13 +7,13 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
)
func StubChannels(names ...string) []models.Channelable {
sl := make([]models.Channelable, 0, len(names))
func StubChannels(ids ...string) []models.Channelable {
sl := make([]models.Channelable, 0, len(ids))
for _, name := range names {
for _, id := range ids {
ch := models.NewChannel()
ch.SetDisplayName(ptr.To(name))
ch.SetId(ptr.To(uuid.NewString()))
ch.SetDisplayName(ptr.To(id))
ch.SetId(ptr.To(id))
sl = append(sl, ch)
}
@ -21,15 +21,15 @@ func StubChannels(names ...string) []models.Channelable {
return sl
}
func StubChatMessages(names ...string) []models.ChatMessageable {
sl := make([]models.ChatMessageable, 0, len(names))
func StubChatMessages(ids ...string) []models.ChatMessageable {
sl := make([]models.ChatMessageable, 0, len(ids))
for _, name := range names {
for _, id := range ids {
cm := models.NewChatMessage()
cm.SetId(ptr.To(uuid.NewString()))
body := models.NewItemBody()
body.SetContent(ptr.To(name))
body.SetContent(ptr.To(id))
cm.SetBody(body)

View File

@ -34,20 +34,6 @@ const (
AddtlDataRemoved = "@removed"
)
// ---------------------------------------------------------------------------
// Metadata Files
// ---------------------------------------------------------------------------
const (
// DeltaURLsFileName is the name of the file containing delta token(s) for a
// given endpoint. The endpoint granularity varies by service.
DeltaURLsFileName = "delta"
// PreviousPathFileName is the name of the file containing previous path(s) for a
// given endpoint.
PreviousPathFileName = "previouspath"
)
// ---------------------------------------------------------------------------
// Runtime Configuration
// ---------------------------------------------------------------------------

View File

@ -37,12 +37,6 @@ const (
defaultHTTPClientTimeout = 1 * time.Hour
)
// AllMetadataFileNames produces the standard set of filenames used to store graph
// metadata such as delta tokens and folderID->path references.
func AllMetadataFileNames() []string {
return []string{DeltaURLsFileName, PreviousPathFileName}
}
type QueryParams struct {
Category path.CategoryType
ProtectedResource idname.Provider

View File

@ -18,6 +18,7 @@ import (
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -107,7 +108,7 @@ func ProduceBackupCollections(
}
case path.ChannelMessagesCategory:
dbcs, err = groups.CreateCollections(
dbcs, canUsePreviousBackup, err = groups.CreateCollections(
ctx,
bpc,
groups.NewChannelBackupHandler(bpc.ProtectedResource.ID(), ac.Channels()),
@ -183,7 +184,7 @@ func getSitesMetadataCollection(
md, err := graph.MakeMetadataCollection(
p,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(graph.PreviousPathFileName, sites),
graph.NewMetadataEntry(metadata.PreviousPathFileName, sites),
},
su)

View File

@ -36,6 +36,7 @@ import (
"github.com/alcionai/corso/src/pkg/backup/details"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/extensions"
@ -1608,10 +1609,10 @@ func makeMetadataCollectionEntries(
) []graph.MetadataCollectionEntry {
return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
graph.DeltaURLsFileName,
metadata.DeltaURLsFileName,
map[string]string{driveID: deltaURL}),
graph.NewMetadataEntry(
graph.PreviousPathFileName,
metadata.PreviousPathFileName,
map[string]map[string]string{
driveID: {
folderID: p.PlainString(),

View File

@ -9,8 +9,8 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/kopia/inject"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
@ -63,7 +63,7 @@ func getManifestsAndMetadata(
) (kopia.BackupBases, []data.RestoreCollection, bool, error) {
var (
tags = map[string]string{kopia.TagBackupCategory: ""}
metadataFiles = graph.AllMetadataFileNames()
metadataFiles = metadata.AllMetadataFileNames()
collections []data.RestoreCollection
)

View File

@ -23,8 +23,6 @@ type GroupsBackupIntgSuite struct {
}
func TestGroupsBackupIntgSuite(t *testing.T) {
t.Skip("todo: enable")
suite.Run(t, &GroupsBackupIntgSuite{
Suite: tester.NewIntegrationSuite(
t,

View File

@ -193,7 +193,14 @@ func runAndCheckBackup(
acceptNoData bool,
) {
err := bo.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
if !assert.NoError(t, err, clues.ToCore(err)) {
for i, err := range bo.Errors.Recovered() {
t.Logf("recoverable err %d, %+v", i, err)
}
assert.Fail(t, "not allowed to error")
}
require.NotEmpty(t, bo.Results, "the backup had non-zero results")
require.NotEmpty(t, bo.Results.BackupID, "the backup generated an ID")

View File

@ -31,6 +31,7 @@ import (
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
ctrlTD "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/count"
@ -173,7 +174,7 @@ func runDriveIncrementalTest(
now = dttm.FormatNow(dttm.SafeForTesting)
categories = map[path.CategoryType][]string{
category: {graph.DeltaURLsFileName, graph.PreviousPathFileName},
category: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName},
}
container1 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 1, now)
container2 = fmt.Sprintf("%s%d_%s", incrementalsDestContainerPrefix, 2, now)
@ -787,7 +788,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
mb = evmock.NewBus()
categories = map[path.CategoryType][]string{
path.FilesCategory: {graph.DeltaURLsFileName, graph.PreviousPathFileName},
path.FilesCategory: {bupMD.DeltaURLsFileName, bupMD.PreviousPathFileName},
}
)

View File

@ -124,8 +124,13 @@ func (i *GroupsInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
}
func (i *GroupsInfo) updateFolder(f *FolderInfo) error {
if i.ItemType == SharePointLibrary {
f.DataType = i.ItemType
switch i.ItemType {
case SharePointLibrary:
return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f)
case GroupsChannelMessage:
return nil
}
return clues.New("unsupported ItemType for GroupsInfo").With("item_type", i.ItemType)

View File

@ -0,0 +1,51 @@
package metadata
import "github.com/alcionai/corso/src/pkg/path"
const (
// DeltaURLsFileName is the name of the file containing delta token(s) for a
// given endpoint. The endpoint granularity varies by service.
DeltaURLsFileName = "delta"
// PreviousPathFileName is the name of the file containing previous path(s) for a
// given endpoint.
PreviousPathFileName = "previouspath"
PathKey = "path"
DeltaKey = "delta"
)
type (
CatDeltaPaths map[path.CategoryType]DeltaPaths
DeltaPaths map[string]DeltaPath
DeltaPath struct {
Delta string
Path string
}
)
func (dps DeltaPaths) AddDelta(k, d string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Delta = d
dps[k] = dp
}
func (dps DeltaPaths) AddPath(k, p string) {
dp, ok := dps[k]
if !ok {
dp = DeltaPath{}
}
dp.Path = p
dps[k] = dp
}
// AllMetadataFileNames produces the standard set of filenames used to store graph
// metadata such as delta tokens and folderID->path references.
func AllMetadataFileNames() []string {
return []string{DeltaURLsFileName, PreviousPathFileName}
}

View File

@ -26,7 +26,7 @@ const (
LibrariesCategory CategoryType = 6 // libraries
PagesCategory CategoryType = 7 // pages
DetailsCategory CategoryType = 8 // details
ChannelMessagesCategory CategoryType = 9 // channel messages
ChannelMessagesCategory CategoryType = 9 // channelMessages
)
func ToCategoryType(category string) CategoryType {

View File

@ -20,9 +20,9 @@ func _() {
_ = x[ChannelMessagesCategory-9]
}
const _CategoryType_name = "UnknownCategoryemailcontactseventsfileslistslibrariespagesdetailschannel messages"
const _CategoryType_name = "UnknownCategoryemailcontactseventsfileslistslibrariespagesdetailschannelMessages"
var _CategoryType_index = [...]uint8{0, 15, 20, 28, 34, 39, 44, 53, 58, 65, 81}
var _CategoryType_index = [...]uint8{0, 15, 20, 28, 34, 39, 44, 53, 58, 65, 80}
func (i CategoryType) String() string {
if i < 0 || i >= CategoryType(len(_CategoryType_index)-1) {

View File

@ -159,6 +159,8 @@ func ChannelMessageInfo(
from := msg.GetFrom()
switch true {
case from == nil:
// not all messages have a populated 'from'. Namely, system messages do not.
case from.GetApplication() != nil:
msgCreator = ptr.Val(from.GetApplication().GetDisplayName())
case from.GetDevice() != nil:

View File

@ -86,12 +86,14 @@ func (c Channels) NewChannelMessageDeltaPager(
}
// GetChannelMessageIDsDelta fetches a delta of all messages in the channel.
// returns two maps: addedItems, deletedItems
func (c Channels) GetChannelMessageIDsDelta(
ctx context.Context,
teamID, channelID, prevDelta string,
) (map[string]struct{}, DeltaUpdate, error) {
) (map[string]struct{}, map[string]struct{}, DeltaUpdate, error) {
var (
vs = map[string]struct{}{}
added = map[string]struct{}{}
deleted = map[string]struct{}{}
// select is not currently allowed on messages
// this func will still isolate to the ID, however,
// because we need the follow-up get request to gather
@ -109,7 +111,8 @@ func (c Channels) GetChannelMessageIDsDelta(
logger.Ctx(ctx).Infow("Invalid previous delta", "delta_link", prevDelta)
invalidPrevDelta = true
vs = map[string]struct{}{}
added = map[string]struct{}{}
deleted = map[string]struct{}{}
pager.Reset(ctx)
@ -117,16 +120,20 @@ func (c Channels) GetChannelMessageIDsDelta(
}
if err != nil {
return nil, DeltaUpdate{}, graph.Wrap(ctx, err, "retrieving page of channel messages")
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "retrieving page of channel messages")
}
vals, err := pager.ValuesIn(page)
if err != nil {
return nil, DeltaUpdate{}, graph.Wrap(ctx, err, "extracting channel messages from response")
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "extracting channel messages from response")
}
for _, v := range vals {
vs[ptr.Val(v.GetId())] = struct{}{}
if v.GetAdditionalData()[graph.AddtlDataRemoved] == nil {
added[ptr.Val(v.GetId())] = struct{}{}
} else {
deleted[ptr.Val(v.GetId())] = struct{}{}
}
}
nextLink, deltaLink := NextAndDeltaLink(page)
@ -142,14 +149,14 @@ func (c Channels) GetChannelMessageIDsDelta(
pager.SetNext(nextLink)
}
logger.Ctx(ctx).Debugf("retrieved %d channel messages", len(vs))
logger.Ctx(ctx).Debugf("retrieved %d channel messages", len(added))
du := DeltaUpdate{
URL: newDeltaLink,
Reset: invalidPrevDelta,
}
return vs, du, nil
return added, deleted, du, nil
}
// ---------------------------------------------------------------------------

View File

@ -56,27 +56,28 @@ func (suite *ChannelsPagerIntgSuite) TestEnumerateChannelMessages() {
ctx, flush := tester.NewContext(t)
defer flush()
msgIDs, du, err := ac.GetChannelMessageIDsDelta(
addedIDs, _, du, err := ac.GetChannelMessageIDsDelta(
ctx,
suite.its.group.id,
suite.its.group.testContainerID,
"")
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, msgIDs)
require.NotEmpty(t, addedIDs)
require.NotZero(t, du.URL, "delta link")
require.True(t, du.Reset, "reset due to empty prev delta link")
msgIDs, du, err = ac.GetChannelMessageIDsDelta(
addedIDs, deletedIDs, du, err := ac.GetChannelMessageIDsDelta(
ctx,
suite.its.group.id,
suite.its.group.testContainerID,
du.URL)
require.NoError(t, err, clues.ToCore(err))
require.Empty(t, msgIDs, "should have no new messages from delta")
require.Empty(t, addedIDs, "should have no new messages from delta")
require.Empty(t, deletedIDs, "should have no deleted messages from delta")
require.NotZero(t, du.URL, "delta link")
require.False(t, du.Reset, "prev delta link should be valid")
for id := range msgIDs {
for id := range addedIDs {
suite.Run(id+"-replies", func() {
testEnumerateChannelMessageReplies(
suite.T(),