Compare commits
11 Commits
main
...
2825-9-lis
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b00559acc8 | ||
|
|
ca2fbda260 | ||
|
|
702e25bcda | ||
|
|
9bb97f0ebd | ||
|
|
3531b3c84c | ||
|
|
cdbf3910e8 | ||
|
|
547b8c767e | ||
|
|
945e4ecbd0 | ||
|
|
3da8bf86a4 | ||
|
|
b5527e9ef2 | ||
|
|
feddd9b183 |
@ -195,6 +195,8 @@ func runBackups(
|
|||||||
r repository.Repository,
|
r repository.Repository,
|
||||||
serviceName, resourceOwnerType string,
|
serviceName, resourceOwnerType string,
|
||||||
selectorSet []selectors.Selector,
|
selectorSet []selectors.Selector,
|
||||||
|
resourceOwnersIDToName map[string]string,
|
||||||
|
resourceOwnersNameToID map[string]string,
|
||||||
) error {
|
) error {
|
||||||
var (
|
var (
|
||||||
bIDs []model.StableID
|
bIDs []model.StableID
|
||||||
@ -204,21 +206,21 @@ func runBackups(
|
|||||||
for _, discSel := range selectorSet {
|
for _, discSel := range selectorSet {
|
||||||
var (
|
var (
|
||||||
owner = discSel.DiscreteOwner
|
owner = discSel.DiscreteOwner
|
||||||
bctx = clues.Add(ctx, "resource_owner", owner)
|
ictx = clues.Add(ctx, "resource_owner", owner)
|
||||||
)
|
)
|
||||||
|
|
||||||
bo, err := r.NewBackup(bctx, discSel)
|
bo, err := r.NewBackup(ictx, discSel, resourceOwnersIDToName, resourceOwnersNameToID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = append(errs, clues.Wrap(err, owner).WithClues(bctx))
|
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
|
||||||
Errf(bctx, "%v\n", err)
|
Errf(ictx, "%v\n", err)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
err = bo.Run(bctx)
|
err = bo.Run(ictx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = append(errs, clues.Wrap(err, owner).WithClues(bctx))
|
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
|
||||||
Errf(bctx, "%v\n", err)
|
Errf(ictx, "%v\n", err)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/options"
|
"github.com/alcionai/corso/src/cli/options"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
@ -164,14 +165,14 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
|
|||||||
// TODO: log/print recoverable errors
|
// TODO: log/print recoverable errors
|
||||||
errs := fault.New(false)
|
errs := fault.New(false)
|
||||||
|
|
||||||
users, err := m365.UserPNs(ctx, *acct, errs)
|
idToPN, pnToID, err := m365.UsersMap(ctx, *acct, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 user(s)"))
|
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
|
||||||
}
|
}
|
||||||
|
|
||||||
selectorSet := []selectors.Selector{}
|
selectorSet := []selectors.Selector{}
|
||||||
|
|
||||||
for _, discSel := range sel.SplitByResourceOwner(users) {
|
for _, discSel := range sel.SplitByResourceOwner(maps.Keys(pnToID)) {
|
||||||
selectorSet = append(selectorSet, discSel.Selector)
|
selectorSet = append(selectorSet, discSel.Selector)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,7 +181,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
|
|||||||
r,
|
r,
|
||||||
"Exchange", "user",
|
"Exchange", "user",
|
||||||
selectorSet,
|
selectorSet,
|
||||||
)
|
idToPN, pnToID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func exchangeBackupCreateSelectors(userIDs, cats []string) *selectors.ExchangeBackup {
|
func exchangeBackupCreateSelectors(userIDs, cats []string) *selectors.ExchangeBackup {
|
||||||
|
|||||||
@ -300,7 +300,11 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
|||||||
|
|
||||||
suite.backupOps = make(map[path.CategoryType]string)
|
suite.backupOps = make(map[path.CategoryType]string)
|
||||||
|
|
||||||
users := []string{suite.m365UserID}
|
var (
|
||||||
|
users = []string{suite.m365UserID}
|
||||||
|
idToName = map[string]string{suite.m365UserID: "todo-name-" + suite.m365UserID}
|
||||||
|
nameToID = map[string]string{"todo-name-" + suite.m365UserID: suite.m365UserID}
|
||||||
|
)
|
||||||
|
|
||||||
for _, set := range backupDataSets {
|
for _, set := range backupDataSets {
|
||||||
var (
|
var (
|
||||||
@ -321,7 +325,7 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
|||||||
|
|
||||||
sel.Include(scopes)
|
sel.Include(scopes)
|
||||||
|
|
||||||
bop, err := suite.repo.NewBackup(ctx, sel.Selector)
|
bop, err := suite.repo.NewBackup(ctx, sel.Selector, idToName, nameToID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = bop.Run(ctx)
|
err = bop.Run(ctx)
|
||||||
@ -546,7 +550,7 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
|
|||||||
sel := selectors.NewExchangeBackup(users)
|
sel := selectors.NewExchangeBackup(users)
|
||||||
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
|
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
|
||||||
|
|
||||||
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
|
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector, nil, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = suite.backupOp.Run(ctx)
|
err = suite.backupOp.Run(ctx)
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/options"
|
"github.com/alcionai/corso/src/cli/options"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
@ -148,14 +149,14 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
|
|||||||
// TODO: log/print recoverable errors
|
// TODO: log/print recoverable errors
|
||||||
errs := fault.New(false)
|
errs := fault.New(false)
|
||||||
|
|
||||||
users, err := m365.UserPNs(ctx, *acct, errs)
|
idToName, nameToID, err := m365.UsersMap(ctx, *acct, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
|
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
|
||||||
}
|
}
|
||||||
|
|
||||||
selectorSet := []selectors.Selector{}
|
selectorSet := []selectors.Selector{}
|
||||||
|
|
||||||
for _, discSel := range sel.SplitByResourceOwner(users) {
|
for _, discSel := range sel.SplitByResourceOwner(maps.Keys(idToName)) {
|
||||||
selectorSet = append(selectorSet, discSel.Selector)
|
selectorSet = append(selectorSet, discSel.Selector)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -164,7 +165,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
|
|||||||
r,
|
r,
|
||||||
"OneDrive", "user",
|
"OneDrive", "user",
|
||||||
selectorSet,
|
selectorSet,
|
||||||
)
|
idToName, nameToID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func validateOneDriveBackupCreateFlags(users []string) error {
|
func validateOneDriveBackupCreateFlags(users []string) error {
|
||||||
|
|||||||
@ -205,14 +205,18 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
|
|||||||
})
|
})
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
m365UserID := tester.M365UserID(t)
|
var (
|
||||||
users := []string{m365UserID}
|
m365UserID = tester.M365UserID(t)
|
||||||
|
users = []string{m365UserID}
|
||||||
|
idToName = map[string]string{m365UserID: "todo-name-" + m365UserID}
|
||||||
|
nameToID = map[string]string{"todo-name-" + m365UserID: m365UserID}
|
||||||
|
)
|
||||||
|
|
||||||
// some tests require an existing backup
|
// some tests require an existing backup
|
||||||
sel := selectors.NewOneDriveBackup(users)
|
sel := selectors.NewOneDriveBackup(users)
|
||||||
sel.Include(sel.Folders(selectors.Any()))
|
sel.Include(sel.Folders(selectors.Any()))
|
||||||
|
|
||||||
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
|
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector, idToName, nameToID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = suite.backupOp.Run(ctx)
|
err = suite.backupOp.Run(ctx)
|
||||||
|
|||||||
@ -7,18 +7,20 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/options"
|
"github.com/alcionai/corso/src/cli/options"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/repository"
|
"github.com/alcionai/corso/src/pkg/repository"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
@ -154,19 +156,19 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
|||||||
// TODO: log/print recoverable errors
|
// TODO: log/print recoverable errors
|
||||||
errs := fault.New(false)
|
errs := fault.New(false)
|
||||||
|
|
||||||
gc, err := connector.NewGraphConnector(ctx, graph.HTTPClient(graph.NoTimeout()), *acct, connector.Sites, errs)
|
idToURL, urlToID, err := m365.SitesMap(ctx, *acct, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, clues.Wrap(err, "Failed to connect to Microsoft APIs"))
|
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites"))
|
||||||
}
|
}
|
||||||
|
|
||||||
sel, err := sharePointBackupCreateSelectors(ctx, utils.SiteID, utils.WebURL, utils.CategoryData, gc)
|
sel, err := sharePointBackupCreateSelectors(ctx, urlToID, utils.SiteID, utils.WebURL, utils.CategoryData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL"))
|
return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL"))
|
||||||
}
|
}
|
||||||
|
|
||||||
selectorSet := []selectors.Selector{}
|
selectorSet := []selectors.Selector{}
|
||||||
|
|
||||||
for _, discSel := range sel.SplitByResourceOwner(gc.GetSiteIDs()) {
|
for _, discSel := range sel.SplitByResourceOwner(maps.Keys(idToURL)) {
|
||||||
selectorSet = append(selectorSet, discSel.Selector)
|
selectorSet = append(selectorSet, discSel.Selector)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -175,7 +177,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
|||||||
r,
|
r,
|
||||||
"SharePoint", "site",
|
"SharePoint", "site",
|
||||||
selectorSet,
|
selectorSet,
|
||||||
)
|
idToURL, urlToID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
|
func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
|
||||||
@ -201,44 +203,30 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
|
|||||||
// TODO: users might specify a data type, this only supports AllData().
|
// TODO: users might specify a data type, this only supports AllData().
|
||||||
func sharePointBackupCreateSelectors(
|
func sharePointBackupCreateSelectors(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
urlToID map[string]string,
|
||||||
sites, weburls, cats []string,
|
sites, weburls, cats []string,
|
||||||
gc *connector.GraphConnector,
|
|
||||||
) (*selectors.SharePointBackup, error) {
|
) (*selectors.SharePointBackup, error) {
|
||||||
if len(sites) == 0 && len(weburls) == 0 {
|
if len(sites) == 0 && len(weburls) == 0 {
|
||||||
return selectors.NewSharePointBackup(selectors.None()), nil
|
return selectors.NewSharePointBackup(selectors.None()), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, site := range sites {
|
if filters.PathContains(sites).Compare(utils.Wildcard) {
|
||||||
if site == utils.Wildcard {
|
return includeAllSitesWithCategories(urlToID, cats), nil
|
||||||
return includeAllSitesWithCategories(cats), nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, wURL := range weburls {
|
if filters.PathContains(weburls).Compare(utils.Wildcard) {
|
||||||
if wURL == utils.Wildcard {
|
return includeAllSitesWithCategories(urlToID, cats), nil
|
||||||
return includeAllSitesWithCategories(cats), nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: log/print recoverable errors
|
sel := selectors.NewSharePointBackup(append(slices.Clone(sites), weburls...))
|
||||||
errs := fault.New(false)
|
|
||||||
|
|
||||||
union, err := gc.UnionSiteIDsAndWebURLs(ctx, sites, weburls, errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
sel := selectors.NewSharePointBackup(union)
|
|
||||||
|
|
||||||
return addCategories(sel, cats), nil
|
return addCategories(sel, cats), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func includeAllSitesWithCategories(categories []string) *selectors.SharePointBackup {
|
func includeAllSitesWithCategories(urlToID map[string]string, categories []string) *selectors.SharePointBackup {
|
||||||
sel := addCategories(
|
return addCategories(
|
||||||
selectors.NewSharePointBackup(selectors.Any()),
|
selectors.NewSharePointBackup(maps.Values(urlToID)),
|
||||||
categories)
|
categories)
|
||||||
|
|
||||||
return sel
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func addCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
|
func addCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
|
||||||
|
|||||||
@ -156,14 +156,18 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
|
|||||||
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
|
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
m365SiteID := tester.M365SiteID(t)
|
var (
|
||||||
sites := []string{m365SiteID}
|
m365SiteID = tester.M365SiteID(t)
|
||||||
|
sites = []string{m365SiteID}
|
||||||
|
idToName = map[string]string{m365SiteID: "todo-name-" + m365SiteID}
|
||||||
|
nameToID = map[string]string{"todo-name-" + m365SiteID: m365SiteID}
|
||||||
|
)
|
||||||
|
|
||||||
// some tests require an existing backup
|
// some tests require an existing backup
|
||||||
sel := selectors.NewSharePointBackup(sites)
|
sel := selectors.NewSharePointBackup(sites)
|
||||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||||
|
|
||||||
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
|
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector, idToName, nameToID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = suite.backupOp.Run(ctx)
|
err = suite.backupOp.Run(ctx)
|
||||||
|
|||||||
@ -11,7 +11,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -108,13 +107,17 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
|
func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
|
||||||
comboString := []string{"id_1", "id_2"}
|
const (
|
||||||
gc := &connector.GraphConnector{
|
id1 = "id_1"
|
||||||
Sites: map[string]string{
|
id2 = "id_2"
|
||||||
"url_1": "id_1",
|
url1 = "url_1/foo"
|
||||||
"url_2": "id_2",
|
url2 = "url_2/bar"
|
||||||
},
|
)
|
||||||
}
|
|
||||||
|
var (
|
||||||
|
bothIDs = []string{id1, id2}
|
||||||
|
urlToID = map[string]string{url1: id1, url2: id2}
|
||||||
|
)
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
@ -137,73 +140,72 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
|
|||||||
{
|
{
|
||||||
name: "site wildcard",
|
name: "site wildcard",
|
||||||
site: []string{utils.Wildcard},
|
site: []string{utils.Wildcard},
|
||||||
expect: selectors.Any(),
|
expect: bothIDs,
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "url wildcard",
|
name: "url wildcard",
|
||||||
weburl: []string{utils.Wildcard},
|
weburl: []string{utils.Wildcard},
|
||||||
expect: selectors.Any(),
|
expect: bothIDs,
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sites",
|
name: "sites",
|
||||||
site: []string{"id_1", "id_2"},
|
site: []string{id1, id2},
|
||||||
expect: []string{"id_1", "id_2"},
|
expect: []string{id1, id2},
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "urls",
|
name: "urls",
|
||||||
weburl: []string{"url_1", "url_2"},
|
weburl: []string{url1, url2},
|
||||||
expect: []string{"id_1", "id_2"},
|
expect: []string{url1, url2},
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "mix sites and urls",
|
name: "mix sites and urls",
|
||||||
site: []string{"id_1"},
|
site: []string{id1},
|
||||||
weburl: []string{"url_2"},
|
weburl: []string{url2},
|
||||||
expect: []string{"id_1", "id_2"},
|
expect: []string{id1, url2},
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "duplicate sites and urls",
|
name: "duplicate sites and urls",
|
||||||
site: []string{"id_1", "id_2"},
|
site: []string{id1, id2},
|
||||||
weburl: []string{"url_1", "url_2"},
|
weburl: []string{url1, url2},
|
||||||
expect: comboString,
|
expect: []string{id1, id2, url1, url2},
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "unnecessary site wildcard",
|
name: "unnecessary site wildcard",
|
||||||
site: []string{"id_1", utils.Wildcard},
|
site: []string{id1, utils.Wildcard},
|
||||||
weburl: []string{"url_1", "url_2"},
|
weburl: []string{url1, url2},
|
||||||
expect: selectors.Any(),
|
expect: bothIDs,
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "unnecessary url wildcard",
|
name: "unnecessary url wildcard",
|
||||||
site: comboString,
|
site: []string{id1},
|
||||||
weburl: []string{"url_1", utils.Wildcard},
|
weburl: []string{url1, utils.Wildcard},
|
||||||
expect: selectors.Any(),
|
expect: bothIDs,
|
||||||
expectScopesLen: 2,
|
expectScopesLen: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Pages",
|
name: "Pages",
|
||||||
site: comboString,
|
site: bothIDs,
|
||||||
data: []string{dataPages},
|
data: []string{dataPages},
|
||||||
expect: comboString,
|
expect: bothIDs,
|
||||||
expectScopesLen: 1,
|
expectScopesLen: 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc)
|
t := suite.T()
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
|
sel, err := sharePointBackupCreateSelectors(ctx, urlToID, test.site, test.weburl, test.data)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
|
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -73,7 +73,12 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
|
|||||||
suite.vpr, suite.cfgFP = tester.MakeTempTestConfigClone(t, force)
|
suite.vpr, suite.cfgFP = tester.MakeTempTestConfigClone(t, force)
|
||||||
|
|
||||||
suite.m365UserID = tester.M365UserID(t)
|
suite.m365UserID = tester.M365UserID(t)
|
||||||
users := []string{suite.m365UserID}
|
|
||||||
|
var (
|
||||||
|
users = []string{suite.m365UserID}
|
||||||
|
idToName = map[string]string{suite.m365UserID: "todo-name-" + suite.m365UserID}
|
||||||
|
nameToID = map[string]string{"todo-name-" + suite.m365UserID: suite.m365UserID}
|
||||||
|
)
|
||||||
|
|
||||||
// init the repo first
|
// init the repo first
|
||||||
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
|
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
|
||||||
@ -100,7 +105,7 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
|
|||||||
|
|
||||||
sel.Include(scopes)
|
sel.Include(scopes)
|
||||||
|
|
||||||
bop, err := suite.repo.NewBackup(ctx, sel.Selector)
|
bop, err := suite.repo.NewBackup(ctx, sel.Selector, idToName, nameToID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = bop.Run(ctx)
|
err = bop.Run(ctx)
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/print"
|
"github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
@ -96,7 +97,7 @@ func generateAndRestoreItems(
|
|||||||
|
|
||||||
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
|
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
|
||||||
|
|
||||||
return gc.RestoreDataCollections(ctx, version.Backup, acct, sel, dest, opts, dataColls, errs)
|
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, dataColls, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------
|
||||||
@ -126,12 +127,12 @@ func getGCAndVerifyUser(ctx context.Context, userID string) (*connector.GraphCon
|
|||||||
errs := fault.New(false)
|
errs := fault.New(false)
|
||||||
normUsers := map[string]struct{}{}
|
normUsers := map[string]struct{}{}
|
||||||
|
|
||||||
users, err := m365.UserPNs(ctx, acct, errs)
|
idToName, _, err := m365.UsersMap(ctx, acct, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, account.Account{}, clues.Wrap(err, "getting tenant users")
|
return nil, account.Account{}, clues.Wrap(err, "getting tenant users")
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, k := range users {
|
for _, k := range maps.Keys(idToName) {
|
||||||
normUsers[strings.ToLower(k)] = struct{}{}
|
normUsers[strings.ToLower(k)] = struct{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
6
src/internal/common/idname.go
Normal file
6
src/internal/common/idname.go
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
package common
|
||||||
|
|
||||||
|
type IDNamer interface {
|
||||||
|
ID() string
|
||||||
|
Name() string
|
||||||
|
}
|
||||||
@ -43,3 +43,10 @@ func OrNow(t *time.Time) time.Time {
|
|||||||
|
|
||||||
return *t
|
return *t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// To generates a pointer from any value. Primarily useful
|
||||||
|
// for generating pointers to strings and other primitives
|
||||||
|
// without needing to store a second variable.
|
||||||
|
func To[T any](t T) *T {
|
||||||
|
return &t
|
||||||
|
}
|
||||||
|
|||||||
@ -5,7 +5,9 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery"
|
"github.com/alcionai/corso/src/internal/connector/discovery"
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
@ -27,29 +29,33 @@ import (
|
|||||||
// Data Collections
|
// Data Collections
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// DataCollections utility function to launch backup operations for exchange and
|
// ProduceBackupCollections generates a slice of backup collections for the service
|
||||||
// onedrive. metadataCols contains any collections with metadata files that may
|
// specified in the selectors.
|
||||||
// be useful for the current backup. Metadata can include things like delta
|
// The metadata field can include things like delta tokens or the previous backup's
|
||||||
// tokens or the previous backup's folder hierarchy. The absence of metadataCols
|
// folder hierarchy. The absence of metadata causes the collection creation to ignore
|
||||||
// results in all data being pulled.
|
// prior history (ie, incrementals) and run a full backup.
|
||||||
func (gc *GraphConnector) DataCollections(
|
func (gc *GraphConnector) ProduceBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
owner common.IDNamer,
|
||||||
sels selectors.Selector,
|
sels selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
|
) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
|
||||||
ctx, end := diagnostics.Span(ctx, "gc:dataCollections", diagnostics.Index("service", sels.Service.String()))
|
ctx, end := diagnostics.Span(
|
||||||
|
ctx,
|
||||||
|
"gc:produceBackupCollections",
|
||||||
|
diagnostics.Index("service", sels.Service.String()))
|
||||||
defer end()
|
defer end()
|
||||||
|
|
||||||
err := verifyBackupInputs(sels, gc.GetSiteIDs())
|
err := verifyBackupInputs(sels, maps.Keys(gc.ResourceOwnerIDToName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Stack(err).WithClues(ctx)
|
return nil, nil, clues.Stack(err).WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
serviceEnabled, err := checkServiceEnabled(
|
serviceEnabled, err := checkServiceEnabled(
|
||||||
ctx,
|
ctx,
|
||||||
gc.Owners.Users(),
|
gc.Discovery.Users(),
|
||||||
path.ServiceType(sels.Service),
|
path.ServiceType(sels.Service),
|
||||||
sels.DiscreteOwner)
|
sels.DiscreteOwner)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -188,10 +194,10 @@ func checkServiceEnabled(
|
|||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// RestoreDataCollections restores data from the specified collections
|
// ConsumeRestoreCollections restores data from the specified collections
|
||||||
// into M365 using the GraphAPI.
|
// into M365 using the GraphAPI.
|
||||||
// SideEffect: gc.status is updated at the completion of operation
|
// SideEffect: gc.status is updated at the completion of operation
|
||||||
func (gc *GraphConnector) RestoreDataCollections(
|
func (gc *GraphConnector) ConsumeRestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
acct account.Account,
|
acct account.Account,
|
||||||
|
|||||||
@ -129,8 +129,8 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
status := connector.AwaitStatus()
|
status := connector.Wait()
|
||||||
assert.NotZero(t, status.Metrics.Successes)
|
assert.NotZero(t, status.Successes)
|
||||||
t.Log(status.String())
|
t.Log(status.String())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -205,9 +205,10 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestDataCollections_invali
|
|||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
collections, excludes, err := connector.DataCollections(
|
collections, excludes, err := connector.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
test.getSelector(t),
|
test.getSelector(t),
|
||||||
|
test.getSelector(t),
|
||||||
nil,
|
nil,
|
||||||
control.Options{},
|
control.Options{},
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
@ -286,8 +287,8 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
status := connector.AwaitStatus()
|
status := connector.Wait()
|
||||||
assert.NotZero(t, status.Metrics.Successes)
|
assert.NotZero(t, status.Successes)
|
||||||
t.Log(status.String())
|
t.Log(status.String())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -336,9 +337,10 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
|
|||||||
sel := selectors.NewSharePointBackup(siteIDs)
|
sel := selectors.NewSharePointBackup(siteIDs)
|
||||||
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
|
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
|
||||||
|
|
||||||
cols, excludes, err := gc.DataCollections(
|
cols, excludes, err := gc.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
|
sel.Selector,
|
||||||
nil,
|
nil,
|
||||||
control.Options{},
|
control.Options{},
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
@ -374,9 +376,10 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
|
|||||||
sel := selectors.NewSharePointBackup(siteIDs)
|
sel := selectors.NewSharePointBackup(siteIDs)
|
||||||
sel.Include(sel.Lists(selectors.Any(), selectors.PrefixMatch()))
|
sel.Include(sel.Lists(selectors.Any(), selectors.PrefixMatch()))
|
||||||
|
|
||||||
cols, excludes, err := gc.DataCollections(
|
cols, excludes, err := gc.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
|
sel.Selector,
|
||||||
nil,
|
nil,
|
||||||
control.Options{},
|
control.Options{},
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
|
|||||||
198
src/internal/connector/discovery/api/sites.go
Normal file
198
src/internal/connector/discovery/api/sites.go
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// controller
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (c Client) Sites() Sites {
|
||||||
|
return Sites{c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sites is an interface-compliant provider of the client.
|
||||||
|
type Sites struct {
|
||||||
|
Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// methods
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// GetAll retrieves all sites.
|
||||||
|
func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error) {
|
||||||
|
service, err := c.service()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp models.SiteCollectionResponseable
|
||||||
|
|
||||||
|
resp, err = service.Client().Sites().Get(ctx, nil)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting all sites")
|
||||||
|
}
|
||||||
|
|
||||||
|
iter, err := msgraphgocore.NewPageIterator(
|
||||||
|
resp,
|
||||||
|
service.Adapter(),
|
||||||
|
models.CreateSiteCollectionResponseFromDiscriminatorValue)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "creating sites iterator")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
us = make([]models.Siteable, 0)
|
||||||
|
el = errs.Local()
|
||||||
|
)
|
||||||
|
|
||||||
|
iterator := func(item any) bool {
|
||||||
|
if el.Failure() != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
s, err := validateSite(item)
|
||||||
|
if errors.Is(err, errKnownSkippableCase) {
|
||||||
|
// safe to no-op
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
el.AddRecoverable(graph.Wrap(ctx, err, "validating site"))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
us = append(us, s)
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := iter.Iterate(ctx, iterator); err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "iterating all sites")
|
||||||
|
}
|
||||||
|
|
||||||
|
return us, el.Failure()
|
||||||
|
}
|
||||||
|
|
||||||
|
const uuidRE = "[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}"
|
||||||
|
|
||||||
|
// matches a site ID, with or without a doman name. Ex, either one of:
|
||||||
|
// 10rqc2.sharepoint.com,deadbeef-0000-0000-0000-000000000000,beefdead-0000-0000-0000-000000000000
|
||||||
|
// deadbeef-0000-0000-0000-000000000000,beefdead-0000-0000-0000-000000000000
|
||||||
|
var siteIDRE = regexp.MustCompile("(.+,)?" + uuidRE + "," + uuidRE)
|
||||||
|
|
||||||
|
const webURLGetTemplate = "https://graph.microsoft.com/v1.0/sites/%s:/%s"
|
||||||
|
|
||||||
|
// GetByID looks up the site matching the given ID. The ID can be either a
|
||||||
|
// canonical site id or a webURL. Assumes the webURL is complete and well formed;
|
||||||
|
// eg: https://10rqc2.sharepoint.com/sites/Example
|
||||||
|
func (c Sites) GetByID(ctx context.Context, id string) (models.Siteable, error) {
|
||||||
|
var (
|
||||||
|
resp models.Siteable
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = clues.Add(ctx, "given_site_id", id)
|
||||||
|
|
||||||
|
if siteIDRE.MatchString(id) {
|
||||||
|
resp, err = c.stable.Client().SitesById(id).Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting site by id")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var (
|
||||||
|
url = strings.TrimPrefix(id, "https://")
|
||||||
|
parts = strings.SplitN(url, "/", 1)
|
||||||
|
host = parts[0]
|
||||||
|
path string
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(parts) > 1 {
|
||||||
|
path = parts[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
rawURL := fmt.Sprintf(webURLGetTemplate, host, path)
|
||||||
|
resp, err = sites.
|
||||||
|
NewItemSitesSiteItemRequestBuilder(rawURL, c.stable.Adapter()).
|
||||||
|
Get(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "getting site by weburl")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIDAndName looks up the site matching the given ID, and returns
|
||||||
|
// its canonical ID and the webURL as the name. Accepts an ID or a
|
||||||
|
// WebURL as an ID.
|
||||||
|
func (c Sites) GetIDAndName(ctx context.Context, siteID string) (string, string, error) {
|
||||||
|
s, err := c.GetByID(ctx, siteID)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ptr.Val(s.GetId()), ptr.Val(s.GetWebUrl()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var errKnownSkippableCase = clues.New("case is known and skippable")
|
||||||
|
|
||||||
|
const personalSitePath = "sharepoint.com/personal/"
|
||||||
|
|
||||||
|
// validateSite ensures the item is a Siteable, and contains the necessary
|
||||||
|
// identifiers that we handle with all users.
|
||||||
|
// returns the item as a Siteable model.
|
||||||
|
func validateSite(item any) (models.Siteable, error) {
|
||||||
|
m, ok := item.(models.Siteable)
|
||||||
|
if !ok {
|
||||||
|
return nil, clues.New(fmt.Sprintf("unexpected model: %T", item))
|
||||||
|
}
|
||||||
|
|
||||||
|
id, ok := ptr.ValOK(m.GetId())
|
||||||
|
if !ok || len(id) == 0 {
|
||||||
|
return nil, clues.New("missing ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
url, ok := ptr.ValOK(m.GetWebUrl())
|
||||||
|
if !ok || len(url) == 0 {
|
||||||
|
return nil, clues.New("missing webURL").With("site_id", id) // TODO: pii
|
||||||
|
}
|
||||||
|
|
||||||
|
// personal (ie: oneDrive) sites have to be filtered out server-side.
|
||||||
|
if ok && strings.Contains(url, personalSitePath) {
|
||||||
|
return nil, clues.Stack(errKnownSkippableCase).
|
||||||
|
With("site_id", id, "site_url", url) // TODO: pii
|
||||||
|
}
|
||||||
|
|
||||||
|
if name, ok := ptr.ValOK(m.GetDisplayName()); !ok || len(name) == 0 {
|
||||||
|
// the built-in site at "https://{tenant-domain}/search" never has a name.
|
||||||
|
if strings.HasSuffix(url, "/search") {
|
||||||
|
return nil, clues.Stack(errKnownSkippableCase).
|
||||||
|
With("site_id", id, "site_url", url) // TODO: pii
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, clues.New("missing site display name").With("site_id", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
166
src/internal/connector/discovery/api/sites_test.go
Normal file
166
src/internal/connector/discovery/api/sites_test.go
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SitesUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSitesUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &SitesUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SitesUnitSuite) TestValidateSite() {
|
||||||
|
site := models.NewSite()
|
||||||
|
site.SetWebUrl(ptr.To("sharepoint.com/sites/foo"))
|
||||||
|
site.SetDisplayName(ptr.To("testsite"))
|
||||||
|
site.SetId(ptr.To("testID"))
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args interface{}
|
||||||
|
want models.Siteable
|
||||||
|
errCheck assert.ErrorAssertionFunc
|
||||||
|
errIsSkippable bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Invalid type",
|
||||||
|
args: string("invalid type"),
|
||||||
|
errCheck: assert.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "No ID",
|
||||||
|
args: models.NewSite(),
|
||||||
|
errCheck: assert.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "No WebURL",
|
||||||
|
args: func() *models.Site {
|
||||||
|
s := models.NewSite()
|
||||||
|
s.SetId(ptr.To("id"))
|
||||||
|
return s
|
||||||
|
}(),
|
||||||
|
errCheck: assert.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "No name",
|
||||||
|
args: func() *models.Site {
|
||||||
|
s := models.NewSite()
|
||||||
|
s.SetId(ptr.To("id"))
|
||||||
|
s.SetWebUrl(ptr.To("sharepoint.com/sites/foo"))
|
||||||
|
return s
|
||||||
|
}(),
|
||||||
|
errCheck: assert.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Search site",
|
||||||
|
args: func() *models.Site {
|
||||||
|
s := models.NewSite()
|
||||||
|
s.SetId(ptr.To("id"))
|
||||||
|
s.SetWebUrl(ptr.To("sharepoint.com/search"))
|
||||||
|
return s
|
||||||
|
}(),
|
||||||
|
errCheck: assert.Error,
|
||||||
|
errIsSkippable: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Personal OneDrive",
|
||||||
|
args: func() *models.Site {
|
||||||
|
s := models.NewSite()
|
||||||
|
s.SetId(ptr.To("id"))
|
||||||
|
s.SetWebUrl(ptr.To("https://" + personalSitePath + "/someone's/onedrive"))
|
||||||
|
return s
|
||||||
|
}(),
|
||||||
|
errCheck: assert.Error,
|
||||||
|
errIsSkippable: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Valid Site",
|
||||||
|
args: site,
|
||||||
|
want: site,
|
||||||
|
errCheck: assert.NoError,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
got, err := validateSite(test.args)
|
||||||
|
test.errCheck(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if test.errIsSkippable {
|
||||||
|
assert.ErrorIs(t, err, errKnownSkippableCase)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, test.want, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SitesIntgSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSitesIntgSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &SitesIntgSuite{
|
||||||
|
Suite: tester.NewIntegrationSuite(t, [][]string{tester.M365AcctCredEnvs}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SitesIntgSuite) TestSites_GetByID() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
siteID = tester.M365SiteID(t)
|
||||||
|
host = strings.Split(siteID, ",")[0]
|
||||||
|
shortID = strings.TrimPrefix(siteID, host+",")
|
||||||
|
siteURL = tester.M365SiteURL(t)
|
||||||
|
acct = tester.NewM365Account(t)
|
||||||
|
)
|
||||||
|
|
||||||
|
creds, err := acct.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
client, err := NewClient(creds)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
sitesAPI := client.Sites()
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
id string
|
||||||
|
expectErr assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{"3 part id", siteID, assert.NoError},
|
||||||
|
{"2 part id", shortID, assert.NoError},
|
||||||
|
{"malformed id", uuid.NewString(), assert.Error},
|
||||||
|
{"random id", uuid.NewString() + "," + uuid.NewString(), assert.Error},
|
||||||
|
{"url", siteURL, assert.NoError},
|
||||||
|
{"host only", host, assert.NoError},
|
||||||
|
{"malformed url", "barunihlda", assert.Error},
|
||||||
|
{"non-matching url", "https://test/sites/testing", assert.Error},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
_, err := sitesAPI.GetByID(ctx, test.id)
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -150,6 +151,17 @@ func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, err
|
|||||||
return resp, err
|
return resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetIDAndName looks up the user matching the given ID, and returns
|
||||||
|
// its canonical ID and the PrincipalName as the name.
|
||||||
|
func (c Users) GetIDAndName(ctx context.Context, userID string) (string, string, error) {
|
||||||
|
u, err := c.GetByID(ctx, userID)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ptr.Val(u.GetId()), ptr.Val(u.GetUserPrincipalName()), nil
|
||||||
|
}
|
||||||
|
|
||||||
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
||||||
// Assume all services are enabled
|
// Assume all services are enabled
|
||||||
// then filter down to only services the user has enabled
|
// then filter down to only services the user has enabled
|
||||||
|
|||||||
@ -29,6 +29,24 @@ type getWithInfoer interface {
|
|||||||
getInfoer
|
getInfoer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func apiClient(ctx context.Context, acct account.Account) (api.Client, error) {
|
||||||
|
m365, err := acct.M365Config()
|
||||||
|
if err != nil {
|
||||||
|
return api.Client{}, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := api.NewClient(m365)
|
||||||
|
if err != nil {
|
||||||
|
return api.Client{}, clues.Wrap(err, "creating api client").WithClues(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
return client, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// api
|
// api
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -39,19 +57,15 @@ func Users(
|
|||||||
acct account.Account,
|
acct account.Account,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]models.Userable, error) {
|
) ([]models.Userable, error) {
|
||||||
m365, err := acct.M365Config()
|
client, err := apiClient(ctx, acct)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
|
return nil, err
|
||||||
}
|
|
||||||
|
|
||||||
client, err := api.NewClient(m365)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return client.Users().GetAll(ctx, errs)
|
return client.Users().GetAll(ctx, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// User fetches a single user's data.
|
||||||
func User(ctx context.Context, gwi getWithInfoer, userID string) (models.Userable, *api.UserInfo, error) {
|
func User(ctx context.Context, gwi getWithInfoer, userID string) (models.Userable, *api.UserInfo, error) {
|
||||||
u, err := gwi.GetByID(ctx, userID)
|
u, err := gwi.GetByID(ctx, userID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -69,3 +83,17 @@ func User(ctx context.Context, gwi getWithInfoer, userID string) (models.Userabl
|
|||||||
|
|
||||||
return u, ui, nil
|
return u, ui, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sites fetches all sharepoint sites in the tenant
|
||||||
|
func Sites(
|
||||||
|
ctx context.Context,
|
||||||
|
acct account.Account,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]models.Siteable, error) {
|
||||||
|
client, err := apiClient(ctx, acct)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return client.Sites().GetAll(ctx, errs)
|
||||||
|
}
|
||||||
|
|||||||
@ -31,10 +31,11 @@ func (suite *DiscoveryIntegrationSuite) TestUsers() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
t := suite.T()
|
var (
|
||||||
|
t = suite.T()
|
||||||
acct := tester.NewM365Account(t)
|
acct = tester.NewM365Account(t)
|
||||||
errs := fault.New(true)
|
errs = fault.New(true)
|
||||||
|
)
|
||||||
|
|
||||||
users, err := discovery.Users(ctx, acct, errs)
|
users, err := discovery.Users(ctx, acct, errs)
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
@ -42,8 +43,7 @@ func (suite *DiscoveryIntegrationSuite) TestUsers() {
|
|||||||
ferrs := errs.Errors()
|
ferrs := errs.Errors()
|
||||||
assert.Nil(t, ferrs.Failure)
|
assert.Nil(t, ferrs.Failure)
|
||||||
assert.Empty(t, ferrs.Recovered)
|
assert.Empty(t, ferrs.Recovered)
|
||||||
|
assert.NotEmpty(t, users)
|
||||||
assert.Less(t, 0, len(users))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() {
|
func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() {
|
||||||
@ -84,16 +84,85 @@ func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() {
|
|||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
a = test.acct(t)
|
||||||
|
errs = fault.New(true)
|
||||||
|
)
|
||||||
|
|
||||||
|
users, err := discovery.Users(ctx, a, errs)
|
||||||
|
assert.Empty(t, users, "returned some users")
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DiscoveryIntegrationSuite) TestSites() {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
acct = tester.NewM365Account(t)
|
||||||
|
errs = fault.New(true)
|
||||||
|
)
|
||||||
|
|
||||||
|
sites, err := discovery.Sites(ctx, acct, errs)
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
ferrs := errs.Errors()
|
||||||
|
assert.Nil(t, ferrs.Failure)
|
||||||
|
assert.Empty(t, ferrs.Recovered)
|
||||||
|
assert.NotEmpty(t, sites)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DiscoveryIntegrationSuite) TestSites_InvalidCredentials() {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
acct func(t *testing.T) account.Account
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Invalid Credentials",
|
||||||
|
acct: func(t *testing.T) account.Account {
|
||||||
|
a, err := account.NewAccount(
|
||||||
|
account.ProviderM365,
|
||||||
|
account.M365Config{
|
||||||
|
M365: credentials.M365{
|
||||||
|
AzureClientID: "Test",
|
||||||
|
AzureClientSecret: "without",
|
||||||
|
},
|
||||||
|
AzureTenantID: "data",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
return a
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Empty Credentials",
|
||||||
|
acct: func(t *testing.T) account.Account {
|
||||||
|
// intentionally swallowing the error here
|
||||||
|
a, _ := account.NewAccount(account.ProviderM365)
|
||||||
|
return a
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
a = test.acct(t)
|
||||||
|
errs = fault.New(true)
|
||||||
|
)
|
||||||
|
|
||||||
a := test.acct(t)
|
|
||||||
errs := fault.New(true)
|
|
||||||
users, err := discovery.Users(ctx, a, errs)
|
users, err := discovery.Users(ctx, a, errs)
|
||||||
|
|
||||||
assert.Empty(t, users, "returned some users")
|
assert.Empty(t, users, "returned some users")
|
||||||
assert.NotNil(t, err)
|
assert.NotNil(t, err)
|
||||||
// TODO(ashmrtn): Uncomment when fault package is used in discovery API.
|
|
||||||
// assert.NotNil(t, errs.Err())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,25 +4,18 @@ package connector
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"runtime/trace"
|
"runtime/trace"
|
||||||
"strings"
|
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
|
||||||
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
@ -37,13 +30,19 @@ import (
|
|||||||
// bookkeeping and interfacing with other component.
|
// bookkeeping and interfacing with other component.
|
||||||
type GraphConnector struct {
|
type GraphConnector struct {
|
||||||
Service graph.Servicer
|
Service graph.Servicer
|
||||||
Owners api.Client
|
Discovery api.Client
|
||||||
itemClient *http.Client // configured to handle large item downloads
|
itemClient *http.Client // configured to handle large item downloads
|
||||||
|
|
||||||
tenant string
|
tenant string
|
||||||
Sites map[string]string // webURL -> siteID and siteID -> webURL
|
|
||||||
credentials account.M365Config
|
credentials account.M365Config
|
||||||
|
|
||||||
|
ownerLookup getOwnerIDAndNamer
|
||||||
|
// maps of resource owner ids to names, and names to ids.
|
||||||
|
// not guaranteed to be populated, only here as a post-population
|
||||||
|
// reference for processes that choose to populate the values.
|
||||||
|
ResourceOwnerIDToName map[string]string
|
||||||
|
ResourceOwnerNameToID map[string]string
|
||||||
|
|
||||||
// wg is used to track completion of GC tasks
|
// wg is used to track completion of GC tasks
|
||||||
wg *sync.WaitGroup
|
wg *sync.WaitGroup
|
||||||
region *trace.Region
|
region *trace.Region
|
||||||
@ -53,15 +52,6 @@ type GraphConnector struct {
|
|||||||
status support.ConnectorOperationStatus // contains the status of the last run status
|
status support.ConnectorOperationStatus // contains the status of the last run status
|
||||||
}
|
}
|
||||||
|
|
||||||
type resource int
|
|
||||||
|
|
||||||
const (
|
|
||||||
UnknownResource resource = iota
|
|
||||||
AllResources
|
|
||||||
Users
|
|
||||||
Sites
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewGraphConnector(
|
func NewGraphConnector(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
itemClient *http.Client,
|
itemClient *http.Client,
|
||||||
@ -69,43 +59,93 @@ func NewGraphConnector(
|
|||||||
r resource,
|
r resource,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (*GraphConnector, error) {
|
) (*GraphConnector, error) {
|
||||||
m365, err := acct.M365Config()
|
creds, err := acct.M365Config()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
|
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
gc := GraphConnector{
|
service, err := createService(creds)
|
||||||
itemClient: itemClient,
|
|
||||||
tenant: m365.AzureTenantID,
|
|
||||||
wg: &sync.WaitGroup{},
|
|
||||||
credentials: m365,
|
|
||||||
}
|
|
||||||
|
|
||||||
gc.Service, err = gc.createService()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "creating service connection").WithClues(ctx)
|
return nil, clues.Wrap(err, "creating service connection").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
gc.Owners, err = api.NewClient(m365)
|
discovery, err := api.NewClient(creds)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
|
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
if r == AllResources || r == Sites {
|
rc, err := r.resourceClient(discovery)
|
||||||
if err = gc.setTenantSites(ctx, errs); err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "retrieveing tenant site list")
|
return nil, clues.Wrap(err, "creating resource client").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gc := GraphConnector{
|
||||||
|
itemClient: itemClient,
|
||||||
|
Discovery: discovery,
|
||||||
|
tenant: acct.ID(),
|
||||||
|
wg: &sync.WaitGroup{},
|
||||||
|
credentials: creds,
|
||||||
|
ownerLookup: rc,
|
||||||
|
Service: service,
|
||||||
}
|
}
|
||||||
|
|
||||||
return &gc, nil
|
return &gc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Owner Lookup
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// PopulateOwnerIDAndNamesFrom takes the provided owner identifier and produces
|
||||||
|
// the owner's name and ID from that value. Returns an error if the owner is
|
||||||
|
// not recognized by the current tenant.
|
||||||
|
//
|
||||||
|
// The id-name maps are optional. Some processes will look up all owners in
|
||||||
|
// the tenant before reaching this step. In that case, the data gets handed
|
||||||
|
// down for this func to consume instead of performing further queries. The
|
||||||
|
// maps get stored inside the gc instance for later re-use.
|
||||||
|
//
|
||||||
|
// TODO: If the maps are nil or empty, this func will perform a lookup on the given
|
||||||
|
// owner, and populate each map with that owner's id and name for downstream
|
||||||
|
// guarantees about that data being present. Optional performance enhancement
|
||||||
|
// idea: downstream from here, we should _only_ need the given user's id and name,
|
||||||
|
// and could store minimal map copies with that info instead of the whole tenant.
|
||||||
|
func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
|
||||||
|
ctx context.Context,
|
||||||
|
owner string, // input value, can be either id or name
|
||||||
|
idToName, nameToID map[string]string, // optionally pre-populated lookups
|
||||||
|
) (string, string, error) {
|
||||||
|
// ensure the maps exist, even if they aren't populated so that
|
||||||
|
// getOwnerIDAndNameFrom can populate any values it looks up.
|
||||||
|
if len(idToName) == 0 {
|
||||||
|
idToName = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(nameToID) == 0 {
|
||||||
|
nameToID = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, idToName, nameToID)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", errors.Wrap(err, "resolving resource owner details")
|
||||||
|
}
|
||||||
|
|
||||||
|
gc.ResourceOwnerIDToName = idToName
|
||||||
|
gc.ResourceOwnerNameToID = nameToID
|
||||||
|
|
||||||
|
return id, name, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Service Client
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// createService constructor for graphService component
|
// createService constructor for graphService component
|
||||||
func (gc *GraphConnector) createService() (*graph.Service, error) {
|
func createService(creds account.M365Config) (*graph.Service, error) {
|
||||||
adapter, err := graph.CreateAdapter(
|
adapter, err := graph.CreateAdapter(
|
||||||
gc.credentials.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
gc.credentials.AzureClientID,
|
creds.AzureClientID,
|
||||||
gc.credentials.AzureClientSecret)
|
creds.AzureClientSecret)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &graph.Service{}, err
|
return &graph.Service{}, err
|
||||||
}
|
}
|
||||||
@ -113,117 +153,12 @@ func (gc *GraphConnector) createService() (*graph.Service, error) {
|
|||||||
return graph.NewService(adapter), nil
|
return graph.NewService(adapter), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// setTenantSites queries the M365 to identify the sites in the
|
// ---------------------------------------------------------------------------
|
||||||
// workspace. The sites field is updated during this method
|
// Processing Status
|
||||||
// iff the returned error is nil.
|
// ---------------------------------------------------------------------------
|
||||||
func (gc *GraphConnector) setTenantSites(ctx context.Context, errs *fault.Bus) error {
|
|
||||||
gc.Sites = map[string]string{}
|
|
||||||
|
|
||||||
ctx, end := diagnostics.Span(ctx, "gc:setTenantSites")
|
|
||||||
defer end()
|
|
||||||
|
|
||||||
sites, err := getResources(
|
|
||||||
ctx,
|
|
||||||
gc.Service,
|
|
||||||
gc.tenant,
|
|
||||||
sharepoint.GetAllSitesForTenant,
|
|
||||||
models.CreateSiteCollectionResponseFromDiscriminatorValue,
|
|
||||||
identifySite,
|
|
||||||
errs)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
gc.Sites = sites
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var errKnownSkippableCase = clues.New("case is known and skippable")
|
|
||||||
|
|
||||||
const personalSitePath = "sharepoint.com/personal/"
|
|
||||||
|
|
||||||
// Transforms an interface{} into a key,value pair representing
|
|
||||||
// siteName:siteID.
|
|
||||||
func identifySite(item any) (string, string, error) {
|
|
||||||
m, ok := item.(models.Siteable)
|
|
||||||
if !ok {
|
|
||||||
return "", "", clues.New("non-Siteable item").With("item_type", fmt.Sprintf("%T", item))
|
|
||||||
}
|
|
||||||
|
|
||||||
id := ptr.Val(m.GetId())
|
|
||||||
url, ok := ptr.ValOK(m.GetWebUrl())
|
|
||||||
|
|
||||||
if m.GetName() == nil {
|
|
||||||
// the built-in site at "https://{tenant-domain}/search" never has a name.
|
|
||||||
if ok && strings.HasSuffix(url, "/search") {
|
|
||||||
// TODO: pii siteID, on this and all following cases
|
|
||||||
return "", "", clues.Stack(errKnownSkippableCase).With("site_id", id)
|
|
||||||
}
|
|
||||||
|
|
||||||
return "", "", clues.New("site has no name").With("site_id", id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// personal (ie: oneDrive) sites have to be filtered out server-side.
|
|
||||||
if ok && strings.Contains(url, personalSitePath) {
|
|
||||||
return "", "", clues.Stack(errKnownSkippableCase).With("site_id", id)
|
|
||||||
}
|
|
||||||
|
|
||||||
return url, id, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetSiteWebURLs returns the WebURLs of sharepoint sites within the tenant.
|
|
||||||
func (gc *GraphConnector) GetSiteWebURLs() []string {
|
|
||||||
return maps.Keys(gc.Sites)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetSiteIds returns the canonical site IDs in the tenant
|
|
||||||
func (gc *GraphConnector) GetSiteIDs() []string {
|
|
||||||
return maps.Values(gc.Sites)
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnionSiteIDsAndWebURLs reduces the id and url slices into a single slice of site IDs.
|
|
||||||
// WebURLs will run as a path-suffix style matcher. Callers may provide partial urls, though
|
|
||||||
// each element in the url must fully match. Ex: the webURL value "foo" will match "www.ex.com/foo",
|
|
||||||
// but not match "www.ex.com/foobar".
|
|
||||||
// The returned IDs are reduced to a set of unique values.
|
|
||||||
func (gc *GraphConnector) UnionSiteIDsAndWebURLs(
|
|
||||||
ctx context.Context,
|
|
||||||
ids, urls []string,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) ([]string, error) {
|
|
||||||
if len(gc.Sites) == 0 {
|
|
||||||
if err := gc.setTenantSites(ctx, errs); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
idm := map[string]struct{}{}
|
|
||||||
|
|
||||||
for _, id := range ids {
|
|
||||||
idm[id] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
match := filters.PathSuffix(urls)
|
|
||||||
|
|
||||||
for url, id := range gc.Sites {
|
|
||||||
if !match.Compare(url) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
idm[id] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
idsl := make([]string, 0, len(idm))
|
|
||||||
for id := range idm {
|
|
||||||
idsl = append(idsl, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
return idsl, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// AwaitStatus waits for all gc tasks to complete and then returns status
|
// AwaitStatus waits for all gc tasks to complete and then returns status
|
||||||
func (gc *GraphConnector) AwaitStatus() *support.ConnectorOperationStatus {
|
func (gc *GraphConnector) Wait() *data.CollectionStats {
|
||||||
defer func() {
|
defer func() {
|
||||||
if gc.region != nil {
|
if gc.region != nil {
|
||||||
gc.region.End()
|
gc.region.End()
|
||||||
@ -233,12 +168,18 @@ func (gc *GraphConnector) AwaitStatus() *support.ConnectorOperationStatus {
|
|||||||
gc.wg.Wait()
|
gc.wg.Wait()
|
||||||
|
|
||||||
// clean up and reset statefulness
|
// clean up and reset statefulness
|
||||||
status := gc.status
|
dcs := data.CollectionStats{
|
||||||
|
Folders: gc.status.Folders,
|
||||||
|
Objects: gc.status.Metrics.Objects,
|
||||||
|
Successes: gc.status.Metrics.Successes,
|
||||||
|
Bytes: gc.status.Metrics.Bytes,
|
||||||
|
Details: gc.status.String(),
|
||||||
|
}
|
||||||
|
|
||||||
gc.wg = &sync.WaitGroup{}
|
gc.wg = &sync.WaitGroup{}
|
||||||
gc.status = support.ConnectorOperationStatus{}
|
gc.status = support.ConnectorOperationStatus{}
|
||||||
|
|
||||||
return &status
|
return &dcs
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateStatus is used by gc initiated tasks to indicate completion
|
// UpdateStatus is used by gc initiated tasks to indicate completion
|
||||||
@ -273,57 +214,110 @@ func (gc *GraphConnector) incrementMessagesBy(num int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Helper Funcs
|
// Resource Handling
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func getResources(
|
type resource int
|
||||||
ctx context.Context,
|
|
||||||
gs graph.Servicer,
|
|
||||||
tenantID string,
|
|
||||||
query func(context.Context, graph.Servicer) (serialization.Parsable, error),
|
|
||||||
parser func(parseNode serialization.ParseNode) (serialization.Parsable, error),
|
|
||||||
identify func(any) (string, string, error),
|
|
||||||
errs *fault.Bus,
|
|
||||||
) (map[string]string, error) {
|
|
||||||
resources := map[string]string{}
|
|
||||||
|
|
||||||
response, err := query(ctx, gs)
|
const (
|
||||||
if err != nil {
|
UnknownResource resource = iota
|
||||||
return nil, graph.Wrap(ctx, err, "retrieving tenant's resources")
|
AllResources // unused
|
||||||
|
Users
|
||||||
|
Sites
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r resource) resourceClient(discovery api.Client) (*resourceClient, error) {
|
||||||
|
switch r {
|
||||||
|
case Users:
|
||||||
|
return &resourceClient{enum: r, getter: discovery.Users()}, nil
|
||||||
|
case Sites:
|
||||||
|
return &resourceClient{enum: r, getter: discovery.Sites()}, nil
|
||||||
|
default:
|
||||||
|
return nil, clues.New("unrecognized owner resource enum").With("resource_enum", r)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
iter, err := msgraphgocore.NewPageIterator(response, gs.Adapter(), parser)
|
|
||||||
if err != nil {
|
type resourceClient struct {
|
||||||
return nil, graph.Stack(ctx, err)
|
enum resource
|
||||||
}
|
getter getIDAndNamer
|
||||||
|
}
|
||||||
el := errs.Local()
|
|
||||||
|
type getIDAndNamer interface {
|
||||||
callbackFunc := func(item any) bool {
|
GetIDAndName(ctx context.Context, owner string) (
|
||||||
if el.Failure() != nil {
|
ownerID string,
|
||||||
return false
|
ownerName string,
|
||||||
}
|
err error,
|
||||||
|
)
|
||||||
k, v, err := identify(item)
|
}
|
||||||
if err != nil {
|
|
||||||
if !errors.Is(err, errKnownSkippableCase) {
|
var _ getOwnerIDAndNamer = &resourceClient{}
|
||||||
el.AddRecoverable(clues.Stack(err).
|
|
||||||
WithClues(ctx).
|
type getOwnerIDAndNamer interface {
|
||||||
With("query_url", gs.Adapter().GetBaseUrl()))
|
getOwnerIDAndNameFrom(
|
||||||
}
|
ctx context.Context,
|
||||||
|
discovery api.Client,
|
||||||
return true
|
owner string,
|
||||||
}
|
idToName, nameToID map[string]string,
|
||||||
|
) (
|
||||||
resources[k] = v
|
ownerID string,
|
||||||
resources[v] = k
|
ownerName string,
|
||||||
|
err error,
|
||||||
return true
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := iter.Iterate(ctx, callbackFunc); err != nil {
|
var ErrResourceOwnerNotFound = clues.New("resource owner not found in tenant")
|
||||||
return nil, graph.Stack(ctx, err)
|
|
||||||
}
|
// getOwnerIDAndNameFrom looks up the owner's canonical id and display name.
|
||||||
|
// if idToName and nameToID are populated, and the owner is a key of one of
|
||||||
return resources, el.Failure()
|
// those maps, then those values are returned.
|
||||||
|
//
|
||||||
|
// As a fallback, the resource calls the discovery api to fetch the user or
|
||||||
|
// site using the owner value. This fallback assumes that the owner is a well
|
||||||
|
// formed ID or display name of appropriate design (PrincipalName for users,
|
||||||
|
// WebURL for sites). If the fallback lookup is used, the maps are populated
|
||||||
|
// to contain the id and name references.
|
||||||
|
//
|
||||||
|
// Consumers are allowed to pass in a path suffix (eg: /sites/foo) as a site
|
||||||
|
// owner, but only if they also pass in a nameToID map. A nil map will cascade
|
||||||
|
// to the fallback, which will fail for having a malformed id value.
|
||||||
|
func (r resourceClient) getOwnerIDAndNameFrom(
|
||||||
|
ctx context.Context,
|
||||||
|
discovery api.Client,
|
||||||
|
owner string,
|
||||||
|
idToName, nameToID map[string]string,
|
||||||
|
) (string, string, error) {
|
||||||
|
if n, ok := idToName[owner]; ok {
|
||||||
|
return owner, n, nil
|
||||||
|
} else if id, ok := nameToID[owner]; ok {
|
||||||
|
return id, owner, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx = clues.Add(ctx, "owner_identifier", owner)
|
||||||
|
|
||||||
|
var (
|
||||||
|
id, name string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
// check if the provided owner is a suffix of a weburl in the lookup map
|
||||||
|
if r.enum == Sites {
|
||||||
|
url, _, ok := filters.PathSuffix([]string{owner}).CompareAny(maps.Keys(nameToID)...)
|
||||||
|
if ok {
|
||||||
|
return nameToID[url], url, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
id, name, err = r.getter.GetIDAndName(ctx, owner)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(id) == 0 || len(name) == 0 {
|
||||||
|
return "", "", clues.Stack(ErrResourceOwnerNotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
idToName[id] = name
|
||||||
|
nameToID[name] = id
|
||||||
|
|
||||||
|
return id, name, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -111,17 +111,16 @@ func (suite *DisconnectedGraphConnectorSuite) TestGraphConnector_Status() {
|
|||||||
go statusTestTask(&gc, 4, 1, 1)
|
go statusTestTask(&gc, 4, 1, 1)
|
||||||
go statusTestTask(&gc, 4, 1, 1)
|
go statusTestTask(&gc, 4, 1, 1)
|
||||||
|
|
||||||
status := gc.AwaitStatus()
|
stats := gc.Wait()
|
||||||
|
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
assert.NotEmpty(t, gc.PrintableStatus())
|
assert.NotEmpty(t, gc.PrintableStatus())
|
||||||
// Expect 8 objects
|
// Expect 8 objects
|
||||||
assert.Equal(t, 8, status.Metrics.Objects)
|
assert.Equal(t, 8, stats.Objects)
|
||||||
// Expect 2 success
|
// Expect 2 success
|
||||||
assert.Equal(t, 2, status.Metrics.Successes)
|
assert.Equal(t, 2, stats.Successes)
|
||||||
// Expect 2 folders
|
// Expect 2 folders
|
||||||
assert.Equal(t, 2, status.Folders)
|
assert.Equal(t, 2, stats.Folders)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices() {
|
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices() {
|
||||||
|
|||||||
@ -452,11 +452,11 @@ func (suite *GraphConnectorSharePointIntegrationSuite) SetupSuite() {
|
|||||||
|
|
||||||
si.resourceOwner = tester.M365SiteID(suite.T())
|
si.resourceOwner = tester.M365SiteID(suite.T())
|
||||||
|
|
||||||
user, err := si.connector.Owners.Users().GetByID(ctx, si.user)
|
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
|
||||||
require.NoError(suite.T(), err, "fetching user", si.user, clues.ToCore(err))
|
require.NoError(suite.T(), err, "fetching user", si.user, clues.ToCore(err))
|
||||||
si.userID = ptr.Val(user.GetId())
|
si.userID = ptr.Val(user.GetId())
|
||||||
|
|
||||||
secondaryUser, err := si.connector.Owners.Users().GetByID(ctx, si.secondaryUser)
|
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
|
||||||
require.NoError(suite.T(), err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
require.NoError(suite.T(), err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||||
|
|
||||||
@ -499,11 +499,11 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() {
|
|||||||
|
|
||||||
si.resourceOwner = si.user
|
si.resourceOwner = si.user
|
||||||
|
|
||||||
user, err := si.connector.Owners.Users().GetByID(ctx, si.user)
|
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
|
||||||
require.NoError(suite.T(), err, "fetching user", si.user, clues.ToCore(err))
|
require.NoError(suite.T(), err, "fetching user", si.user, clues.ToCore(err))
|
||||||
si.userID = ptr.Val(user.GetId())
|
si.userID = ptr.Val(user.GetId())
|
||||||
|
|
||||||
secondaryUser, err := si.connector.Owners.Users().GetByID(ctx, si.secondaryUser)
|
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
|
||||||
require.NoError(suite.T(), err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
require.NoError(suite.T(), err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||||
|
|
||||||
@ -695,11 +695,11 @@ func (suite *GraphConnectorOneDriveNightlySuite) SetupSuite() {
|
|||||||
|
|
||||||
si.resourceOwner = si.user
|
si.resourceOwner = si.user
|
||||||
|
|
||||||
user, err := si.connector.Owners.Users().GetByID(ctx, si.user)
|
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
|
||||||
require.NoError(suite.T(), err, "fetching user", si.user, clues.ToCore(err))
|
require.NoError(suite.T(), err, "fetching user", si.user, clues.ToCore(err))
|
||||||
si.userID = ptr.Val(user.GetId())
|
si.userID = ptr.Val(user.GetId())
|
||||||
|
|
||||||
secondaryUser, err := si.connector.Owners.Users().GetByID(ctx, si.secondaryUser)
|
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
|
||||||
require.NoError(suite.T(), err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
require.NoError(suite.T(), err, "fetching user", si.secondaryUser, clues.ToCore(err))
|
||||||
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
|
||||||
|
|
||||||
|
|||||||
@ -38,104 +38,175 @@ func TestGraphConnectorUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &GraphConnectorUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &GraphConnectorUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *GraphConnectorUnitSuite) TestUnionSiteIDsAndWebURLs() {
|
var _ getIDAndNamer = &mockNameIDGetter{}
|
||||||
|
|
||||||
|
type mockNameIDGetter struct {
|
||||||
|
id, name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mnig mockNameIDGetter) GetIDAndName(
|
||||||
|
_ context.Context,
|
||||||
|
_ string,
|
||||||
|
) (string, string, error) {
|
||||||
|
return mnig.id, mnig.name, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
||||||
const (
|
const (
|
||||||
url1 = "www.foo.com/bar"
|
ownerID = "owner-id"
|
||||||
url2 = "www.fnords.com/smarf"
|
ownerName = "owner-name"
|
||||||
path1 = "bar"
|
|
||||||
path2 = "/smarf"
|
|
||||||
id1 = "site-id-1"
|
|
||||||
id2 = "site-id-2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
gc := &GraphConnector{
|
var (
|
||||||
// must be populated, else the func will try to make a graph call
|
itn = map[string]string{ownerID: ownerName}
|
||||||
// to retrieve site data.
|
nti = map[string]string{ownerName: ownerID}
|
||||||
Sites: map[string]string{
|
lookup = &resourceClient{
|
||||||
url1: id1,
|
enum: Users,
|
||||||
url2: id2,
|
getter: &mockNameIDGetter{id: ownerID, name: ownerName},
|
||||||
},
|
}
|
||||||
}
|
noLookup = &resourceClient{enum: Users, getter: &mockNameIDGetter{}}
|
||||||
|
siteLookup = &resourceClient{enum: Sites, getter: &mockNameIDGetter{}}
|
||||||
|
)
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
ids []string
|
owner string
|
||||||
urls []string
|
idToName map[string]string
|
||||||
expect []string
|
nameToID map[string]string
|
||||||
|
rc *resourceClient
|
||||||
|
expectID string
|
||||||
|
expectName string
|
||||||
|
expectErr assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "nil",
|
name: "nil maps, getter lookup",
|
||||||
|
owner: ownerID,
|
||||||
|
rc: lookup,
|
||||||
|
idToName: nil,
|
||||||
|
nameToID: nil,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "empty",
|
name: "only id map with owner id",
|
||||||
ids: []string{},
|
owner: ownerID,
|
||||||
urls: []string{},
|
rc: noLookup,
|
||||||
expect: []string{},
|
idToName: itn,
|
||||||
|
nameToID: nil,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ids only",
|
name: "only name map with owner id",
|
||||||
ids: []string{id1, id2},
|
owner: ownerID,
|
||||||
urls: []string{},
|
rc: lookup,
|
||||||
expect: []string{id1, id2},
|
idToName: nil,
|
||||||
|
nameToID: nti,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "urls only",
|
name: "only id map with owner name",
|
||||||
ids: []string{},
|
owner: ownerName,
|
||||||
urls: []string{url1, url2},
|
rc: lookup,
|
||||||
expect: []string{id1, id2},
|
idToName: itn,
|
||||||
|
nameToID: nil,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "url suffix only",
|
name: "only name map with owner name",
|
||||||
ids: []string{},
|
owner: ownerName,
|
||||||
urls: []string{path1, path2},
|
rc: lookup,
|
||||||
expect: []string{id1, id2},
|
idToName: nil,
|
||||||
|
nameToID: nti,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "url and suffix overlap",
|
name: "both maps with owner id",
|
||||||
ids: []string{},
|
owner: ownerID,
|
||||||
urls: []string{url1, url2, path1, path2},
|
rc: noLookup,
|
||||||
expect: []string{id1, id2},
|
idToName: itn,
|
||||||
|
nameToID: nti,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ids and urls, no overlap",
|
name: "both maps with owner name",
|
||||||
ids: []string{id1},
|
owner: ownerName,
|
||||||
urls: []string{url2},
|
rc: noLookup,
|
||||||
expect: []string{id1, id2},
|
idToName: itn,
|
||||||
|
nameToID: nti,
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: ownerName,
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ids and urls, overlap",
|
name: "non-matching maps with owner id",
|
||||||
ids: []string{id1, id2},
|
owner: ownerID,
|
||||||
urls: []string{url1, url2},
|
rc: noLookup,
|
||||||
expect: []string{id1, id2},
|
idToName: map[string]string{"foo": "bar"},
|
||||||
|
nameToID: map[string]string{"fnords": "smarf"},
|
||||||
|
expectID: "",
|
||||||
|
expectName: "",
|
||||||
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "partial non-match on path",
|
name: "non-matching with owner name",
|
||||||
ids: []string{},
|
owner: ownerName,
|
||||||
urls: []string{path1[2:], path2[2:]},
|
rc: noLookup,
|
||||||
expect: []string{},
|
idToName: map[string]string{"foo": "bar"},
|
||||||
|
nameToID: map[string]string{"fnords": "smarf"},
|
||||||
|
expectID: "",
|
||||||
|
expectName: "",
|
||||||
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "partial non-match on url",
|
name: "site suffix lookup",
|
||||||
ids: []string{},
|
owner: "/url/path",
|
||||||
urls: []string{url1[5:], url2[5:]},
|
rc: siteLookup,
|
||||||
expect: []string{},
|
idToName: nil,
|
||||||
|
nameToID: map[string]string{"http://some/site/url/path": ownerID},
|
||||||
|
expectID: ownerID,
|
||||||
|
expectName: "http://some/site/url/path",
|
||||||
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true))
|
var (
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
t = suite.T()
|
||||||
assert.ElementsMatch(t, test.expect, result)
|
gc = &GraphConnector{ownerLookup: test.rc}
|
||||||
|
)
|
||||||
|
|
||||||
|
id, name, err := gc.PopulateOwnerIDAndNamesFrom(
|
||||||
|
ctx,
|
||||||
|
test.owner,
|
||||||
|
test.idToName,
|
||||||
|
test.nameToID)
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, test.expectID, id, "id")
|
||||||
|
assert.Equal(t, test.expectName, name, "name")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *GraphConnectorUnitSuite) TestGraphConnector_AwaitStatus() {
|
func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() {
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
@ -156,14 +227,14 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_AwaitStatus() {
|
|||||||
gc.wg.Add(1)
|
gc.wg.Add(1)
|
||||||
gc.UpdateStatus(status)
|
gc.UpdateStatus(status)
|
||||||
|
|
||||||
result := gc.AwaitStatus()
|
result := gc.Wait()
|
||||||
require.NotNil(t, result)
|
require.NotNil(t, result)
|
||||||
assert.Nil(t, gc.region, "region")
|
assert.Nil(t, gc.region, "region")
|
||||||
assert.Empty(t, gc.status, "status")
|
assert.Empty(t, gc.status, "status")
|
||||||
assert.Equal(t, 1, result.Folders)
|
assert.Equal(t, 1, result.Folders)
|
||||||
assert.Equal(t, 2, result.Metrics.Objects)
|
assert.Equal(t, 2, result.Objects)
|
||||||
assert.Equal(t, 3, result.Metrics.Successes)
|
assert.Equal(t, 3, result.Successes)
|
||||||
assert.Equal(t, int64(4), result.Metrics.Bytes)
|
assert.Equal(t, int64(4), result.Bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -199,35 +270,6 @@ func (suite *GraphConnectorIntegrationSuite) SetupSuite() {
|
|||||||
tester.LogTimeOfTest(suite.T())
|
tester.LogTimeOfTest(suite.T())
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestSetTenantSites verifies GraphConnector's ability to query
|
|
||||||
// the sites associated with the credentials
|
|
||||||
func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
|
|
||||||
newConnector := GraphConnector{
|
|
||||||
tenant: "test_tenant",
|
|
||||||
Sites: make(map[string]string, 0),
|
|
||||||
credentials: suite.connector.credentials,
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext()
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
service, err := newConnector.createService()
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
newConnector.Service = service
|
|
||||||
assert.Equal(t, 0, len(newConnector.Sites))
|
|
||||||
|
|
||||||
err = newConnector.setTenantSites(ctx, fault.New(true))
|
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
assert.Less(t, 0, len(newConnector.Sites))
|
|
||||||
|
|
||||||
for _, site := range newConnector.Sites {
|
|
||||||
assert.NotContains(t, "sharepoint.com/personal/", site)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
|
func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
@ -241,7 +283,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
deets, err := suite.connector.RestoreDataCollections(
|
deets, err := suite.connector.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
acct,
|
acct,
|
||||||
@ -256,10 +298,10 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
|
|||||||
assert.Error(t, err, clues.ToCore(err))
|
assert.Error(t, err, clues.ToCore(err))
|
||||||
assert.NotNil(t, deets)
|
assert.NotNil(t, deets)
|
||||||
|
|
||||||
status := suite.connector.AwaitStatus()
|
status := suite.connector.Wait()
|
||||||
assert.Equal(t, 0, status.Metrics.Objects)
|
assert.Equal(t, 0, status.Objects)
|
||||||
assert.Equal(t, 0, status.Folders)
|
assert.Equal(t, 0, status.Folders)
|
||||||
assert.Equal(t, 0, status.Metrics.Successes)
|
assert.Equal(t, 0, status.Successes)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
||||||
@ -320,7 +362,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
deets, err := suite.connector.RestoreDataCollections(
|
deets, err := suite.connector.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
suite.acct,
|
suite.acct,
|
||||||
@ -335,10 +377,10 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.NotNil(t, deets)
|
assert.NotNil(t, deets)
|
||||||
|
|
||||||
stats := suite.connector.AwaitStatus()
|
stats := suite.connector.Wait()
|
||||||
assert.Zero(t, stats.Metrics.Objects)
|
assert.Zero(t, stats.Objects)
|
||||||
assert.Zero(t, stats.Folders)
|
assert.Zero(t, stats.Folders)
|
||||||
assert.Zero(t, stats.Metrics.Successes)
|
assert.Zero(t, stats.Successes)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -400,7 +442,7 @@ func runRestore(
|
|||||||
|
|
||||||
restoreGC := loadConnector(ctx, t, graph.HTTPClient(graph.NoTimeout()), config.resource)
|
restoreGC := loadConnector(ctx, t, graph.HTTPClient(graph.NoTimeout()), config.resource)
|
||||||
restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true)
|
restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true)
|
||||||
deets, err := restoreGC.RestoreDataCollections(
|
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
config.acct,
|
config.acct,
|
||||||
@ -412,11 +454,11 @@ func runRestore(
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.NotNil(t, deets)
|
assert.NotNil(t, deets)
|
||||||
|
|
||||||
status := restoreGC.AwaitStatus()
|
status := restoreGC.Wait()
|
||||||
runTime := time.Since(start)
|
runTime := time.Since(start)
|
||||||
|
|
||||||
assert.Equal(t, numRestoreItems, status.Metrics.Objects, "restored status.Metrics.Objects")
|
assert.Equal(t, numRestoreItems, status.Objects, "restored status.Objects")
|
||||||
assert.Equal(t, numRestoreItems, status.Metrics.Successes, "restored status.Metrics.Successes")
|
assert.Equal(t, numRestoreItems, status.Successes, "restored status.Successes")
|
||||||
assert.Len(
|
assert.Len(
|
||||||
t,
|
t,
|
||||||
deets.Entries,
|
deets.Entries,
|
||||||
@ -457,9 +499,10 @@ func runBackupAndCompare(
|
|||||||
t.Logf("Selective backup of %s\n", backupSel)
|
t.Logf("Selective backup of %s\n", backupSel)
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
dcs, excludes, err := backupGC.DataCollections(
|
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupSel,
|
backupSel,
|
||||||
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
config.opts,
|
config.opts,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
@ -480,12 +523,12 @@ func runBackupAndCompare(
|
|||||||
config.dest,
|
config.dest,
|
||||||
config.opts.RestorePermissions)
|
config.opts.RestorePermissions)
|
||||||
|
|
||||||
status := backupGC.AwaitStatus()
|
status := backupGC.Wait()
|
||||||
|
|
||||||
assert.Equalf(t, totalItems+skipped, status.Metrics.Objects,
|
assert.Equalf(t, totalItems+skipped, status.Objects,
|
||||||
"backup status.Metrics.Objects; wanted %d items + %d skipped", totalItems, skipped)
|
"backup status.Objects; wanted %d items + %d skipped", totalItems, skipped)
|
||||||
assert.Equalf(t, totalItems+skipped, status.Metrics.Successes,
|
assert.Equalf(t, totalItems+skipped, status.Successes,
|
||||||
"backup status.Metrics.Successes; wanted %d items + %d skipped", totalItems, skipped)
|
"backup status.Successes; wanted %d items + %d skipped", totalItems, skipped)
|
||||||
}
|
}
|
||||||
|
|
||||||
func runRestoreBackupTest(
|
func runRestoreBackupTest(
|
||||||
@ -964,7 +1007,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
)
|
)
|
||||||
|
|
||||||
restoreGC := loadConnector(ctx, t, graph.HTTPClient(graph.NoTimeout()), test.resource)
|
restoreGC := loadConnector(ctx, t, graph.HTTPClient(graph.NoTimeout()), test.resource)
|
||||||
deets, err := restoreGC.RestoreDataCollections(
|
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
version.Backup,
|
version.Backup,
|
||||||
suite.acct,
|
suite.acct,
|
||||||
@ -979,12 +1022,12 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.NotNil(t, deets)
|
require.NotNil(t, deets)
|
||||||
|
|
||||||
status := restoreGC.AwaitStatus()
|
status := restoreGC.Wait()
|
||||||
// Always just 1 because it's just 1 collection.
|
// Always just 1 because it's just 1 collection.
|
||||||
assert.Equal(t, totalItems, status.Metrics.Objects, "status.Metrics.Objects")
|
assert.Equal(t, totalItems, status.Objects, "status.Objects")
|
||||||
assert.Equal(t, totalItems, status.Metrics.Successes, "status.Metrics.Successes")
|
assert.Equal(t, totalItems, status.Successes, "status.Successes")
|
||||||
assert.Len(
|
assert.Equal(
|
||||||
t, deets.Entries, totalItems,
|
t, totalItems, len(deets.Entries),
|
||||||
"details entries contains same item count as total successful items restored")
|
"details entries contains same item count as total successful items restored")
|
||||||
|
|
||||||
t.Log("Restore complete")
|
t.Log("Restore complete")
|
||||||
@ -996,9 +1039,10 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
backupSel := backupSelectorForExpected(t, test.service, expectedDests)
|
backupSel := backupSelectorForExpected(t, test.service, expectedDests)
|
||||||
t.Log("Selective backup of", backupSel)
|
t.Log("Selective backup of", backupSel)
|
||||||
|
|
||||||
dcs, excludes, err := backupGC.DataCollections(
|
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupSel,
|
backupSel,
|
||||||
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
@ -1023,9 +1067,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
control.RestoreDestination{},
|
control.RestoreDestination{},
|
||||||
true)
|
true)
|
||||||
|
|
||||||
status := backupGC.AwaitStatus()
|
status := backupGC.Wait()
|
||||||
assert.Equal(t, allItems+skipped, status.Metrics.Objects, "status.Metrics.Objects")
|
assert.Equal(t, allItems+skipped, status.Objects, "status.Objects")
|
||||||
assert.Equal(t, allItems+skipped, status.Metrics.Successes, "status.Metrics.Successes")
|
assert.Equal(t, allItems+skipped, status.Successes, "status.Successes")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1147,9 +1191,10 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
|
|||||||
start = time.Now()
|
start = time.Now()
|
||||||
)
|
)
|
||||||
|
|
||||||
dcs, excludes, err := backupGC.DataCollections(
|
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupSel,
|
backupSel,
|
||||||
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: false,
|
RestorePermissions: false,
|
||||||
@ -1191,7 +1236,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
|
|||||||
|
|
||||||
assert.ElementsMatch(t, test.categories, foundCategories)
|
assert.ElementsMatch(t, test.categories, foundCategories)
|
||||||
|
|
||||||
backupGC.AwaitStatus()
|
backupGC.Wait()
|
||||||
|
|
||||||
assert.NoError(t, errs.Failure())
|
assert.NoError(t, errs.Failure())
|
||||||
})
|
})
|
||||||
|
|||||||
56
src/internal/connector/mockconnector/mock_data_connector.go
Normal file
56
src/internal/connector/mockconnector/mock_data_connector.go
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
package mockconnector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GraphConnector struct {
|
||||||
|
Collections []data.BackupCollection
|
||||||
|
Exclude map[string]map[string]struct{}
|
||||||
|
|
||||||
|
Deets *details.Details
|
||||||
|
|
||||||
|
Err error
|
||||||
|
|
||||||
|
Stats data.CollectionStats
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gc GraphConnector) ProduceBackupCollections(
|
||||||
|
_ context.Context,
|
||||||
|
_ common.IDNamer,
|
||||||
|
_ selectors.Selector,
|
||||||
|
_ []data.RestoreCollection,
|
||||||
|
_ control.Options,
|
||||||
|
_ *fault.Bus,
|
||||||
|
) (
|
||||||
|
[]data.BackupCollection,
|
||||||
|
map[string]map[string]struct{},
|
||||||
|
error,
|
||||||
|
) {
|
||||||
|
return gc.Collections, gc.Exclude, gc.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gc GraphConnector) Wait() *data.CollectionStats {
|
||||||
|
return &gc.Stats
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gc GraphConnector) ConsumeRestoreCollections(
|
||||||
|
_ context.Context,
|
||||||
|
_ int,
|
||||||
|
_ account.Account,
|
||||||
|
_ selectors.Selector,
|
||||||
|
_ control.RestoreDestination,
|
||||||
|
_ control.Options,
|
||||||
|
_ []data.RestoreCollection,
|
||||||
|
_ *fault.Bus,
|
||||||
|
) (*details.Details, error) {
|
||||||
|
return gc.Deets, gc.Err
|
||||||
|
}
|
||||||
16
src/internal/data/metrics.go
Normal file
16
src/internal/data/metrics.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package data
|
||||||
|
|
||||||
|
type CollectionStats struct {
|
||||||
|
Folders int
|
||||||
|
Objects, Successes int
|
||||||
|
Bytes int64
|
||||||
|
Details string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cs CollectionStats) IsZero() bool {
|
||||||
|
return cs.Folders+cs.Objects+cs.Successes+int(cs.Bytes) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cs CollectionStats) String() string {
|
||||||
|
return cs.Details
|
||||||
|
}
|
||||||
@ -124,13 +124,13 @@ type PrevRefs struct {
|
|||||||
Location path.Path
|
Location path.Path
|
||||||
}
|
}
|
||||||
|
|
||||||
// BackupCollections takes a set of collections and creates a kopia snapshot
|
// ConsumeBackupCollections takes a set of collections and creates a kopia snapshot
|
||||||
// with the data that they contain. previousSnapshots is used for incremental
|
// with the data that they contain. previousSnapshots is used for incremental
|
||||||
// backups and should represent the base snapshot from which metadata is sourced
|
// backups and should represent the base snapshot from which metadata is sourced
|
||||||
// from as well as any incomplete snapshot checkpoints that may contain more
|
// from as well as any incomplete snapshot checkpoints that may contain more
|
||||||
// recent data than the base snapshot. The absence of previousSnapshots causes a
|
// recent data than the base snapshot. The absence of previousSnapshots causes a
|
||||||
// complete backup of all data.
|
// complete backup of all data.
|
||||||
func (w Wrapper) BackupCollections(
|
func (w Wrapper) ConsumeBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
previousSnapshots []IncrementalBase,
|
previousSnapshots []IncrementalBase,
|
||||||
collections []data.BackupCollection,
|
collections []data.BackupCollection,
|
||||||
@ -143,7 +143,7 @@ func (w Wrapper) BackupCollections(
|
|||||||
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
|
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx, end := diagnostics.Span(ctx, "kopia:backupCollections")
|
ctx, end := diagnostics.Span(ctx, "kopia:consumeBackupCollections")
|
||||||
defer end()
|
defer end()
|
||||||
|
|
||||||
if len(collections) == 0 && len(globalExcludeSet) == 0 {
|
if len(collections) == 0 && len(globalExcludeSet) == 0 {
|
||||||
|
|||||||
@ -276,7 +276,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
|
|||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
stats, deets, _, err := suite.w.BackupCollections(
|
stats, deets, _, err := suite.w.ConsumeBackupCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
prevSnaps,
|
prevSnaps,
|
||||||
collections,
|
collections,
|
||||||
@ -423,7 +423,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
|
|||||||
t := suite.T()
|
t := suite.T()
|
||||||
collections := test.cols()
|
collections := test.cols()
|
||||||
|
|
||||||
stats, deets, prevShortRefs, err := suite.w.BackupCollections(
|
stats, deets, prevShortRefs, err := suite.w.ConsumeBackupCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
prevSnaps,
|
prevSnaps,
|
||||||
collections,
|
collections,
|
||||||
@ -525,7 +525,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
|
|||||||
fp2, err := suite.storePath2.Append(dc2.Names[0], true)
|
fp2, err := suite.storePath2.Append(dc2.Names[0], true)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
stats, _, _, err := w.BackupCollections(
|
stats, _, _, err := w.ConsumeBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
nil,
|
nil,
|
||||||
[]data.BackupCollection{dc1, dc2},
|
[]data.BackupCollection{dc1, dc2},
|
||||||
@ -644,7 +644,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
stats, deets, _, err := suite.w.BackupCollections(
|
stats, deets, _, err := suite.w.ConsumeBackupCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
nil,
|
nil,
|
||||||
collections,
|
collections,
|
||||||
@ -706,7 +706,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections()
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
s, d, _, err := suite.w.BackupCollections(
|
s, d, _, err := suite.w.ConsumeBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
nil,
|
nil,
|
||||||
test.collections,
|
test.collections,
|
||||||
@ -866,7 +866,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
|
|||||||
tags[k] = ""
|
tags[k] = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
stats, deets, _, err := suite.w.BackupCollections(
|
stats, deets, _, err := suite.w.ConsumeBackupCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
nil,
|
nil,
|
||||||
collections,
|
collections,
|
||||||
@ -1018,7 +1018,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats, _, _, err := suite.w.BackupCollections(
|
stats, _, _, err := suite.w.ConsumeBackupCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
[]IncrementalBase{
|
[]IncrementalBase{
|
||||||
{
|
{
|
||||||
|
|||||||
@ -9,8 +9,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/common/crash"
|
"github.com/alcionai/corso/src/internal/common/crash"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
@ -34,14 +32,14 @@ import (
|
|||||||
type BackupOperation struct {
|
type BackupOperation struct {
|
||||||
operation
|
operation
|
||||||
|
|
||||||
ResourceOwner string `json:"resourceOwner"`
|
ResourceOwner common.IDNamer
|
||||||
ResourceOwnerName string `json:"resourceOwnerName"`
|
|
||||||
|
|
||||||
Results BackupResults `json:"results"`
|
Results BackupResults `json:"results"`
|
||||||
Selectors selectors.Selector `json:"selectors"`
|
Selectors selectors.Selector `json:"selectors"`
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
|
|
||||||
account account.Account
|
account account.Account
|
||||||
|
bp BackupProducer
|
||||||
|
|
||||||
// when true, this allows for incremental backups instead of full data pulls
|
// when true, this allows for incremental backups instead of full data pulls
|
||||||
incremental bool
|
incremental bool
|
||||||
@ -60,24 +58,19 @@ func NewBackupOperation(
|
|||||||
opts control.Options,
|
opts control.Options,
|
||||||
kw *kopia.Wrapper,
|
kw *kopia.Wrapper,
|
||||||
sw *store.Wrapper,
|
sw *store.Wrapper,
|
||||||
gc *connector.GraphConnector,
|
bp BackupProducer,
|
||||||
acct account.Account,
|
acct account.Account,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
ownerName string,
|
owner common.IDNamer,
|
||||||
bus events.Eventer,
|
bus events.Eventer,
|
||||||
) (BackupOperation, error) {
|
) (BackupOperation, error) {
|
||||||
op := BackupOperation{
|
op := BackupOperation{
|
||||||
operation: newOperation(opts, bus, kw, sw, gc),
|
operation: newOperation(opts, bus, kw, sw),
|
||||||
ResourceOwner: selector.DiscreteOwner,
|
ResourceOwner: owner,
|
||||||
ResourceOwnerName: ownerName,
|
Selectors: selector,
|
||||||
Selectors: selector,
|
Version: "v0",
|
||||||
Version: "v0",
|
account: acct,
|
||||||
account: acct,
|
incremental: useIncrementalBackup(selector, opts),
|
||||||
incremental: useIncrementalBackup(selector, opts),
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(ownerName) == 0 {
|
|
||||||
op.ResourceOwnerName = op.ResourceOwner
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := op.validate(); err != nil {
|
if err := op.validate(); err != nil {
|
||||||
@ -88,10 +81,18 @@ func NewBackupOperation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (op BackupOperation) validate() error {
|
func (op BackupOperation) validate() error {
|
||||||
if len(op.ResourceOwner) == 0 {
|
if op.ResourceOwner == nil {
|
||||||
return clues.New("backup requires a resource owner")
|
return clues.New("backup requires a resource owner")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(op.ResourceOwner.ID()) == 0 {
|
||||||
|
return clues.New("backup requires a resource owner with a populated ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
if op.bp == nil {
|
||||||
|
return clues.New("missing backup producer")
|
||||||
|
}
|
||||||
|
|
||||||
return op.operation.validate()
|
return op.operation.validate()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -101,7 +102,7 @@ func (op BackupOperation) validate() error {
|
|||||||
// get populated asynchronously.
|
// get populated asynchronously.
|
||||||
type backupStats struct {
|
type backupStats struct {
|
||||||
k *kopia.BackupStats
|
k *kopia.BackupStats
|
||||||
gc *support.ConnectorOperationStatus
|
gc *data.CollectionStats
|
||||||
resourceCount int
|
resourceCount int
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -160,7 +161,7 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
|
|||||||
// Execution
|
// Execution
|
||||||
// -----
|
// -----
|
||||||
|
|
||||||
observe.Message(ctx, observe.Safe("Backing Up"), observe.Bullet, observe.PII(op.ResourceOwner))
|
observe.Message(ctx, observe.Safe("Backing Up"), observe.Bullet, observe.PII(op.ResourceOwner.Name()))
|
||||||
|
|
||||||
deets, err := op.do(
|
deets, err := op.do(
|
||||||
ctx,
|
ctx,
|
||||||
@ -243,14 +244,21 @@ func (op *BackupOperation) do(
|
|||||||
return nil, clues.Wrap(err, "producing manifests and metadata")
|
return nil, clues.Wrap(err, "producing manifests and metadata")
|
||||||
}
|
}
|
||||||
|
|
||||||
cs, excludes, err := produceBackupDataCollections(ctx, op.gc, op.Selectors, mdColls, op.Options, op.Errors)
|
cs, excludes, err := produceBackupDataCollections(
|
||||||
|
ctx,
|
||||||
|
op.bp,
|
||||||
|
op.ResourceOwner,
|
||||||
|
op.Selectors,
|
||||||
|
mdColls,
|
||||||
|
op.Options,
|
||||||
|
op.Errors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "producing backup data collections")
|
return nil, clues.Wrap(err, "producing backup data collections")
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = clues.Add(ctx, "coll_count", len(cs))
|
ctx = clues.Add(ctx, "coll_count", len(cs))
|
||||||
|
|
||||||
writeStats, deets, toMerge, err := consumeBackupDataCollections(
|
writeStats, deets, toMerge, err := consumeBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
op.kopia,
|
op.kopia,
|
||||||
op.account.ID(),
|
op.account.ID(),
|
||||||
@ -279,9 +287,9 @@ func (op *BackupOperation) do(
|
|||||||
return nil, clues.Wrap(err, "merging details")
|
return nil, clues.Wrap(err, "merging details")
|
||||||
}
|
}
|
||||||
|
|
||||||
opStats.gc = op.gc.AwaitStatus()
|
opStats.gc = op.bp.Wait()
|
||||||
|
|
||||||
logger.Ctx(ctx).Debug(op.gc.PrintableStatus())
|
logger.Ctx(ctx).Debug(opStats.gc)
|
||||||
|
|
||||||
return deets, nil
|
return deets, nil
|
||||||
}
|
}
|
||||||
@ -309,10 +317,25 @@ func useIncrementalBackup(sel selectors.Selector, opts control.Options) bool {
|
|||||||
// Producer funcs
|
// Producer funcs
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type BackupProducer interface {
|
||||||
|
ProduceBackupCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
resourceOwner common.IDNamer,
|
||||||
|
sels selectors.Selector,
|
||||||
|
metadata []data.RestoreCollection,
|
||||||
|
ctrlOpts control.Options,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]data.BackupCollection, map[string]map[string]struct{}, error)
|
||||||
|
// TODO: ConnectorOperationStatus should be replaced with something
|
||||||
|
// more generic.
|
||||||
|
Wait() *data.CollectionStats
|
||||||
|
}
|
||||||
|
|
||||||
// calls the producer to generate collections of data to backup
|
// calls the producer to generate collections of data to backup
|
||||||
func produceBackupDataCollections(
|
func produceBackupDataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
gc *connector.GraphConnector,
|
bp BackupProducer,
|
||||||
|
resourceOwner common.IDNamer,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
@ -325,15 +348,15 @@ func produceBackupDataCollections(
|
|||||||
closer()
|
closer()
|
||||||
}()
|
}()
|
||||||
|
|
||||||
return gc.DataCollections(ctx, sel, metadata, ctrlOpts, errs)
|
return bp.ProduceBackupCollections(ctx, resourceOwner, sel, metadata, ctrlOpts, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Consumer funcs
|
// Consumer funcs
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type backuper interface {
|
type BackupConsumer interface {
|
||||||
BackupCollections(
|
ConsumeBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bases []kopia.IncrementalBase,
|
bases []kopia.IncrementalBase,
|
||||||
cs []data.BackupCollection,
|
cs []data.BackupCollection,
|
||||||
@ -389,9 +412,9 @@ func builderFromReason(ctx context.Context, tenant string, r kopia.Reason) (*pat
|
|||||||
}
|
}
|
||||||
|
|
||||||
// calls kopia to backup the collections of data
|
// calls kopia to backup the collections of data
|
||||||
func consumeBackupDataCollections(
|
func consumeBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bu backuper,
|
bc BackupConsumer,
|
||||||
tenantID string,
|
tenantID string,
|
||||||
reasons []kopia.Reason,
|
reasons []kopia.Reason,
|
||||||
mans []*kopia.ManifestEntry,
|
mans []*kopia.ManifestEntry,
|
||||||
@ -465,7 +488,7 @@ func consumeBackupDataCollections(
|
|||||||
"base_backup_id", mbID)
|
"base_backup_id", mbID)
|
||||||
}
|
}
|
||||||
|
|
||||||
kopiaStats, deets, itemsSourcedFromBase, err := bu.BackupCollections(
|
kopiaStats, deets, itemsSourcedFromBase, err := bc.ConsumeBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
bases,
|
bases,
|
||||||
cs,
|
cs,
|
||||||
@ -663,11 +686,11 @@ func (op *BackupOperation) persistResults(
|
|||||||
return clues.New("backup population never completed")
|
return clues.New("backup population never completed")
|
||||||
}
|
}
|
||||||
|
|
||||||
if op.Status != Failed && opStats.gc.Metrics.Successes == 0 {
|
if op.Status != Failed && opStats.gc.IsZero() {
|
||||||
op.Status = NoData
|
op.Status = NoData
|
||||||
}
|
}
|
||||||
|
|
||||||
op.Results.ItemsRead = opStats.gc.Metrics.Successes
|
op.Results.ItemsRead = opStats.gc.Successes
|
||||||
|
|
||||||
return op.Errors.Failure()
|
return op.Errors.Failure()
|
||||||
}
|
}
|
||||||
@ -714,8 +737,8 @@ func (op *BackupOperation) createBackupModels(
|
|||||||
op.Status.String(),
|
op.Status.String(),
|
||||||
backupID,
|
backupID,
|
||||||
op.Selectors,
|
op.Selectors,
|
||||||
op.ResourceOwner,
|
op.ResourceOwner.ID(),
|
||||||
op.ResourceOwnerName,
|
op.ResourceOwner.Name(),
|
||||||
op.Results.ReadWrites,
|
op.Results.ReadWrites,
|
||||||
op.Results.StartAndEndTime,
|
op.Results.StartAndEndTime,
|
||||||
op.Errors.Errors())
|
op.Errors.Errors())
|
||||||
|
|||||||
@ -152,7 +152,7 @@ func newTestBackupOp(
|
|||||||
|
|
||||||
opts.ToggleFeatures = featureToggles
|
opts.ToggleFeatures = featureToggles
|
||||||
|
|
||||||
bo, err := NewBackupOperation(ctx, opts, kw, sw, gc, acct, sel, sel.DiscreteOwner, bus)
|
bo, err := NewBackupOperation(ctx, opts, kw, sw, gc, acct, sel, sel, bus)
|
||||||
if !assert.NoError(t, err, clues.ToCore(err)) {
|
if !assert.NoError(t, err, clues.ToCore(err)) {
|
||||||
closer()
|
closer()
|
||||||
t.FailNow()
|
t.FailNow()
|
||||||
@ -383,7 +383,7 @@ func generateContainerOfItems(
|
|||||||
dest,
|
dest,
|
||||||
collections)
|
collections)
|
||||||
|
|
||||||
deets, err := gc.RestoreDataCollections(
|
deets, err := gc.ConsumeRestoreCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
acct,
|
acct,
|
||||||
@ -394,7 +394,9 @@ func generateContainerOfItems(
|
|||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
gc.AwaitStatus()
|
// have to wait here, both to ensure the process
|
||||||
|
// finishes, and also to clean up the gc status
|
||||||
|
gc.Wait()
|
||||||
|
|
||||||
return deets
|
return deets
|
||||||
}
|
}
|
||||||
@ -539,7 +541,7 @@ func (suite *BackupOpIntegrationSuite) SetupSuite() {
|
|||||||
func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
||||||
kw := &kopia.Wrapper{}
|
kw := &kopia.Wrapper{}
|
||||||
sw := &store.Wrapper{}
|
sw := &store.Wrapper{}
|
||||||
gc := &connector.GraphConnector{}
|
gc := &mockconnector.GraphConnector{}
|
||||||
acct := tester.NewM365Account(suite.T())
|
acct := tester.NewM365Account(suite.T())
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
@ -547,7 +549,7 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
|||||||
opts control.Options
|
opts control.Options
|
||||||
kw *kopia.Wrapper
|
kw *kopia.Wrapper
|
||||||
sw *store.Wrapper
|
sw *store.Wrapper
|
||||||
gc *connector.GraphConnector
|
bp BackupProducer
|
||||||
acct account.Account
|
acct account.Account
|
||||||
targets []string
|
targets []string
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
@ -555,22 +557,24 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
|||||||
{"good", control.Options{}, kw, sw, gc, acct, nil, assert.NoError},
|
{"good", control.Options{}, kw, sw, gc, acct, nil, assert.NoError},
|
||||||
{"missing kopia", control.Options{}, nil, sw, gc, acct, nil, assert.Error},
|
{"missing kopia", control.Options{}, nil, sw, gc, acct, nil, assert.Error},
|
||||||
{"missing modelstore", control.Options{}, kw, nil, gc, acct, nil, assert.Error},
|
{"missing modelstore", control.Options{}, kw, nil, gc, acct, nil, assert.Error},
|
||||||
{"missing graphconnector", control.Options{}, kw, sw, nil, acct, nil, assert.Error},
|
{"missing backup producer", control.Options{}, kw, sw, nil, acct, nil, assert.Error},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
|
sel := selectors.Selector{DiscreteOwner: "test"}
|
||||||
|
|
||||||
_, err := NewBackupOperation(
|
_, err := NewBackupOperation(
|
||||||
ctx,
|
ctx,
|
||||||
test.opts,
|
test.opts,
|
||||||
test.kw,
|
test.kw,
|
||||||
test.sw,
|
test.sw,
|
||||||
test.gc,
|
test.bp,
|
||||||
test.acct,
|
test.acct,
|
||||||
selectors.Selector{DiscreteOwner: "test"},
|
sel,
|
||||||
"test-name",
|
sel,
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
test.errCheck(suite.T(), err, clues.ToCore(err))
|
test.errCheck(suite.T(), err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
|
|||||||
@ -14,8 +14,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector/mockconnector"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
@ -85,9 +84,9 @@ func checkPaths(t *testing.T, expected, got []path.Path) {
|
|||||||
assert.ElementsMatch(t, expected, got)
|
assert.ElementsMatch(t, expected, got)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ----- backup producer
|
// ----- backup consumer
|
||||||
|
|
||||||
type mockBackuper struct {
|
type mockBackupConsumer struct {
|
||||||
checkFunc func(
|
checkFunc func(
|
||||||
bases []kopia.IncrementalBase,
|
bases []kopia.IncrementalBase,
|
||||||
cs []data.BackupCollection,
|
cs []data.BackupCollection,
|
||||||
@ -95,7 +94,7 @@ type mockBackuper struct {
|
|||||||
buildTreeWithBase bool)
|
buildTreeWithBase bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mbu mockBackuper) BackupCollections(
|
func (mbu mockBackupConsumer) ConsumeBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bases []kopia.IncrementalBase,
|
bases []kopia.IncrementalBase,
|
||||||
cs []data.BackupCollection,
|
cs []data.BackupCollection,
|
||||||
@ -360,7 +359,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
|
|||||||
var (
|
var (
|
||||||
kw = &kopia.Wrapper{}
|
kw = &kopia.Wrapper{}
|
||||||
sw = &store.Wrapper{}
|
sw = &store.Wrapper{}
|
||||||
gc = &connector.GraphConnector{}
|
gc = &mockconnector.GraphConnector{}
|
||||||
acct = account.Account{}
|
acct = account.Account{}
|
||||||
now = time.Now()
|
now = time.Now()
|
||||||
)
|
)
|
||||||
@ -381,9 +380,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
|
|||||||
TotalHashedBytes: 1,
|
TotalHashedBytes: 1,
|
||||||
TotalUploadedBytes: 1,
|
TotalUploadedBytes: 1,
|
||||||
},
|
},
|
||||||
gc: &support.ConnectorOperationStatus{
|
gc: &data.CollectionStats{Successes: 1},
|
||||||
Metrics: support.CollectionMetrics{Successes: 1},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -392,7 +389,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
|
|||||||
fail: assert.AnError,
|
fail: assert.AnError,
|
||||||
stats: backupStats{
|
stats: backupStats{
|
||||||
k: &kopia.BackupStats{},
|
k: &kopia.BackupStats{},
|
||||||
gc: &support.ConnectorOperationStatus{},
|
gc: &data.CollectionStats{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -400,7 +397,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
|
|||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
stats: backupStats{
|
stats: backupStats{
|
||||||
k: &kopia.BackupStats{},
|
k: &kopia.BackupStats{},
|
||||||
gc: &support.ConnectorOperationStatus{},
|
gc: &data.CollectionStats{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -418,7 +415,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
|
|||||||
gc,
|
gc,
|
||||||
acct,
|
acct,
|
||||||
sel,
|
sel,
|
||||||
sel.DiscreteOwner,
|
sel,
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
@ -427,7 +424,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() {
|
|||||||
test.expectErr(t, op.persistResults(now, &test.stats))
|
test.expectErr(t, op.persistResults(now, &test.stats))
|
||||||
|
|
||||||
assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status")
|
assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status")
|
||||||
assert.Equal(t, test.stats.gc.Metrics.Successes, op.Results.ItemsRead, "items read")
|
assert.Equal(t, test.stats.gc.Successes, op.Results.ItemsRead, "items read")
|
||||||
assert.Equal(t, test.stats.k.TotalFileCount, op.Results.ItemsWritten, "items written")
|
assert.Equal(t, test.stats.k.TotalFileCount, op.Results.ItemsWritten, "items written")
|
||||||
assert.Equal(t, test.stats.k.TotalHashedBytes, op.Results.BytesRead, "bytes read")
|
assert.Equal(t, test.stats.k.TotalHashedBytes, op.Results.BytesRead, "bytes read")
|
||||||
assert.Equal(t, test.stats.k.TotalUploadedBytes, op.Results.BytesUploaded, "bytes written")
|
assert.Equal(t, test.stats.k.TotalUploadedBytes, op.Results.BytesUploaded, "bytes written")
|
||||||
@ -564,7 +561,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
mbu := &mockBackuper{
|
mbu := &mockBackupConsumer{
|
||||||
checkFunc: func(
|
checkFunc: func(
|
||||||
bases []kopia.IncrementalBase,
|
bases []kopia.IncrementalBase,
|
||||||
cs []data.BackupCollection,
|
cs []data.BackupCollection,
|
||||||
@ -576,7 +573,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:errcheck
|
//nolint:errcheck
|
||||||
consumeBackupDataCollections(
|
consumeBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
mbu,
|
mbu,
|
||||||
tenant,
|
tenant,
|
||||||
|
|||||||
@ -5,7 +5,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -57,7 +56,6 @@ type operation struct {
|
|||||||
bus events.Eventer
|
bus events.Eventer
|
||||||
kopia *kopia.Wrapper
|
kopia *kopia.Wrapper
|
||||||
store *store.Wrapper
|
store *store.Wrapper
|
||||||
gc *connector.GraphConnector
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newOperation(
|
func newOperation(
|
||||||
@ -65,7 +63,6 @@ func newOperation(
|
|||||||
bus events.Eventer,
|
bus events.Eventer,
|
||||||
kw *kopia.Wrapper,
|
kw *kopia.Wrapper,
|
||||||
sw *store.Wrapper,
|
sw *store.Wrapper,
|
||||||
gc *connector.GraphConnector,
|
|
||||||
) operation {
|
) operation {
|
||||||
return operation{
|
return operation{
|
||||||
CreatedAt: time.Now(),
|
CreatedAt: time.Now(),
|
||||||
@ -75,7 +72,6 @@ func newOperation(
|
|||||||
bus: bus,
|
bus: bus,
|
||||||
kopia: kw,
|
kopia: kw,
|
||||||
store: sw,
|
store: sw,
|
||||||
gc: gc,
|
|
||||||
|
|
||||||
Status: InProgress,
|
Status: InProgress,
|
||||||
}
|
}
|
||||||
@ -90,9 +86,5 @@ func (op operation) validate() error {
|
|||||||
return clues.New("missing modelstore")
|
return clues.New("missing modelstore")
|
||||||
}
|
}
|
||||||
|
|
||||||
if op.gc == nil {
|
|
||||||
return clues.New("missing graph connector")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -26,30 +25,27 @@ func TestOperationSuite(t *testing.T) {
|
|||||||
|
|
||||||
func (suite *OperationSuite) TestNewOperation() {
|
func (suite *OperationSuite) TestNewOperation() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
op := newOperation(control.Options{}, events.Bus{}, nil, nil, nil)
|
op := newOperation(control.Options{}, events.Bus{}, nil, nil)
|
||||||
assert.Greater(t, op.CreatedAt, time.Time{})
|
assert.Greater(t, op.CreatedAt, time.Time{})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OperationSuite) TestOperation_Validate() {
|
func (suite *OperationSuite) TestOperation_Validate() {
|
||||||
kwStub := &kopia.Wrapper{}
|
kwStub := &kopia.Wrapper{}
|
||||||
swStub := &store.Wrapper{}
|
swStub := &store.Wrapper{}
|
||||||
gcStub := &connector.GraphConnector{}
|
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
kw *kopia.Wrapper
|
kw *kopia.Wrapper
|
||||||
sw *store.Wrapper
|
sw *store.Wrapper
|
||||||
gc *connector.GraphConnector
|
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{"good", kwStub, swStub, gcStub, assert.NoError},
|
{"good", kwStub, swStub, assert.NoError},
|
||||||
{"missing kopia wrapper", nil, swStub, gcStub, assert.Error},
|
{"missing kopia wrapper", nil, swStub, assert.Error},
|
||||||
{"missing store wrapper", kwStub, nil, gcStub, assert.Error},
|
{"missing store wrapper", kwStub, nil, assert.Error},
|
||||||
{"missing graph connector", kwStub, swStub, nil, assert.Error},
|
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
err := newOperation(control.Options{}, events.Bus{}, test.kw, test.sw, test.gc).validate()
|
err := newOperation(control.Options{}, events.Bus{}, test.kw, test.sw).validate()
|
||||||
test.errCheck(suite.T(), err, clues.ToCore(err))
|
test.errCheck(suite.T(), err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,12 +7,11 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/common/crash"
|
"github.com/alcionai/corso/src/internal/common/crash"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
@ -42,6 +41,7 @@ type RestoreOperation struct {
|
|||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
|
|
||||||
account account.Account
|
account account.Account
|
||||||
|
rc RestoreConsumer
|
||||||
}
|
}
|
||||||
|
|
||||||
// RestoreResults aggregate the details of the results of the operation.
|
// RestoreResults aggregate the details of the results of the operation.
|
||||||
@ -56,7 +56,7 @@ func NewRestoreOperation(
|
|||||||
opts control.Options,
|
opts control.Options,
|
||||||
kw *kopia.Wrapper,
|
kw *kopia.Wrapper,
|
||||||
sw *store.Wrapper,
|
sw *store.Wrapper,
|
||||||
gc *connector.GraphConnector,
|
rc RestoreConsumer,
|
||||||
acct account.Account,
|
acct account.Account,
|
||||||
backupID model.StableID,
|
backupID model.StableID,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
@ -64,12 +64,13 @@ func NewRestoreOperation(
|
|||||||
bus events.Eventer,
|
bus events.Eventer,
|
||||||
) (RestoreOperation, error) {
|
) (RestoreOperation, error) {
|
||||||
op := RestoreOperation{
|
op := RestoreOperation{
|
||||||
operation: newOperation(opts, bus, kw, sw, gc),
|
operation: newOperation(opts, bus, kw, sw),
|
||||||
BackupID: backupID,
|
BackupID: backupID,
|
||||||
Selectors: sel,
|
Selectors: sel,
|
||||||
Destination: dest,
|
Destination: dest,
|
||||||
Version: "v0",
|
Version: "v0",
|
||||||
account: acct,
|
account: acct,
|
||||||
|
rc: rc,
|
||||||
}
|
}
|
||||||
if err := op.validate(); err != nil {
|
if err := op.validate(); err != nil {
|
||||||
return RestoreOperation{}, err
|
return RestoreOperation{}, err
|
||||||
@ -79,6 +80,10 @@ func NewRestoreOperation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (op RestoreOperation) validate() error {
|
func (op RestoreOperation) validate() error {
|
||||||
|
if op.rc == nil {
|
||||||
|
return clues.New("missing restore consumer")
|
||||||
|
}
|
||||||
|
|
||||||
return op.operation.validate()
|
return op.operation.validate()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -88,7 +93,7 @@ func (op RestoreOperation) validate() error {
|
|||||||
// get populated asynchronously.
|
// get populated asynchronously.
|
||||||
type restoreStats struct {
|
type restoreStats struct {
|
||||||
cs []data.RestoreCollection
|
cs []data.RestoreCollection
|
||||||
gc *support.ConnectorOperationStatus
|
gc *data.CollectionStats
|
||||||
bytesRead *stats.ByteCounter
|
bytesRead *stats.ByteCounter
|
||||||
resourceCount int
|
resourceCount int
|
||||||
|
|
||||||
@ -235,12 +240,9 @@ func (op *RestoreOperation) do(
|
|||||||
opStats.resourceCount = 1
|
opStats.resourceCount = 1
|
||||||
opStats.cs = dcs
|
opStats.cs = dcs
|
||||||
|
|
||||||
restoreComplete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Restoring data"))
|
deets, err = consumeRestoreCollections(
|
||||||
defer closer()
|
|
||||||
defer close(restoreComplete)
|
|
||||||
|
|
||||||
restoreDetails, err := op.gc.RestoreDataCollections(
|
|
||||||
ctx,
|
ctx,
|
||||||
|
op.rc,
|
||||||
bup.Version,
|
bup.Version,
|
||||||
op.account,
|
op.account,
|
||||||
op.Selectors,
|
op.Selectors,
|
||||||
@ -252,13 +254,11 @@ func (op *RestoreOperation) do(
|
|||||||
return nil, clues.Wrap(err, "restoring collections")
|
return nil, clues.Wrap(err, "restoring collections")
|
||||||
}
|
}
|
||||||
|
|
||||||
restoreComplete <- struct{}{}
|
opStats.gc = op.rc.Wait()
|
||||||
|
|
||||||
opStats.gc = op.gc.AwaitStatus()
|
logger.Ctx(ctx).Debug(opStats.gc)
|
||||||
|
|
||||||
logger.Ctx(ctx).Debug(op.gc.PrintableStatus())
|
return deets, nil
|
||||||
|
|
||||||
return restoreDetails, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// persists details and statistics about the restore operation.
|
// persists details and statistics about the restore operation.
|
||||||
@ -285,11 +285,11 @@ func (op *RestoreOperation) persistResults(
|
|||||||
return clues.New("restoration never completed")
|
return clues.New("restoration never completed")
|
||||||
}
|
}
|
||||||
|
|
||||||
if op.Status != Failed && opStats.gc.Metrics.Successes == 0 {
|
if op.Status != Failed && opStats.gc.IsZero() {
|
||||||
op.Status = NoData
|
op.Status = NoData
|
||||||
}
|
}
|
||||||
|
|
||||||
op.Results.ItemsWritten = opStats.gc.Metrics.Successes
|
op.Results.ItemsWritten = opStats.gc.Successes
|
||||||
|
|
||||||
op.bus.Event(
|
op.bus.Event(
|
||||||
ctx,
|
ctx,
|
||||||
@ -312,6 +312,60 @@ func (op *RestoreOperation) persistResults(
|
|||||||
return op.Errors.Failure()
|
return op.Errors.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Restorer funcs
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type RestoreConsumer interface {
|
||||||
|
ConsumeRestoreCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
backupVersion int,
|
||||||
|
acct account.Account,
|
||||||
|
selector selectors.Selector,
|
||||||
|
dest control.RestoreDestination,
|
||||||
|
opts control.Options,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) (*details.Details, error)
|
||||||
|
// TODO: ConnectorOperationStatus should be replaced with something
|
||||||
|
// more generic.
|
||||||
|
Wait() *data.CollectionStats
|
||||||
|
}
|
||||||
|
|
||||||
|
func consumeRestoreCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
rc RestoreConsumer,
|
||||||
|
backupVersion int,
|
||||||
|
acct account.Account,
|
||||||
|
sel selectors.Selector,
|
||||||
|
dest control.RestoreDestination,
|
||||||
|
opts control.Options,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) (*details.Details, error) {
|
||||||
|
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Restoring data"))
|
||||||
|
defer func() {
|
||||||
|
complete <- struct{}{}
|
||||||
|
close(complete)
|
||||||
|
closer()
|
||||||
|
}()
|
||||||
|
|
||||||
|
deets, err := rc.ConsumeRestoreCollections(
|
||||||
|
ctx,
|
||||||
|
backupVersion,
|
||||||
|
acct,
|
||||||
|
sel,
|
||||||
|
dest,
|
||||||
|
opts,
|
||||||
|
dcs,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "restoring collections")
|
||||||
|
}
|
||||||
|
|
||||||
|
return deets, nil
|
||||||
|
}
|
||||||
|
|
||||||
// formatDetailsForRestoration reduces the provided detail entries according to the
|
// formatDetailsForRestoration reduces the provided detail entries according to the
|
||||||
// selector specifications.
|
// selector specifications.
|
||||||
func formatDetailsForRestoration(
|
func formatDetailsForRestoration(
|
||||||
|
|||||||
@ -16,7 +16,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/mockconnector"
|
"github.com/alcionai/corso/src/internal/connector/mockconnector"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||||
@ -50,7 +49,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
var (
|
var (
|
||||||
kw = &kopia.Wrapper{}
|
kw = &kopia.Wrapper{}
|
||||||
sw = &store.Wrapper{}
|
sw = &store.Wrapper{}
|
||||||
gc = &connector.GraphConnector{}
|
gc = &mockconnector.GraphConnector{}
|
||||||
acct = account.Account{}
|
acct = account.Account{}
|
||||||
now = time.Now()
|
now = time.Now()
|
||||||
dest = tester.DefaultTestRestoreDestination()
|
dest = tester.DefaultTestRestoreDestination()
|
||||||
@ -75,11 +74,9 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
Collection: &mockconnector.MockExchangeDataCollection{},
|
Collection: &mockconnector.MockExchangeDataCollection{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
gc: &support.ConnectorOperationStatus{
|
gc: &data.CollectionStats{
|
||||||
Metrics: support.CollectionMetrics{
|
Objects: 1,
|
||||||
Objects: 1,
|
Successes: 1,
|
||||||
Successes: 1,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -89,7 +86,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
fail: assert.AnError,
|
fail: assert.AnError,
|
||||||
stats: restoreStats{
|
stats: restoreStats{
|
||||||
bytesRead: &stats.ByteCounter{},
|
bytesRead: &stats.ByteCounter{},
|
||||||
gc: &support.ConnectorOperationStatus{},
|
gc: &data.CollectionStats{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -98,7 +95,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
stats: restoreStats{
|
stats: restoreStats{
|
||||||
bytesRead: &stats.ByteCounter{},
|
bytesRead: &stats.ByteCounter{},
|
||||||
cs: []data.RestoreCollection{},
|
cs: []data.RestoreCollection{},
|
||||||
gc: &support.ConnectorOperationStatus{},
|
gc: &data.CollectionStats{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -126,7 +123,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
|
|||||||
|
|
||||||
assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status")
|
assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status")
|
||||||
assert.Equal(t, len(test.stats.cs), op.Results.ItemsRead, "items read")
|
assert.Equal(t, len(test.stats.cs), op.Results.ItemsRead, "items read")
|
||||||
assert.Equal(t, test.stats.gc.Metrics.Successes, op.Results.ItemsWritten, "items written")
|
assert.Equal(t, test.stats.gc.Successes, op.Results.ItemsWritten, "items written")
|
||||||
assert.Equal(t, test.stats.bytesRead.NumBytes, op.Results.BytesRead, "resource owners")
|
assert.Equal(t, test.stats.bytesRead.NumBytes, op.Results.BytesRead, "resource owners")
|
||||||
assert.Equal(t, test.stats.resourceCount, op.Results.ResourceOwners, "resource owners")
|
assert.Equal(t, test.stats.resourceCount, op.Results.ResourceOwners, "resource owners")
|
||||||
assert.Equal(t, now, op.Results.StartedAt, "started at")
|
assert.Equal(t, now, op.Results.StartedAt, "started at")
|
||||||
@ -217,7 +214,7 @@ func (suite *RestoreOpIntegrationSuite) TearDownSuite() {
|
|||||||
func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
||||||
kw := &kopia.Wrapper{}
|
kw := &kopia.Wrapper{}
|
||||||
sw := &store.Wrapper{}
|
sw := &store.Wrapper{}
|
||||||
gc := &connector.GraphConnector{}
|
gc := &mockconnector.GraphConnector{}
|
||||||
acct := tester.NewM365Account(suite.T())
|
acct := tester.NewM365Account(suite.T())
|
||||||
dest := tester.DefaultTestRestoreDestination()
|
dest := tester.DefaultTestRestoreDestination()
|
||||||
|
|
||||||
@ -226,7 +223,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
|||||||
opts control.Options
|
opts control.Options
|
||||||
kw *kopia.Wrapper
|
kw *kopia.Wrapper
|
||||||
sw *store.Wrapper
|
sw *store.Wrapper
|
||||||
gc *connector.GraphConnector
|
rc RestoreConsumer
|
||||||
acct account.Account
|
acct account.Account
|
||||||
targets []string
|
targets []string
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
@ -234,7 +231,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
|||||||
{"good", control.Options{}, kw, sw, gc, acct, nil, assert.NoError},
|
{"good", control.Options{}, kw, sw, gc, acct, nil, assert.NoError},
|
||||||
{"missing kopia", control.Options{}, nil, sw, gc, acct, nil, assert.Error},
|
{"missing kopia", control.Options{}, nil, sw, gc, acct, nil, assert.Error},
|
||||||
{"missing modelstore", control.Options{}, kw, nil, gc, acct, nil, assert.Error},
|
{"missing modelstore", control.Options{}, kw, nil, gc, acct, nil, assert.Error},
|
||||||
{"missing graphConnector", control.Options{}, kw, sw, nil, acct, nil, assert.Error},
|
{"missing restore consumer", control.Options{}, kw, sw, nil, acct, nil, assert.Error},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
@ -246,7 +243,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
|
|||||||
test.opts,
|
test.opts,
|
||||||
test.kw,
|
test.kw,
|
||||||
test.sw,
|
test.sw,
|
||||||
test.gc,
|
test.rc,
|
||||||
test.acct,
|
test.acct,
|
||||||
"backup-id",
|
"backup-id",
|
||||||
selectors.Selector{DiscreteOwner: "test"},
|
selectors.Selector{DiscreteOwner: "test"},
|
||||||
@ -295,7 +292,7 @@ func setupExchangeBackup(
|
|||||||
gc,
|
gc,
|
||||||
acct,
|
acct,
|
||||||
bsel.Selector,
|
bsel.Selector,
|
||||||
bsel.Selector.DiscreteOwner,
|
bsel.Selector,
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
@ -352,7 +349,7 @@ func setupSharePointBackup(
|
|||||||
gc,
|
gc,
|
||||||
acct,
|
acct,
|
||||||
spsel.Selector,
|
spsel.Selector,
|
||||||
spsel.Selector.DiscreteOwner,
|
spsel.Selector,
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
|||||||
@ -222,7 +222,7 @@ func collect(
|
|||||||
}
|
}
|
||||||
|
|
||||||
type backuper interface {
|
type backuper interface {
|
||||||
BackupCollections(
|
ConsumeBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bases []kopia.IncrementalBase,
|
bases []kopia.IncrementalBase,
|
||||||
cs []data.BackupCollection,
|
cs []data.BackupCollection,
|
||||||
@ -240,7 +240,7 @@ func write(
|
|||||||
dbcs []data.BackupCollection,
|
dbcs []data.BackupCollection,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
backupStats, _, _, err := bup.BackupCollections(
|
backupStats, _, _, err := bup.ConsumeBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
nil,
|
nil,
|
||||||
dbcs,
|
dbcs,
|
||||||
|
|||||||
@ -24,6 +24,7 @@ const (
|
|||||||
// M365 config
|
// M365 config
|
||||||
TestCfgAzureTenantID = "azure_tenantid"
|
TestCfgAzureTenantID = "azure_tenantid"
|
||||||
TestCfgSiteID = "m365siteid"
|
TestCfgSiteID = "m365siteid"
|
||||||
|
TestCfgSiteURL = "m365siteurl"
|
||||||
TestCfgUserID = "m365userid"
|
TestCfgUserID = "m365userid"
|
||||||
TestCfgSecondaryUserID = "secondarym365userid"
|
TestCfgSecondaryUserID = "secondarym365userid"
|
||||||
TestCfgLoadTestUserID = "loadtestm365userid"
|
TestCfgLoadTestUserID = "loadtestm365userid"
|
||||||
@ -34,6 +35,7 @@ const (
|
|||||||
// test specific env vars
|
// test specific env vars
|
||||||
const (
|
const (
|
||||||
EnvCorsoM365TestSiteID = "CORSO_M365_TEST_SITE_ID"
|
EnvCorsoM365TestSiteID = "CORSO_M365_TEST_SITE_ID"
|
||||||
|
EnvCorsoM365TestSiteURL = "CORSO_M365_TEST_SITE_URL"
|
||||||
EnvCorsoM365TestUserID = "CORSO_M365_TEST_USER_ID"
|
EnvCorsoM365TestUserID = "CORSO_M365_TEST_USER_ID"
|
||||||
EnvCorsoSecondaryM365TestUserID = "CORSO_SECONDARY_M365_TEST_USER_ID"
|
EnvCorsoSecondaryM365TestUserID = "CORSO_SECONDARY_M365_TEST_USER_ID"
|
||||||
EnvCorsoM365LoadTestUserID = "CORSO_M365_LOAD_TEST_USER_ID"
|
EnvCorsoM365LoadTestUserID = "CORSO_M365_LOAD_TEST_USER_ID"
|
||||||
@ -136,6 +138,12 @@ func readTestConfig() (map[string]string, error) {
|
|||||||
os.Getenv(EnvCorsoM365TestSiteID),
|
os.Getenv(EnvCorsoM365TestSiteID),
|
||||||
vpr.GetString(TestCfgSiteID),
|
vpr.GetString(TestCfgSiteID),
|
||||||
"10rqc2.sharepoint.com,4892edf5-2ebf-46be-a6e5-a40b2cbf1c1a,38ab6d06-fc82-4417-af93-22d8733c22be")
|
"10rqc2.sharepoint.com,4892edf5-2ebf-46be-a6e5-a40b2cbf1c1a,38ab6d06-fc82-4417-af93-22d8733c22be")
|
||||||
|
fallbackTo(
|
||||||
|
testEnv,
|
||||||
|
TestCfgSiteURL,
|
||||||
|
os.Getenv(EnvCorsoM365TestSiteURL),
|
||||||
|
vpr.GetString(TestCfgSiteURL),
|
||||||
|
"https://10rqc2.sharepoint.com/sites/CorsoCI")
|
||||||
|
|
||||||
testEnv[EnvCorsoTestConfigFilePath] = os.Getenv(EnvCorsoTestConfigFilePath)
|
testEnv[EnvCorsoTestConfigFilePath] = os.Getenv(EnvCorsoTestConfigFilePath)
|
||||||
testConfig = testEnv
|
testConfig = testEnv
|
||||||
|
|||||||
@ -52,7 +52,6 @@ func LoadTestM365SiteID(t *testing.T) string {
|
|||||||
cfg, err := readTestConfig()
|
cfg, err := readTestConfig()
|
||||||
require.NoError(t, err, "retrieving load test m365 site id from test configuration", clues.ToCore(err))
|
require.NoError(t, err, "retrieving load test m365 site id from test configuration", clues.ToCore(err))
|
||||||
|
|
||||||
// TODO: load test site id, not standard test site id
|
|
||||||
return cfg[TestCfgSiteID]
|
return cfg[TestCfgSiteID]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,3 +132,14 @@ func M365SiteID(t *testing.T) string {
|
|||||||
|
|
||||||
return cfg[TestCfgSiteID]
|
return cfg[TestCfgSiteID]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// M365SiteURL returns a site webURL string representing the m365SiteURL described
|
||||||
|
// by either the env var CORSO_M365_TEST_SITE_URL, the corso_test.toml config
|
||||||
|
// file or the default value (in that order of priority). The default is a
|
||||||
|
// last-attempt fallback that will only work on alcion's testing org.
|
||||||
|
func M365SiteURL(t *testing.T) string {
|
||||||
|
cfg, err := readTestConfig()
|
||||||
|
require.NoError(t, err, "retrieving m365 site url from test configuration", clues.ToCore(err))
|
||||||
|
|
||||||
|
return cfg[TestCfgSiteURL]
|
||||||
|
}
|
||||||
|
|||||||
@ -165,6 +165,11 @@ type Printable struct {
|
|||||||
|
|
||||||
// MinimumPrintable reduces the Backup to its minimally printable details.
|
// MinimumPrintable reduces the Backup to its minimally printable details.
|
||||||
func (b Backup) MinimumPrintable() any {
|
func (b Backup) MinimumPrintable() any {
|
||||||
|
name := b.ResourceOwnerName
|
||||||
|
if len(name) == 0 {
|
||||||
|
name = b.Selector.Name()
|
||||||
|
}
|
||||||
|
|
||||||
return Printable{
|
return Printable{
|
||||||
ID: b.ID,
|
ID: b.ID,
|
||||||
ErrorCount: b.ErrorCount,
|
ErrorCount: b.ErrorCount,
|
||||||
@ -173,7 +178,7 @@ func (b Backup) MinimumPrintable() any {
|
|||||||
Version: "0",
|
Version: "0",
|
||||||
BytesRead: b.BytesRead,
|
BytesRead: b.BytesRead,
|
||||||
BytesUploaded: b.BytesUploaded,
|
BytesUploaded: b.BytesUploaded,
|
||||||
Owner: b.Selector.DiscreteOwner,
|
Owner: name,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -232,10 +237,15 @@ func (b Backup) Values() []string {
|
|||||||
status += (")")
|
status += (")")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
name := b.ResourceOwnerName
|
||||||
|
if len(name) == 0 {
|
||||||
|
name = b.Selector.Name()
|
||||||
|
}
|
||||||
|
|
||||||
return []string{
|
return []string{
|
||||||
common.FormatTabularDisplayTime(b.StartedAt),
|
common.FormatTabularDisplayTime(b.StartedAt),
|
||||||
string(b.ID),
|
string(b.ID),
|
||||||
status,
|
status,
|
||||||
b.Selector.DiscreteOwner,
|
name,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -24,8 +24,10 @@ func TestBackupUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const testDomainOwner = "test-domain-owner"
|
||||||
|
|
||||||
func stubBackup(t time.Time) backup.Backup {
|
func stubBackup(t time.Time) backup.Backup {
|
||||||
sel := selectors.NewExchangeBackup([]string{"test"})
|
sel := selectors.NewExchangeBackup([]string{testDomainOwner})
|
||||||
sel.Include(sel.AllData())
|
sel.Include(sel.AllData())
|
||||||
|
|
||||||
return backup.Backup{
|
return backup.Backup{
|
||||||
@ -61,30 +63,41 @@ func stubBackup(t time.Time) backup.Backup {
|
|||||||
|
|
||||||
func (suite *BackupUnitSuite) TestBackup_HeadersValues() {
|
func (suite *BackupUnitSuite) TestBackup_HeadersValues() {
|
||||||
var (
|
var (
|
||||||
t = suite.T()
|
now = time.Now()
|
||||||
now = time.Now()
|
nowFmt = common.FormatTabularDisplayTime(now)
|
||||||
b = stubBackup(now)
|
expectHs = []string{"Started At", "ID", "Status", "Resource Owner"}
|
||||||
expectHs = []string{
|
expectVsBase = []string{nowFmt, "id", "status (2 errors, 1 skipped: 1 malware)"}
|
||||||
"Started At",
|
|
||||||
"ID",
|
|
||||||
"Status",
|
|
||||||
"Resource Owner",
|
|
||||||
}
|
|
||||||
nowFmt = common.FormatTabularDisplayTime(now)
|
|
||||||
expectVs = []string{
|
|
||||||
nowFmt,
|
|
||||||
"id",
|
|
||||||
"status (2 errors, 1 skipped: 1 malware)",
|
|
||||||
"test",
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// single skipped malware
|
table := []struct {
|
||||||
hs := b.Headers()
|
name string
|
||||||
assert.Equal(t, expectHs, hs)
|
bup func() backup.Backup
|
||||||
|
expectVs []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "owner from selectors",
|
||||||
|
bup: func() backup.Backup { return stubBackup(now) },
|
||||||
|
expectVs: append(expectVsBase, testDomainOwner),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "owner from backup",
|
||||||
|
bup: func() backup.Backup {
|
||||||
|
b := stubBackup(now)
|
||||||
|
b.ResourceOwnerName = "test ro name"
|
||||||
|
return b
|
||||||
|
},
|
||||||
|
expectVs: append(expectVsBase, "test ro name"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
b := test.bup()
|
||||||
|
|
||||||
vs := b.Values()
|
assert.Equal(t, expectHs, b.Headers()) // not elementsMatch, order matters
|
||||||
assert.Equal(t, expectVs, vs)
|
assert.Equal(t, test.expectVs, b.Values()) // not elementsMatch, order matters
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
|
func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
|
||||||
|
|||||||
@ -359,21 +359,23 @@ func newSliceFilter(c comparator, targets, normTargets []string, negate bool) Fi
|
|||||||
// ----------------------------------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
// CompareAny checks whether any one of all the provided
|
// CompareAny checks whether any one of all the provided
|
||||||
// inputs passes the filter.
|
// inputs passes the filter. If one passes, that value is
|
||||||
|
// returned, as well as its index in the input range.
|
||||||
|
// If nothing matches, returns ("", -1, false)
|
||||||
//
|
//
|
||||||
// Note that, as a gotcha, CompareAny can resolve truthily
|
// Note that, as a gotcha, CompareAny can resolve truthily
|
||||||
// for both the standard and negated versions of a filter.
|
// for both the standard and negated versions of a filter.
|
||||||
// Ex: consider the input CompareAny(true, false), which
|
// Ex: consider the input CompareAny(true, false), which
|
||||||
// will return true for both Equals(true) and NotEquals(true),
|
// will return true for both Equals(true) and NotEquals(true),
|
||||||
// because at least one element matches for both filters.
|
// because at least one element matches for both filters.
|
||||||
func (f Filter) CompareAny(inputs ...string) bool {
|
func (f Filter) CompareAny(inputs ...string) (string, int, bool) {
|
||||||
for _, in := range inputs {
|
for i, in := range inputs {
|
||||||
if f.Compare(in) {
|
if f.Compare(in) {
|
||||||
return true
|
return in, i, true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
return "", -1, false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compare checks whether the input passes the filter.
|
// Compare checks whether the input passes the filter.
|
||||||
@ -442,6 +444,48 @@ func (f Filter) Compare(input string) bool {
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Matches extends Compare by not only checking if
|
||||||
|
// the input passes the filter, but if it passes, the
|
||||||
|
// target which matched and its index are returned as well.
|
||||||
|
// If more than one value matches the input, only the
|
||||||
|
// first is returned.
|
||||||
|
// returns ("", -1, false) if no match is found.
|
||||||
|
// TODO: only partially implemented.
|
||||||
|
// func (f Filter) Matches(input string) (string, int, bool) {
|
||||||
|
// var (
|
||||||
|
// cmp func(string, string) bool
|
||||||
|
// res bool
|
||||||
|
// targets = f.NormalizedTargets
|
||||||
|
// )
|
||||||
|
|
||||||
|
// switch f.Comparator {
|
||||||
|
// case TargetPathPrefix:
|
||||||
|
// cmp = pathPrefix
|
||||||
|
// case TargetPathContains:
|
||||||
|
// cmp = pathContains
|
||||||
|
// case TargetPathSuffix:
|
||||||
|
// cmp = pathSuffix
|
||||||
|
// case TargetPathEquals:
|
||||||
|
// cmp = pathEquals
|
||||||
|
// default:
|
||||||
|
// return "", -1, false
|
||||||
|
// }
|
||||||
|
|
||||||
|
// for i, tgt := range targets {
|
||||||
|
// res = cmp(norm(tgt), norm(input))
|
||||||
|
|
||||||
|
// if !f.Negate && res {
|
||||||
|
// return f.Targets[i], i, true
|
||||||
|
// }
|
||||||
|
|
||||||
|
// if f.Negate && !res {
|
||||||
|
// return f.Targets[i], i, true
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return "", -1, false
|
||||||
|
// }
|
||||||
|
|
||||||
// true if t == i
|
// true if t == i
|
||||||
func equals(target, input string) bool {
|
func equals(target, input string) bool {
|
||||||
return target == input
|
return target == input
|
||||||
|
|||||||
@ -45,20 +45,49 @@ func (suite *FiltersSuite) TestEquals_any() {
|
|||||||
nf := filters.NotEqual("foo")
|
nf := filters.NotEqual("foo")
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
input []string
|
input []string
|
||||||
expectF assert.BoolAssertionFunc
|
expectF assert.BoolAssertionFunc
|
||||||
expectNF assert.BoolAssertionFunc
|
expectFVal string
|
||||||
|
expectFIdx int
|
||||||
|
expectNF assert.BoolAssertionFunc
|
||||||
|
expectNFVal string
|
||||||
|
expectNFIdx int
|
||||||
}{
|
}{
|
||||||
{"includes target", []string{"foo", "bar"}, assert.True, assert.True},
|
{
|
||||||
{"not includes target", []string{"baz", "qux"}, assert.False, assert.True},
|
name: "includes target",
|
||||||
|
input: []string{"foo", "bar"},
|
||||||
|
expectF: assert.True,
|
||||||
|
expectFVal: "foo",
|
||||||
|
expectFIdx: 0,
|
||||||
|
expectNF: assert.True,
|
||||||
|
expectNFVal: "bar",
|
||||||
|
expectNFIdx: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "not includes target",
|
||||||
|
input: []string{"baz", "qux"},
|
||||||
|
expectF: assert.False,
|
||||||
|
expectFVal: "",
|
||||||
|
expectFIdx: -1,
|
||||||
|
expectNF: assert.True,
|
||||||
|
expectNFVal: "baz",
|
||||||
|
expectNFIdx: 0,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
test.expectF(t, f.CompareAny(test.input...), "filter")
|
v, i, b := f.CompareAny(test.input...)
|
||||||
test.expectNF(t, nf.CompareAny(test.input...), "negated filter")
|
test.expectF(t, b, "filter")
|
||||||
|
assert.Equal(t, test.expectFIdx, i, "index")
|
||||||
|
assert.Equal(t, test.expectFVal, v, "value")
|
||||||
|
|
||||||
|
v, i, b = nf.CompareAny(test.input...)
|
||||||
|
test.expectNF(t, b, "neg-filter")
|
||||||
|
assert.Equal(t, test.expectNFIdx, i, "neg-index")
|
||||||
|
assert.Equal(t, test.expectNFVal, v, "neg-value")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -120,7 +120,7 @@ func runLoadTest(
|
|||||||
) {
|
) {
|
||||||
//revive:enable:context-as-argument
|
//revive:enable:context-as-argument
|
||||||
t.Run(prefix+"_load_test_main", func(t *testing.T) {
|
t.Run(prefix+"_load_test_main", func(t *testing.T) {
|
||||||
b, err := r.NewBackup(ctx, bupSel)
|
b, err := r.NewBackup(ctx, bupSel, nil, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
runBackupLoadTest(t, ctx, &b, service, usersUnderTest)
|
runBackupLoadTest(t, ctx, &b, service, usersUnderTest)
|
||||||
@ -447,8 +447,7 @@ func (suite *LoadExchangeSuite) TestExchange() {
|
|||||||
"all_users", "exchange",
|
"all_users", "exchange",
|
||||||
suite.usersUnderTest,
|
suite.usersUnderTest,
|
||||||
sel, sel, // same selection for backup and restore
|
sel, sel, // same selection for backup and restore
|
||||||
true,
|
true)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// single user, lots of data
|
// single user, lots of data
|
||||||
@ -500,8 +499,7 @@ func (suite *IndividualLoadExchangeSuite) TestExchange() {
|
|||||||
"single_user", "exchange",
|
"single_user", "exchange",
|
||||||
suite.usersUnderTest,
|
suite.usersUnderTest,
|
||||||
sel, sel, // same selection for backup and restore
|
sel, sel, // same selection for backup and restore
|
||||||
true,
|
true)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
@ -553,8 +551,7 @@ func (suite *LoadOneDriveSuite) TestOneDrive() {
|
|||||||
"all_users", "one_drive",
|
"all_users", "one_drive",
|
||||||
suite.usersUnderTest,
|
suite.usersUnderTest,
|
||||||
sel, sel, // same selection for backup and restore
|
sel, sel, // same selection for backup and restore
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type IndividualLoadOneDriveSuite struct {
|
type IndividualLoadOneDriveSuite struct {
|
||||||
@ -601,8 +598,7 @@ func (suite *IndividualLoadOneDriveSuite) TestOneDrive() {
|
|||||||
"single_user", "one_drive",
|
"single_user", "one_drive",
|
||||||
suite.usersUnderTest,
|
suite.usersUnderTest,
|
||||||
sel, sel, // same selection for backup and restore
|
sel, sel, // same selection for backup and restore
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
@ -654,8 +650,7 @@ func (suite *LoadSharePointSuite) TestSharePoint() {
|
|||||||
"all_sites", "share_point",
|
"all_sites", "share_point",
|
||||||
suite.sitesUnderTest,
|
suite.sitesUnderTest,
|
||||||
sel, sel, // same selection for backup and restore
|
sel, sel, // same selection for backup and restore
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type IndividualLoadSharePointSuite struct {
|
type IndividualLoadSharePointSuite struct {
|
||||||
@ -703,6 +698,5 @@ func (suite *IndividualLoadSharePointSuite) TestSharePoint() {
|
|||||||
"single_site", "share_point",
|
"single_site", "share_point",
|
||||||
suite.sitesUnderTest,
|
suite.sitesUnderTest,
|
||||||
sel, sel, // same selection for backup and restore
|
sel, sel, // same selection for backup and restore
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -54,6 +54,7 @@ type Repository interface {
|
|||||||
NewBackup(
|
NewBackup(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
self selectors.Selector,
|
self selectors.Selector,
|
||||||
|
ownerIDToName, ownerNameToID map[string]string,
|
||||||
) (operations.BackupOperation, error)
|
) (operations.BackupOperation, error)
|
||||||
NewRestore(
|
NewRestore(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -283,15 +284,30 @@ func (r *repository) Close(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewBackup generates a BackupOperation runner.
|
// NewBackup generates a BackupOperation runner.
|
||||||
|
// ownerIDToName and ownerNameToID are optional populations, in case the caller has
|
||||||
|
// already generated those values.
|
||||||
func (r repository) NewBackup(
|
func (r repository) NewBackup(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
|
ownerIDToName, ownerNameToID map[string]string,
|
||||||
) (operations.BackupOperation, error) {
|
) (operations.BackupOperation, error) {
|
||||||
gc, err := connectToM365(ctx, sel, r.Account, fault.New(true))
|
gc, err := connectToM365(ctx, sel, r.Account, fault.New(true))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return operations.BackupOperation{}, errors.Wrap(err, "connecting to m365")
|
return operations.BackupOperation{}, errors.Wrap(err, "connecting to m365")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ownerID, ownerName, err := gc.PopulateOwnerIDAndNamesFrom(
|
||||||
|
ctx,
|
||||||
|
sel.DiscreteOwner,
|
||||||
|
ownerIDToName,
|
||||||
|
ownerNameToID)
|
||||||
|
if err != nil {
|
||||||
|
return operations.BackupOperation{}, errors.Wrap(err, "resolving resource owner details")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: retrieve display name from gc
|
||||||
|
sel = sel.SetDiscreteOwnerIDName(ownerID, ownerName)
|
||||||
|
|
||||||
return operations.NewBackupOperation(
|
return operations.NewBackupOperation(
|
||||||
ctx,
|
ctx,
|
||||||
r.Opts,
|
r.Opts,
|
||||||
@ -300,7 +316,7 @@ func (r repository) NewBackup(
|
|||||||
gc,
|
gc,
|
||||||
r.Account,
|
r.Account,
|
||||||
sel,
|
sel,
|
||||||
sel.DiscreteOwner,
|
sel,
|
||||||
r.Bus)
|
r.Bus)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -198,7 +198,7 @@ func (suite *RepositoryIntegrationSuite) TestNewBackup() {
|
|||||||
r, err := repository.Initialize(ctx, acct, st, control.Options{})
|
r, err := repository.Initialize(ctx, acct, st, control.Options{})
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
bo, err := r.NewBackup(ctx, selectors.Selector{DiscreteOwner: "test"})
|
bo, err := r.NewBackup(ctx, selectors.Selector{DiscreteOwner: "test"}, nil, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.NotNil(t, bo)
|
require.NotNil(t, bo)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -223,7 +223,9 @@ func matchesAny[T scopeT, C categoryT](s T, cat C, inpts []string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return s[cat.String()].CompareAny(inpts...)
|
_, _, pass := s[cat.String()].CompareAny(inpts...)
|
||||||
|
|
||||||
|
return pass
|
||||||
}
|
}
|
||||||
|
|
||||||
// getCategory returns the scope's category value.
|
// getCategory returns the scope's category value.
|
||||||
|
|||||||
@ -112,6 +112,8 @@ type Selector struct {
|
|||||||
// iterate over the results, where each one will populate this field
|
// iterate over the results, where each one will populate this field
|
||||||
// with a different owner.
|
// with a different owner.
|
||||||
DiscreteOwner string `json:"discreteOwner,omitempty"`
|
DiscreteOwner string `json:"discreteOwner,omitempty"`
|
||||||
|
// display name for the DiscreteOwner.
|
||||||
|
DiscreteOwnerName string `json:"discreteOwnerName,omitempty"`
|
||||||
|
|
||||||
// A slice of exclusion scopes. Exclusions apply globally to all
|
// A slice of exclusion scopes. Exclusions apply globally to all
|
||||||
// inclusions/filters, with any-match behavior.
|
// inclusions/filters, with any-match behavior.
|
||||||
@ -146,6 +148,46 @@ func (s Selector) DiscreteResourceOwners() []string {
|
|||||||
return split(s.ResourceOwners.Target)
|
return split(s.ResourceOwners.Target)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetDiscreteOwnerIDName ensures the selector has the correct discrete owner
|
||||||
|
// id and name. It is assumed that these values are sourced using the current
|
||||||
|
// s.DiscreteOwner as input. The reason for taking in both the id and name, and
|
||||||
|
// not just the name, is so that constructors can input owner aliases in place
|
||||||
|
// of ids, with the expectation that the two will get sorted and re-written
|
||||||
|
// later on with this setter.
|
||||||
|
func (s Selector) SetDiscreteOwnerIDName(id, name string) Selector {
|
||||||
|
r := s
|
||||||
|
|
||||||
|
if len(id) == 0 {
|
||||||
|
// assume a the discreteOwner is already set, and don't replace anything.
|
||||||
|
r.DiscreteOwnerName = s.DiscreteOwner
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
r.DiscreteOwner = id
|
||||||
|
r.DiscreteOwnerName = name
|
||||||
|
|
||||||
|
if len(name) == 0 {
|
||||||
|
r.DiscreteOwnerName = id
|
||||||
|
}
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// ID returns s.discreteOwner, which is assumed to be a stable ID.
|
||||||
|
func (s Selector) ID() string {
|
||||||
|
return s.DiscreteOwner
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns s.discreteOwnerName. If that value is empty, it returns
|
||||||
|
// s.DiscreteOwner instead.
|
||||||
|
func (s Selector) Name() string {
|
||||||
|
if len(s.DiscreteOwnerName) == 0 {
|
||||||
|
return s.DiscreteOwner
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.DiscreteOwnerName
|
||||||
|
}
|
||||||
|
|
||||||
// isAnyResourceOwner returns true if the selector includes all resource owners.
|
// isAnyResourceOwner returns true if the selector includes all resource owners.
|
||||||
func isAnyResourceOwner(s Selector) bool {
|
func isAnyResourceOwner(s Selector) bool {
|
||||||
return s.ResourceOwners.Comparator == filters.Passes
|
return s.ResourceOwners.Comparator == filters.Passes
|
||||||
@ -336,7 +378,7 @@ func pathCategoriesIn[T scopeT, C categoryT](ss []scope) []path.CategoryType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// scope helpers
|
// scope constructors
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type scopeConfig struct {
|
type scopeConfig struct {
|
||||||
|
|||||||
@ -248,6 +248,47 @@ func (suite *SelectorSuite) TestSplitByResourceOnwer() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *SelectorSuite) TestIDName() {
|
||||||
|
table := []struct {
|
||||||
|
title string
|
||||||
|
id, name string
|
||||||
|
expectID, expectName string
|
||||||
|
}{
|
||||||
|
{"empty", "", "", "", ""},
|
||||||
|
{"only id", "id", "", "id", "id"},
|
||||||
|
{"only name", "", "name", "", "name"},
|
||||||
|
{"both", "id", "name", "id", "name"},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.title, func() {
|
||||||
|
sel := Selector{DiscreteOwner: test.id, DiscreteOwnerName: test.name}
|
||||||
|
assert.Equal(suite.T(), test.expectID, sel.ID())
|
||||||
|
assert.Equal(suite.T(), test.expectName, sel.Name())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SelectorSuite) TestSetDiscreteOwnerIDName() {
|
||||||
|
table := []struct {
|
||||||
|
title string
|
||||||
|
id, name string
|
||||||
|
expectID, expectName string
|
||||||
|
}{
|
||||||
|
{"empty", "", "", "", ""},
|
||||||
|
{"only id", "id", "", "id", "id"},
|
||||||
|
{"only name", "", "", "", ""},
|
||||||
|
{"both", "id", "name", "id", "name"},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.title, func() {
|
||||||
|
sel := Selector{}
|
||||||
|
sel = sel.SetDiscreteOwnerIDName(test.id, test.name)
|
||||||
|
assert.Equal(suite.T(), test.expectID, sel.ID())
|
||||||
|
assert.Equal(suite.T(), test.expectName, sel.Name())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TestPathCategories verifies that no scope produces a `path.UnknownCategory`
|
// TestPathCategories verifies that no scope produces a `path.UnknownCategory`
|
||||||
func (suite *SelectorSuite) TestPathCategories_includes() {
|
func (suite *SelectorSuite) TestPathCategories_includes() {
|
||||||
users := []string{"someuser@onmicrosoft.com"}
|
users := []string{"someuser@onmicrosoft.com"}
|
||||||
|
|||||||
@ -6,9 +6,8 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery"
|
"github.com/alcionai/corso/src/internal/connector/discovery"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
)
|
)
|
||||||
@ -55,84 +54,6 @@ func Users(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*User,
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func UserIDs(ctx context.Context, acct account.Account, errs *fault.Bus) ([]string, error) {
|
|
||||||
users, err := Users(ctx, acct, errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret := make([]string, 0, len(users))
|
|
||||||
for _, u := range users {
|
|
||||||
ret = append(ret, u.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UserPNs retrieves all user principleNames in the tenant. Principle Names
|
|
||||||
// can be used analogous userIDs in graph API queries.
|
|
||||||
func UserPNs(ctx context.Context, acct account.Account, errs *fault.Bus) ([]string, error) {
|
|
||||||
users, err := Users(ctx, acct, errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret := make([]string, 0, len(users))
|
|
||||||
for _, u := range users {
|
|
||||||
ret = append(ret, u.PrincipalName)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type Site struct {
|
|
||||||
// WebURL that displays the item in the browser
|
|
||||||
WebURL string
|
|
||||||
|
|
||||||
// ID is of the format: <site collection hostname>.<site collection unique id>.<site unique id>
|
|
||||||
// for example: contoso.sharepoint.com,abcdeab3-0ccc-4ce1-80ae-b32912c9468d,xyzud296-9f7c-44e1-af81-3c06d0d43007
|
|
||||||
ID string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sites returns a list of Sites in a specified M365 tenant
|
|
||||||
func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) {
|
|
||||||
gc, err := connector.NewGraphConnector(ctx, graph.HTTPClient(graph.NoTimeout()), acct, connector.Sites, errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "initializing M365 graph connection")
|
|
||||||
}
|
|
||||||
|
|
||||||
// gc.Sites is a map with keys: SiteURL, values: ID
|
|
||||||
ret := make([]*Site, 0, len(gc.Sites))
|
|
||||||
for k, v := range gc.Sites {
|
|
||||||
ret = append(ret, &Site{
|
|
||||||
WebURL: k,
|
|
||||||
ID: v,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// SiteURLs returns a list of SharePoint site WebURLs in the specified M365 tenant
|
|
||||||
func SiteURLs(ctx context.Context, acct account.Account, errs *fault.Bus) ([]string, error) {
|
|
||||||
gc, err := connector.NewGraphConnector(ctx, graph.HTTPClient(graph.NoTimeout()), acct, connector.Sites, errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "initializing M365 graph connection")
|
|
||||||
}
|
|
||||||
|
|
||||||
return gc.GetSiteWebURLs(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// SiteIDs returns a list of SharePoint sites IDs in the specified M365 tenant
|
|
||||||
func SiteIDs(ctx context.Context, acct account.Account, errs *fault.Bus) ([]string, error) {
|
|
||||||
gc, err := connector.NewGraphConnector(ctx, graph.HTTPClient(graph.NoTimeout()), acct, connector.Sites, errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.Wrap(err, "initializing graph connection")
|
|
||||||
}
|
|
||||||
|
|
||||||
return gc.GetSiteIDs(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseUser extracts information from `models.Userable` we care about
|
// parseUser extracts information from `models.Userable` we care about
|
||||||
func parseUser(item models.Userable) (*User, error) {
|
func parseUser(item models.Userable) (*User, error) {
|
||||||
if item.GetUserPrincipalName() == nil {
|
if item.GetUserPrincipalName() == nil {
|
||||||
@ -148,3 +69,93 @@ func parseUser(item models.Userable) (*User, error) {
|
|||||||
|
|
||||||
return u, nil
|
return u, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UsersMap retrieves all users in the tenant, and returns two maps: one id-to-principalName,
|
||||||
|
// and one principalName-to-id.
|
||||||
|
func UsersMap(
|
||||||
|
ctx context.Context,
|
||||||
|
acct account.Account,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) (map[string]string, map[string]string, error) {
|
||||||
|
users, err := Users(ctx, acct, errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
idToName = make(map[string]string, len(users))
|
||||||
|
nameToID = make(map[string]string, len(users))
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, u := range users {
|
||||||
|
idToName[u.ID] = u.PrincipalName
|
||||||
|
nameToID[u.PrincipalName] = u.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
return idToName, nameToID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Site struct {
|
||||||
|
// WebURL that displays the item in the browser
|
||||||
|
WebURL string
|
||||||
|
|
||||||
|
// ID is of the format: <site collection hostname>.<site collection unique id>.<site unique id>
|
||||||
|
// for example: contoso.sharepoint.com,abcdeab3-0ccc-4ce1-80ae-b32912c9468d,xyzud296-9f7c-44e1-af81-3c06d0d43007
|
||||||
|
ID string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sites returns a list of Sites in a specified M365 tenant
|
||||||
|
func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) {
|
||||||
|
sites, err := discovery.Sites(ctx, acct, errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "initializing M365 graph connection")
|
||||||
|
}
|
||||||
|
|
||||||
|
ret := make([]*Site, 0, len(sites))
|
||||||
|
|
||||||
|
for _, s := range sites {
|
||||||
|
ps, err := parseSite(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "parsing siteable")
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, ps)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSite extracts the information from `models.Siteable` we care about
|
||||||
|
func parseSite(item models.Siteable) (*Site, error) {
|
||||||
|
s := &Site{
|
||||||
|
ID: ptr.Val(item.GetId()),
|
||||||
|
WebURL: ptr.Val(item.GetWebUrl()),
|
||||||
|
}
|
||||||
|
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SitesMap retrieves all sites in the tenant, and returns two maps: one id-to-webURL,
|
||||||
|
// and one webURL-to-id.
|
||||||
|
func SitesMap(
|
||||||
|
ctx context.Context,
|
||||||
|
acct account.Account,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) (map[string]string, map[string]string, error) {
|
||||||
|
sites, err := Sites(ctx, acct, errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
idToName = make(map[string]string, len(sites))
|
||||||
|
nameToID = make(map[string]string, len(sites))
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, s := range sites {
|
||||||
|
idToName[s.ID] = s.WebURL
|
||||||
|
nameToID[s.WebURL] = s.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
return idToName, nameToID, nil
|
||||||
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user