add weburl identification in the CLI (#1712)
## Description Adds weburl identificatiotn and unioning in the cli, for both backup and restore of sharepoint data. ## Type of change - [x] 🌻 Feature ## Issue(s) * #1616 ## Test Plan - [x] ⚡ Unit test
This commit is contained in:
parent
a18619ffa4
commit
38239e986c
@ -12,6 +12,7 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/connector"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
"github.com/alcionai/corso/src/internal/model"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
@ -19,7 +20,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
@ -28,9 +28,10 @@ import (
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
var (
|
||||
site []string
|
||||
libraryPaths []string
|
||||
libraryItems []string
|
||||
libraryPaths []string
|
||||
site []string
|
||||
weburl []string
|
||||
|
||||
sharepointData []string
|
||||
)
|
||||
@ -82,7 +83,12 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
|
||||
fs.StringArrayVar(&site,
|
||||
utils.SiteFN, nil,
|
||||
"Backup SharePoint data by site ID; accepts '"+utils.Wildcard+"' to select all sites. (required)")
|
||||
"Backup SharePoint data by site ID; accepts '"+utils.Wildcard+"' to select all sites.")
|
||||
|
||||
fs.StringSliceVar(&weburl,
|
||||
utils.WebURLFN, nil,
|
||||
"Restore data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.")
|
||||
|
||||
// TODO: implement
|
||||
fs.StringSliceVar(
|
||||
&sharepointData,
|
||||
@ -120,6 +126,14 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
utils.LibraryItemFN, nil,
|
||||
"Select backup details by library item name or ID.")
|
||||
|
||||
fs.StringArrayVar(&site,
|
||||
utils.SiteFN, nil,
|
||||
"Backup SharePoint data by site ID; accepts '"+utils.Wildcard+"' to select all sites.")
|
||||
|
||||
fs.StringSliceVar(&weburl,
|
||||
utils.WebURLFN, nil,
|
||||
"Restore data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.")
|
||||
|
||||
// info flags
|
||||
|
||||
// fs.StringVar(
|
||||
@ -165,7 +179,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateSharePointBackupCreateFlags(site); err != nil {
|
||||
if err := validateSharePointBackupCreateFlags(site, weburl); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -181,11 +195,14 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
sel := sharePointBackupCreateSelectors(site)
|
||||
|
||||
sites, err := m365.SiteIDs(ctx, acct)
|
||||
gc, err := connector.NewGraphConnector(ctx, acct, connector.Sites)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to retrieve SharePoint sites"))
|
||||
return Only(ctx, errors.Wrap(err, "Failed to connect to Microsoft APIs"))
|
||||
}
|
||||
|
||||
sel, err := sharePointBackupCreateSelectors(ctx, site, weburl, gc)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Retrieving up sharepoint sites by ID and WebURL"))
|
||||
}
|
||||
|
||||
var (
|
||||
@ -193,7 +210,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
bIDs []model.StableID
|
||||
)
|
||||
|
||||
for _, scope := range sel.DiscreteScopes(sites) {
|
||||
for _, scope := range sel.DiscreteScopes(gc.GetSiteIDs()) {
|
||||
for _, selSite := range scope.Get(selectors.SharePointSite) {
|
||||
opSel := selectors.NewSharePointBackup()
|
||||
opSel.Include([]selectors.SharePointScope{scope.DiscreteCopy(selSite)})
|
||||
@ -238,19 +255,49 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateSharePointBackupCreateFlags(sites []string) error {
|
||||
if len(sites) == 0 {
|
||||
return errors.New("requires one or more --site ids or the wildcard --site *")
|
||||
func validateSharePointBackupCreateFlags(sites, weburls []string) error {
|
||||
if len(sites) == 0 && len(weburls) == 0 {
|
||||
return errors.New(
|
||||
"requires one or more --" +
|
||||
utils.SiteFN + " ids, --" +
|
||||
utils.WebURLFN + " urls, or the wildcard --" +
|
||||
utils.SiteFN + " *",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func sharePointBackupCreateSelectors(sites []string) *selectors.SharePointBackup {
|
||||
func sharePointBackupCreateSelectors(
|
||||
ctx context.Context,
|
||||
sites, weburls []string,
|
||||
gc *connector.GraphConnector,
|
||||
) (*selectors.SharePointBackup, error) {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Include(sel.Sites(sites))
|
||||
|
||||
return sel
|
||||
for _, site := range sites {
|
||||
if site == utils.Wildcard {
|
||||
sel.Include(sel.Sites(sites))
|
||||
return sel, nil
|
||||
}
|
||||
}
|
||||
|
||||
for _, wURL := range weburls {
|
||||
if wURL == utils.Wildcard {
|
||||
// due to the wildcard, selectors will drop any url values.
|
||||
sel.Include(sel.Sites(weburls))
|
||||
return sel, nil
|
||||
}
|
||||
}
|
||||
|
||||
union, err := gc.UnionSiteIDsAndWebURLs(ctx, sites, weburls)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sel.Include(sel.Sites(union))
|
||||
|
||||
return sel, nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -388,9 +435,10 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
opts := utils.SharePointOpts{
|
||||
Sites: site,
|
||||
LibraryPaths: libraryPaths,
|
||||
LibraryItems: libraryItems,
|
||||
LibraryPaths: libraryPaths,
|
||||
Sites: site,
|
||||
WebURLs: weburl,
|
||||
|
||||
Populated: utils.GetPopulatedFlags(cmd),
|
||||
}
|
||||
|
||||
@ -8,8 +8,11 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/connector"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
type SharePointSuite struct {
|
||||
@ -69,21 +72,118 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
site []string
|
||||
weburl []string
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no sites",
|
||||
name: "no sites or urls",
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "sites",
|
||||
site: []string{"fnord"},
|
||||
site: []string{"smarf"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "urls",
|
||||
weburl: []string{"fnord"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "both",
|
||||
site: []string{"smarf"},
|
||||
weburl: []string{"fnord"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
test.expect(t, validateSharePointBackupCreateFlags(test.site))
|
||||
test.expect(t, validateSharePointBackupCreateFlags(test.site, test.weburl))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
|
||||
gc := &connector.GraphConnector{
|
||||
Sites: map[string]string{
|
||||
"url_1": "id_1",
|
||||
"url_2": "id_2",
|
||||
},
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
site []string
|
||||
weburl []string
|
||||
expect []string
|
||||
}{
|
||||
{
|
||||
name: "no sites or urls",
|
||||
expect: selectors.None(),
|
||||
},
|
||||
{
|
||||
name: "empty sites and urls",
|
||||
site: []string{},
|
||||
weburl: []string{},
|
||||
expect: selectors.None(),
|
||||
},
|
||||
{
|
||||
name: "site wildcard",
|
||||
site: []string{utils.Wildcard},
|
||||
expect: selectors.Any(),
|
||||
},
|
||||
{
|
||||
name: "url wildcard",
|
||||
weburl: []string{utils.Wildcard},
|
||||
expect: selectors.Any(),
|
||||
},
|
||||
{
|
||||
name: "sites",
|
||||
site: []string{"id_1", "id_2"},
|
||||
expect: []string{"id_1", "id_2"},
|
||||
},
|
||||
{
|
||||
name: "urls",
|
||||
weburl: []string{"url_1", "url_2"},
|
||||
expect: []string{"id_1", "id_2"},
|
||||
},
|
||||
{
|
||||
name: "mix sites and urls",
|
||||
site: []string{"id_1"},
|
||||
weburl: []string{"url_2"},
|
||||
expect: []string{"id_1", "id_2"},
|
||||
},
|
||||
{
|
||||
name: "duplicate sites and urls",
|
||||
site: []string{"id_1", "id_2"},
|
||||
weburl: []string{"url_1", "url_2"},
|
||||
expect: []string{"id_1", "id_2"},
|
||||
},
|
||||
{
|
||||
name: "unnecessary site wildcard",
|
||||
site: []string{"id_1", utils.Wildcard},
|
||||
weburl: []string{"url_1", "url_2"},
|
||||
expect: selectors.Any(),
|
||||
},
|
||||
{
|
||||
name: "unnecessary url wildcard",
|
||||
site: []string{"id_1", "id_2"},
|
||||
weburl: []string{"url_1", utils.Wildcard},
|
||||
expect: selectors.Any(),
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, gc)
|
||||
require.NoError(t, err)
|
||||
|
||||
scope := sel.Scopes()[0]
|
||||
targetSites := scope.Get(selectors.SharePointSite)
|
||||
|
||||
assert.ElementsMatch(t, test.expect, targetSites)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,9 +16,10 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
site []string
|
||||
libraryPaths []string
|
||||
libraryItems []string
|
||||
libraryPaths []string
|
||||
site []string
|
||||
weburl []string
|
||||
)
|
||||
|
||||
// called by restore.go to map subcommands to provider-specific handling.
|
||||
@ -47,6 +48,10 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
utils.SiteFN, nil,
|
||||
"Restore data by site ID; accepts '"+utils.Wildcard+"' to select all sites.")
|
||||
|
||||
fs.StringSliceVar(&weburl,
|
||||
utils.WebURLFN, nil,
|
||||
"Restore data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.")
|
||||
|
||||
// sharepoint hierarchy (path/name) flags
|
||||
|
||||
fs.StringSliceVar(
|
||||
@ -110,9 +115,10 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
opts := utils.SharePointOpts{
|
||||
Sites: site,
|
||||
LibraryPaths: libraryPaths,
|
||||
LibraryItems: libraryItems,
|
||||
LibraryPaths: libraryPaths,
|
||||
Sites: site,
|
||||
WebURLs: weburl,
|
||||
// FileCreatedAfter: fileCreatedAfter,
|
||||
|
||||
Populated: utils.GetPopulatedFlags(cmd),
|
||||
|
||||
@ -9,12 +9,14 @@ import (
|
||||
const (
|
||||
LibraryItemFN = "library-item"
|
||||
LibraryFN = "library"
|
||||
WebURLFN = "web-url"
|
||||
)
|
||||
|
||||
type SharePointOpts struct {
|
||||
Sites []string
|
||||
LibraryItems []string
|
||||
LibraryPaths []string
|
||||
Sites []string
|
||||
WebURLs []string
|
||||
|
||||
Populated PopulatedFlags
|
||||
}
|
||||
@ -52,7 +54,7 @@ func IncludeSharePointRestoreDataSelectors(
|
||||
sel *selectors.SharePointRestore,
|
||||
opts SharePointOpts,
|
||||
) {
|
||||
lp, ln := len(opts.LibraryPaths), len(opts.LibraryItems)
|
||||
lp, ln, lwu := len(opts.LibraryPaths), len(opts.LibraryItems), len(opts.WebURLs)
|
||||
|
||||
// only use the inclusion if either a path or item name
|
||||
// is specified
|
||||
@ -64,8 +66,7 @@ func IncludeSharePointRestoreDataSelectors(
|
||||
opts.Sites = selectors.Any()
|
||||
}
|
||||
|
||||
// either scope the request to a set of sites
|
||||
if lp+ln == 0 {
|
||||
if lp+ln+lwu == 0 {
|
||||
sel.Include(sel.Sites(opts.Sites))
|
||||
|
||||
return
|
||||
@ -77,6 +78,16 @@ func IncludeSharePointRestoreDataSelectors(
|
||||
opts.LibraryItems = selectors.Any()
|
||||
}
|
||||
|
||||
containsURLs, suffixURLs := splitFoldersIntoContainsAndPrefix(opts.WebURLs)
|
||||
|
||||
if len(containsURLs) > 0 {
|
||||
sel.Include(sel.WebURL(containsURLs))
|
||||
}
|
||||
|
||||
if len(suffixURLs) > 0 {
|
||||
sel.Include(sel.WebURL(suffixURLs, selectors.SuffixMatch()))
|
||||
}
|
||||
|
||||
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.LibraryPaths)
|
||||
|
||||
if len(containsFolders) > 0 {
|
||||
|
||||
@ -18,7 +18,9 @@ func TestSharePointUtilsSuite(t *testing.T) {
|
||||
suite.Run(t, new(SharePointUtilsSuite))
|
||||
}
|
||||
|
||||
func (suite *ExchangeUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
|
||||
func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
|
||||
suite.T().Skip("just until next PR")
|
||||
|
||||
var (
|
||||
empty = []string{}
|
||||
single = []string{"single"}
|
||||
@ -36,54 +38,90 @@ func (suite *ExchangeUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
|
||||
{
|
||||
name: "no inputs",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: empty,
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: empty,
|
||||
Sites: empty,
|
||||
WebURLs: empty,
|
||||
},
|
||||
expectIncludeLen: 0,
|
||||
},
|
||||
{
|
||||
name: "single inputs",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: single,
|
||||
LibraryPaths: single,
|
||||
LibraryItems: single,
|
||||
LibraryPaths: single,
|
||||
Sites: single,
|
||||
WebURLs: single,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "multi inputs",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: multi,
|
||||
LibraryPaths: multi,
|
||||
LibraryItems: multi,
|
||||
LibraryPaths: multi,
|
||||
Sites: multi,
|
||||
WebURLs: multi,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library contains",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: containsOnly,
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: containsOnly,
|
||||
Sites: empty,
|
||||
WebURLs: empty,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library prefixes",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: prefixOnly,
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: prefixOnly,
|
||||
Sites: empty,
|
||||
WebURLs: empty,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library prefixes and contains",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: containsAndPrefix,
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: containsAndPrefix,
|
||||
Sites: empty,
|
||||
WebURLs: empty,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
{
|
||||
name: "weburl contains",
|
||||
opts: utils.SharePointOpts{
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: empty,
|
||||
Sites: empty,
|
||||
WebURLs: containsOnly,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library suffixes",
|
||||
opts: utils.SharePointOpts{
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: empty,
|
||||
Sites: empty,
|
||||
WebURLs: prefixOnly, // prefix pattern matches suffix pattern
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library suffixes and contains",
|
||||
opts: utils.SharePointOpts{
|
||||
LibraryItems: empty,
|
||||
LibraryPaths: empty,
|
||||
Sites: empty,
|
||||
WebURLs: containsAndPrefix, // prefix pattern matches suffix pattern
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
|
||||
@ -136,6 +136,9 @@ func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.ShareP
|
||||
url string
|
||||
)
|
||||
|
||||
// TODO: we rely on this info for details/restore lookups,
|
||||
// so if it's nil we have an issue, and will need an alternative
|
||||
// way to source the data.
|
||||
gsi := di.GetSharepointIds()
|
||||
if gsi != nil {
|
||||
if gsi.GetSiteId() != nil {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user