CLI: Backup: SharePoint: Pages Enable (#2213)

## Description
Adds the functionality to select SharePoint Pages for backup.
<!-- Insert PR description-->

## Does this PR need a docs update or release note?
SharePoint commands remain hidden in terms of pages. 
- [x] 🕐 Yes, but in a later PR

## Type of change

<!--- Please check the type of change your PR introduces: --->
- [x] 🌻 Feature

## Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* closes  #2216 <issue>
* closes #2107

## Test Plan

<!-- How will this be tested prior to merging.-->
- [x]  Unit test
This commit is contained in:
Danny 2023-01-21 00:50:23 -05:00 committed by GitHub
parent ea2d9ceceb
commit 6d615810cd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 57 additions and 13 deletions

View File

@ -27,9 +27,12 @@ import (
// setup and globals // setup and globals
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// sharePoint bucket info from flags
var ( var (
libraryItems []string libraryItems []string
libraryPaths []string libraryPaths []string
pageFolders []string
page []string
site []string site []string
weburl []string weburl []string
@ -38,6 +41,7 @@ var (
const ( const (
dataLibraries = "libraries" dataLibraries = "libraries"
dataPages = "pages"
) )
const ( const (
@ -89,11 +93,10 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
utils.WebURLFN, nil, utils.WebURLFN, nil,
"Restore data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.") "Restore data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.")
// TODO: implement
fs.StringSliceVar( fs.StringSliceVar(
&sharepointData, &sharepointData,
utils.DataFN, nil, utils.DataFN, nil,
"Select one or more types of data to backup: "+dataLibraries+".") "Select one or more types of data to backup: "+dataLibraries+" or "+dataPages+".")
options.AddOperationFlags(c) options.AddOperationFlags(c)
case listCommand: case listCommand:
@ -128,11 +131,22 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
fs.StringArrayVar(&site, fs.StringArrayVar(&site,
utils.SiteFN, nil, utils.SiteFN, nil,
"Backup SharePoint data by site ID; accepts '"+utils.Wildcard+"' to select all sites.") "Select backup details by site ID; accepts '"+utils.Wildcard+"' to select all sites.")
fs.StringSliceVar(&weburl, fs.StringSliceVar(&weburl,
utils.WebURLFN, nil, utils.WebURLFN, nil,
"Restore data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.") "Select backup data by site webURL; accepts '"+utils.Wildcard+"' to select all sites.")
fs.StringSliceVar(
&pageFolders,
utils.PageFN, nil,
"Select backup data by site ID; accepts '"+utils.Wildcard+"' to select all sites.")
fs.StringSliceVar(
&page,
utils.PageItemFN, nil,
"Select backup data by file name; accepts '"+utils.Wildcard+"' to select all pages within the site.",
)
// info flags // info flags
@ -179,7 +193,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if err := validateSharePointBackupCreateFlags(site, weburl); err != nil { if err := validateSharePointBackupCreateFlags(site, weburl, sharepointData); err != nil {
return err return err
} }
@ -200,7 +214,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, errors.Wrap(err, "Failed to connect to Microsoft APIs")) return Only(ctx, errors.Wrap(err, "Failed to connect to Microsoft APIs"))
} }
sel, err := sharePointBackupCreateSelectors(ctx, site, weburl, gc) sel, err := sharePointBackupCreateSelectors(ctx, site, weburl, sharepointData, gc)
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Retrieving up sharepoint sites by ID and WebURL")) return Only(ctx, errors.Wrap(err, "Retrieving up sharepoint sites by ID and WebURL"))
} }
@ -250,7 +264,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
func validateSharePointBackupCreateFlags(sites, weburls []string) error { func validateSharePointBackupCreateFlags(sites, weburls, data []string) error {
if len(sites) == 0 && len(weburls) == 0 { if len(sites) == 0 && len(weburls) == 0 {
return errors.New( return errors.New(
"requires one or more --" + "requires one or more --" +
@ -260,13 +274,21 @@ func validateSharePointBackupCreateFlags(sites, weburls []string) error {
) )
} }
for _, d := range data {
if d != dataLibraries && d != dataPages {
return errors.New(
d + " is an unrecognized data type; either " + dataLibraries + "or " + dataPages,
)
}
}
return nil return nil
} }
// TODO: users might specify a data type, this only supports AllData(). // TODO: users might specify a data type, this only supports AllData().
func sharePointBackupCreateSelectors( func sharePointBackupCreateSelectors(
ctx context.Context, ctx context.Context,
sites, weburls []string, sites, weburls, data []string,
gc *connector.GraphConnector, gc *connector.GraphConnector,
) (*selectors.SharePointBackup, error) { ) (*selectors.SharePointBackup, error) {
if len(sites) == 0 && len(weburls) == 0 { if len(sites) == 0 && len(weburls) == 0 {
@ -297,7 +319,20 @@ func sharePointBackupCreateSelectors(
} }
sel := selectors.NewSharePointBackup(union) sel := selectors.NewSharePointBackup(union)
sel.Include(sel.AllData()) if len(data) == 0 {
sel.Include(sel.AllData())
return sel, nil
}
for _, d := range data {
switch d {
case dataLibraries:
sel.Include(sel.Libraries(selectors.Any()))
case dataPages:
sel.Include(sel.Pages(selectors.Any()))
}
}
return sel, nil return sel, nil
} }

View File

@ -98,12 +98,13 @@ func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
} }
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, validateSharePointBackupCreateFlags(test.site, test.weburl)) test.expect(t, validateSharePointBackupCreateFlags(test.site, test.weburl, nil))
}) })
} }
} }
func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() { func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
comboString := []string{"id_1", "id_2"}
gc := &connector.GraphConnector{ gc := &connector.GraphConnector{
Sites: map[string]string{ Sites: map[string]string{
"url_1": "id_1", "url_1": "id_1",
@ -115,6 +116,7 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
name string name string
site []string site []string
weburl []string weburl []string
data []string
expect []string expect []string
expectScopesLen int expectScopesLen int
}{ }{
@ -163,7 +165,7 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
name: "duplicate sites and urls", name: "duplicate sites and urls",
site: []string{"id_1", "id_2"}, site: []string{"id_1", "id_2"},
weburl: []string{"url_1", "url_2"}, weburl: []string{"url_1", "url_2"},
expect: []string{"id_1", "id_2"}, expect: comboString,
expectScopesLen: 2, expectScopesLen: 2,
}, },
{ {
@ -175,18 +177,25 @@ func (suite *SharePointSuite) TestSharePointBackupCreateSelectors() {
}, },
{ {
name: "unnecessary url wildcard", name: "unnecessary url wildcard",
site: []string{"id_1", "id_2"}, site: comboString,
weburl: []string{"url_1", utils.Wildcard}, weburl: []string{"url_1", utils.Wildcard},
expect: selectors.Any(), expect: selectors.Any(),
expectScopesLen: 2, expectScopesLen: 2,
}, },
{
name: "Pages",
site: comboString,
data: []string{dataPages},
expect: comboString,
expectScopesLen: 1,
},
} }
for _, test := range table { for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, gc) sel, err := sharePointBackupCreateSelectors(ctx, test.site, test.weburl, test.data, gc)
require.NoError(t, err) require.NoError(t, err)
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners()) assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())