remove weburl suffix support from sharepoint (#3082)

We're dropping support for selection-by-url-suffix in sharepoint.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #2825

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-04-11 10:09:58 -06:00 committed by GitHub
parent e1e83e5bcd
commit 21cd7210cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 232 additions and 27 deletions

View File

@ -360,7 +360,7 @@ func runDetailsSharePointCmd(
ctx = clues.Add(ctx, "details_entries", len(d.Entries)) ctx = clues.Add(ctx, "details_entries", len(d.Entries))
if !skipReduce { if !skipReduce {
sel := utils.IncludeSharePointRestoreDataSelectors(opts) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts) utils.FilterSharePointRestoreInfoSelectors(sel, opts)
d = sel.Reduce(ctx, d, errs) d = sel.Reduce(ctx, d, errs)
} }

View File

@ -106,7 +106,7 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
dest := control.DefaultRestoreDestination(common.SimpleDateTimeOneDrive) dest := control.DefaultRestoreDestination(common.SimpleDateTimeOneDrive)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", dest.ContainerName)
sel := utils.IncludeSharePointRestoreDataSelectors(opts) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts) utils.FilterSharePointRestoreInfoSelectors(sel, opts)
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest)

View File

@ -1,9 +1,14 @@
package utils package utils
import ( import (
"context"
"net/url"
"strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -135,6 +140,15 @@ func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error
return clues.New("a backup ID is required") return clues.New("a backup ID is required")
} }
// ensure url can parse all weburls provided by --site.
if _, ok := opts.Populated[SiteFN]; ok {
for _, wu := range opts.WebURL {
if _, err := url.Parse(wu); err != nil {
return clues.New("invalid site url: " + wu)
}
}
}
if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) { if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
return clues.New("invalid time format for " + FileCreatedAfterFN) return clues.New("invalid time format for " + FileCreatedAfterFN)
} }
@ -170,7 +184,7 @@ func AddSharePointInfo(
// IncludeSharePointRestoreDataSelectors builds the common data-selector // IncludeSharePointRestoreDataSelectors builds the common data-selector
// inclusions for SharePoint commands. // inclusions for SharePoint commands.
func IncludeSharePointRestoreDataSelectors(opts SharePointOpts) *selectors.SharePointRestore { func IncludeSharePointRestoreDataSelectors(ctx context.Context, opts SharePointOpts) *selectors.SharePointRestore {
sites := opts.SiteID sites := opts.SiteID
lfp, lfn := len(opts.FolderPath), len(opts.FileName) lfp, lfn := len(opts.FolderPath), len(opts.FileName)
@ -241,16 +255,29 @@ func IncludeSharePointRestoreDataSelectors(opts SharePointOpts) *selectors.Share
} }
if lwu > 0 { if lwu > 0 {
opts.WebURL = trimFolderSlash(opts.WebURL) urls := make([]string, 0, len(opts.WebURL))
containsURLs, suffixURLs := splitFoldersIntoContainsAndPrefix(opts.WebURL)
if len(containsURLs) > 0 { for _, wu := range opts.WebURL {
sel.Include(sel.WebURL(containsURLs)) // for normalization, ensure the site has a https:// prefix.
wu = strings.TrimPrefix(wu, "https://")
wu = strings.TrimPrefix(wu, "http://")
// don't add a prefix to path-only values
if len(wu) > 0 && wu != "*" && !strings.HasPrefix(wu, "/") {
wu = "https://" + wu
} }
if len(suffixURLs) > 0 { u, err := url.Parse(wu)
sel.Include(sel.WebURL(suffixURLs, selectors.SuffixMatch())) if err != nil {
// shouldn't be possible to err, if we called validation first.
logger.Ctx(ctx).With("web_url", wu).Error("malformed web url")
continue
} }
urls = append(urls, u.String())
}
sel.Include(sel.WebURL(urls))
} }
return sel return sel

View File

@ -7,7 +7,9 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors"
) )
type SharePointUtilsSuite struct { type SharePointUtilsSuite struct {
@ -156,7 +158,7 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
SiteID: empty, SiteID: empty,
WebURL: containsAndPrefix, // prefix pattern matches suffix pattern WebURL: containsAndPrefix, // prefix pattern matches suffix pattern
}, },
expectIncludeLen: 6, expectIncludeLen: 3,
}, },
{ {
name: "Page Folder", name: "Page Folder",
@ -183,8 +185,181 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
sel := utils.IncludeSharePointRestoreDataSelectors(test.opts) ctx, flush := tester.NewContext()
defer flush()
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, test.opts)
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen) assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
}) })
} }
} }
func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors_normalizedWebURLs() {
table := []struct {
name string
weburl string
expect []string
}{
{
name: "blank",
weburl: "",
expect: []string{""},
},
{
name: "wildcard",
weburl: "*",
expect: []string{"*"},
},
{
name: "no scheme",
weburl: "www.corsobackup.io/path",
expect: []string{"https://www.corsobackup.io/path"},
},
{
name: "no path",
weburl: "https://www.corsobackup.io",
expect: []string{"https://www.corsobackup.io"},
},
{
name: "http",
weburl: "http://www.corsobackup.io/path",
expect: []string{"https://www.corsobackup.io/path"},
},
{
name: "https",
weburl: "https://www.corsobackup.io/path",
expect: []string{"https://www.corsobackup.io/path"},
},
{
name: "path only",
weburl: "/path",
expect: []string{"/path"},
},
{
name: "host only",
weburl: "www.corsobackup.io",
expect: []string{"https://www.corsobackup.io"},
},
}
for _, test := range table {
suite.Run(test.name, func() {
ctx, flush := tester.NewContext()
defer flush()
var (
opts = utils.SharePointOpts{WebURL: []string{test.weburl}}
sel = utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
)
for _, scope := range sel.Scopes() {
if scope.InfoCategory() != selectors.SharePointWebURL {
continue
}
assert.ElementsMatch(suite.T(), test.expect, scope.Get(selectors.SharePointWebURL))
}
})
}
}
func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
table := []struct {
name string
backupID string
opts utils.SharePointOpts
expect assert.ErrorAssertionFunc
}{
{
name: "no opts",
backupID: "id",
opts: utils.SharePointOpts{},
expect: assert.NoError,
},
{
name: "all valid",
backupID: "id",
opts: utils.SharePointOpts{
WebURL: []string{"www.corsobackup.io/sites/foo"},
FileCreatedAfter: common.Now(),
FileCreatedBefore: common.Now(),
FileModifiedAfter: common.Now(),
FileModifiedBefore: common.Now(),
Populated: utils.PopulatedFlags{
utils.SiteFN: {},
utils.FileCreatedAfterFN: {},
utils.FileCreatedBeforeFN: {},
utils.FileModifiedAfterFN: {},
utils.FileModifiedBeforeFN: {},
},
},
expect: assert.NoError,
},
{
name: "no backupID",
backupID: "",
opts: utils.SharePointOpts{},
expect: assert.Error,
},
{
name: "invalid weburl",
backupID: "id",
opts: utils.SharePointOpts{
WebURL: []string{"slander://:vree.garbles/:"},
Populated: utils.PopulatedFlags{
utils.SiteFN: {},
},
},
expect: assert.Error,
},
{
name: "invalid file created after",
backupID: "id",
opts: utils.SharePointOpts{
FileCreatedAfter: "1235",
Populated: utils.PopulatedFlags{
utils.FileCreatedAfterFN: {},
},
},
expect: assert.Error,
},
{
name: "invalid file created before",
backupID: "id",
opts: utils.SharePointOpts{
FileCreatedBefore: "1235",
Populated: utils.PopulatedFlags{
utils.FileCreatedBeforeFN: {},
},
},
expect: assert.Error,
},
{
name: "invalid file modified after",
backupID: "id",
opts: utils.SharePointOpts{
FileModifiedAfter: "1235",
Populated: utils.PopulatedFlags{
utils.FileModifiedAfterFN: {},
},
},
expect: assert.Error,
},
{
name: "invalid file modified before",
backupID: "id",
opts: utils.SharePointOpts{
FileModifiedBefore: "1235",
Populated: utils.PopulatedFlags{
utils.FileModifiedBeforeFN: {},
},
},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
test.expect(t, utils.ValidateSharePointRestoreFlags(test.backupID, test.opts))
})
}
}

View File

@ -122,6 +122,7 @@ func (c Sites) GetByID(ctx context.Context, identifier string) (models.Siteable,
// ensure it has a prefix https:// // ensure it has a prefix https://
if !strings.HasPrefix(identifier, "/") { if !strings.HasPrefix(identifier, "/") {
identifier = strings.TrimPrefix(identifier, "https://") identifier = strings.TrimPrefix(identifier, "https://")
identifier = strings.TrimPrefix(identifier, "http://")
identifier = "https://" + identifier identifier = "https://" + identifier
} }

View File

@ -254,10 +254,6 @@ func (r resourceClient) getOwnerIDAndNameFrom(
err error err error
) )
// if r.enum == Sites {
// TODO: check all suffixes in nameToID
// }
id, name, err = r.getter.GetIDAndName(ctx, owner) id, name, err = r.getter.GetIDAndName(ctx, owner)
if err != nil { if err != nil {
if graph.IsErrUserNotFound(err) { if graph.IsErrUserNotFound(err) {

View File

@ -537,6 +537,8 @@ func pathFilterFactory(opts ...option) sliceFilterFunc {
ff = filters.PathPrefix ff = filters.PathPrefix
case sc.useSuffixFilter: case sc.useSuffixFilter:
ff = filters.PathSuffix ff = filters.PathSuffix
case sc.useEqualsFilter:
ff = filters.PathEquals
default: default:
ff = filters.PathContains ff = filters.PathContains
} }

View File

@ -189,29 +189,35 @@ func (s *sharePoint) Scopes() []SharePointScope {
// Produces one or more SharePoint webURL scopes. // Produces one or more SharePoint webURL scopes.
// One scope is created per webURL entry. // One scope is created per webURL entry.
// Defaults to equals check, on the assumption we identify fully qualified
// urls, and do not want to default to contains. ie: https://host/sites/foo
// should not match https://host/sites/foo/bar.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any] // If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None] // If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None] // If any slice is empty, it defaults to [selectors.None]
func (s *SharePointRestore) WebURL(urlSuffixes []string, opts ...option) []SharePointScope { func (s *SharePointRestore) WebURL(urls []string, opts ...option) []SharePointScope {
scopes := []SharePointScope{} var (
scopes = []SharePointScope{}
os = append([]option{ExactMatch()}, opts...)
)
scopes = append( scopes = append(
scopes, scopes,
makeInfoScope[SharePointScope]( makeInfoScope[SharePointScope](
SharePointLibraryItem, SharePointLibraryItem,
SharePointWebURL, SharePointWebURL,
urlSuffixes, urls,
pathFilterFactory(opts...)), pathFilterFactory(os...)),
makeInfoScope[SharePointScope]( makeInfoScope[SharePointScope](
SharePointListItem, SharePointListItem,
SharePointWebURL, SharePointWebURL,
urlSuffixes, urls,
pathFilterFactory(opts...)), pathFilterFactory(os...)),
makeInfoScope[SharePointScope]( makeInfoScope[SharePointScope](
SharePointPage, SharePointPage,
SharePointWebURL, SharePointWebURL,
urlSuffixes, urls,
pathFilterFactory(opts...)), pathFilterFactory(os...)),
) )
return scopes return scopes

View File

@ -411,13 +411,11 @@ func (suite *SharePointSelectorSuite) TestSharePointScope_MatchesInfo() {
}{ }{
{"host match", host, sel.WebURL([]string{host}), assert.True}, {"host match", host, sel.WebURL([]string{host}), assert.True},
{"url match", url, sel.WebURL([]string{url}), assert.True}, {"url match", url, sel.WebURL([]string{url}), assert.True},
{"url contains host", url, sel.WebURL([]string{host}), assert.True},
{"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True}, {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
{"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False}, {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
{"url contains path", url, sel.WebURL([]string{pth}), assert.True},
{"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True}, {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
{"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False}, {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
{"url does not suffix substring", url, sel.WebURL([]string{"oo"}), assert.False}, {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
{"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False}, {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
{"file create after the epoch", host, sel.CreatedAfter(common.FormatTime(epoch)), assert.True}, {"file create after the epoch", host, sel.CreatedAfter(common.FormatTime(epoch)), assert.True},
{"file create after now", host, sel.CreatedAfter(common.FormatTime(now)), assert.False}, {"file create after now", host, sel.CreatedAfter(common.FormatTime(now)), assert.False},