remove weburl suffix support from sharepoint (#3082)
We're dropping support for selection-by-url-suffix in sharepoint. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🧹 Tech Debt/Cleanup #### Issue(s) * #2825 #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
e1e83e5bcd
commit
21cd7210cb
@ -360,7 +360,7 @@ func runDetailsSharePointCmd(
|
||||
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
|
||||
|
||||
if !skipReduce {
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(opts)
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
d = sel.Reduce(ctx, d, errs)
|
||||
}
|
||||
|
||||
@ -106,7 +106,7 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
dest := control.DefaultRestoreDestination(common.SimpleDateTimeOneDrive)
|
||||
Infof(ctx, "Restoring to folder %s", dest.ContainerName)
|
||||
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(opts)
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest)
|
||||
|
||||
@ -1,9 +1,14 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -135,6 +140,15 @@ func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error
|
||||
return clues.New("a backup ID is required")
|
||||
}
|
||||
|
||||
// ensure url can parse all weburls provided by --site.
|
||||
if _, ok := opts.Populated[SiteFN]; ok {
|
||||
for _, wu := range opts.WebURL {
|
||||
if _, err := url.Parse(wu); err != nil {
|
||||
return clues.New("invalid site url: " + wu)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
|
||||
return clues.New("invalid time format for " + FileCreatedAfterFN)
|
||||
}
|
||||
@ -170,7 +184,7 @@ func AddSharePointInfo(
|
||||
|
||||
// IncludeSharePointRestoreDataSelectors builds the common data-selector
|
||||
// inclusions for SharePoint commands.
|
||||
func IncludeSharePointRestoreDataSelectors(opts SharePointOpts) *selectors.SharePointRestore {
|
||||
func IncludeSharePointRestoreDataSelectors(ctx context.Context, opts SharePointOpts) *selectors.SharePointRestore {
|
||||
sites := opts.SiteID
|
||||
|
||||
lfp, lfn := len(opts.FolderPath), len(opts.FileName)
|
||||
@ -241,16 +255,29 @@ func IncludeSharePointRestoreDataSelectors(opts SharePointOpts) *selectors.Share
|
||||
}
|
||||
|
||||
if lwu > 0 {
|
||||
opts.WebURL = trimFolderSlash(opts.WebURL)
|
||||
containsURLs, suffixURLs := splitFoldersIntoContainsAndPrefix(opts.WebURL)
|
||||
urls := make([]string, 0, len(opts.WebURL))
|
||||
|
||||
if len(containsURLs) > 0 {
|
||||
sel.Include(sel.WebURL(containsURLs))
|
||||
for _, wu := range opts.WebURL {
|
||||
// for normalization, ensure the site has a https:// prefix.
|
||||
wu = strings.TrimPrefix(wu, "https://")
|
||||
wu = strings.TrimPrefix(wu, "http://")
|
||||
|
||||
// don't add a prefix to path-only values
|
||||
if len(wu) > 0 && wu != "*" && !strings.HasPrefix(wu, "/") {
|
||||
wu = "https://" + wu
|
||||
}
|
||||
|
||||
u, err := url.Parse(wu)
|
||||
if err != nil {
|
||||
// shouldn't be possible to err, if we called validation first.
|
||||
logger.Ctx(ctx).With("web_url", wu).Error("malformed web url")
|
||||
continue
|
||||
}
|
||||
|
||||
urls = append(urls, u.String())
|
||||
}
|
||||
|
||||
if len(suffixURLs) > 0 {
|
||||
sel.Include(sel.WebURL(suffixURLs, selectors.SuffixMatch()))
|
||||
}
|
||||
sel.Include(sel.WebURL(urls))
|
||||
}
|
||||
|
||||
return sel
|
||||
|
||||
@ -7,7 +7,9 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
type SharePointUtilsSuite struct {
|
||||
@ -156,7 +158,7 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
|
||||
SiteID: empty,
|
||||
WebURL: containsAndPrefix, // prefix pattern matches suffix pattern
|
||||
},
|
||||
expectIncludeLen: 6,
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
name: "Page Folder",
|
||||
@ -183,8 +185,181 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(test.opts)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, test.opts)
|
||||
assert.Len(suite.T(), sel.Includes, test.expectIncludeLen)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors_normalizedWebURLs() {
|
||||
table := []struct {
|
||||
name string
|
||||
weburl string
|
||||
expect []string
|
||||
}{
|
||||
{
|
||||
name: "blank",
|
||||
weburl: "",
|
||||
expect: []string{""},
|
||||
},
|
||||
{
|
||||
name: "wildcard",
|
||||
weburl: "*",
|
||||
expect: []string{"*"},
|
||||
},
|
||||
{
|
||||
name: "no scheme",
|
||||
weburl: "www.corsobackup.io/path",
|
||||
expect: []string{"https://www.corsobackup.io/path"},
|
||||
},
|
||||
{
|
||||
name: "no path",
|
||||
weburl: "https://www.corsobackup.io",
|
||||
expect: []string{"https://www.corsobackup.io"},
|
||||
},
|
||||
{
|
||||
name: "http",
|
||||
weburl: "http://www.corsobackup.io/path",
|
||||
expect: []string{"https://www.corsobackup.io/path"},
|
||||
},
|
||||
{
|
||||
name: "https",
|
||||
weburl: "https://www.corsobackup.io/path",
|
||||
expect: []string{"https://www.corsobackup.io/path"},
|
||||
},
|
||||
{
|
||||
name: "path only",
|
||||
weburl: "/path",
|
||||
expect: []string{"/path"},
|
||||
},
|
||||
{
|
||||
name: "host only",
|
||||
weburl: "www.corsobackup.io",
|
||||
expect: []string{"https://www.corsobackup.io"},
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
opts = utils.SharePointOpts{WebURL: []string{test.weburl}}
|
||||
sel = utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
)
|
||||
|
||||
for _, scope := range sel.Scopes() {
|
||||
if scope.InfoCategory() != selectors.SharePointWebURL {
|
||||
continue
|
||||
}
|
||||
|
||||
assert.ElementsMatch(suite.T(), test.expect, scope.Get(selectors.SharePointWebURL))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
backupID string
|
||||
opts utils.SharePointOpts
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no opts",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "all valid",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{
|
||||
WebURL: []string{"www.corsobackup.io/sites/foo"},
|
||||
FileCreatedAfter: common.Now(),
|
||||
FileCreatedBefore: common.Now(),
|
||||
FileModifiedAfter: common.Now(),
|
||||
FileModifiedBefore: common.Now(),
|
||||
Populated: utils.PopulatedFlags{
|
||||
utils.SiteFN: {},
|
||||
utils.FileCreatedAfterFN: {},
|
||||
utils.FileCreatedBeforeFN: {},
|
||||
utils.FileModifiedAfterFN: {},
|
||||
utils.FileModifiedBeforeFN: {},
|
||||
},
|
||||
},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "no backupID",
|
||||
backupID: "",
|
||||
opts: utils.SharePointOpts{},
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid weburl",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{
|
||||
WebURL: []string{"slander://:vree.garbles/:"},
|
||||
Populated: utils.PopulatedFlags{
|
||||
utils.SiteFN: {},
|
||||
},
|
||||
},
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid file created after",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{
|
||||
FileCreatedAfter: "1235",
|
||||
Populated: utils.PopulatedFlags{
|
||||
utils.FileCreatedAfterFN: {},
|
||||
},
|
||||
},
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid file created before",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{
|
||||
FileCreatedBefore: "1235",
|
||||
Populated: utils.PopulatedFlags{
|
||||
utils.FileCreatedBeforeFN: {},
|
||||
},
|
||||
},
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid file modified after",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{
|
||||
FileModifiedAfter: "1235",
|
||||
Populated: utils.PopulatedFlags{
|
||||
utils.FileModifiedAfterFN: {},
|
||||
},
|
||||
},
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid file modified before",
|
||||
backupID: "id",
|
||||
opts: utils.SharePointOpts{
|
||||
FileModifiedBefore: "1235",
|
||||
Populated: utils.PopulatedFlags{
|
||||
utils.FileModifiedBeforeFN: {},
|
||||
},
|
||||
},
|
||||
expect: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
test.expect(t, utils.ValidateSharePointRestoreFlags(test.backupID, test.opts))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -122,6 +122,7 @@ func (c Sites) GetByID(ctx context.Context, identifier string) (models.Siteable,
|
||||
// ensure it has a prefix https://
|
||||
if !strings.HasPrefix(identifier, "/") {
|
||||
identifier = strings.TrimPrefix(identifier, "https://")
|
||||
identifier = strings.TrimPrefix(identifier, "http://")
|
||||
identifier = "https://" + identifier
|
||||
}
|
||||
|
||||
|
||||
@ -254,10 +254,6 @@ func (r resourceClient) getOwnerIDAndNameFrom(
|
||||
err error
|
||||
)
|
||||
|
||||
// if r.enum == Sites {
|
||||
// TODO: check all suffixes in nameToID
|
||||
// }
|
||||
|
||||
id, name, err = r.getter.GetIDAndName(ctx, owner)
|
||||
if err != nil {
|
||||
if graph.IsErrUserNotFound(err) {
|
||||
|
||||
@ -537,6 +537,8 @@ func pathFilterFactory(opts ...option) sliceFilterFunc {
|
||||
ff = filters.PathPrefix
|
||||
case sc.useSuffixFilter:
|
||||
ff = filters.PathSuffix
|
||||
case sc.useEqualsFilter:
|
||||
ff = filters.PathEquals
|
||||
default:
|
||||
ff = filters.PathContains
|
||||
}
|
||||
|
||||
@ -189,29 +189,35 @@ func (s *sharePoint) Scopes() []SharePointScope {
|
||||
|
||||
// Produces one or more SharePoint webURL scopes.
|
||||
// One scope is created per webURL entry.
|
||||
// Defaults to equals check, on the assumption we identify fully qualified
|
||||
// urls, and do not want to default to contains. ie: https://host/sites/foo
|
||||
// should not match https://host/sites/foo/bar.
|
||||
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||
// If any slice is empty, it defaults to [selectors.None]
|
||||
func (s *SharePointRestore) WebURL(urlSuffixes []string, opts ...option) []SharePointScope {
|
||||
scopes := []SharePointScope{}
|
||||
func (s *SharePointRestore) WebURL(urls []string, opts ...option) []SharePointScope {
|
||||
var (
|
||||
scopes = []SharePointScope{}
|
||||
os = append([]option{ExactMatch()}, opts...)
|
||||
)
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeInfoScope[SharePointScope](
|
||||
SharePointLibraryItem,
|
||||
SharePointWebURL,
|
||||
urlSuffixes,
|
||||
pathFilterFactory(opts...)),
|
||||
urls,
|
||||
pathFilterFactory(os...)),
|
||||
makeInfoScope[SharePointScope](
|
||||
SharePointListItem,
|
||||
SharePointWebURL,
|
||||
urlSuffixes,
|
||||
pathFilterFactory(opts...)),
|
||||
urls,
|
||||
pathFilterFactory(os...)),
|
||||
makeInfoScope[SharePointScope](
|
||||
SharePointPage,
|
||||
SharePointWebURL,
|
||||
urlSuffixes,
|
||||
pathFilterFactory(opts...)),
|
||||
urls,
|
||||
pathFilterFactory(os...)),
|
||||
)
|
||||
|
||||
return scopes
|
||||
|
||||
@ -411,13 +411,11 @@ func (suite *SharePointSelectorSuite) TestSharePointScope_MatchesInfo() {
|
||||
}{
|
||||
{"host match", host, sel.WebURL([]string{host}), assert.True},
|
||||
{"url match", url, sel.WebURL([]string{url}), assert.True},
|
||||
{"url contains host", url, sel.WebURL([]string{host}), assert.True},
|
||||
{"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
|
||||
{"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
|
||||
{"url contains path", url, sel.WebURL([]string{pth}), assert.True},
|
||||
{"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
|
||||
{"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
|
||||
{"url does not suffix substring", url, sel.WebURL([]string{"oo"}), assert.False},
|
||||
{"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
|
||||
{"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
|
||||
{"file create after the epoch", host, sel.CreatedAfter(common.FormatTime(epoch)), assert.True},
|
||||
{"file create after now", host, sel.CreatedAfter(common.FormatTime(now)), assert.False},
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user