add clues/fault to sharepoint api (#2507)

## Does this PR need a docs update or release note?

- [x]  No 

## Type of change

- [x] 🧹 Tech Debt/Cleanup

## Issue(s)

* #1970

## Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-02-18 13:42:48 -07:00 committed by GitHub
parent b1ff20d36c
commit 5707036b7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 56 additions and 145 deletions

View File

@ -1,6 +1,6 @@
package api
type Tuple struct {
type NameID struct {
Name string
ID string
}

View File

@ -5,17 +5,20 @@ import (
"fmt"
"io"
"sync"
"time"
"github.com/pkg/errors"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
)
// GetSitePages retrieves a collection of Pages related to the give Site.
@ -25,30 +28,31 @@ func GetSitePages(
serv *discover.BetaService,
siteID string,
pages []string,
errs *fault.Errors,
) ([]models.SitePageable, error) {
var (
col = make([]models.SitePageable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize)
opts = retrieveSitePageOptions()
err, errs error
err error
wg sync.WaitGroup
m sync.Mutex
)
defer close(semaphoreCh)
errUpdater := func(id string, err error) {
m.Lock()
errs = support.WrapAndAppend(id, err, errs)
m.Unlock()
}
updatePages := func(page models.SitePageable) {
m.Lock()
defer m.Unlock()
col = append(col, page)
m.Unlock()
}
for _, entry := range pages {
if errs.Err() != nil {
break
}
semaphoreCh <- struct{}{}
wg.Add(1)
@ -61,47 +65,47 @@ func GetSitePages(
page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts)
if err != nil {
errUpdater(pageID, errors.Wrap(err, support.ConnectorStackErrorTrace(err)+" fetching page"))
} else {
updatePages(page)
errs.Add(clues.Wrap(err, "fetching page").WithClues(ctx).With(graph.ErrData(err)...))
return
}
updatePages(page)
}(entry)
}
wg.Wait()
if errs != nil {
return nil, errs
}
return col, nil
return col, errs.Err()
}
// fetchPages utility function to return the tuple of item
func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]Tuple, error) {
func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]NameID, error) {
var (
builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions()
pageTuples = make([]Tuple, 0)
resp models.SitePageCollectionResponseable
err error
builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions()
pages = make([]NameID, 0)
resp models.SitePageCollectionResponseable
err error
)
for {
resp, err = builder.Get(ctx, opts)
if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "failed fetching site page")
return nil, clues.Wrap(err, "fetching site page").WithClues(ctx).With(graph.ErrData(err)...)
}
for _, entry := range resp.GetValue() {
pid := *entry.GetId()
temp := Tuple{pid, pid}
var (
pid = *entry.GetId()
temp = NameID{pid, pid}
)
if entry.GetName() != nil {
temp.Name = *entry.GetName()
name, ok := ptr.ValOK(entry.GetName())
if ok {
temp.Name = name
}
pageTuples = append(pageTuples, temp)
pages = append(pages, temp)
}
if resp.GetOdataNextLink() == nil {
@ -111,7 +115,7 @@ func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([
builder = sites.NewItemPagesRequestBuilder(*resp.GetOdataNextLink(), bs.Client().Adapter())
}
return pageTuples, nil
return pages, nil
}
// fetchPageOptions is used to return minimal information reltating to Site Pages
@ -136,7 +140,7 @@ func DeleteSitePage(
) error {
err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil)
if err != nil {
return support.ConnectorStackErrorTraceWrap(err, "deleting page: "+pageID)
return clues.Wrap(err, "deleting page").WithClues(ctx).With(graph.ErrData(err)...)
}
return nil
@ -169,9 +173,11 @@ func RestoreSitePage(
pageName = pageID
)
ctx = clues.Add(ctx, "page_id", pageID)
byteArray, err := io.ReadAll(itemData.ToReader())
if err != nil {
return dii, errors.Wrap(err, "reading sharepoint page bytes from stream")
return dii, clues.Wrap(err, "reading sharepoint data").WithClues(ctx)
}
// Hydrate Page
@ -180,9 +186,9 @@ func RestoreSitePage(
return dii, errors.Wrapf(err, "creating Page object %s", pageID)
}
pageNamePtr := page.GetName()
if pageNamePtr != nil {
pageName = *pageNamePtr
name, ok := ptr.ValOK(page.GetName())
if ok {
pageName = name
}
newName := fmt.Sprintf("%s_%s", destName, pageName)
@ -194,19 +200,16 @@ func RestoreSitePage(
// See: https://learn.microsoft.com/en-us/graph/api/sitepage-create?view=graph-rest-beta
restoredPage, err := service.Client().SitesById(siteID).Pages().Post(ctx, page, nil)
if err != nil {
sendErr := support.ConnectorStackErrorTraceWrap(
err,
"creating page from ID: %s"+pageName+" API Error Details",
)
return dii, sendErr
return dii, clues.Wrap(err, "creating page").WithClues(ctx).With(graph.ErrData(err)...)
}
pageID = *restoredPage.GetId()
pageID = ptr.Val(restoredPage.GetId())
ctx = clues.Add(ctx, "restored_page_id", pageID)
// Publish page to make visible
// See https://learn.microsoft.com/en-us/graph/api/sitepage-publish?view=graph-rest-beta
if restoredPage.GetWebUrl() == nil {
return dii, fmt.Errorf("creating page %s incomplete. Field `webURL` not populated", pageID)
return dii, clues.New("webURL not populated during page creation").WithClues(ctx)
}
err = service.Client().
@ -215,10 +218,7 @@ func RestoreSitePage(
Publish().
Post(ctx, nil)
if err != nil {
return dii, support.ConnectorStackErrorTraceWrap(
err,
"publishing page ID: "+*restoredPage.GetId()+" API Error Details",
)
return dii, clues.Wrap(err, "publishing page").WithClues(ctx).With(graph.ErrData(err)...)
}
dii.SharePoint = PageInfo(restoredPage, int64(len(byteArray)))
@ -234,26 +234,12 @@ func RestoreSitePage(
// PageInfo extracts useful metadata into struct for book keeping
func PageInfo(page models.SitePageable, size int64) *details.SharePointInfo {
var (
name, webURL string
created, modified time.Time
name = ptr.Val(page.GetTitle())
webURL = ptr.Val(page.GetWebUrl())
created = ptr.Val(page.GetCreatedDateTime())
modified = ptr.Val(page.GetLastModifiedDateTime())
)
if page.GetTitle() != nil {
name = *page.GetTitle()
}
if page.GetWebUrl() != nil {
webURL = *page.GetWebUrl()
}
if page.GetCreatedDateTime() != nil {
created = *page.GetCreatedDateTime()
}
if page.GetLastModifiedDateTime() != nil {
modified = *page.GetLastModifiedDateTime()
}
return &details.SharePointInfo{
ItemType: details.SharePointItem,
ItemName: name,

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
)
type SharePointPageSuite struct {
@ -71,7 +72,7 @@ func (suite *SharePointPageSuite) TestGetSitePages() {
require.NotNil(t, tuples)
jobs := []string{tuples[0].ID}
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs)
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true))
assert.NoError(t, err)
assert.NotEmpty(t, pages)
}

View File

@ -282,7 +282,7 @@ func (sc *Collection) retrievePages(
return metrics, clues.New("beta service required").WithClues(ctx)
}
pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs)
pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs, errs)
if err != nil {
return metrics, err
}
@ -310,7 +310,7 @@ func (sc *Collection) retrievePages(
sc.data <- &Item{
id: *pg.GetId(),
data: io.NopCloser(bytes.NewReader(byteArray)),
info: sharePointPageInfo(pg, size),
info: sapi.PageInfo(pg, size),
modTime: ptr.OrNow(pg.GetLastModifiedDateTime()),
}

View File

@ -1,28 +0,0 @@
package sharepoint
import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/pkg/backup/details"
)
// sharePointPageInfo propagates metadata from the SharePoint Page data type
// into searchable content.
// Page Details: https://learn.microsoft.com/en-us/graph/api/resources/sitepage?view=graph-rest-beta
func sharePointPageInfo(page models.SitePageable, size int64) *details.SharePointInfo {
var (
name = ptr.Val(page.GetTitle())
webURL = ptr.Val(page.GetWebUrl())
created = ptr.Val(page.GetCreatedDateTime())
modified = ptr.Val(page.GetLastModifiedDateTime())
)
return &details.SharePointInfo{
ItemType: details.SharePointItem,
ItemName: name,
Created: created,
Modified: modified,
WebURL: webURL,
Size: size,
}
}

View File

@ -1,48 +0,0 @@
package sharepoint
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/pkg/backup/details"
)
func (suite *SharePointInfoSuite) TestSharePointInfo_Pages() {
tests := []struct {
name string
pageAndDeets func() (models.SitePageable, *details.SharePointInfo)
}{
{
name: "Empty Page",
pageAndDeets: func() (models.SitePageable, *details.SharePointInfo) {
deets := &details.SharePointInfo{ItemType: details.SharePointItem}
return models.NewSitePage(), deets
},
},
{
name: "Only Name",
pageAndDeets: func() (models.SitePageable, *details.SharePointInfo) {
title := "Blank Page"
sPage := models.NewSitePage()
sPage.SetTitle(&title)
deets := &details.SharePointInfo{
ItemType: details.SharePointItem,
ItemName: title,
}
return sPage, deets
},
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
paged, expected := test.pageAndDeets()
info := sharePointPageInfo(paged, 0)
assert.Equal(t, expected.ItemType, info.ItemType)
assert.Equal(t, expected.ItemName, info.ItemName)
assert.Equal(t, expected.WebURL, info.WebURL)
})
}
}

View File

@ -308,7 +308,7 @@ func RestorePageCollection(
service := discover.NewBetaService(adpt)
// Restore items from collection
items := dc.Items(ctx, nil) // TODO: fault.Errors instead of nil
items := dc.Items(ctx, errs)
for {
if errs.Err() != nil {