add clues/fault to sharepoint api (#2507)

## Does this PR need a docs update or release note?

- [x]  No 

## Type of change

- [x] 🧹 Tech Debt/Cleanup

## Issue(s)

* #1970

## Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-02-18 13:42:48 -07:00 committed by GitHub
parent b1ff20d36c
commit 5707036b7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 56 additions and 145 deletions

View File

@ -1,6 +1,6 @@
package api package api
type Tuple struct { type NameID struct {
Name string Name string
ID string ID string
} }

View File

@ -5,17 +5,20 @@ import (
"fmt" "fmt"
"io" "io"
"sync" "sync"
"time"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api" discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites" "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics" D "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
) )
// GetSitePages retrieves a collection of Pages related to the give Site. // GetSitePages retrieves a collection of Pages related to the give Site.
@ -25,30 +28,31 @@ func GetSitePages(
serv *discover.BetaService, serv *discover.BetaService,
siteID string, siteID string,
pages []string, pages []string,
errs *fault.Errors,
) ([]models.SitePageable, error) { ) ([]models.SitePageable, error) {
var ( var (
col = make([]models.SitePageable, 0) col = make([]models.SitePageable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize) semaphoreCh = make(chan struct{}, fetchChannelSize)
opts = retrieveSitePageOptions() opts = retrieveSitePageOptions()
err, errs error err error
wg sync.WaitGroup wg sync.WaitGroup
m sync.Mutex m sync.Mutex
) )
defer close(semaphoreCh) defer close(semaphoreCh)
errUpdater := func(id string, err error) {
m.Lock()
errs = support.WrapAndAppend(id, err, errs)
m.Unlock()
}
updatePages := func(page models.SitePageable) { updatePages := func(page models.SitePageable) {
m.Lock() m.Lock()
defer m.Unlock()
col = append(col, page) col = append(col, page)
m.Unlock()
} }
for _, entry := range pages { for _, entry := range pages {
if errs.Err() != nil {
break
}
semaphoreCh <- struct{}{} semaphoreCh <- struct{}{}
wg.Add(1) wg.Add(1)
@ -61,47 +65,47 @@ func GetSitePages(
page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts) page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts)
if err != nil { if err != nil {
errUpdater(pageID, errors.Wrap(err, support.ConnectorStackErrorTrace(err)+" fetching page")) errs.Add(clues.Wrap(err, "fetching page").WithClues(ctx).With(graph.ErrData(err)...))
} else { return
updatePages(page)
} }
updatePages(page)
}(entry) }(entry)
} }
wg.Wait() wg.Wait()
if errs != nil { return col, errs.Err()
return nil, errs
}
return col, nil
} }
// fetchPages utility function to return the tuple of item // fetchPages utility function to return the tuple of item
func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]Tuple, error) { func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]NameID, error) {
var ( var (
builder = bs.Client().SitesById(siteID).Pages() builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions() opts = fetchPageOptions()
pageTuples = make([]Tuple, 0) pages = make([]NameID, 0)
resp models.SitePageCollectionResponseable resp models.SitePageCollectionResponseable
err error err error
) )
for { for {
resp, err = builder.Get(ctx, opts) resp, err = builder.Get(ctx, opts)
if err != nil { if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "failed fetching site page") return nil, clues.Wrap(err, "fetching site page").WithClues(ctx).With(graph.ErrData(err)...)
} }
for _, entry := range resp.GetValue() { for _, entry := range resp.GetValue() {
pid := *entry.GetId() var (
temp := Tuple{pid, pid} pid = *entry.GetId()
temp = NameID{pid, pid}
)
if entry.GetName() != nil { name, ok := ptr.ValOK(entry.GetName())
temp.Name = *entry.GetName() if ok {
temp.Name = name
} }
pageTuples = append(pageTuples, temp) pages = append(pages, temp)
} }
if resp.GetOdataNextLink() == nil { if resp.GetOdataNextLink() == nil {
@ -111,7 +115,7 @@ func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([
builder = sites.NewItemPagesRequestBuilder(*resp.GetOdataNextLink(), bs.Client().Adapter()) builder = sites.NewItemPagesRequestBuilder(*resp.GetOdataNextLink(), bs.Client().Adapter())
} }
return pageTuples, nil return pages, nil
} }
// fetchPageOptions is used to return minimal information reltating to Site Pages // fetchPageOptions is used to return minimal information reltating to Site Pages
@ -136,7 +140,7 @@ func DeleteSitePage(
) error { ) error {
err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil) err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil)
if err != nil { if err != nil {
return support.ConnectorStackErrorTraceWrap(err, "deleting page: "+pageID) return clues.Wrap(err, "deleting page").WithClues(ctx).With(graph.ErrData(err)...)
} }
return nil return nil
@ -169,9 +173,11 @@ func RestoreSitePage(
pageName = pageID pageName = pageID
) )
ctx = clues.Add(ctx, "page_id", pageID)
byteArray, err := io.ReadAll(itemData.ToReader()) byteArray, err := io.ReadAll(itemData.ToReader())
if err != nil { if err != nil {
return dii, errors.Wrap(err, "reading sharepoint page bytes from stream") return dii, clues.Wrap(err, "reading sharepoint data").WithClues(ctx)
} }
// Hydrate Page // Hydrate Page
@ -180,9 +186,9 @@ func RestoreSitePage(
return dii, errors.Wrapf(err, "creating Page object %s", pageID) return dii, errors.Wrapf(err, "creating Page object %s", pageID)
} }
pageNamePtr := page.GetName() name, ok := ptr.ValOK(page.GetName())
if pageNamePtr != nil { if ok {
pageName = *pageNamePtr pageName = name
} }
newName := fmt.Sprintf("%s_%s", destName, pageName) newName := fmt.Sprintf("%s_%s", destName, pageName)
@ -194,19 +200,16 @@ func RestoreSitePage(
// See: https://learn.microsoft.com/en-us/graph/api/sitepage-create?view=graph-rest-beta // See: https://learn.microsoft.com/en-us/graph/api/sitepage-create?view=graph-rest-beta
restoredPage, err := service.Client().SitesById(siteID).Pages().Post(ctx, page, nil) restoredPage, err := service.Client().SitesById(siteID).Pages().Post(ctx, page, nil)
if err != nil { if err != nil {
sendErr := support.ConnectorStackErrorTraceWrap( return dii, clues.Wrap(err, "creating page").WithClues(ctx).With(graph.ErrData(err)...)
err,
"creating page from ID: %s"+pageName+" API Error Details",
)
return dii, sendErr
} }
pageID = *restoredPage.GetId() pageID = ptr.Val(restoredPage.GetId())
ctx = clues.Add(ctx, "restored_page_id", pageID)
// Publish page to make visible // Publish page to make visible
// See https://learn.microsoft.com/en-us/graph/api/sitepage-publish?view=graph-rest-beta // See https://learn.microsoft.com/en-us/graph/api/sitepage-publish?view=graph-rest-beta
if restoredPage.GetWebUrl() == nil { if restoredPage.GetWebUrl() == nil {
return dii, fmt.Errorf("creating page %s incomplete. Field `webURL` not populated", pageID) return dii, clues.New("webURL not populated during page creation").WithClues(ctx)
} }
err = service.Client(). err = service.Client().
@ -215,10 +218,7 @@ func RestoreSitePage(
Publish(). Publish().
Post(ctx, nil) Post(ctx, nil)
if err != nil { if err != nil {
return dii, support.ConnectorStackErrorTraceWrap( return dii, clues.Wrap(err, "publishing page").WithClues(ctx).With(graph.ErrData(err)...)
err,
"publishing page ID: "+*restoredPage.GetId()+" API Error Details",
)
} }
dii.SharePoint = PageInfo(restoredPage, int64(len(byteArray))) dii.SharePoint = PageInfo(restoredPage, int64(len(byteArray)))
@ -234,26 +234,12 @@ func RestoreSitePage(
// PageInfo extracts useful metadata into struct for book keeping // PageInfo extracts useful metadata into struct for book keeping
func PageInfo(page models.SitePageable, size int64) *details.SharePointInfo { func PageInfo(page models.SitePageable, size int64) *details.SharePointInfo {
var ( var (
name, webURL string name = ptr.Val(page.GetTitle())
created, modified time.Time webURL = ptr.Val(page.GetWebUrl())
created = ptr.Val(page.GetCreatedDateTime())
modified = ptr.Val(page.GetLastModifiedDateTime())
) )
if page.GetTitle() != nil {
name = *page.GetTitle()
}
if page.GetWebUrl() != nil {
webURL = *page.GetWebUrl()
}
if page.GetCreatedDateTime() != nil {
created = *page.GetCreatedDateTime()
}
if page.GetLastModifiedDateTime() != nil {
modified = *page.GetLastModifiedDateTime()
}
return &details.SharePointInfo{ return &details.SharePointInfo{
ItemType: details.SharePointItem, ItemType: details.SharePointItem,
ItemName: name, ItemName: name,

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/sharepoint/api" "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
) )
type SharePointPageSuite struct { type SharePointPageSuite struct {
@ -71,7 +72,7 @@ func (suite *SharePointPageSuite) TestGetSitePages() {
require.NotNil(t, tuples) require.NotNil(t, tuples)
jobs := []string{tuples[0].ID} jobs := []string{tuples[0].ID}
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs) pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true))
assert.NoError(t, err) assert.NoError(t, err)
assert.NotEmpty(t, pages) assert.NotEmpty(t, pages)
} }

View File

@ -282,7 +282,7 @@ func (sc *Collection) retrievePages(
return metrics, clues.New("beta service required").WithClues(ctx) return metrics, clues.New("beta service required").WithClues(ctx)
} }
pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs) pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs, errs)
if err != nil { if err != nil {
return metrics, err return metrics, err
} }
@ -310,7 +310,7 @@ func (sc *Collection) retrievePages(
sc.data <- &Item{ sc.data <- &Item{
id: *pg.GetId(), id: *pg.GetId(),
data: io.NopCloser(bytes.NewReader(byteArray)), data: io.NopCloser(bytes.NewReader(byteArray)),
info: sharePointPageInfo(pg, size), info: sapi.PageInfo(pg, size),
modTime: ptr.OrNow(pg.GetLastModifiedDateTime()), modTime: ptr.OrNow(pg.GetLastModifiedDateTime()),
} }

View File

@ -1,28 +0,0 @@
package sharepoint
import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/pkg/backup/details"
)
// sharePointPageInfo propagates metadata from the SharePoint Page data type
// into searchable content.
// Page Details: https://learn.microsoft.com/en-us/graph/api/resources/sitepage?view=graph-rest-beta
func sharePointPageInfo(page models.SitePageable, size int64) *details.SharePointInfo {
var (
name = ptr.Val(page.GetTitle())
webURL = ptr.Val(page.GetWebUrl())
created = ptr.Val(page.GetCreatedDateTime())
modified = ptr.Val(page.GetLastModifiedDateTime())
)
return &details.SharePointInfo{
ItemType: details.SharePointItem,
ItemName: name,
Created: created,
Modified: modified,
WebURL: webURL,
Size: size,
}
}

View File

@ -1,48 +0,0 @@
package sharepoint
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/pkg/backup/details"
)
func (suite *SharePointInfoSuite) TestSharePointInfo_Pages() {
tests := []struct {
name string
pageAndDeets func() (models.SitePageable, *details.SharePointInfo)
}{
{
name: "Empty Page",
pageAndDeets: func() (models.SitePageable, *details.SharePointInfo) {
deets := &details.SharePointInfo{ItemType: details.SharePointItem}
return models.NewSitePage(), deets
},
},
{
name: "Only Name",
pageAndDeets: func() (models.SitePageable, *details.SharePointInfo) {
title := "Blank Page"
sPage := models.NewSitePage()
sPage.SetTitle(&title)
deets := &details.SharePointInfo{
ItemType: details.SharePointItem,
ItemName: title,
}
return sPage, deets
},
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
paged, expected := test.pageAndDeets()
info := sharePointPageInfo(paged, 0)
assert.Equal(t, expected.ItemType, info.ItemType)
assert.Equal(t, expected.ItemName, info.ItemName)
assert.Equal(t, expected.WebURL, info.WebURL)
})
}
}

View File

@ -308,7 +308,7 @@ func RestorePageCollection(
service := discover.NewBetaService(adpt) service := discover.NewBetaService(adpt)
// Restore items from collection // Restore items from collection
items := dc.Items(ctx, nil) // TODO: fault.Errors instead of nil items := dc.Items(ctx, errs)
for { for {
if errs.Err() != nil { if errs.Err() != nil {