add clues/fault to sharepoint collections (#2506)
## Does this PR need a docs update or release note? - [x] ⛔ No ## Type of change - [x] 🧹 Tech Debt/Cleanup ## Issue(s) * #1970 ## Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
7e3532832e
commit
b1ff20d36c
@ -101,7 +101,8 @@ func (gc *GraphConnector) DataCollections(
|
||||
gc.credentials,
|
||||
gc.Service,
|
||||
gc,
|
||||
ctrlOpts)
|
||||
ctrlOpts,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
@ -258,7 +258,8 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
|
||||
connector.credentials,
|
||||
connector.Service,
|
||||
connector,
|
||||
control.Options{})
|
||||
control.Options{},
|
||||
fault.New(true))
|
||||
require.NoError(t, err)
|
||||
// Not expecting excludes as this isn't an incremental backup.
|
||||
assert.Empty(t, excludes)
|
||||
|
||||
@ -3,14 +3,14 @@ package sharepoint
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
absser "github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
kw "github.com/microsoft/kiota-serialization-json-go"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
sapi "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
|
||||
@ -114,7 +114,7 @@ func (sc *Collection) Items(
|
||||
ctx context.Context,
|
||||
errs *fault.Errors,
|
||||
) <-chan data.Stream {
|
||||
go sc.populate(ctx)
|
||||
go sc.populate(ctx, errs)
|
||||
return sc.data
|
||||
}
|
||||
|
||||
@ -129,12 +129,10 @@ type Item struct {
|
||||
}
|
||||
|
||||
func NewItem(name string, d io.ReadCloser) *Item {
|
||||
item := &Item{
|
||||
return &Item{
|
||||
id: name,
|
||||
data: d,
|
||||
}
|
||||
|
||||
return item
|
||||
}
|
||||
|
||||
func (sd *Item) UUID() string {
|
||||
@ -157,7 +155,12 @@ func (sd *Item) ModTime() time.Time {
|
||||
return sd.modTime
|
||||
}
|
||||
|
||||
func (sc *Collection) finishPopulation(ctx context.Context, attempts, success int, totalBytes int64, errs error) {
|
||||
func (sc *Collection) finishPopulation(
|
||||
ctx context.Context,
|
||||
attempts, success int,
|
||||
totalBytes int64,
|
||||
err error,
|
||||
) {
|
||||
close(sc.data)
|
||||
|
||||
attempted := attempts
|
||||
@ -170,7 +173,7 @@ func (sc *Collection) finishPopulation(ctx context.Context, attempts, success in
|
||||
Successes: success,
|
||||
TotalBytes: totalBytes,
|
||||
},
|
||||
errs,
|
||||
err,
|
||||
sc.fullPath.Folder(false))
|
||||
logger.Ctx(ctx).Debug(status.String())
|
||||
|
||||
@ -180,16 +183,17 @@ func (sc *Collection) finishPopulation(ctx context.Context, attempts, success in
|
||||
}
|
||||
|
||||
// populate utility function to retrieve data from back store for a given collection
|
||||
func (sc *Collection) populate(ctx context.Context) {
|
||||
func (sc *Collection) populate(ctx context.Context, errs *fault.Errors) {
|
||||
var (
|
||||
metrics numMetrics
|
||||
errs error
|
||||
writer = kw.NewJsonSerializationWriter()
|
||||
err error
|
||||
)
|
||||
|
||||
defer func() {
|
||||
sc.finishPopulation(ctx, metrics.attempts, metrics.success, int64(metrics.totalBytes), errs)
|
||||
sc.finishPopulation(ctx, metrics.attempts, metrics.success, int64(metrics.totalBytes), err)
|
||||
}()
|
||||
|
||||
// TODO: Insert correct ID for CollectionProgress
|
||||
colProgress, closer := observe.CollectionProgress(
|
||||
ctx,
|
||||
@ -205,9 +209,9 @@ func (sc *Collection) populate(ctx context.Context) {
|
||||
// Switch retrieval function based on category
|
||||
switch sc.category {
|
||||
case List:
|
||||
metrics, errs = sc.retrieveLists(ctx, writer, colProgress)
|
||||
metrics, err = sc.retrieveLists(ctx, writer, colProgress, errs)
|
||||
case Pages:
|
||||
metrics, errs = sc.retrievePages(ctx, writer, colProgress)
|
||||
metrics, err = sc.retrievePages(ctx, writer, colProgress, errs)
|
||||
}
|
||||
}
|
||||
|
||||
@ -217,46 +221,44 @@ func (sc *Collection) retrieveLists(
|
||||
ctx context.Context,
|
||||
wtr *kw.JsonSerializationWriter,
|
||||
progress chan<- struct{},
|
||||
errs *fault.Errors,
|
||||
) (numMetrics, error) {
|
||||
var (
|
||||
errs error
|
||||
metrics numMetrics
|
||||
)
|
||||
var metrics numMetrics
|
||||
|
||||
lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs)
|
||||
lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs, errs)
|
||||
if err != nil {
|
||||
return metrics, errors.Wrap(err, sc.fullPath.ResourceOwner())
|
||||
return metrics, err
|
||||
}
|
||||
|
||||
metrics.attempts += len(lists)
|
||||
// For each models.Listable, object is serialized and the metrics are collected.
|
||||
// The progress is objected via the passed in channel.
|
||||
for _, lst := range lists {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
byteArray, err := serializeContent(wtr, lst)
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend(*lst.GetId(), err, errs)
|
||||
if sc.ctrl.FailFast {
|
||||
return metrics, errs
|
||||
}
|
||||
|
||||
errs.Add(clues.Wrap(err, "serializing list").WithClues(ctx))
|
||||
continue
|
||||
}
|
||||
|
||||
arrayLength := int64(len(byteArray))
|
||||
size := int64(len(byteArray))
|
||||
|
||||
if arrayLength > 0 {
|
||||
if size > 0 {
|
||||
t := time.Now()
|
||||
if t1 := lst.GetLastModifiedDateTime(); t1 != nil {
|
||||
t = *t1
|
||||
}
|
||||
|
||||
metrics.totalBytes += arrayLength
|
||||
metrics.totalBytes += size
|
||||
|
||||
metrics.success++
|
||||
sc.data <- &Item{
|
||||
id: *lst.GetId(),
|
||||
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||
info: sharePointListInfo(lst, arrayLength),
|
||||
info: sharePointListInfo(lst, size),
|
||||
modTime: t,
|
||||
}
|
||||
|
||||
@ -264,27 +266,25 @@ func (sc *Collection) retrieveLists(
|
||||
}
|
||||
}
|
||||
|
||||
return metrics, nil
|
||||
return metrics, errs.Err()
|
||||
}
|
||||
|
||||
func (sc *Collection) retrievePages(
|
||||
ctx context.Context,
|
||||
wtr *kw.JsonSerializationWriter,
|
||||
progress chan<- struct{},
|
||||
errs *fault.Errors,
|
||||
) (numMetrics, error) {
|
||||
var (
|
||||
errs error
|
||||
metrics numMetrics
|
||||
)
|
||||
var metrics numMetrics
|
||||
|
||||
betaService := sc.betaService
|
||||
if betaService == nil {
|
||||
return metrics, fmt.Errorf("beta service not found in collection")
|
||||
return metrics, clues.New("beta service required").WithClues(ctx)
|
||||
}
|
||||
|
||||
pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs)
|
||||
if err != nil {
|
||||
return metrics, errors.Wrap(err, sc.fullPath.ResourceOwner())
|
||||
return metrics, err
|
||||
}
|
||||
|
||||
metrics.attempts = len(pages)
|
||||
@ -292,38 +292,33 @@ func (sc *Collection) retrievePages(
|
||||
// Pageable objects are not supported in v1.0 of msgraph at this time.
|
||||
// TODO: Verify Parsable interface supported with modified-Pageable
|
||||
for _, pg := range pages {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
byteArray, err := serializeContent(wtr, pg)
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend(*pg.GetId(), err, errs)
|
||||
if sc.ctrl.FailFast {
|
||||
return metrics, errs
|
||||
}
|
||||
|
||||
errs.Add(clues.Wrap(err, "serializing page").WithClues(ctx))
|
||||
continue
|
||||
}
|
||||
|
||||
arrayLength := int64(len(byteArray))
|
||||
size := int64(len(byteArray))
|
||||
|
||||
if arrayLength > 0 {
|
||||
t := time.Now()
|
||||
if t1 := pg.GetLastModifiedDateTime(); t1 != nil {
|
||||
t = *t1
|
||||
}
|
||||
|
||||
metrics.totalBytes += arrayLength
|
||||
if size > 0 {
|
||||
metrics.totalBytes += size
|
||||
metrics.success++
|
||||
sc.data <- &Item{
|
||||
id: *pg.GetId(),
|
||||
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||
info: sharePointPageInfo(pg, arrayLength),
|
||||
modTime: t,
|
||||
info: sharePointPageInfo(pg, size),
|
||||
modTime: ptr.OrNow(pg.GetLastModifiedDateTime()),
|
||||
}
|
||||
|
||||
progress <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return metrics, nil
|
||||
return metrics, errs.Err()
|
||||
}
|
||||
|
||||
func serializeContent(writer *kw.JsonSerializationWriter, obj absser.Parsable) ([]byte, error) {
|
||||
@ -331,12 +326,12 @@ func serializeContent(writer *kw.JsonSerializationWriter, obj absser.Parsable) (
|
||||
|
||||
err := writer.WriteObjectValue("", obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, clues.Wrap(err, "writing object").With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
byteArray, err := writer.GetSerializedContent()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, clues.Wrap(err, "getting content from writer").With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
return byteArray, nil
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
@ -15,6 +16,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
@ -34,6 +36,7 @@ func DataCollections(
|
||||
serv graph.Servicer,
|
||||
su statusUpdater,
|
||||
ctrlOpts control.Options,
|
||||
errs *fault.Errors,
|
||||
) ([]data.BackupCollection, map[string]struct{}, error) {
|
||||
b, err := selector.ToSharePointBackup()
|
||||
if err != nil {
|
||||
@ -43,10 +46,13 @@ func DataCollections(
|
||||
var (
|
||||
site = b.DiscreteOwner
|
||||
collections = []data.BackupCollection{}
|
||||
errs error
|
||||
)
|
||||
|
||||
for _, scope := range b.Scopes() {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
foldersComplete, closer := observe.MessageWithCompletion(ctx, observe.Bulletf(
|
||||
"%s - %s",
|
||||
observe.Safe(scope.Category().PathType().String()),
|
||||
@ -64,9 +70,11 @@ func DataCollections(
|
||||
creds.AzureTenantID,
|
||||
site,
|
||||
su,
|
||||
ctrlOpts)
|
||||
ctrlOpts,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, support.WrapAndAppend(site, err, errs)
|
||||
errs.Add(err)
|
||||
continue
|
||||
}
|
||||
|
||||
case path.LibrariesCategory:
|
||||
@ -80,8 +88,10 @@ func DataCollections(
|
||||
su,
|
||||
ctrlOpts)
|
||||
if err != nil {
|
||||
return nil, nil, support.WrapAndAppend(site, err, errs)
|
||||
errs.Add(err)
|
||||
continue
|
||||
}
|
||||
|
||||
case path.PagesCategory:
|
||||
spcs, err = collectPages(
|
||||
ctx,
|
||||
@ -89,9 +99,11 @@ func DataCollections(
|
||||
serv,
|
||||
site,
|
||||
su,
|
||||
ctrlOpts)
|
||||
ctrlOpts,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, support.WrapAndAppend(site, err, errs)
|
||||
errs.Add(err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,7 +111,7 @@ func DataCollections(
|
||||
foldersComplete <- struct{}{}
|
||||
}
|
||||
|
||||
return collections, nil, errs
|
||||
return collections, nil, errs.Err()
|
||||
}
|
||||
|
||||
func collectLists(
|
||||
@ -108,17 +120,22 @@ func collectLists(
|
||||
tenantID, siteID string,
|
||||
updater statusUpdater,
|
||||
ctrlOpts control.Options,
|
||||
errs *fault.Errors,
|
||||
) ([]data.BackupCollection, error) {
|
||||
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections")
|
||||
|
||||
spcs := make([]data.BackupCollection, 0)
|
||||
|
||||
tuples, err := preFetchLists(ctx, serv, siteID)
|
||||
lists, err := preFetchLists(ctx, serv, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, tuple := range tuples {
|
||||
for _, tuple := range lists {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
dir, err := path.Builder{}.Append(tuple.name).
|
||||
ToDataLayerSharePointPath(
|
||||
tenantID,
|
||||
@ -126,7 +143,7 @@ func collectLists(
|
||||
path.ListsCategory,
|
||||
false)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to create collection path for site: %s", siteID)
|
||||
errs.Add(clues.Wrap(err, "creating list collection path").WithClues(ctx))
|
||||
}
|
||||
|
||||
collection := NewCollection(dir, serv, List, updater.UpdateStatus, ctrlOpts)
|
||||
@ -135,7 +152,7 @@ func collectLists(
|
||||
spcs = append(spcs, collection)
|
||||
}
|
||||
|
||||
return spcs, nil
|
||||
return spcs, errs.Err()
|
||||
}
|
||||
|
||||
// collectLibraries constructs a onedrive Collections struct and Get()s
|
||||
@ -149,31 +166,29 @@ func collectLibraries(
|
||||
updater statusUpdater,
|
||||
ctrlOpts control.Options,
|
||||
) ([]data.BackupCollection, map[string]struct{}, error) {
|
||||
logger.Ctx(ctx).Debug("creating SharePoint Library collections")
|
||||
|
||||
var (
|
||||
collections = []data.BackupCollection{}
|
||||
errs error
|
||||
colls = onedrive.NewCollections(
|
||||
itemClient,
|
||||
tenantID,
|
||||
siteID,
|
||||
onedrive.SharePointSource,
|
||||
folderMatcher{scope},
|
||||
serv,
|
||||
updater.UpdateStatus,
|
||||
ctrlOpts)
|
||||
)
|
||||
|
||||
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Library collections")
|
||||
|
||||
colls := onedrive.NewCollections(
|
||||
itemClient,
|
||||
tenantID,
|
||||
siteID,
|
||||
onedrive.SharePointSource,
|
||||
folderMatcher{scope},
|
||||
serv,
|
||||
updater.UpdateStatus,
|
||||
ctrlOpts)
|
||||
|
||||
// TODO(ashmrtn): Pass previous backup metadata when SharePoint supports delta
|
||||
// token-based incrementals.
|
||||
odcs, excludes, err := colls.Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, nil, support.WrapAndAppend(siteID, err, errs)
|
||||
return nil, nil, clues.Wrap(err, "getting library").WithClues(ctx).With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
return append(collections, odcs...), excludes, errs
|
||||
return append(collections, odcs...), excludes, nil
|
||||
}
|
||||
|
||||
// collectPages constructs a sharepoint Collections struct and Get()s the associated
|
||||
@ -185,8 +200,9 @@ func collectPages(
|
||||
siteID string,
|
||||
updater statusUpdater,
|
||||
ctrlOpts control.Options,
|
||||
errs *fault.Errors,
|
||||
) ([]data.BackupCollection, error) {
|
||||
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Pages collections")
|
||||
logger.Ctx(ctx).Debug("creating SharePoint Pages collections")
|
||||
|
||||
spcs := make([]data.BackupCollection, 0)
|
||||
|
||||
@ -194,7 +210,7 @@ func collectPages(
|
||||
// Need to receive From DataCollection Call
|
||||
adpt, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret)
|
||||
if err != nil {
|
||||
return nil, errors.New("unable to create adapter w/ env credentials")
|
||||
return nil, clues.Wrap(err, "creating azure client adapter")
|
||||
}
|
||||
|
||||
betaService := api.NewBetaService(adpt)
|
||||
@ -205,6 +221,10 @@ func collectPages(
|
||||
}
|
||||
|
||||
for _, tuple := range tuples {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
dir, err := path.Builder{}.Append(tuple.Name).
|
||||
ToDataLayerSharePointPath(
|
||||
creds.AzureTenantID,
|
||||
@ -212,7 +232,7 @@ func collectPages(
|
||||
path.PagesCategory,
|
||||
false)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to create collection path for site: %s", siteID)
|
||||
errs.Add(clues.Wrap(err, "creating page collection path").WithClues(ctx))
|
||||
}
|
||||
|
||||
collection := NewCollection(dir, serv, Pages, updater.UpdateStatus, ctrlOpts)
|
||||
@ -222,7 +242,7 @@ func collectPages(
|
||||
spcs = append(spcs, collection)
|
||||
}
|
||||
|
||||
return spcs, nil
|
||||
return spcs, errs.Err()
|
||||
}
|
||||
|
||||
type folderMatcher struct {
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -177,7 +178,7 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
|
||||
siteID,
|
||||
&MockGraphService{},
|
||||
control.Defaults(),
|
||||
)
|
||||
fault.New(true))
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, col)
|
||||
}
|
||||
|
||||
@ -2,15 +2,17 @@ package sharepoint
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
mssite "github.com/microsoftgraph/msgraph-sdk-go/sites"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
)
|
||||
|
||||
type listTuple struct {
|
||||
@ -39,23 +41,23 @@ func preFetchLists(
|
||||
builder = gs.Client().SitesById(siteID).Lists()
|
||||
options = preFetchListOptions()
|
||||
listTuples = make([]listTuple, 0)
|
||||
errs error
|
||||
)
|
||||
|
||||
for {
|
||||
resp, err := builder.Get(ctx, options)
|
||||
if err != nil {
|
||||
return nil, support.WrapAndAppend(support.ConnectorStackErrorTrace(err), err, errs)
|
||||
return nil, clues.Wrap(err, "getting lists").WithClues(ctx).With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
for _, entry := range resp.GetValue() {
|
||||
temp := listTuple{id: *entry.GetId()}
|
||||
var (
|
||||
id = ptr.Val(entry.GetId())
|
||||
name = ptr.Val(entry.GetDisplayName())
|
||||
temp = listTuple{id: id, name: name}
|
||||
)
|
||||
|
||||
name := entry.GetDisplayName()
|
||||
if name != nil {
|
||||
temp.name = *name
|
||||
} else {
|
||||
temp.name = *entry.GetId()
|
||||
if len(name) == 0 {
|
||||
temp.name = id
|
||||
}
|
||||
|
||||
listTuples = append(listTuples, temp)
|
||||
@ -65,7 +67,7 @@ func preFetchLists(
|
||||
break
|
||||
}
|
||||
|
||||
builder = mssite.NewItemListsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
|
||||
builder = mssite.NewItemListsRequestBuilder(ptr.Val(resp.GetOdataNextLink()), gs.Adapter())
|
||||
}
|
||||
|
||||
return listTuples, nil
|
||||
@ -90,30 +92,29 @@ func loadSiteLists(
|
||||
gs graph.Servicer,
|
||||
siteID string,
|
||||
listIDs []string,
|
||||
errs *fault.Errors,
|
||||
) ([]models.Listable, error) {
|
||||
var (
|
||||
results = make([]models.Listable, 0)
|
||||
semaphoreCh = make(chan struct{}, fetchChannelSize)
|
||||
errs error
|
||||
wg sync.WaitGroup
|
||||
m sync.Mutex
|
||||
)
|
||||
|
||||
defer close(semaphoreCh)
|
||||
|
||||
errUpdater := func(id string, err error) {
|
||||
m.Lock()
|
||||
errs = support.WrapAndAppend(id, err, errs)
|
||||
m.Unlock()
|
||||
}
|
||||
|
||||
updateLists := func(list models.Listable) {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
|
||||
results = append(results, list)
|
||||
m.Unlock()
|
||||
}
|
||||
|
||||
for _, listID := range listIDs {
|
||||
if errs.Err() != nil {
|
||||
return nil, errs.Err()
|
||||
}
|
||||
|
||||
semaphoreCh <- struct{}{}
|
||||
|
||||
wg.Add(1)
|
||||
@ -129,13 +130,13 @@ func loadSiteLists(
|
||||
|
||||
entry, err = gs.Client().SitesById(siteID).ListsById(id).Get(ctx, nil)
|
||||
if err != nil {
|
||||
errUpdater(id, support.ConnectorStackErrorTraceWrap(err, ""))
|
||||
errs.Add(clues.Wrap(err, "getting site list").WithClues(ctx).With(graph.ErrData(err)...))
|
||||
return
|
||||
}
|
||||
|
||||
cols, cTypes, lItems, err := fetchListContents(ctx, gs, siteID, id)
|
||||
cols, cTypes, lItems, err := fetchListContents(ctx, gs, siteID, id, errs)
|
||||
if err != nil {
|
||||
errUpdater(id, errors.Wrap(err, "unable to fetchRelationships during loadSiteLists"))
|
||||
errs.Add(clues.Wrap(err, "getting list contents"))
|
||||
return
|
||||
}
|
||||
|
||||
@ -148,11 +149,7 @@ func loadSiteLists(
|
||||
|
||||
wg.Wait()
|
||||
|
||||
if errs != nil {
|
||||
return nil, errs
|
||||
}
|
||||
|
||||
return results, nil
|
||||
return results, errs.Err()
|
||||
}
|
||||
|
||||
// fetchListContents utility function to retrieve associated M365 relationships
|
||||
@ -162,31 +159,26 @@ func fetchListContents(
|
||||
ctx context.Context,
|
||||
service graph.Servicer,
|
||||
siteID, listID string,
|
||||
errs *fault.Errors,
|
||||
) (
|
||||
[]models.ColumnDefinitionable,
|
||||
[]models.ContentTypeable,
|
||||
[]models.ListItemable,
|
||||
error,
|
||||
) {
|
||||
var errs error
|
||||
|
||||
cols, err := fetchColumns(ctx, service, siteID, listID, "")
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend(siteID, err, errs)
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
cTypes, err := fetchContentTypes(ctx, service, siteID, listID)
|
||||
cTypes, err := fetchContentTypes(ctx, service, siteID, listID, errs)
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend(siteID, err, errs)
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
lItems, err := fetchListItems(ctx, service, siteID, listID)
|
||||
lItems, err := fetchListItems(ctx, service, siteID, listID, errs)
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend(siteID, err, errs)
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return nil, nil, nil, errs
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
return cols, cTypes, lItems, nil
|
||||
@ -200,26 +192,35 @@ func fetchListItems(
|
||||
ctx context.Context,
|
||||
gs graph.Servicer,
|
||||
siteID, listID string,
|
||||
errs *fault.Errors,
|
||||
) ([]models.ListItemable, error) {
|
||||
var (
|
||||
prefix = gs.Client().SitesById(siteID).ListsById(listID)
|
||||
builder = prefix.Items()
|
||||
itms = make([]models.ListItemable, 0)
|
||||
errs error
|
||||
)
|
||||
|
||||
for {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
}
|
||||
|
||||
for _, itm := range resp.GetValue() {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
newPrefix := prefix.ItemsById(*itm.GetId())
|
||||
|
||||
fields, err := newPrefix.Fields().Get(ctx, nil)
|
||||
if err != nil {
|
||||
errs = errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
errs.Add(clues.Wrap(err, "getting list fields").WithClues(ctx).With(graph.ErrData(err)...))
|
||||
continue
|
||||
}
|
||||
|
||||
itm.SetFields(fields)
|
||||
@ -234,11 +235,7 @@ func fetchListItems(
|
||||
builder = mssite.NewItemListsItemItemsRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return nil, errors.Wrap(errs, "fetchListItem unsuccessful")
|
||||
}
|
||||
|
||||
return itms, nil
|
||||
return itms, errs.Err()
|
||||
}
|
||||
|
||||
// fetchColumns utility function to return columns from a site.
|
||||
@ -258,7 +255,7 @@ func fetchColumns(
|
||||
for {
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, support.WrapAndAppend(support.ConnectorStackErrorTrace(err), err, nil)
|
||||
return nil, clues.Wrap(err, "getting list columns").WithClues(ctx).With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
cs = append(cs, resp.GetValue()...)
|
||||
@ -275,7 +272,7 @@ func fetchColumns(
|
||||
for {
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
return nil, clues.Wrap(err, "getting content columns").WithClues(ctx).With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
cs = append(cs, resp.GetValue()...)
|
||||
@ -301,33 +298,42 @@ func fetchContentTypes(
|
||||
ctx context.Context,
|
||||
gs graph.Servicer,
|
||||
siteID, listID string,
|
||||
errs *fault.Errors,
|
||||
) ([]models.ContentTypeable, error) {
|
||||
var (
|
||||
cTypes = make([]models.ContentTypeable, 0)
|
||||
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypes()
|
||||
errs error
|
||||
)
|
||||
|
||||
for {
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, support.WrapAndAppend(support.ConnectorStackErrorTrace(err), err, errs)
|
||||
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
}
|
||||
|
||||
for _, cont := range resp.GetValue() {
|
||||
id := *cont.GetId()
|
||||
if errs.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
id := ptr.Val(cont.GetId())
|
||||
|
||||
links, err := fetchColumnLinks(ctx, gs, siteID, listID, id)
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend("unable to add column links to list", err, errs)
|
||||
break
|
||||
errs.Add(err)
|
||||
continue
|
||||
}
|
||||
|
||||
cont.SetColumnLinks(links)
|
||||
|
||||
cs, err := fetchColumns(ctx, gs, siteID, listID, id)
|
||||
if err != nil {
|
||||
errs = support.WrapAndAppend("unable to populate columns for contentType", err, errs)
|
||||
errs.Add(err)
|
||||
continue
|
||||
}
|
||||
|
||||
cont.SetColumns(cs)
|
||||
@ -342,11 +348,7 @@ func fetchContentTypes(
|
||||
builder = mssite.NewItemListsItemContentTypesRequestBuilder(*resp.GetOdataNextLink(), gs.Adapter())
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return nil, errs
|
||||
}
|
||||
|
||||
return cTypes, nil
|
||||
return cTypes, errs.Err()
|
||||
}
|
||||
|
||||
func fetchColumnLinks(
|
||||
@ -362,7 +364,7 @@ func fetchColumnLinks(
|
||||
for {
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
return nil, clues.Wrap(err, "getting column links").WithClues(ctx).With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
links = append(links, resp.GetValue()...)
|
||||
@ -388,11 +390,9 @@ func DeleteList(
|
||||
siteID, listID string,
|
||||
) error {
|
||||
err := gs.Client().SitesById(siteID).ListsById(listID).Delete(ctx, nil)
|
||||
errorMsg := fmt.Sprintf("failure deleting listID %s from site %s. Details: %s",
|
||||
listID,
|
||||
siteID,
|
||||
support.ConnectorStackErrorTrace(err),
|
||||
)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "deleting list").WithClues(ctx).With(graph.ErrData(err)...)
|
||||
}
|
||||
|
||||
return errors.Wrap(err, errorMsg)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
)
|
||||
|
||||
type SharePointSuite struct {
|
||||
@ -54,7 +55,7 @@ func (suite *SharePointSuite) TestLoadList() {
|
||||
require.NoError(t, err)
|
||||
|
||||
job := []string{tuples[0].id}
|
||||
lists, err := loadSiteLists(ctx, service, "root", job)
|
||||
lists, err := loadSiteLists(ctx, service, "root", job, fault.New(true))
|
||||
assert.NoError(t, err)
|
||||
assert.Greater(t, len(lists), 0)
|
||||
t.Logf("Length: %d\n", len(lists))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user