add fault/clues throughout exchange backup (#2382)
## Does this PR need a docs update or release note? - [x] ⛔ No ## Type of change - [x] 🧹 Tech Debt/Cleanup ## Issue(s) * #1970 ## Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
634076328c
commit
43e5ccdb5b
@ -114,7 +114,7 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userabl
|
|||||||
us := make([]models.Userable, 0)
|
us := make([]models.Userable, 0)
|
||||||
|
|
||||||
iterator := func(item any) bool {
|
iterator := func(item any) bool {
|
||||||
if errs.Failed() {
|
if errs.Err() != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,10 +3,10 @@ package exchange
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ func (cfc *contactFolderCache) populateContactRoot(
|
|||||||
) error {
|
) error {
|
||||||
f, err := cfc.getter.GetContainerByID(ctx, cfc.userID, directoryID)
|
f, err := cfc.getter.GetContainerByID(ctx, cfc.userID, directoryID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
|
return clues.Wrap(err, "fetching root folder")
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(
|
temp := graph.NewCacheFolder(
|
||||||
@ -34,7 +34,7 @@ func (cfc *contactFolderCache) populateContactRoot(
|
|||||||
path.Builder{}.Append(baseContainerPath...), // storage path
|
path.Builder{}.Append(baseContainerPath...), // storage path
|
||||||
path.Builder{}.Append(baseContainerPath...)) // display location
|
path.Builder{}.Append(baseContainerPath...)) // display location
|
||||||
if err := cfc.addFolder(temp); err != nil {
|
if err := cfc.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "adding resolver dir")
|
return clues.Wrap(err, "adding resolver dir").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -71,7 +71,7 @@ func (cfc *contactFolderCache) init(
|
|||||||
baseContainerPath []string,
|
baseContainerPath []string,
|
||||||
) error {
|
) error {
|
||||||
if len(baseNode) == 0 {
|
if len(baseNode) == 0 {
|
||||||
return errors.New("m365 folderID required for base folder")
|
return clues.New("m365 folderID required for base folder").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfc.containerResolver == nil {
|
if cfc.containerResolver == nil {
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package exchange
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/hashicorp/go-multierror"
|
"github.com/hashicorp/go-multierror"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
@ -62,13 +63,15 @@ func (cr *containerResolver) idToPath(
|
|||||||
depth int,
|
depth int,
|
||||||
useIDInPath bool,
|
useIDInPath bool,
|
||||||
) (*path.Builder, *path.Builder, error) {
|
) (*path.Builder, *path.Builder, error) {
|
||||||
|
ctx = clues.Add(ctx, "container_id", folderID)
|
||||||
|
|
||||||
if depth >= maxIterations {
|
if depth >= maxIterations {
|
||||||
return nil, nil, errors.New("path contains cycle or is too tall")
|
return nil, nil, clues.New("path contains cycle or is too tall").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
c, ok := cr.cache[folderID]
|
c, ok := cr.cache[folderID]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, errors.Errorf("folder %s not cached", folderID)
|
return nil, nil, clues.New("folder not cached").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
p := c.Path()
|
p := c.Path()
|
||||||
@ -164,7 +167,7 @@ func (cr *containerResolver) AddToCache(
|
|||||||
Container: f,
|
Container: f,
|
||||||
}
|
}
|
||||||
if err := cr.addFolder(temp); err != nil {
|
if err := cr.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "adding cache folder")
|
return clues.Wrap(err, "adding cache folder").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Populate the path for this entry so calls to PathInCache succeed no matter
|
// Populate the path for this entry so calls to PathInCache succeed no matter
|
||||||
|
|||||||
@ -185,7 +185,7 @@ func DataCollections(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, scope := range eb.Scopes() {
|
for _, scope := range eb.Scopes() {
|
||||||
if errs.Failed() {
|
if errs.Err() != nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,7 +196,8 @@ func DataCollections(
|
|||||||
scope,
|
scope,
|
||||||
cdps[scope.Category().PathType()],
|
cdps[scope.Category().PathType()],
|
||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
su)
|
su,
|
||||||
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs.Add(err)
|
errs.Add(err)
|
||||||
continue
|
continue
|
||||||
@ -217,7 +218,7 @@ func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedIte
|
|||||||
case path.ContactsCategory:
|
case path.ContactsCategory:
|
||||||
return ac.Contacts(), nil
|
return ac.Contacts(), nil
|
||||||
default:
|
default:
|
||||||
return nil, clues.Wrap(clues.New(category.String()), "category not supported")
|
return nil, clues.New("no api client registered for category")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -232,6 +233,7 @@ func createCollections(
|
|||||||
dps DeltaPaths,
|
dps DeltaPaths,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
su support.StatusUpdater,
|
su support.StatusUpdater,
|
||||||
|
errs *fault.Errors,
|
||||||
) ([]data.BackupCollection, error) {
|
) ([]data.BackupCollection, error) {
|
||||||
var (
|
var (
|
||||||
allCollections = make([]data.BackupCollection, 0)
|
allCollections = make([]data.BackupCollection, 0)
|
||||||
@ -239,6 +241,8 @@ func createCollections(
|
|||||||
category = scope.Category().PathType()
|
category = scope.Category().PathType()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ctx = clues.Add(ctx, "category", category)
|
||||||
|
|
||||||
getter, err := getterByType(ac, category)
|
getter, err := getterByType(ac, category)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Stack(err).WithClues(ctx)
|
return nil, clues.Stack(err).WithClues(ctx)
|
||||||
@ -274,7 +278,8 @@ func createCollections(
|
|||||||
resolver,
|
resolver,
|
||||||
scope,
|
scope,
|
||||||
dps,
|
dps,
|
||||||
ctrlOpts)
|
ctrlOpts,
|
||||||
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "filling collections")
|
return nil, errors.Wrap(err, "filling collections")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -267,7 +268,8 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
|
|||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Options{},
|
control.Options{},
|
||||||
func(status *support.ConnectorOperationStatus) {})
|
func(status *support.ConnectorOperationStatus) {},
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
for _, c := range collections {
|
for _, c := range collections {
|
||||||
@ -334,7 +336,8 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
|
|||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Options{},
|
control.Options{},
|
||||||
func(status *support.ConnectorOperationStatus) {})
|
func(status *support.ConnectorOperationStatus) {},
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
|
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
|
||||||
|
|
||||||
@ -364,7 +367,8 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
|
|||||||
test.scope,
|
test.scope,
|
||||||
dps,
|
dps,
|
||||||
control.Options{},
|
control.Options{},
|
||||||
func(status *support.ConnectorOperationStatus) {})
|
func(status *support.ConnectorOperationStatus) {},
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// TODO(keepers): this isn't a very useful test at the moment. It needs to
|
// TODO(keepers): this isn't a very useful test at the moment. It needs to
|
||||||
@ -409,7 +413,8 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
|
|||||||
sel.Scopes()[0],
|
sel.Scopes()[0],
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Options{},
|
control.Options{},
|
||||||
newStatusUpdater(t, &wg))
|
newStatusUpdater(t, &wg),
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
wg.Add(len(collections))
|
wg.Add(len(collections))
|
||||||
@ -476,7 +481,8 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
|
|||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Options{},
|
control.Options{},
|
||||||
newStatusUpdater(t, &wg))
|
newStatusUpdater(t, &wg),
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
wg.Add(len(edcs))
|
wg.Add(len(edcs))
|
||||||
@ -583,7 +589,8 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
|
|||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Options{},
|
control.Options{},
|
||||||
newStatusUpdater(t, &wg))
|
newStatusUpdater(t, &wg),
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, collections, 2)
|
require.Len(t, collections, 2)
|
||||||
|
|
||||||
|
|||||||
@ -3,10 +3,10 @@ package exchange
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
|
|||||||
|
|
||||||
f, err := ecc.getter.GetContainerByID(ctx, ecc.userID, container)
|
f, err := ecc.getter.GetContainerByID(ctx, ecc.userID, container)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
|
return errors.Wrap(err, "fetching calendar")
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(
|
temp := graph.NewCacheFolder(
|
||||||
@ -50,7 +50,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
|
|||||||
path.Builder{}.Append(*f.GetId()), // storage path
|
path.Builder{}.Append(*f.GetId()), // storage path
|
||||||
path.Builder{}.Append(*f.GetDisplayName())) // display location
|
path.Builder{}.Append(*f.GetDisplayName())) // display location
|
||||||
if err := ecc.addFolder(temp); err != nil {
|
if err := ecc.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "initializing calendar resolver")
|
return clues.Wrap(err, "initializing calendar resolver").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -88,7 +88,7 @@ func (ecc *eventCalendarCache) Populate(
|
|||||||
// @returns error iff the required values are not accessible.
|
// @returns error iff the required values are not accessible.
|
||||||
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container, useIDInPath bool) error {
|
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container, useIDInPath bool) error {
|
||||||
if err := checkIDAndName(f); err != nil {
|
if err := checkIDAndName(f); err != nil {
|
||||||
return errors.Wrap(err, "validating container")
|
return clues.Wrap(err, "validating container").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(
|
temp := graph.NewCacheFolder(
|
||||||
@ -104,7 +104,7 @@ func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container
|
|||||||
|
|
||||||
if err := ecc.addFolder(temp); err != nil {
|
if err := ecc.addFolder(temp); err != nil {
|
||||||
delete(ecc.newAdditions, *f.GetDisplayName())
|
delete(ecc.newAdditions, *f.GetDisplayName())
|
||||||
return errors.Wrap(err, "adding container")
|
return clues.Wrap(err, "adding container").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Populate the path for this entry so calls to PathInCache succeed no matter
|
// Populate the path for this entry so calls to PathInCache succeed no matter
|
||||||
|
|||||||
@ -3,10 +3,10 @@ package exchange
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -46,7 +46,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
|
|||||||
|
|
||||||
f, err := mc.getter.GetContainerByID(ctx, mc.userID, fldr)
|
f, err := mc.getter.GetContainerByID(ctx, mc.userID, fldr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
|
return clues.Wrap(err, "fetching root folder")
|
||||||
}
|
}
|
||||||
|
|
||||||
if fldr == DefaultMailFolder {
|
if fldr == DefaultMailFolder {
|
||||||
@ -57,7 +57,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
|
|||||||
path.Builder{}.Append(directory), // storage path
|
path.Builder{}.Append(directory), // storage path
|
||||||
path.Builder{}.Append(directory)) // display location
|
path.Builder{}.Append(directory)) // display location
|
||||||
if err := mc.addFolder(temp); err != nil {
|
if err := mc.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "adding resolver dir")
|
return clues.Wrap(err, "adding resolver dir").WithClues(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -2,8 +2,8 @@ package exchange
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
@ -75,11 +75,11 @@ func PopulateExchangeContainerResolver(
|
|||||||
cacheRoot = DefaultCalendar
|
cacheRoot = DefaultCalendar
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("ContainerResolver not present for %s type", qp.Category)
|
return nil, clues.New("no container resolver registered for category").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := res.Populate(ctx, cacheRoot); err != nil {
|
if err := res.Populate(ctx, cacheRoot); err != nil {
|
||||||
return nil, errors.Wrap(err, "populating directory resolver")
|
return nil, clues.Wrap(err, "populating directory resolver").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
return res, nil
|
return res, nil
|
||||||
|
|||||||
@ -2,8 +2,8 @@ package exchange
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
@ -38,9 +39,9 @@ func filterContainersAndFillCollections(
|
|||||||
scope selectors.ExchangeScope,
|
scope selectors.ExchangeScope,
|
||||||
dps DeltaPaths,
|
dps DeltaPaths,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
|
errs *fault.Errors,
|
||||||
) error {
|
) error {
|
||||||
var (
|
var (
|
||||||
errs error
|
|
||||||
// folder ID -> delta url or folder path lookups
|
// folder ID -> delta url or folder path lookups
|
||||||
deltaURLs = map[string]string{}
|
deltaURLs = map[string]string{}
|
||||||
currPaths = map[string]string{}
|
currPaths = map[string]string{}
|
||||||
@ -63,8 +64,8 @@ func filterContainersAndFillCollections(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, c := range resolver.Items() {
|
for _, c := range resolver.Items() {
|
||||||
if ctrlOpts.FailFast && errs != nil {
|
if errs.Err() != nil {
|
||||||
return errs
|
return errs.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
cID := *c.GetId()
|
cID := *c.GetId()
|
||||||
@ -85,7 +86,7 @@ func filterContainersAndFillCollections(
|
|||||||
|
|
||||||
if len(prevPathStr) > 0 {
|
if len(prevPathStr) > 0 {
|
||||||
if prevPath, err = pathFromPrevString(prevPathStr); err != nil {
|
if prevPath, err = pathFromPrevString(prevPathStr); err != nil {
|
||||||
logger.Ctx(ctx).Error(err)
|
logger.Ctx(ctx).With("err", err).Errorw("parsing prev path", clues.InErr(err).Slice()...)
|
||||||
// if the previous path is unusable, then the delta must be, too.
|
// if the previous path is unusable, then the delta must be, too.
|
||||||
prevDelta = ""
|
prevDelta = ""
|
||||||
}
|
}
|
||||||
@ -94,7 +95,7 @@ func filterContainersAndFillCollections(
|
|||||||
added, removed, newDelta, err := getter.GetAddedAndRemovedItemIDs(ctx, qp.ResourceOwner, cID, prevDelta)
|
added, removed, newDelta, err := getter.GetAddedAndRemovedItemIDs(ctx, qp.ResourceOwner, cID, prevDelta)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !graph.IsErrDeletedInFlight(err) {
|
if !graph.IsErrDeletedInFlight(err) {
|
||||||
errs = support.WrapAndAppend(qp.ResourceOwner, err, errs)
|
errs.Add(err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,7 +151,7 @@ func filterContainersAndFillCollections(
|
|||||||
// resolver (which contains all the resource owners' current containers).
|
// resolver (which contains all the resource owners' current containers).
|
||||||
for id, p := range tombstones {
|
for id, p := range tombstones {
|
||||||
if collections[id] != nil {
|
if collections[id] != nil {
|
||||||
errs = support.WrapAndAppend(p, errors.New("conflict: tombstone exists for a live collection"), errs)
|
errs.Add(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ctx))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -162,9 +163,8 @@ func filterContainersAndFillCollections(
|
|||||||
|
|
||||||
prevPath, err := pathFromPrevString(p)
|
prevPath, err := pathFromPrevString(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// technically shouldn't ever happen. But just in case, we need to catch
|
// technically shouldn't ever happen. But just in case...
|
||||||
// it for protection.
|
logger.Ctx(ctx).With("err", err).Errorw("parsing tombstone prev path", clues.InErr(err).Slice()...)
|
||||||
logger.Ctx(ctx).Errorw("parsing tombstone path", "err", err)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -189,20 +189,21 @@ func filterContainersAndFillCollections(
|
|||||||
entries = append(entries, graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs))
|
entries = append(entries, graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs))
|
||||||
}
|
}
|
||||||
|
|
||||||
if col, err := graph.MakeMetadataCollection(
|
col, err := graph.MakeMetadataCollection(
|
||||||
qp.Credentials.AzureTenantID,
|
qp.Credentials.AzureTenantID,
|
||||||
qp.ResourceOwner,
|
qp.ResourceOwner,
|
||||||
path.ExchangeService,
|
path.ExchangeService,
|
||||||
qp.Category,
|
qp.Category,
|
||||||
entries,
|
entries,
|
||||||
statusUpdater,
|
statusUpdater,
|
||||||
); err != nil {
|
)
|
||||||
errs = support.WrapAndAppend("making metadata collection", err, errs)
|
if err != nil {
|
||||||
} else if col != nil {
|
return clues.Wrap(err, "making metadata collection")
|
||||||
collections["metadata"] = col
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return errs
|
collections["metadata"] = col
|
||||||
|
|
||||||
|
return errs.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
// produces a set of id:path pairs from the deltapaths map.
|
// produces a set of id:path pairs from the deltapaths map.
|
||||||
@ -236,6 +237,6 @@ func itemerByType(ac api.Client, category path.CategoryType) (itemer, error) {
|
|||||||
case path.ContactsCategory:
|
case path.ContactsCategory:
|
||||||
return ac.Contacts(), nil
|
return ac.Contacts(), nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("category %s not supported by getFetchIDFunc", category)
|
return nil, clues.New("category not registered in getFetchIDFunc")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -230,7 +231,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
|
|||||||
},
|
},
|
||||||
resolver: newMockResolver(container1),
|
resolver: newMockResolver(container1),
|
||||||
scope: allScope,
|
scope: allScope,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.NoError,
|
||||||
expectNewColls: 0,
|
expectNewColls: 0,
|
||||||
expectMetadataColls: 1,
|
expectMetadataColls: 1,
|
||||||
},
|
},
|
||||||
@ -255,7 +256,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
|
|||||||
},
|
},
|
||||||
resolver: newMockResolver(container1, container2),
|
resolver: newMockResolver(container1, container2),
|
||||||
scope: allScope,
|
scope: allScope,
|
||||||
expectErr: assert.Error,
|
expectErr: assert.NoError,
|
||||||
expectNewColls: 1,
|
expectNewColls: 1,
|
||||||
expectMetadataColls: 1,
|
expectMetadataColls: 1,
|
||||||
},
|
},
|
||||||
@ -304,7 +305,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
|
|||||||
test.scope,
|
test.scope,
|
||||||
dps,
|
dps,
|
||||||
control.Options{FailFast: test.failFast},
|
control.Options{FailFast: test.failFast},
|
||||||
)
|
fault.New(test.failFast))
|
||||||
test.expectErr(t, err)
|
test.expectErr(t, err)
|
||||||
|
|
||||||
// collection assertions
|
// collection assertions
|
||||||
@ -457,7 +458,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
|
|||||||
allScope,
|
allScope,
|
||||||
dps,
|
dps,
|
||||||
control.Options{FailFast: true},
|
control.Options{FailFast: true},
|
||||||
)
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// collection assertions
|
// collection assertions
|
||||||
@ -809,7 +810,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
|
|||||||
allScope,
|
allScope,
|
||||||
test.dps,
|
test.dps,
|
||||||
control.Options{},
|
control.Options{},
|
||||||
)
|
fault.New(true))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
metadatas := 0
|
metadatas := 0
|
||||||
|
|||||||
@ -331,7 +331,7 @@ func getResources(
|
|||||||
}
|
}
|
||||||
|
|
||||||
callbackFunc := func(item any) bool {
|
callbackFunc := func(item any) bool {
|
||||||
if errs.Failed() {
|
if errs.Err() != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -142,7 +142,7 @@ func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs
|
|||||||
)
|
)
|
||||||
|
|
||||||
for _, man := range mans {
|
for _, man := range mans {
|
||||||
if errs.Failed() {
|
if errs.Err() != nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -84,11 +84,6 @@ func (e *Errors) Fail(err error) *Errors {
|
|||||||
return e.setErr(err)
|
return e.setErr(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Failed returns true if e.err != nil, signifying a catastrophic failure.
|
|
||||||
func (e *Errors) Failed() bool {
|
|
||||||
return e.err != nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// setErr handles setting errors.err. Sync locking gets
|
// setErr handles setting errors.err. Sync locking gets
|
||||||
// handled upstream of this call.
|
// handled upstream of this call.
|
||||||
func (e *Errors) setErr(err error) *Errors {
|
func (e *Errors) setErr(err error) *Errors {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user