Set DoNotMerge on OneDrive collections if delta token expired (#2401)
## Description Wire up configuring DoNotMerge for OneDrive collections. ## Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [x] 🕐 Yes, but in a later PR - [ ] ⛔ No ## Type of change <!--- Please check the type of change your PR introduces: ---> - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Test - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup ## Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * https://github.com/alcionai/corso/issues/2123 * https://github.com/alcionai/corso/issues/2124 ## Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
2c6ccb70b2
commit
45291ebaea
@ -8,7 +8,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
ka "github.com/microsoft/kiota-authentication-azure-go"
|
||||
khttp "github.com/microsoft/kiota-http-go"
|
||||
@ -16,6 +15,7 @@ import (
|
||||
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
|
||||
@ -97,17 +97,19 @@ func NewCollection(
|
||||
statusUpdater support.StatusUpdater,
|
||||
source driveSource,
|
||||
ctrlOpts control.Options,
|
||||
doNotMergeItems bool,
|
||||
) *Collection {
|
||||
c := &Collection{
|
||||
itemClient: itemClient,
|
||||
folderPath: folderPath,
|
||||
driveItems: map[string]models.DriveItemable{},
|
||||
driveID: driveID,
|
||||
source: source,
|
||||
service: service,
|
||||
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||
statusUpdater: statusUpdater,
|
||||
ctrl: ctrlOpts,
|
||||
itemClient: itemClient,
|
||||
folderPath: folderPath,
|
||||
driveItems: map[string]models.DriveItemable{},
|
||||
driveID: driveID,
|
||||
source: source,
|
||||
service: service,
|
||||
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||
statusUpdater: statusUpdater,
|
||||
ctrl: ctrlOpts,
|
||||
doNotMergeItems: doNotMergeItems,
|
||||
}
|
||||
|
||||
// Allows tests to set a mock populator
|
||||
|
||||
@ -168,7 +168,8 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
||||
suite,
|
||||
suite.testStatusUpdater(&wg, &collStatus),
|
||||
test.source,
|
||||
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}})
|
||||
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}},
|
||||
true)
|
||||
require.NotNil(t, coll)
|
||||
assert.Equal(t, folderPath, coll.FullPath())
|
||||
|
||||
@ -301,7 +302,8 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
|
||||
suite,
|
||||
suite.testStatusUpdater(&wg, &collStatus),
|
||||
test.source,
|
||||
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}})
|
||||
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}},
|
||||
true)
|
||||
|
||||
mockItem := models.NewDriveItem()
|
||||
mockItem.SetId(&testItemID)
|
||||
@ -372,7 +374,8 @@ func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() {
|
||||
suite,
|
||||
suite.testStatusUpdater(&wg, &collStatus),
|
||||
test.source,
|
||||
control.Options{ToggleFeatures: control.Toggles{}})
|
||||
control.Options{ToggleFeatures: control.Toggles{}},
|
||||
true)
|
||||
|
||||
now := time.Now()
|
||||
mockItem := models.NewDriveItem()
|
||||
|
||||
@ -374,6 +374,7 @@ func (c *Collections) UpdateCollections(
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
invalidPrevDelta bool,
|
||||
) error {
|
||||
for _, item := range items {
|
||||
if item.GetRoot() != nil {
|
||||
@ -465,7 +466,9 @@ func (c *Collections) UpdateCollections(
|
||||
c.service,
|
||||
c.statusUpdater,
|
||||
c.source,
|
||||
c.ctrl)
|
||||
c.ctrl,
|
||||
invalidPrevDelta,
|
||||
)
|
||||
|
||||
c.CollectionMap[collectionPath.String()] = col
|
||||
c.NumContainers++
|
||||
|
||||
@ -646,6 +646,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
tt.inputFolderMap,
|
||||
outputFolderMap,
|
||||
excludes,
|
||||
false,
|
||||
)
|
||||
tt.expect(t, err)
|
||||
assert.Equal(t, len(tt.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
|
||||
@ -1133,6 +1134,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
|
||||
expectedDeltaURLs map[string]string
|
||||
expectedFolderPaths map[string]map[string]string
|
||||
expectedDelList map[string]struct{}
|
||||
doNotMergeItems bool
|
||||
}{
|
||||
{
|
||||
name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors",
|
||||
@ -1344,6 +1346,135 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
|
||||
expectedFolderPaths: nil,
|
||||
expectedDelList: nil,
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_DeltaError",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]deltaPagerResult{
|
||||
driveID1: {
|
||||
{
|
||||
err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
driveItem("file", "file", testBaseDrivePath, true, false, false),
|
||||
},
|
||||
deltaLink: &delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
expectedCollections: map[string][]string{
|
||||
expectedPathAsSlice(
|
||||
suite.T(),
|
||||
tenant,
|
||||
user,
|
||||
testBaseDrivePath,
|
||||
)[0]: {"file"},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
driveID1: delta,
|
||||
},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
// We need an empty map here so deserializing metadata knows the delta
|
||||
// token for this drive is valid.
|
||||
driveID1: {},
|
||||
},
|
||||
expectedDelList: map[string]struct{}{},
|
||||
doNotMergeItems: true,
|
||||
},
|
||||
{
|
||||
name: "OneDrive_MultipleCollections_DeltaError",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]deltaPagerResult{
|
||||
driveID1: {
|
||||
{
|
||||
err: getDeltaError(),
|
||||
},
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
driveItem("file", "file", testBaseDrivePath, true, false, false),
|
||||
},
|
||||
nextLink: &next,
|
||||
},
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false),
|
||||
},
|
||||
deltaLink: &delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
expectedCollections: map[string][]string{
|
||||
expectedPathAsSlice(
|
||||
suite.T(),
|
||||
tenant,
|
||||
user,
|
||||
testBaseDrivePath,
|
||||
)[0]: {"file"},
|
||||
expectedPathAsSlice(
|
||||
suite.T(),
|
||||
tenant,
|
||||
user,
|
||||
testBaseDrivePath+"/folder",
|
||||
)[0]: {"file"},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
driveID1: delta,
|
||||
},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
// We need an empty map here so deserializing metadata knows the delta
|
||||
// token for this drive is valid.
|
||||
driveID1: {},
|
||||
},
|
||||
expectedDelList: map[string]struct{}{},
|
||||
doNotMergeItems: true,
|
||||
},
|
||||
{
|
||||
name: "OneDrive_MultipleCollections_NoDeltaError",
|
||||
drives: []models.Driveable{drive1},
|
||||
items: map[string][]deltaPagerResult{
|
||||
driveID1: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
driveItem("file", "file", testBaseDrivePath, true, false, false),
|
||||
},
|
||||
nextLink: &next,
|
||||
},
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false),
|
||||
},
|
||||
deltaLink: &delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
expectedCollections: map[string][]string{
|
||||
expectedPathAsSlice(
|
||||
suite.T(),
|
||||
tenant,
|
||||
user,
|
||||
testBaseDrivePath,
|
||||
)[0]: {"file"},
|
||||
expectedPathAsSlice(
|
||||
suite.T(),
|
||||
tenant,
|
||||
user,
|
||||
testBaseDrivePath+"/folder",
|
||||
)[0]: {"file"},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
driveID1: delta,
|
||||
},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
// We need an empty map here so deserializing metadata knows the delta
|
||||
// token for this drive is valid.
|
||||
driveID1: {},
|
||||
},
|
||||
expectedDelList: map[string]struct{}{},
|
||||
doNotMergeItems: false,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
@ -1423,6 +1554,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
|
||||
}
|
||||
|
||||
assert.ElementsMatch(t, test.expectedCollections[folderPath], itemIDs)
|
||||
assert.Equal(t, test.doNotMergeItems, baseCol.DoNotMergeItems(), "DoNotMergeItems")
|
||||
}
|
||||
|
||||
assert.Equal(t, test.expectedDelList, delList)
|
||||
@ -1483,10 +1615,7 @@ func delItem(
|
||||
return item
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
next := "next"
|
||||
delta := "delta"
|
||||
|
||||
func getDeltaError() error {
|
||||
syncStateNotFound := "SyncStateNotFound" // TODO(meain): export graph.errCodeSyncStateNotFound
|
||||
me := odataerrors.NewMainError()
|
||||
me.SetCode(&syncStateNotFound)
|
||||
@ -1494,6 +1623,13 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
deltaError := odataerrors.NewODataError()
|
||||
deltaError.SetError(me)
|
||||
|
||||
return deltaError
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
next := "next"
|
||||
delta := "delta"
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
items []deltaPagerResult
|
||||
@ -1522,7 +1658,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
name: "invalid prev delta",
|
||||
deltaURL: delta,
|
||||
items: []deltaPagerResult{
|
||||
{err: deltaError},
|
||||
{err: getDeltaError()},
|
||||
{deltaLink: &delta}, // works on retry
|
||||
},
|
||||
prevDeltaSuccess: false,
|
||||
@ -1553,6 +1689,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
doNotMergeItems bool,
|
||||
) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -143,6 +143,7 @@ type itemCollector func(
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
validPrevDelta bool,
|
||||
) error
|
||||
|
||||
type itemPager interface {
|
||||
@ -228,7 +229,7 @@ func collectItems(
|
||||
return DeltaUpdate{}, nil, nil, errors.Wrap(err, "extracting items from response")
|
||||
}
|
||||
|
||||
err = collector(ctx, driveID, driveName, vals, oldPaths, newPaths, excluded)
|
||||
err = collector(ctx, driveID, driveName, vals, oldPaths, newPaths, excluded, invalidPrevDelta)
|
||||
if err != nil {
|
||||
return DeltaUpdate{}, nil, nil, err
|
||||
}
|
||||
@ -380,6 +381,7 @@ func GetAllFolders(
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
doNotMergeItems bool,
|
||||
) error {
|
||||
for _, item := range items {
|
||||
// Skip the root item.
|
||||
|
||||
@ -106,6 +106,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
doNotMergeItems bool,
|
||||
) error {
|
||||
for _, item := range items {
|
||||
if item.GetFile() != nil {
|
||||
|
||||
@ -4,9 +4,10 @@ import (
|
||||
"testing"
|
||||
|
||||
discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func createTestBetaService(t *testing.T, credentials account.M365Config) *discover.BetaService {
|
||||
|
||||
@ -100,7 +100,7 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
||||
&MockGraphService{},
|
||||
nil,
|
||||
control.Options{})
|
||||
err := c.UpdateCollections(ctx, "driveID", "General", test.items, paths, newPaths, excluded)
|
||||
err := c.UpdateCollections(ctx, "driveID", "General", test.items, paths, newPaths, excluded, true)
|
||||
test.expect(t, err)
|
||||
assert.Equal(t, len(test.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
|
||||
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
|
||||
|
||||
@ -3,11 +3,12 @@ package support
|
||||
import (
|
||||
"strings"
|
||||
|
||||
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
|
||||
absser "github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
js "github.com/microsoft/kiota-serialization-json-go"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
|
||||
)
|
||||
|
||||
// CreateFromBytes helper function to initialize m365 object form bytes.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user