diff --git a/CHANGELOG.md b/CHANGELOG.md index 2765f90c4..b7d786587 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,11 +10,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - Support for item.Attachment:Mail restore +- Errors from duplicate names in Exchange Calendars ### Changed +- When using Restore and Details on Exchange Calendars, the `--event-calendar` flag can now identify calendars by either a Display Name or a Microsoft 365 ID. +- Exchange Calendars storage entries now construct their paths using container IDs instead of display names. This fixes cases where duplicate display names caused system failures. ### Known Issues - Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs +- Breaking changes to Exchange Calendar backups. ## [v0.3.0] (alpha) - 2023-2-07 diff --git a/src/cli/utils/testdata/opts.go b/src/cli/utils/testdata/opts.go index e958fc5c5..bcb05ca4d 100644 --- a/src/cli/utils/testdata/opts.go +++ b/src/cli/utils/testdata/opts.go @@ -137,14 +137,14 @@ var ( Name: "EmailsFolderPrefixMatch", Expected: testdata.ExchangeEmailItems, Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder()}, + EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false)}, }, }, { Name: "EmailsFolderPrefixMatchTrailingSlash", Expected: testdata.ExchangeEmailItems, Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder() + "/"}, + EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false) + "/"}, }, }, { @@ -154,7 +154,7 @@ var ( testdata.ExchangeEmailItems[2], }, Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder()}, + EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false)}, }, }, { @@ -164,7 +164,7 @@ var ( testdata.ExchangeEmailItems[2], }, Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder() + "/"}, + EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false) + "/"}, }, }, { diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index 307ea65e0..66efe5f39 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -172,7 +172,7 @@ func buildCollections( return nil, err } - mc := mockconnector.NewMockExchangeCollection(pth, len(c.items)) + mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items)) for i := 0; i < len(c.items); i++ { mc.Names[i] = c.items[i].name diff --git a/src/go.mod b/src/go.mod index 4056edfbc..69c5eadd7 100644 --- a/src/go.mod +++ b/src/go.mod @@ -5,7 +5,7 @@ go 1.19 require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230202001016-cbda58c9de9e - github.com/aws/aws-sdk-go v1.44.197 + github.com/aws/aws-sdk-go v1.44.199 github.com/aws/aws-xray-sdk-go v1.8.0 github.com/google/uuid v1.3.0 github.com/hashicorp/go-multierror v1.1.1 @@ -29,7 +29,7 @@ require ( github.com/vbauerster/mpb/v8 v8.1.6 go.uber.org/zap v1.24.0 golang.org/x/exp v0.0.0-20221217163422-3c43f8badb15 - golang.org/x/tools v0.5.0 + golang.org/x/tools v0.6.0 gopkg.in/resty.v1 v1.12.0 ) @@ -111,11 +111,11 @@ require ( go.uber.org/atomic v1.10.0 // indirect go.uber.org/multierr v1.8.0 // indirect golang.org/x/crypto v0.5.0 // indirect - golang.org/x/mod v0.7.0 // indirect - golang.org/x/net v0.5.0 // indirect + golang.org/x/mod v0.8.0 // indirect + golang.org/x/net v0.6.0 // indirect golang.org/x/sync v0.1.0 // indirect golang.org/x/sys v0.5.0 // indirect - golang.org/x/text v0.6.0 // indirect + golang.org/x/text v0.7.0 // indirect google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef // indirect google.golang.org/grpc v1.52.0 // indirect google.golang.org/protobuf v1.28.1 // indirect diff --git a/src/go.sum b/src/go.sum index da19f8a2e..b4e49d219 100644 --- a/src/go.sum +++ b/src/go.sum @@ -62,8 +62,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5 github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/aws/aws-sdk-go v1.44.197 h1:pkg/NZsov9v/CawQWy+qWVzJMIZRQypCtYjUBXFomF8= -github.com/aws/aws-sdk-go v1.44.197/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.199 h1:hYuQmS4zLMJR9v2iOp2UOD6Vi/0V+nwyR/Uhrkrtlbc= +github.com/aws/aws-sdk-go v1.44.199/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.0 h1:0xncHZ588wB/geLjbM/esoW3FOEThWy2TJyb4VXfLFY= github.com/aws/aws-xray-sdk-go v1.8.0/go.mod h1:7LKe47H+j3evfvS1+q0wzpoaGXGrF3mUsfM+thqVO+A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= @@ -487,8 +487,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA= -golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -529,8 +529,8 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= -golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= +golang.org/x/net v0.6.0 h1:L4ZwwTvKW9gr0ZMS1yrHD9GZhIuVjOBBnaKH+SPQK0Q= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -625,8 +625,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= -golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -678,8 +678,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.5.0 h1:+bSpV5HIeWkuvgaMfI3UmKRThoTA5ODJTUd8T17NO+4= -golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= +golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/src/internal/common/ptr/pointer.go b/src/internal/common/ptr/pointer.go index 68d15b109..7dbf9052f 100644 --- a/src/internal/common/ptr/pointer.go +++ b/src/internal/common/ptr/pointer.go @@ -1,13 +1,20 @@ package ptr -// Val helper method for unwrapping strings +// ptr package is a common package used for pointer +// access and deserialization. + +// Val Generic function for dereferencing pointers. // Microsoft Graph saves many variables as string pointers. // Function will safely check if the point is nil prior to // dereferencing the pointer. If the pointer is nil, -// an empty string is returned. -func Val(ptr *string) string { +// an empty version of the object is returned. +// Operation does not work on Nested objects. +// For example: +// *evt.GetEnd().GetDateTime() will still cause a panic +// if evt is nil or GetEnd() is nil +func Val[T any](ptr *T) T { if ptr == nil { - return "" + return *new(T) } return *ptr diff --git a/src/internal/common/ptr/pointer_test.go b/src/internal/common/ptr/pointer_test.go new file mode 100644 index 000000000..9cf24c5cb --- /dev/null +++ b/src/internal/common/ptr/pointer_test.go @@ -0,0 +1,99 @@ +package ptr_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/alcionai/corso/src/internal/common/ptr" +) + +type PointerSuite struct { + suite.Suite +} + +func TestPointerSuite(t *testing.T) { + suite.Run(t, new(PointerSuite)) +} + +// TestVal checks to ptr derefencing for the +// following types: +// - *string +// - *bool +// - *time.Time +func (suite *PointerSuite) TestVal() { + var ( + t = suite.T() + created *time.Time + testString *string + testBool *bool + testInt *int + testInt32 *int32 + testInt64 *int64 + ) + + // String Checks + subject := ptr.Val(testString) + assert.Empty(t, subject) + + hello := "Hello World" + testString = &hello + subject = ptr.Val(testString) + + t.Logf("Received: %s", subject) + assert.NotEmpty(t, subject) + + // Time Checks + + myTime := ptr.Val(created) + assert.Empty(t, myTime) + assert.NotNil(t, myTime) + + now := time.Now() + created = &now + myTime = ptr.Val(created) + assert.NotEmpty(t, myTime) + + // Bool Checks + truth := true + myBool := ptr.Val(testBool) + assert.NotNil(t, myBool) + assert.False(t, myBool) + + testBool = &truth + myBool = ptr.Val(testBool) + assert.NotNil(t, myBool) + assert.True(t, myBool) + + // Int checks + myInt := ptr.Val(testInt) + myInt32 := ptr.Val(testInt32) + myInt64 := ptr.Val(testInt64) + + assert.NotNil(t, myInt) + assert.NotNil(t, myInt32) + assert.NotNil(t, myInt64) + assert.Empty(t, myInt) + assert.Empty(t, myInt32) + assert.Empty(t, myInt64) + + num := 4071 + num32 := int32(num * 32) + num64 := int64(num * 2048) + testInt = &num + testInt32 = &num32 + testInt64 = &num64 + + myInt = ptr.Val(testInt) + myInt32 = ptr.Val(testInt32) + myInt64 = ptr.Val(testInt64) + + assert.NotNil(t, myInt) + assert.NotNil(t, myInt32) + assert.NotNil(t, myInt64) + assert.NotEmpty(t, myInt) + assert.NotEmpty(t, myInt32) + assert.NotEmpty(t, myInt64) +} diff --git a/src/internal/connector/data_collections.go b/src/internal/connector/data_collections.go index 4c1bd4461..06b7bec6a 100644 --- a/src/internal/connector/data_collections.go +++ b/src/internal/connector/data_collections.go @@ -19,6 +19,7 @@ import ( "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" @@ -38,6 +39,7 @@ func (gc *GraphConnector) DataCollections( sels selectors.Selector, metadata []data.RestoreCollection, ctrlOpts control.Options, + errs *fault.Errors, ) ([]data.BackupCollection, map[string]struct{}, error) { ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String())) defer end() @@ -65,7 +67,8 @@ func (gc *GraphConnector) DataCollections( gc.credentials, // gc.Service, gc.UpdateStatus, - ctrlOpts) + ctrlOpts, + errs) if err != nil { return nil, nil, err } diff --git a/src/internal/connector/data_collections_test.go b/src/internal/connector/data_collections_test.go index 4484a92aa..c2038f006 100644 --- a/src/internal/connector/data_collections_test.go +++ b/src/internal/connector/data_collections_test.go @@ -14,6 +14,7 @@ import ( "github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" ) @@ -105,7 +106,8 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection nil, connector.credentials, connector.UpdateStatus, - control.Options{}) + control.Options{}, + fault.New(true)) require.NoError(t, err) assert.Empty(t, excludes) @@ -201,7 +203,12 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestDataCollections_invali for _, test := range tests { suite.T().Run(test.name, func(t *testing.T) { - collections, excludes, err := connector.DataCollections(ctx, test.getSelector(t), nil, control.Options{}) + collections, excludes, err := connector.DataCollections( + ctx, + test.getSelector(t), + nil, + control.Options{}, + fault.New(true)) assert.Error(t, err) assert.Empty(t, collections) assert.Empty(t, excludes) @@ -325,7 +332,12 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar sel := selectors.NewSharePointBackup(siteIDs) sel.Include(sel.Libraries([]string{"foo"}, selectors.PrefixMatch())) - cols, excludes, err := gc.DataCollections(ctx, sel.Selector, nil, control.Options{}) + cols, excludes, err := gc.DataCollections( + ctx, + sel.Selector, + nil, + control.Options{}, + fault.New(true)) require.NoError(t, err) assert.Len(t, cols, 1) // No excludes yet as this isn't an incremental backup. @@ -351,7 +363,12 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar sel := selectors.NewSharePointBackup(siteIDs) sel.Include(sel.Lists(selectors.Any(), selectors.PrefixMatch())) - cols, excludes, err := gc.DataCollections(ctx, sel.Selector, nil, control.Options{}) + cols, excludes, err := gc.DataCollections( + ctx, + sel.Selector, + nil, + control.Options{}, + fault.New(true)) require.NoError(t, err) assert.Less(t, 0, len(cols)) // No excludes yet as this isn't an incremental backup. diff --git a/src/internal/connector/discovery/api/users.go b/src/internal/connector/discovery/api/users.go index c08297a9e..b1a37b683 100644 --- a/src/internal/connector/discovery/api/users.go +++ b/src/internal/connector/discovery/api/users.go @@ -3,6 +3,7 @@ package api import ( "context" + "github.com/alcionai/clues" absser "github.com/microsoft/kiota-abstractions-go" msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core" "github.com/microsoftgraph/msgraph-sdk-go/models" @@ -10,7 +11,7 @@ import ( "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) @@ -85,7 +86,7 @@ func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration { } // GetAll retrieves all users. -func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) { +func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userable, error) { service, err := c.service() if err != nil { return nil, err @@ -99,7 +100,7 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) { }) if err != nil { - return nil, support.ConnectorStackErrorTraceWrap(err, "getting all users") + return nil, clues.Wrap(err, "getting all users").WithClues(ctx).WithAll(graph.ErrData(err)...) } iter, err := msgraphgocore.NewPageIterator( @@ -107,18 +108,19 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) { service.Adapter(), models.CreateUserCollectionResponseFromDiscriminatorValue) if err != nil { - return nil, support.ConnectorStackErrorTraceWrap(err, "constructing user iterator") + return nil, clues.Wrap(err, "creating users iterator").WithClues(ctx).WithAll(graph.ErrData(err)...) } - var ( - iterErrs error - us = make([]models.Userable, 0) - ) + us := make([]models.Userable, 0) iterator := func(item any) bool { + if errs.Failed() { + return false + } + u, err := validateUser(item) if err != nil { - iterErrs = support.WrapAndAppend("validating user", err, iterErrs) + errs.Add(clues.Wrap(err, "validating user").WithClues(ctx).WithAll(graph.ErrData(err)...)) } else { us = append(us, u) } @@ -127,10 +129,10 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) { } if err := iter.Iterate(ctx, iterator); err != nil { - return nil, support.ConnectorStackErrorTraceWrap(err, "iterating all users") + return nil, clues.Wrap(err, "iterating all users").WithClues(ctx).WithAll(graph.ErrData(err)...) } - return us, iterErrs + return us, errs.Err() } func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) { @@ -145,7 +147,7 @@ func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, err }) if err != nil { - return nil, support.ConnectorStackErrorTraceWrap(err, "getting user by id") + return nil, clues.Wrap(err, "getting user").WithClues(ctx).WithAll(graph.ErrData(err)...) } return resp, err @@ -167,7 +169,7 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) { if err != nil { if !graph.IsErrExchangeMailFolderNotFound(err) { - return nil, support.ConnectorStackErrorTraceWrap(err, "getting user's exchange mailfolders") + return nil, clues.Wrap(err, "getting user's mail folder").WithClues(ctx).WithAll(graph.ErrData(err)...) } delete(userInfo.DiscoveredServices, path.ExchangeService) @@ -186,15 +188,15 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) { func validateUser(item any) (models.Userable, error) { m, ok := item.(models.Userable) if !ok { - return nil, errors.Errorf("expected Userable, got %T", item) + return nil, clues.Stack(clues.New("unexpected model"), errors.Errorf("%T", item)) } if m.GetId() == nil { - return nil, errors.Errorf("missing ID") + return nil, clues.New("missing ID") } if m.GetUserPrincipalName() == nil { - return nil, errors.New("missing principalName") + return nil, clues.New("missing principalName") } return m, nil diff --git a/src/internal/connector/discovery/discovery.go b/src/internal/connector/discovery/discovery.go index 3f75f74c4..f8a6f27c7 100644 --- a/src/internal/connector/discovery/discovery.go +++ b/src/internal/connector/discovery/discovery.go @@ -7,6 +7,7 @@ import ( "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/connector/discovery/api" + "github.com/alcionai/corso/src/pkg/fault" ) // --------------------------------------------------------------------------- @@ -14,7 +15,7 @@ import ( // --------------------------------------------------------------------------- type getAller interface { - GetAll(context.Context) ([]models.Userable, error) + GetAll(context.Context, *fault.Errors) ([]models.Userable, error) } type getter interface { @@ -35,8 +36,8 @@ type getWithInfoer interface { // --------------------------------------------------------------------------- // Users fetches all users in the tenant. -func Users(ctx context.Context, ga getAller) ([]models.Userable, error) { - return ga.GetAll(ctx) +func Users(ctx context.Context, ga getAller, errs *fault.Errors) ([]models.Userable, error) { + return ga.GetAll(ctx, errs) } func User(ctx context.Context, gwi getWithInfoer, userID string) (models.Userable, *api.UserInfo, error) { diff --git a/src/internal/connector/exchange/api/api_test.go b/src/internal/connector/exchange/api/api_test.go index 4fe842452..14693f22c 100644 --- a/src/internal/connector/exchange/api/api_test.go +++ b/src/internal/connector/exchange/api/api_test.go @@ -161,39 +161,6 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() { } } -// TestGraphQueryFunctions verifies if Query functions APIs -// through Microsoft Graph are functional -func (suite *ExchangeServiceSuite) TestGraphQueryFunctions() { - ctx, flush := tester.NewContext() - defer flush() - - c, err := NewClient(suite.credentials) - require.NoError(suite.T(), err) - - userID := tester.M365UserID(suite.T()) - tests := []struct { - name string - function GraphQuery - }{ - { - name: "GraphQuery: Get All ContactFolders", - function: c.Contacts().GetAllContactFolderNamesForUser, - }, - { - name: "GraphQuery: Get All Calendars for User", - function: c.Events().GetAllCalendarNamesForUser, - }, - } - - for _, test := range tests { - suite.T().Run(test.name, func(t *testing.T) { - response, err := test.function(ctx, userID) - assert.NoError(t, err) - assert.NotNil(t, response) - }) - } -} - //nolint:lll var stubHTMLContent = "\r\n
Happy New Year,

In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture? 



Let me know if this meets our culture requirements.

Warm Regards,

Dustin
" diff --git a/src/internal/connector/exchange/api/contacts.go b/src/internal/connector/exchange/api/contacts.go index 8457c94d1..78db13c08 100644 --- a/src/internal/connector/exchange/api/contacts.go +++ b/src/internal/connector/exchange/api/contacts.go @@ -3,7 +3,6 @@ package api import ( "context" "fmt" - "time" "github.com/alcionai/clues" "github.com/hashicorp/go-multierror" @@ -13,6 +12,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/pkg/errors" + "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/support" @@ -80,28 +80,6 @@ func (c Contacts) GetItem( return cont, ContactInfo(cont), nil } -// GetAllContactFolderNamesForUser is a GraphQuery function for getting -// ContactFolderId and display names for contacts. All other information is omitted. -// Does not return the default Contact Folder -func (c Contacts) GetAllContactFolderNamesForUser( - ctx context.Context, - user string, -) (serialization.Parsable, error) { - options, err := optionsForContactFolders([]string{"displayName", "parentFolderId"}) - if err != nil { - return nil, err - } - - var resp models.ContactFolderCollectionResponseable - - err = graph.RunWithRetry(func() error { - resp, err = c.stable.Client().UsersById(user).ContactFolders().Get(ctx, options) - return err - }) - - return resp, err -} - func (c Contacts) GetContainerByID( ctx context.Context, userID, dirID string, @@ -169,10 +147,8 @@ func (c Contacts) EnumerateContainers( continue } - temp := graph.NewCacheFolder(fold, nil) - - err = fn(temp) - if err != nil { + temp := graph.NewCacheFolder(fold, nil, nil) + if err := fn(temp); err != nil { errs = multierror.Append(err, errs) continue } @@ -317,16 +293,8 @@ func (c Contacts) Serialize( // --------------------------------------------------------------------------- func ContactInfo(contact models.Contactable) *details.ExchangeInfo { - name := "" - created := time.Time{} - - if contact.GetDisplayName() != nil { - name = *contact.GetDisplayName() - } - - if contact.GetCreatedDateTime() != nil { - created = *contact.GetCreatedDateTime() - } + name := ptr.Val(contact.GetDisplayName()) + created := ptr.Val(contact.GetCreatedDateTime()) return &details.ExchangeInfo{ ItemType: details.ExchangeContact, diff --git a/src/internal/connector/exchange/api/events.go b/src/internal/connector/exchange/api/events.go index adf218685..cc719f7fc 100644 --- a/src/internal/connector/exchange/api/events.go +++ b/src/internal/connector/exchange/api/events.go @@ -14,6 +14,7 @@ import ( "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/common" + "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/support" @@ -143,25 +144,6 @@ func (c Events) GetItem( return event, EventInfo(event), nil } -func (c Client) GetAllCalendarNamesForUser( - ctx context.Context, - user string, -) (serialization.Parsable, error) { - options, err := optionsForCalendars([]string{"name", "owner"}) - if err != nil { - return nil, err - } - - var resp models.CalendarCollectionResponseable - - err = graph.RunWithRetry(func() error { - resp, err = c.stable.Client().UsersById(user).Calendars().Get(ctx, options) - return err - }) - - return resp, err -} - // EnumerateContainers iterates through all of the users current // calendars, converting each to a graph.CacheFolder, and // calling fn(cf) on each one. If fn(cf) errors, the error is @@ -209,10 +191,11 @@ func (c Events) EnumerateContainers( continue } - temp := graph.NewCacheFolder(cd, path.Builder{}.Append(*cd.GetDisplayName())) - - err = fn(temp) - if err != nil { + temp := graph.NewCacheFolder( + cd, + path.Builder{}.Append(*cd.GetId()), // storage path + path.Builder{}.Append(*cd.GetDisplayName())) // display location + if err := fn(temp); err != nil { errs = multierror.Append(err, errs) continue } @@ -390,11 +373,12 @@ func (c CalendarDisplayable) GetParentFolderId() *string { func EventInfo(evt models.Eventable) *details.ExchangeInfo { var ( - organizer, subject string - recurs bool - start = time.Time{} - end = time.Time{} - created = time.Time{} + organizer string + subject = ptr.Val(evt.GetSubject()) + recurs bool + start = time.Time{} + end = time.Time{} + created = ptr.Val(evt.GetCreatedDateTime()) ) if evt.GetOrganizer() != nil && @@ -405,10 +389,6 @@ func EventInfo(evt models.Eventable) *details.ExchangeInfo { GetAddress() } - if evt.GetSubject() != nil { - subject = *evt.GetSubject() - } - if evt.GetRecurrence() != nil { recurs = true } @@ -437,10 +417,6 @@ func EventInfo(evt models.Eventable) *details.ExchangeInfo { } } - if evt.GetCreatedDateTime() != nil { - created = *evt.GetCreatedDateTime() - } - return &details.ExchangeInfo{ ItemType: details.ExchangeEvent, Organizer: organizer, diff --git a/src/internal/connector/exchange/api/mail.go b/src/internal/connector/exchange/api/mail.go index 5ac96b93a..f94ea0fca 100644 --- a/src/internal/connector/exchange/api/mail.go +++ b/src/internal/connector/exchange/api/mail.go @@ -3,7 +3,6 @@ package api import ( "context" "fmt" - "time" "github.com/alcionai/clues" "github.com/hashicorp/go-multierror" @@ -13,6 +12,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/pkg/errors" + "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph/api" "github.com/alcionai/corso/src/internal/connector/support" @@ -198,8 +198,7 @@ func (c Mail) EnumerateContainers( } for _, v := range resp.GetValue() { - temp := graph.NewCacheFolder(v, nil) - + temp := graph.NewCacheFolder(v, nil, nil) if err := fn(temp); err != nil { errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta")) continue @@ -348,9 +347,9 @@ func (c Mail) Serialize( func MailInfo(msg models.Messageable) *details.ExchangeInfo { sender := "" - subject := "" - received := time.Time{} - created := time.Time{} + subject := ptr.Val(msg.GetSubject()) + received := ptr.Val(msg.GetReceivedDateTime()) + created := ptr.Val(msg.GetCreatedDateTime()) if msg.GetSender() != nil && msg.GetSender().GetEmailAddress() != nil && @@ -358,18 +357,6 @@ func MailInfo(msg models.Messageable) *details.ExchangeInfo { sender = *msg.GetSender().GetEmailAddress().GetAddress() } - if msg.GetSubject() != nil { - subject = *msg.GetSubject() - } - - if msg.GetReceivedDateTime() != nil { - received = *msg.GetReceivedDateTime() - } - - if msg.GetCreatedDateTime() != nil { - created = *msg.GetCreatedDateTime() - } - return &details.ExchangeInfo{ ItemType: details.ExchangeMail, Sender: sender, diff --git a/src/internal/connector/exchange/api/options.go b/src/internal/connector/exchange/api/options.go index 67725225f..3c2c39c0e 100644 --- a/src/internal/connector/exchange/api/options.go +++ b/src/internal/connector/exchange/api/options.go @@ -135,27 +135,6 @@ func optionsForCalendarsByID(moreOps []string) ( return options, nil } -// optionsForContactFolders places allowed options for exchange.ContactFolder object -// @return is first call in ContactFolders().GetWithRequestConfigurationAndResponseHandler -func optionsForContactFolders(moreOps []string) ( - *users.ItemContactFoldersRequestBuilderGetRequestConfiguration, - error, -) { - selecting, err := buildOptions(moreOps, fieldsForFolders) - if err != nil { - return nil, err - } - - requestParameters := &users.ItemContactFoldersRequestBuilderGetQueryParameters{ - Select: selecting, - } - options := &users.ItemContactFoldersRequestBuilderGetRequestConfiguration{ - QueryParameters: requestParameters, - } - - return options, nil -} - func optionsForContactFolderByID(moreOps []string) ( *users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration, error, diff --git a/src/internal/connector/exchange/attachment.go b/src/internal/connector/exchange/attachment.go index ed8828930..b8a29dfe2 100644 --- a/src/internal/connector/exchange/attachment.go +++ b/src/internal/connector/exchange/attachment.go @@ -25,7 +25,8 @@ const ( ) func attachmentType(attachment models.Attachmentable) models.AttachmentType { - switch *attachment.GetOdataType() { + attachmentType := ptr.Val(attachment.GetOdataType()) + switch attachmentType { case fileAttachmentOdataValue: return models.FILE_ATTACHMENTTYPE case itemAttachmentOdataValue: diff --git a/src/internal/connector/exchange/contact_folder_cache.go b/src/internal/connector/exchange/contact_folder_cache.go index b2c077a2e..5ba03172c 100644 --- a/src/internal/connector/exchange/contact_folder_cache.go +++ b/src/internal/connector/exchange/contact_folder_cache.go @@ -29,8 +29,10 @@ func (cfc *contactFolderCache) populateContactRoot( return support.ConnectorStackErrorTraceWrap(err, "fetching root folder") } - temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...)) - + temp := graph.NewCacheFolder( + f, + path.Builder{}.Append(baseContainerPath...), // storage path + path.Builder{}.Append(baseContainerPath...)) // display location if err := cfc.addFolder(temp); err != nil { return errors.Wrap(err, "adding resolver dir") } @@ -56,7 +58,7 @@ func (cfc *contactFolderCache) Populate( return errors.Wrap(err, "enumerating containers") } - if err := cfc.populatePaths(ctx); err != nil { + if err := cfc.populatePaths(ctx, false); err != nil { return errors.Wrap(err, "populating paths") } diff --git a/src/internal/connector/exchange/container_resolver.go b/src/internal/connector/exchange/container_resolver.go index 3541a3914..248f7cd4f 100644 --- a/src/internal/connector/exchange/container_resolver.go +++ b/src/internal/connector/exchange/container_resolver.go @@ -51,38 +51,52 @@ type containerResolver struct { func (cr *containerResolver) IDToPath( ctx context.Context, folderID string, -) (*path.Builder, error) { - return cr.idToPath(ctx, folderID, 0) + useIDInPath bool, +) (*path.Builder, *path.Builder, error) { + return cr.idToPath(ctx, folderID, 0, useIDInPath) } func (cr *containerResolver) idToPath( ctx context.Context, folderID string, depth int, -) (*path.Builder, error) { + useIDInPath bool, +) (*path.Builder, *path.Builder, error) { if depth >= maxIterations { - return nil, errors.New("path contains cycle or is too tall") + return nil, nil, errors.New("path contains cycle or is too tall") } c, ok := cr.cache[folderID] if !ok { - return nil, errors.Errorf("folder %s not cached", folderID) + return nil, nil, errors.Errorf("folder %s not cached", folderID) } p := c.Path() if p != nil { - return p, nil + return p, c.Location(), nil } - parentPath, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1) + parentPath, parentLoc, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1, useIDInPath) if err != nil { - return nil, errors.Wrap(err, "retrieving parent folder") + return nil, nil, errors.Wrap(err, "retrieving parent folder") } - fullPath := parentPath.Append(*c.GetDisplayName()) + toAppend := *c.GetDisplayName() + if useIDInPath { + toAppend = *c.GetId() + } + + fullPath := parentPath.Append(toAppend) c.SetPath(fullPath) - return fullPath, nil + var locPath *path.Builder + + if parentLoc != nil { + locPath = parentLoc.Append(*c.GetDisplayName()) + c.SetLocation(locPath) + } + + return fullPath, locPath, nil } // PathInCache utility function to return m365ID of folder if the path.Folders @@ -93,13 +107,13 @@ func (cr *containerResolver) PathInCache(pathString string) (string, bool) { return "", false } - for _, contain := range cr.cache { - if contain.Path() == nil { + for _, cc := range cr.cache { + if cc.Path() == nil { continue } - if contain.Path().String() == pathString { - return *contain.GetId(), true + if cc.Path().String() == pathString { + return *cc.GetId(), true } } @@ -141,18 +155,21 @@ func (cr *containerResolver) Items() []graph.CachedContainer { // AddToCache adds container to map in field 'cache' // @returns error iff the required values are not accessible. -func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) error { +func (cr *containerResolver) AddToCache( + ctx context.Context, + f graph.Container, + useIDInPath bool, +) error { temp := graph.CacheFolder{ Container: f, } - if err := cr.addFolder(temp); err != nil { return errors.Wrap(err, "adding cache folder") } // Populate the path for this entry so calls to PathInCache succeed no matter // when they're made. - _, err := cr.IDToPath(ctx, *f.GetId()) + _, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath) if err != nil { return errors.Wrap(err, "adding cache entry") } @@ -160,12 +177,18 @@ func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) return nil } -func (cr *containerResolver) populatePaths(ctx context.Context) error { +// DestinationNameToID returns an empty string. This is only supported by exchange +// calendars at this time. +func (cr *containerResolver) DestinationNameToID(dest string) string { + return "" +} + +func (cr *containerResolver) populatePaths(ctx context.Context, useIDInPath bool) error { var errs *multierror.Error // Populate all folder paths. for _, f := range cr.Items() { - _, err := cr.IDToPath(ctx, *f.GetId()) + _, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath) if err != nil { errs = multierror.Append(errs, errors.Wrap(err, "populating path")) } diff --git a/src/internal/connector/exchange/container_resolver_test.go b/src/internal/connector/exchange/container_resolver_test.go index be0704f46..5bc4fe317 100644 --- a/src/internal/connector/exchange/container_resolver_test.go +++ b/src/internal/connector/exchange/container_resolver_test.go @@ -1,6 +1,7 @@ package exchange import ( + "fmt" stdpath "path" "testing" @@ -26,16 +27,19 @@ type mockContainer struct { displayName *string parentID *string p *path.Builder + l *path.Builder } //nolint:revive func (m mockContainer) GetId() *string { return m.id } //nolint:revive -func (m mockContainer) GetParentFolderId() *string { return m.parentID } -func (m mockContainer) GetDisplayName() *string { return m.displayName } -func (m mockContainer) Path() *path.Builder { return m.p } -func (m mockContainer) SetPath(p *path.Builder) {} +func (m mockContainer) GetParentFolderId() *string { return m.parentID } +func (m mockContainer) GetDisplayName() *string { return m.displayName } +func (m mockContainer) Location() *path.Builder { return m.l } +func (m mockContainer) SetLocation(p *path.Builder) {} +func (m mockContainer) Path() *path.Builder { return m.p } +func (m mockContainer) SetPath(p *path.Builder) {} func strPtr(s string) *string { return &s @@ -168,7 +172,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() { parentID: nil, }, nil, - ), + nil), check: assert.Error, }, { @@ -180,7 +184,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() { parentID: nil, }, path.Builder{}.Append("foo"), - ), + path.Builder{}.Append("loc")), check: assert.NoError, }, { @@ -192,7 +196,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() { parentID: &testParentID, }, path.Builder{}.Append("foo"), - ), + path.Builder{}.Append("loc")), check: assert.Error, }, { @@ -204,7 +208,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() { parentID: &testParentID, }, path.Builder{}.Append("foo"), - ), + path.Builder{}.Append("loc")), check: assert.Error, }, { @@ -216,7 +220,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() { parentID: &testParentID, }, nil, - ), + nil), check: assert.NoError, }, } @@ -238,52 +242,57 @@ func newMockCachedContainer(name string) *mockCachedContainer { } type mockCachedContainer struct { - id string - parentID string - displayName string - p *path.Builder - expectedPath string + id string + parentID string + displayName string + l *path.Builder + p *path.Builder + expectedPath string + expectedLocation string } //nolint:revive -func (m mockCachedContainer) GetId() *string { - return &m.id -} +func (m mockCachedContainer) GetId() *string { return &m.id } //nolint:revive -func (m mockCachedContainer) GetParentFolderId() *string { - return &m.parentID -} +func (m mockCachedContainer) GetParentFolderId() *string { return &m.parentID } +func (m mockCachedContainer) GetDisplayName() *string { return &m.displayName } +func (m mockCachedContainer) Location() *path.Builder { return m.l } +func (m *mockCachedContainer) SetLocation(newLoc *path.Builder) { m.l = newLoc } +func (m mockCachedContainer) Path() *path.Builder { return m.p } +func (m *mockCachedContainer) SetPath(newPath *path.Builder) { m.p = newPath } -func (m mockCachedContainer) GetDisplayName() *string { - return &m.displayName -} - -func (m mockCachedContainer) Path() *path.Builder { - return m.p -} - -func (m *mockCachedContainer) SetPath(newPath *path.Builder) { - m.p = newPath -} - -func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) { +func resolverWithContainers(numContainers int, useIDInPath bool) (*containerResolver, []*mockCachedContainer) { containers := make([]*mockCachedContainer, 0, numContainers) for i := 0; i < numContainers; i++ { - containers = append(containers, newMockCachedContainer("a")) + containers = append(containers, newMockCachedContainer(fmt.Sprintf("%d", i))) } // Base case for the recursive lookup. - containers[0].p = path.Builder{}.Append(containers[0].displayName) - containers[0].expectedPath = containers[0].displayName + dn := containers[0].displayName + + apndP := dn + if useIDInPath { + apndP = containers[0].id + } + + containers[0].p = path.Builder{}.Append(apndP) + containers[0].expectedPath = apndP + containers[0].l = path.Builder{}.Append(dn) + containers[0].expectedLocation = dn for i := 1; i < len(containers); i++ { + dn := containers[i].displayName + + apndP := dn + if useIDInPath { + apndP = containers[i].id + } + containers[i].parentID = containers[i-1].id - containers[i].expectedPath = stdpath.Join( - containers[i-1].expectedPath, - containers[i].displayName, - ) + containers[i].expectedPath = stdpath.Join(containers[i-1].expectedPath, apndP) + containers[i].expectedLocation = stdpath.Join(containers[i-1].expectedLocation, dn) } resolver := newContainerResolver() @@ -303,13 +312,16 @@ func resolverWithContainers(numContainers int) (*containerResolver, []*mockCache type ConfiguredFolderCacheUnitSuite struct { suite.Suite - fc *containerResolver + fc *containerResolver + fcWithID *containerResolver - allContainers []*mockCachedContainer + allContainers []*mockCachedContainer + containersWithID []*mockCachedContainer } func (suite *ConfiguredFolderCacheUnitSuite) SetupTest() { - suite.fc, suite.allContainers = resolverWithContainers(4) + suite.fc, suite.allContainers = resolverWithContainers(4, false) + suite.fcWithID, suite.containersWithID = resolverWithContainers(4, true) } func TestConfiguredFolderCacheUnitSuite(t *testing.T) { @@ -339,8 +351,8 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() { for _, test := range table { suite.T().Run(test.name, func(t *testing.T) { - resolver, containers := resolverWithContainers(test.numContainers) - _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id) + resolver, containers := resolverWithContainers(test.numContainers, false) + _, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false) test.check(t, err) }) } @@ -352,7 +364,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() { t := suite.T() - require.NoError(t, suite.fc.populatePaths(ctx)) + require.NoError(t, suite.fc.populatePaths(ctx, false)) items := suite.fc.Items() gotPaths := make([]string, 0, len(items)) @@ -375,10 +387,24 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached for _, c := range suite.allContainers { suite.T().Run(*c.GetDisplayName(), func(t *testing.T) { - p, err := suite.fc.IDToPath(ctx, c.id) + p, l, err := suite.fc.IDToPath(ctx, c.id, false) require.NoError(t, err) - assert.Equal(t, c.expectedPath, p.String()) + assert.Equal(t, c.expectedLocation, l.String()) + }) + } +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached_useID() { + ctx, flush := tester.NewContext() + defer flush() + + for _, c := range suite.containersWithID { + suite.T().Run(*c.GetDisplayName(), func(t *testing.T) { + p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true) + require.NoError(t, err) + assert.Equal(t, c.expectedPath, p.String()) + assert.Equal(t, c.expectedLocation, l.String()) }) } } @@ -390,17 +416,37 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths() t := suite.T() c := suite.allContainers[len(suite.allContainers)-1] - p, err := suite.fc.IDToPath(ctx, c.id) + p, l, err := suite.fc.IDToPath(ctx, c.id, false) require.NoError(t, err) - assert.Equal(t, c.expectedPath, p.String()) + assert.Equal(t, c.expectedLocation, l.String()) c.parentID = "foo" - p, err = suite.fc.IDToPath(ctx, c.id) + p, l, err = suite.fc.IDToPath(ctx, c.id, false) require.NoError(t, err) - assert.Equal(t, c.expectedPath, p.String()) + assert.Equal(t, c.expectedLocation, l.String()) +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_useID() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + c := suite.containersWithID[len(suite.containersWithID)-1] + + p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true) + require.NoError(t, err) + assert.Equal(t, c.expectedPath, p.String()) + assert.Equal(t, c.expectedLocation, l.String()) + + c.parentID = "foo" + + p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true) + require.NoError(t, err) + assert.Equal(t, c.expectedPath, p.String()) + assert.Equal(t, c.expectedLocation, l.String()) } func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentNotFound() { @@ -413,7 +459,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN delete(suite.fc.cache, almostLast.id) - _, err := suite.fc.IDToPath(ctx, last.id) + _, _, err := suite.fc.IDToPath(ctx, last.id, false) assert.Error(t, err) } @@ -423,7 +469,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun t := suite.T() - _, err := suite.fc.IDToPath(ctx, "foo") + _, _, err := suite.fc.IDToPath(ctx, "foo", false) assert.Error(t, err) } @@ -431,20 +477,26 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() { ctx, flush := tester.NewContext() defer flush() - t := suite.T() - - last := suite.allContainers[len(suite.allContainers)-1] - - m := newMockCachedContainer("testAddFolder") + var ( + dest = "testAddFolder" + t = suite.T() + last = suite.allContainers[len(suite.allContainers)-1] + m = newMockCachedContainer(dest) + ) m.parentID = last.id m.expectedPath = stdpath.Join(last.expectedPath, m.displayName) + m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName) - require.NoError(t, suite.fc.AddToCache(ctx, m)) + require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache") + require.NoError(t, suite.fc.AddToCache(ctx, m, false)) + require.Empty(t, suite.fc.DestinationNameToID(dest), + "destination id from cache, still empty, because this is not a calendar") - p, err := suite.fc.IDToPath(ctx, m.id) + p, l, err := suite.fc.IDToPath(ctx, m.id, false) require.NoError(t, err) assert.Equal(t, m.expectedPath, p.String()) + assert.Equal(t, m.expectedLocation, l.String()) } // --------------------------------------------------------------------------- @@ -506,32 +558,35 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() { pathFunc2 func(t *testing.T) path.Path category path.CategoryType folderPrefix string + useIDForPath bool }{ { name: "Mail Cache Test", category: path.EmailCategory, pathFunc1: func(t *testing.T) path.Path { - pth, err := path.Builder{}.Append("Griffindor"). - Append("Croix").ToDataLayerExchangePathForCategory( - suite.credentials.AzureTenantID, - user, - path.EmailCategory, - false, - ) - + pth, err := path.Builder{}. + Append("Griffindor"). + Append("Croix"). + ToDataLayerExchangePathForCategory( + suite.credentials.AzureTenantID, + user, + path.EmailCategory, + false) require.NoError(t, err) + return pth }, pathFunc2: func(t *testing.T) path.Path { - pth, err := path.Builder{}.Append("Griffindor"). - Append("Felicius").ToDataLayerExchangePathForCategory( - suite.credentials.AzureTenantID, - user, - path.EmailCategory, - false, - ) - + pth, err := path.Builder{}. + Append("Griffindor"). + Append("Felicius"). + ToDataLayerExchangePathForCategory( + suite.credentials.AzureTenantID, + user, + path.EmailCategory, + false) require.NoError(t, err) + return pth }, }, @@ -539,63 +594,65 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() { name: "Contact Cache Test", category: path.ContactsCategory, pathFunc1: func(t *testing.T) path.Path { - aPath, err := path.Builder{}.Append("HufflePuff"). + aPath, err := path.Builder{}. + Append("HufflePuff"). ToDataLayerExchangePathForCategory( suite.credentials.AzureTenantID, user, path.ContactsCategory, - false, - ) - + false) require.NoError(t, err) + return aPath }, pathFunc2: func(t *testing.T) path.Path { - aPath, err := path.Builder{}.Append("Ravenclaw"). + aPath, err := path.Builder{}. + Append("Ravenclaw"). ToDataLayerExchangePathForCategory( suite.credentials.AzureTenantID, user, path.ContactsCategory, - false, - ) - + false) require.NoError(t, err) + return aPath }, }, { - name: "Event Cache Test", - category: path.EventsCategory, + name: "Event Cache Test", + category: path.EventsCategory, + useIDForPath: true, pathFunc1: func(t *testing.T) path.Path { - aPath, err := path.Builder{}.Append("Durmstrang"). + aPath, err := path.Builder{}. + Append("Durmstrang"). ToDataLayerExchangePathForCategory( suite.credentials.AzureTenantID, user, path.EventsCategory, - false, - ) + false) require.NoError(t, err) + return aPath }, pathFunc2: func(t *testing.T) path.Path { - aPath, err := path.Builder{}.Append("Beauxbatons"). + aPath, err := path.Builder{}. + Append("Beauxbatons"). ToDataLayerExchangePathForCategory( suite.credentials.AzureTenantID, user, path.EventsCategory, - false, - ) + false) require.NoError(t, err) + return aPath }, - folderPrefix: calendarOthersFolder, }, } ) for _, test := range tests { suite.T().Run(test.name, func(t *testing.T) { - folderID, err := CreateContainerDestinaion( + folderID, err := CreateContainerDestination( ctx, m365, test.pathFunc1(t), @@ -605,21 +662,26 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() { resolver := directoryCaches[test.category] - _, err = resolver.IDToPath(ctx, folderID) + _, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath) assert.NoError(t, err) - secondID, err := CreateContainerDestinaion( + parentContainer := folderName + if test.useIDForPath { + parentContainer = folderID + } + + secondID, err := CreateContainerDestination( ctx, m365, test.pathFunc2(t), - folderName, + parentContainer, directoryCaches) require.NoError(t, err) - _, err = resolver.IDToPath(ctx, secondID) + _, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath) require.NoError(t, err) - p := stdpath.Join(test.folderPrefix, folderName) + p := stdpath.Join(test.folderPrefix, parentContainer) _, ok := resolver.PathInCache(p) require.True(t, ok, "looking for path in cache: %s", p) }) diff --git a/src/internal/connector/exchange/data_collections.go b/src/internal/connector/exchange/data_collections.go index 92e826f3d..7fd180281 100644 --- a/src/internal/connector/exchange/data_collections.go +++ b/src/internal/connector/exchange/data_collections.go @@ -3,9 +3,8 @@ package exchange import ( "context" "encoding/json" - "fmt" - "github.com/hashicorp/go-multierror" + "github.com/alcionai/clues" "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/connector/exchange/api" @@ -15,6 +14,7 @@ import ( "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" ) @@ -90,7 +90,7 @@ func parseMetadataCollections( for { select { case <-ctx.Done(): - return nil, errors.Wrap(ctx.Err(), "parsing collection metadata") + return nil, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx) case item, ok := <-items: if !ok { @@ -105,13 +105,13 @@ func parseMetadataCollections( err := json.NewDecoder(item.ToReader()).Decode(&m) if err != nil { - return nil, errors.New("decoding metadata json") + return nil, clues.New("decoding metadata json").WithClues(ctx) } switch item.UUID() { case graph.PreviousPathFileName: if _, ok := found[category]["path"]; ok { - return nil, errors.Errorf("multiple versions of %s path metadata", category) + return nil, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx) } for k, p := range m { @@ -122,7 +122,7 @@ func parseMetadataCollections( case graph.DeltaURLsFileName: if _, ok := found[category]["delta"]; ok { - return nil, errors.Errorf("multiple versions of %s delta metadata", category) + return nil, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx) } for k, d := range m { @@ -167,16 +167,16 @@ func DataCollections( acct account.M365Config, su support.StatusUpdater, ctrlOpts control.Options, + errs *fault.Errors, ) ([]data.BackupCollection, map[string]struct{}, error) { eb, err := selector.ToExchangeBackup() if err != nil { - return nil, nil, errors.Wrap(err, "exchangeDataCollection: parsing selector") + return nil, nil, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx) } var ( user = selector.DiscreteOwner collections = []data.BackupCollection{} - errs error ) cdps, err := parseMetadataCollections(ctx, metadata) @@ -185,26 +185,27 @@ func DataCollections( } for _, scope := range eb.Scopes() { - dps := cdps[scope.Category().PathType()] + if errs.Failed() { + break + } dcs, err := createCollections( ctx, acct, user, scope, - dps, + cdps[scope.Category().PathType()], ctrlOpts, su) if err != nil { - return nil, nil, support.WrapAndAppend(user, err, errs) + errs.Add(err) + continue } collections = append(collections, dcs...) } - // Exchange does not require adding items to the global exclude list so always - // return nil. - return collections, nil, errs + return collections, nil, errs.Err() } func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedItemIDsGetter, error) { @@ -216,7 +217,7 @@ func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedIte case path.ContactsCategory: return ac.Contacts(), nil default: - return nil, fmt.Errorf("category %s not supported by getFetchIDFunc", category) + return nil, clues.Wrap(clues.New(category.String()), "category not supported") } } @@ -233,7 +234,6 @@ func createCollections( su support.StatusUpdater, ) ([]data.BackupCollection, error) { var ( - errs *multierror.Error allCollections = make([]data.BackupCollection, 0) ac = api.Client{Credentials: creds} category = scope.Category().PathType() @@ -241,7 +241,7 @@ func createCollections( getter, err := getterByType(ac, category) if err != nil { - return nil, err + return nil, clues.Stack(err).WithClues(ctx) } // Create collection of ExchangeDataCollection @@ -262,7 +262,7 @@ func createCollections( resolver, err := PopulateExchangeContainerResolver(ctx, qp) if err != nil { - return nil, errors.Wrap(err, "getting folder cache") + return nil, errors.Wrap(err, "populating container cache") } err = filterContainersAndFillCollections( @@ -275,7 +275,6 @@ func createCollections( scope, dps, ctrlOpts) - if err != nil { return nil, errors.Wrap(err, "filling collections") } @@ -286,5 +285,5 @@ func createCollections( allCollections = append(allCollections, coll) } - return allCollections, errs.ErrorOrNil() + return allCollections, nil } diff --git a/src/internal/connector/exchange/data_collections_test.go b/src/internal/connector/exchange/data_collections_test.go index 3df22d030..d337b9d32 100644 --- a/src/internal/connector/exchange/data_collections_test.go +++ b/src/internal/connector/exchange/data_collections_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + "github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" @@ -274,8 +275,8 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() { continue } - require.NotEmpty(t, c.FullPath().Folder()) - folder := c.FullPath().Folder() + require.NotEmpty(t, c.FullPath().Folder(false)) + folder := c.FullPath().Folder(false) delete(test.folderNames, folder) } @@ -507,7 +508,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression continue } - assert.Equal(t, edc.FullPath().Folder(), DefaultContactFolder) + assert.Equal(t, edc.FullPath().Folder(false), DefaultContactFolder) assert.NotZero(t, count) } @@ -527,13 +528,35 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression( users := []string{suite.user} + ac, err := api.NewClient(acct) + require.NoError(suite.T(), err, "creating client") + + var ( + calID string + bdayID string + ) + + fn := func(gcf graph.CacheFolder) error { + if *gcf.GetDisplayName() == DefaultCalendar { + calID = *gcf.GetId() + } + + if *gcf.GetDisplayName() == "Birthdays" { + bdayID = *gcf.GetId() + } + + return nil + } + + require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn)) + tests := []struct { name, expected string scope selectors.ExchangeScope }{ { name: "Default Event Calendar", - expected: DefaultCalendar, + expected: calID, scope: selectors.NewExchangeBackup(users).EventCalendars( []string{DefaultCalendar}, selectors.PrefixMatch(), @@ -541,9 +564,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression( }, { name: "Birthday Calendar", - expected: calendarOthersFolder + "/Birthdays", + expected: bdayID, scope: selectors.NewExchangeBackup(users).EventCalendars( - []string{calendarOthersFolder + "/Birthdays"}, + []string{"Birthdays"}, selectors.PrefixMatch(), )[0], }, @@ -571,9 +594,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression( if edc.FullPath().Service() != path.ExchangeMetadataService { isMetadata = true - assert.Equal(t, test.expected, edc.FullPath().Folder()) + assert.Equal(t, test.expected, edc.FullPath().Folder(false)) } else { - assert.Equal(t, "", edc.FullPath().Folder()) + assert.Equal(t, "", edc.FullPath().Folder(false)) } for item := range edc.Items() { diff --git a/src/internal/connector/exchange/event_calendar_cache.go b/src/internal/connector/exchange/event_calendar_cache.go index 0377433ee..c7736970e 100644 --- a/src/internal/connector/exchange/event_calendar_cache.go +++ b/src/internal/connector/exchange/event_calendar_cache.go @@ -14,9 +14,10 @@ var _ graph.ContainerResolver = &eventCalendarCache{} type eventCalendarCache struct { *containerResolver - enumer containersEnumerator - getter containerGetter - userID string + enumer containersEnumerator + getter containerGetter + userID string + newAdditions map[string]string } // init ensures that the structure's fields are initialized. @@ -44,7 +45,10 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error { return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err)) } - temp := graph.NewCacheFolder(f, path.Builder{}.Append(container)) + temp := graph.NewCacheFolder( + f, + path.Builder{}.Append(*f.GetId()), // storage path + path.Builder{}.Append(*f.GetDisplayName())) // display location if err := ecc.addFolder(temp); err != nil { return errors.Wrap(err, "initializing calendar resolver") } @@ -68,16 +72,12 @@ func (ecc *eventCalendarCache) Populate( ctx, ecc.userID, "", - func(cf graph.CacheFolder) error { - cf.SetPath(path.Builder{}.Append(calendarOthersFolder, *cf.GetDisplayName())) - return ecc.addFolder(cf) - }, - ) + ecc.addFolder) if err != nil { return errors.Wrap(err, "enumerating containers") } - if err := ecc.populatePaths(ctx); err != nil { + if err := ecc.populatePaths(ctx, true); err != nil { return errors.Wrap(err, "establishing calendar paths") } @@ -86,23 +86,40 @@ func (ecc *eventCalendarCache) Populate( // AddToCache adds container to map in field 'cache' // @returns error iff the required values are not accessible. -func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error { +func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container, useIDInPath bool) error { if err := checkIDAndName(f); err != nil { return errors.Wrap(err, "validating container") } - temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName())) + temp := graph.NewCacheFolder( + f, + path.Builder{}.Append(*f.GetId()), // storage path + path.Builder{}.Append(*f.GetDisplayName())) // display location + + if len(ecc.newAdditions) == 0 { + ecc.newAdditions = map[string]string{} + } + + ecc.newAdditions[*f.GetDisplayName()] = *f.GetId() if err := ecc.addFolder(temp); err != nil { + delete(ecc.newAdditions, *f.GetDisplayName()) return errors.Wrap(err, "adding container") } // Populate the path for this entry so calls to PathInCache succeed no matter // when they're made. - _, err := ecc.IDToPath(ctx, *f.GetId()) + _, _, err := ecc.IDToPath(ctx, *f.GetId(), true) if err != nil { + delete(ecc.newAdditions, *f.GetDisplayName()) return errors.Wrap(err, "setting path to container id") } return nil } + +// DestinationNameToID returns an empty string. This is only supported by exchange +// calendars at this time. +func (ecc *eventCalendarCache) DestinationNameToID(dest string) string { + return ecc.newAdditions[dest] +} diff --git a/src/internal/connector/exchange/exchange_data_collection.go b/src/internal/connector/exchange/exchange_data_collection.go index 07ce33b52..fd37a1f1f 100644 --- a/src/internal/connector/exchange/exchange_data_collection.go +++ b/src/internal/connector/exchange/exchange_data_collection.go @@ -77,6 +77,11 @@ type Collection struct { // moved. It will be empty on its first retrieval. prevPath path.Path + // LocationPath contains the path with human-readable display names. + // IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t" + // Currently only implemented for Exchange Calendars. + locationPath path.Path + state data.CollectionState // doNotMergeItems should only be true if the old delta token expired. @@ -91,7 +96,7 @@ type Collection struct { // or notMoved (if they match). func NewCollection( user string, - curr, prev path.Path, + curr, prev, location path.Path, category path.CategoryType, items itemer, statusUpdater support.StatusUpdater, @@ -99,18 +104,19 @@ func NewCollection( doNotMergeItems bool, ) Collection { collection := Collection{ + added: make(map[string]struct{}, 0), category: category, ctrl: ctrlOpts, data: make(chan data.Stream, collectionChannelBufferSize), doNotMergeItems: doNotMergeItems, fullPath: curr, - added: make(map[string]struct{}, 0), - removed: make(map[string]struct{}, 0), + items: items, + locationPath: location, prevPath: prev, + removed: make(map[string]struct{}, 0), state: data.StateOf(prev, curr), statusUpdater: statusUpdater, user: user, - items: items, } return collection @@ -128,6 +134,12 @@ func (col *Collection) FullPath() path.Path { return col.fullPath } +// LocationPath produces the Collection's full path, but with display names +// instead of IDs in the folders. Only populated for Calendars. +func (col *Collection) LocationPath() path.Path { + return col.locationPath +} + // TODO(ashmrtn): Fill in with previous path once GraphConnector compares old // and new folder hierarchies. func (col Collection) PreviousPath() path.Path { @@ -172,7 +184,7 @@ func (col *Collection) streamItems(ctx context.Context) { ctx, col.fullPath.Category().String(), observe.PII(user), - observe.PII(col.fullPath.Folder())) + observe.PII(col.fullPath.Folder(false))) go closer() @@ -331,7 +343,7 @@ func (col *Collection) finishPopulation(ctx context.Context, success int, totalB TotalBytes: totalBytes, }, errs, - col.fullPath.Folder()) + col.fullPath.Folder(false)) logger.Ctx(ctx).Debugw("done streaming items", "status", status.String()) col.statusUpdater(status) } diff --git a/src/internal/connector/exchange/exchange_data_collection_test.go b/src/internal/connector/exchange/exchange_data_collection_test.go index b327c70c8..8644a0523 100644 --- a/src/internal/connector/exchange/exchange_data_collection_test.go +++ b/src/internal/connector/exchange/exchange_data_collection_test.go @@ -12,8 +12,10 @@ import ( "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" ) @@ -116,6 +118,70 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange suite.Equal(fullPath, edc.FullPath()) } +func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() { + fooP, err := path.Builder{}. + Append("foo"). + ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) + require.NoError(suite.T(), err) + barP, err := path.Builder{}. + Append("bar"). + ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) + require.NoError(suite.T(), err) + locP, err := path.Builder{}. + Append("human-readable"). + ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false) + require.NoError(suite.T(), err) + + table := []struct { + name string + prev path.Path + curr path.Path + loc path.Path + expect data.CollectionState + }{ + { + name: "new", + curr: fooP, + loc: locP, + expect: data.NewState, + }, + { + name: "not moved", + prev: fooP, + curr: fooP, + loc: locP, + expect: data.NotMovedState, + }, + { + name: "moved", + prev: fooP, + curr: barP, + loc: locP, + expect: data.MovedState, + }, + { + name: "deleted", + prev: fooP, + expect: data.DeletedState, + }, + } + for _, test := range table { + suite.T().Run(test.name, func(t *testing.T) { + c := NewCollection( + "u", + test.curr, test.prev, test.loc, + 0, + &mockItemer{}, nil, + control.Options{}, + false) + assert.Equal(t, test.expect, c.State(), "collection state") + assert.Equal(t, test.curr, c.fullPath, "full path") + assert.Equal(t, test.prev, c.prevPath, "prev path") + assert.Equal(t, test.loc, c.locationPath, "location path") + }) + } +} + func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() { table := []struct { name string diff --git a/src/internal/connector/exchange/exchange_vars.go b/src/internal/connector/exchange/exchange_vars.go index 988d20330..e45de0bf0 100644 --- a/src/internal/connector/exchange/exchange_vars.go +++ b/src/internal/connector/exchange/exchange_vars.go @@ -38,5 +38,4 @@ const ( rootFolderAlias = "msgfolderroot" DefaultContactFolder = "Contacts" DefaultCalendar = "Calendar" - calendarOthersFolder = "Other Calendars" ) diff --git a/src/internal/connector/exchange/folder_resolver_test.go b/src/internal/connector/exchange/folder_resolver_test.go index ea7ccb995..8caab5c87 100644 --- a/src/internal/connector/exchange/folder_resolver_test.go +++ b/src/internal/connector/exchange/folder_resolver_test.go @@ -47,6 +47,9 @@ func (suite *CacheResolverSuite) TestPopulate() { ac, err := api.NewClient(suite.credentials) require.NoError(suite.T(), err) + cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar) + require.NoError(suite.T(), err) + eventFunc := func(t *testing.T) graph.ContainerResolver { return &eventCalendarCache{ userID: tester.M365UserID(t), @@ -64,61 +67,61 @@ func (suite *CacheResolverSuite) TestPopulate() { } tests := []struct { - name, folderName, root, basePath string - resolverFunc func(t *testing.T) graph.ContainerResolver - canFind assert.BoolAssertionFunc + name, folderInCache, root, basePath string + resolverFunc func(t *testing.T) graph.ContainerResolver + canFind assert.BoolAssertionFunc }{ { - name: "Default Event Cache", - folderName: DefaultCalendar, - root: DefaultCalendar, - basePath: DefaultCalendar, - resolverFunc: eventFunc, - canFind: assert.True, + name: "Default Event Cache", + folderInCache: *cal.GetId(), + root: DefaultCalendar, + basePath: DefaultCalendar, + resolverFunc: eventFunc, + canFind: assert.True, }, { - name: "Default Event Folder Hidden", - root: DefaultCalendar, - folderName: DefaultContactFolder, - canFind: assert.False, - resolverFunc: eventFunc, + name: "Default Event Folder Hidden", + folderInCache: DefaultContactFolder, + root: DefaultCalendar, + canFind: assert.False, + resolverFunc: eventFunc, }, { - name: "Name Not in Cache", - folderName: "testFooBarWhoBar", - root: DefaultCalendar, - canFind: assert.False, - resolverFunc: eventFunc, + name: "Name Not in Cache", + folderInCache: "testFooBarWhoBar", + root: DefaultCalendar, + canFind: assert.False, + resolverFunc: eventFunc, }, { - name: "Default Contact Cache", - folderName: DefaultContactFolder, - root: DefaultContactFolder, - basePath: DefaultContactFolder, - canFind: assert.True, - resolverFunc: contactFunc, + name: "Default Contact Cache", + folderInCache: DefaultContactFolder, + root: DefaultContactFolder, + basePath: DefaultContactFolder, + canFind: assert.True, + resolverFunc: contactFunc, }, { - name: "Default Contact Hidden", - folderName: DefaultContactFolder, - root: DefaultContactFolder, - canFind: assert.False, - resolverFunc: contactFunc, + name: "Default Contact Hidden", + folderInCache: DefaultContactFolder, + root: DefaultContactFolder, + canFind: assert.False, + resolverFunc: contactFunc, }, { - name: "Name Not in Cache", - folderName: "testFooBarWhoBar", - root: DefaultContactFolder, - canFind: assert.False, - resolverFunc: contactFunc, + name: "Name Not in Cache", + folderInCache: "testFooBarWhoBar", + root: DefaultContactFolder, + canFind: assert.False, + resolverFunc: contactFunc, }, } for _, test := range tests { suite.T().Run(test.name, func(t *testing.T) { resolver := test.resolverFunc(t) - require.NoError(t, resolver.Populate(ctx, test.root, test.basePath)) - _, isFound := resolver.PathInCache(test.folderName) + + _, isFound := resolver.PathInCache(test.folderInCache) test.canFind(t, isFound) }) } diff --git a/src/internal/connector/exchange/mail_folder_cache.go b/src/internal/connector/exchange/mail_folder_cache.go index 565f10736..f9f4cf687 100644 --- a/src/internal/connector/exchange/mail_folder_cache.go +++ b/src/internal/connector/exchange/mail_folder_cache.go @@ -53,7 +53,9 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error { directory = DefaultMailFolder } - temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory)) + temp := graph.NewCacheFolder(f, + path.Builder{}.Append(directory), // storage path + path.Builder{}.Append(directory)) // display location if err := mc.addFolder(temp); err != nil { return errors.Wrap(err, "adding resolver dir") } @@ -81,7 +83,7 @@ func (mc *mailFolderCache) Populate( return errors.Wrap(err, "enumerating containers") } - if err := mc.populatePaths(ctx); err != nil { + if err := mc.populatePaths(ctx, false); err != nil { return errors.Wrap(err, "populating paths") } diff --git a/src/internal/connector/exchange/mail_folder_cache_test.go b/src/internal/connector/exchange/mail_folder_cache_test.go index 6a55df0bc..e671a1d3b 100644 --- a/src/internal/connector/exchange/mail_folder_cache_test.go +++ b/src/internal/connector/exchange/mail_folder_cache_test.go @@ -18,9 +18,9 @@ const ( // top-level folders right now. //nolint:lll testFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAABl7AqpAAA=" - //nolint:lll topFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAEIAAA=" + //nolint:lll // Full folder path for the folder above. expectedFolderPath = "toplevel/subFolder/subsubfolder" ) @@ -94,9 +94,10 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() { require.NoError(t, mfc.Populate(ctx, test.root, test.path...)) - p, err := mfc.IDToPath(ctx, testFolderID) + p, l, err := mfc.IDToPath(ctx, testFolderID, true) require.NoError(t, err) t.Logf("Path: %s\n", p.String()) + t.Logf("Location: %s\n", l.String()) expectedPath := stdpath.Join(append(test.path, expectedFolderPath)...) assert.Equal(t, expectedPath, p.String()) diff --git a/src/internal/connector/exchange/service_functions.go b/src/internal/connector/exchange/service_functions.go index 9d996f01c..16b81c8a8 100644 --- a/src/internal/connector/exchange/service_functions.go +++ b/src/internal/connector/exchange/service_functions.go @@ -86,44 +86,70 @@ func PopulateExchangeContainerResolver( } // Returns true if the container passes the scope comparison and should be included. -// Also returns the path representing the directory. +// Returns: +// - the path representing the directory as it should be stored in the repository. +// - the human-readable path using display names. +// - true if the path passes the scope comparison. func includeContainer( qp graph.QueryParams, c graph.CachedContainer, scope selectors.ExchangeScope, -) (path.Path, bool) { +) (path.Path, path.Path, bool) { var ( - category = scope.Category().PathType() directory string + locPath path.Path + category = scope.Category().PathType() pb = c.Path() + loc = c.Location() ) // Clause ensures that DefaultContactFolder is inspected properly if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder { - pb = c.Path().Append(DefaultContactFolder) + pb = pb.Append(DefaultContactFolder) + + if loc != nil { + loc = loc.Append(DefaultContactFolder) + } } dirPath, err := pb.ToDataLayerExchangePathForCategory( qp.Credentials.AzureTenantID, qp.ResourceOwner, category, - false, - ) + false) // Containers without a path (e.g. Root mail folder) always err here. if err != nil { - return nil, false + return nil, nil, false } - directory = pb.String() + directory = dirPath.Folder(false) + + if loc != nil { + locPath, err = loc.ToDataLayerExchangePathForCategory( + qp.Credentials.AzureTenantID, + qp.ResourceOwner, + category, + false) + // Containers without a path (e.g. Root mail folder) always err here. + if err != nil { + return nil, nil, false + } + + directory = locPath.Folder(false) + } + + var ok bool switch category { case path.EmailCategory: - return dirPath, scope.Matches(selectors.ExchangeMailFolder, directory) + ok = scope.Matches(selectors.ExchangeMailFolder, directory) case path.ContactsCategory: - return dirPath, scope.Matches(selectors.ExchangeContactFolder, directory) + ok = scope.Matches(selectors.ExchangeContactFolder, directory) case path.EventsCategory: - return dirPath, scope.Matches(selectors.ExchangeEventCalendar, directory) + ok = scope.Matches(selectors.ExchangeEventCalendar, directory) default: - return dirPath, false + return nil, nil, false } + + return dirPath, locPath, ok } diff --git a/src/internal/connector/exchange/service_iterators.go b/src/internal/connector/exchange/service_iterators.go index d4b059664..f5ba34b53 100644 --- a/src/internal/connector/exchange/service_iterators.go +++ b/src/internal/connector/exchange/service_iterators.go @@ -70,7 +70,7 @@ func filterContainersAndFillCollections( cID := *c.GetId() delete(tombstones, cID) - currPath, ok := includeContainer(qp, c, scope) + currPath, locPath, ok := includeContainer(qp, c, scope) // Only create a collection if the path matches the scope. if !ok { continue @@ -110,10 +110,15 @@ func filterContainersAndFillCollections( deltaURLs[cID] = newDelta.URL } + if qp.Category != path.EventsCategory { + locPath = nil + } + edc := NewCollection( qp.ResourceOwner, currPath, prevPath, + locPath, scope.Category().PathType(), ibt, statusUpdater, @@ -167,6 +172,7 @@ func filterContainersAndFillCollections( qp.ResourceOwner, nil, // marks the collection as deleted prevPath, + nil, // tombstones don't need a location scope.Category().PathType(), ibt, statusUpdater, diff --git a/src/internal/connector/exchange/service_iterators_test.go b/src/internal/connector/exchange/service_iterators_test.go index 1b54aa803..cd1237b2e 100644 --- a/src/internal/connector/exchange/service_iterators_test.go +++ b/src/internal/connector/exchange/service_iterators_test.go @@ -59,6 +59,7 @@ var _ graph.ContainerResolver = &mockResolver{} type ( mockResolver struct { items []graph.CachedContainer + added map[string]string } ) @@ -76,10 +77,21 @@ func (m mockResolver) Items() []graph.CachedContainer { return m.items } -func (m mockResolver) AddToCache(context.Context, graph.Container) error { return nil } -func (m mockResolver) IDToPath(context.Context, string) (*path.Builder, error) { return nil, nil } -func (m mockResolver) PathInCache(string) (string, bool) { return "", false } -func (m mockResolver) Populate(context.Context, string, ...string) error { return nil } +func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container, b bool) error { + if len(m.added) == 0 { + m.added = map[string]string{} + } + + m.added[*gc.GetDisplayName()] = *gc.GetId() + + return nil +} +func (m mockResolver) DestinationNameToID(dest string) string { return m.added[dest] } +func (m mockResolver) IDToPath(context.Context, string, bool) (*path.Builder, *path.Builder, error) { + return nil, nil, nil +} +func (m mockResolver) PathInCache(string) (string, bool) { return "", false } +func (m mockResolver) Populate(context.Context, string, ...string) error { return nil } // --------------------------------------------------------------------------- // tests diff --git a/src/internal/connector/exchange/service_restore.go b/src/internal/connector/exchange/service_restore.go index bb0179c76..f9ed59823 100644 --- a/src/internal/connector/exchange/service_restore.go +++ b/src/internal/connector/exchange/service_restore.go @@ -11,6 +11,7 @@ import ( "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/common" + "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/support" @@ -71,7 +72,7 @@ func RestoreExchangeContact( response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil) if err != nil { - name := *contact.GetGivenName() + name := ptr.Val(contact.GetGivenName()) return nil, errors.Wrap( err, @@ -146,7 +147,8 @@ func RestoreExchangeEvent( errs = support.WrapAndAppend( fmt.Sprintf( "uploading attachment for message %s: %s", - *transformedEvent.GetId(), support.ConnectorStackErrorTrace(err), + ptr.Val(transformedEvent.GetId()), + support.ConnectorStackErrorTrace(err), ), err, errs, @@ -283,12 +285,8 @@ func SendMailToBackStore( for _, attachment := range attached { if err := uploadAttachment(ctx, uploader, attachment); err != nil { - if attachment.GetOdataType() != nil && - *attachment.GetOdataType() == "#microsoft.graph.itemAttachment" { - var name string - if attachment.GetName() != nil { - name = *attachment.GetName() - } + if ptr.Val(attachment.GetOdataType()) == "#microsoft.graph.itemAttachment" { + name := ptr.Val(attachment.GetName()) logger.Ctx(ctx).Infow( "item attachment upload not successful. content not accepted by M365 server", @@ -344,7 +342,7 @@ func RestoreExchangeDataCollections( userCaches = directoryCaches[userID] } - containerID, err := CreateContainerDestinaion( + containerID, err := CreateContainerDestination( ctx, creds, dc.FullPath(), @@ -400,7 +398,7 @@ func restoreCollection( ctx, category.String(), observe.PII(user), - observe.PII(directory.Folder())) + observe.PII(directory.Folder(false))) defer closer() defer close(colProgress) @@ -447,10 +445,16 @@ func restoreCollection( continue } + var locationRef string + if category == path.ContactsCategory { + locationRef = itemPath.Folder(false) + } + deets.Add( itemPath.String(), itemPath.ShortRef(), "", + locationRef, true, details.ItemInfo{ Exchange: info, @@ -461,12 +465,12 @@ func restoreCollection( } } -// CreateContainerDestinaion builds the destination into the container +// CreateContainerDestination builds the destination into the container // at the provided path. As a precondition, the destination cannot // already exist. If it does then an error is returned. The provided // containerResolver is updated with the new destination. // @ returns the container ID of the new destination container. -func CreateContainerDestinaion( +func CreateContainerDestination( ctx context.Context, creds account.M365Config, directory path.Path, @@ -478,7 +482,6 @@ func CreateContainerDestinaion( user = directory.ResourceOwner() category = directory.Category() directoryCache = caches[category] - newPathFolders = append([]string{destination}, directory.Folders()...) ) // TODO(rkeepers): pass the api client into this func, rather than generating one. @@ -489,6 +492,8 @@ func CreateContainerDestinaion( switch category { case path.EmailCategory: + folders := append([]string{destination}, directory.Folders()...) + if directoryCache == nil { acm := ac.Mail() mfc := &mailFolderCache{ @@ -505,12 +510,14 @@ func CreateContainerDestinaion( return establishMailRestoreLocation( ctx, ac, - newPathFolders, + folders, directoryCache, user, newCache) case path.ContactsCategory: + folders := append([]string{destination}, directory.Folders()...) + if directoryCache == nil { acc := ac.Contacts() cfc := &contactFolderCache{ @@ -526,12 +533,14 @@ func CreateContainerDestinaion( return establishContactsRestoreLocation( ctx, ac, - newPathFolders, + folders, directoryCache, user, newCache) case path.EventsCategory: + dest := destination + if directoryCache == nil { ace := ac.Events() ecc := &eventCalendarCache{ @@ -542,16 +551,23 @@ func CreateContainerDestinaion( caches[category] = ecc newCache = true directoryCache = ecc + } else if did := directoryCache.DestinationNameToID(dest); len(did) > 0 { + // calendars are cached by ID in the resolver, not name, so once we have + // created the destination calendar, we need to look up its id and use + // that for resolver lookups instead of the display name. + dest = did } + folders := append([]string{dest}, directory.Folders()...) + return establishEventsRestoreLocation( ctx, ac, - newPathFolders, + folders, directoryCache, user, - newCache, - ) + newCache) + default: return "", fmt.Errorf("category: %s not support for exchange cache", category) } @@ -604,7 +620,7 @@ func establishMailRestoreLocation( } // NOOP if the folder is already in the cache. - if err = mfc.AddToCache(ctx, temp); err != nil { + if err = mfc.AddToCache(ctx, temp, false); err != nil { return "", errors.Wrap(err, "adding folder to cache") } } @@ -643,7 +659,7 @@ func establishContactsRestoreLocation( return "", errors.Wrap(err, "populating contact cache") } - if err = cfc.AddToCache(ctx, temp); err != nil { + if err = cfc.AddToCache(ctx, temp, false); err != nil { return "", errors.Wrap(err, "adding contact folder to cache") } } @@ -660,10 +676,7 @@ func establishEventsRestoreLocation( isNewCache bool, ) (string, error) { // Need to prefix with the "Other Calendars" folder so lookup happens properly. - cached, ok := ecc.PathInCache(path.Builder{}.Append( - calendarOthersFolder, - folders[0], - ).String()) + cached, ok := ecc.PathInCache(folders[0]) if ok { return cached, nil } @@ -681,7 +694,7 @@ func establishEventsRestoreLocation( } displayable := api.CalendarDisplayable{Calendarable: temp} - if err = ecc.AddToCache(ctx, displayable); err != nil { + if err = ecc.AddToCache(ctx, displayable, true); err != nil { return "", errors.Wrap(err, "adding new calendar to cache") } } diff --git a/src/internal/connector/graph/cache_container.go b/src/internal/connector/graph/cache_container.go index e792c235e..8de403867 100644 --- a/src/internal/connector/graph/cache_container.go +++ b/src/internal/connector/graph/cache_container.go @@ -1,40 +1,84 @@ package graph import ( + "context" + "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/pkg/errors" "github.com/alcionai/corso/src/pkg/path" ) +// Idable represents objects that implement msgraph-sdk-go/models.entityable +// and have the concept of an ID. +type Idable interface { + GetId() *string +} + +// Descendable represents objects that implement msgraph-sdk-go/models.entityable +// and have the concept of a "parent folder". +type Descendable interface { + Idable + GetParentFolderId() *string +} + +// Displayable represents objects that implement msgraph-sdk-go/models.entityable +// and have the concept of a display name. +type Displayable interface { + Idable + GetDisplayName() *string +} + +type Container interface { + Descendable + Displayable +} + // CachedContainer is used for local unit tests but also makes it so that this // code can be broken into generic- and service-specific chunks later on to // reuse logic in IDToPath. type CachedContainer interface { Container + // Location contains either the display names for the dirs (if this is a calendar) + // or nil + Location() *path.Builder + SetLocation(*path.Builder) + // Path contains either the ids for the dirs (if this is a calendar) + // or the display names for the dirs Path() *path.Builder SetPath(*path.Builder) } -// checkRequiredValues is a helper function to ensure that -// all the pointers are set prior to being called. -func CheckRequiredValues(c Container) error { - idPtr := c.GetId() - if idPtr == nil || len(*idPtr) == 0 { - return errors.New("folder without ID") - } +// ContainerResolver houses functions for getting information about containers +// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may +// cache information about containers. +type ContainerResolver interface { + // IDToPath takes an m365 container ID and converts it to a hierarchical path + // to that container. The path has a similar format to paths on the local + // file system. + IDToPath(ctx context.Context, m365ID string, useIDInPath bool) (*path.Builder, *path.Builder, error) - ptr := c.GetDisplayName() - if ptr == nil || len(*ptr) == 0 { - return errors.Errorf("folder %s without display name", *idPtr) - } + // Populate performs initialization steps for the resolver + // @param ctx is necessary param for Graph API tracing + // @param baseFolderID represents the M365ID base that the resolver will + // conclude its search. Default input is "". + Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error - ptr = c.GetParentFolderId() - if ptr == nil || len(*ptr) == 0 { - return errors.Errorf("folder %s without parent ID", *idPtr) - } + // PathInCache performs a look up of a path reprensentation + // and returns the m365ID of directory iff the pathString + // matches the path of a container within the cache. + // @returns bool represents if m365ID was found. + PathInCache(pathString string) (string, bool) - return nil + AddToCache(ctx context.Context, m365Container Container, useIDInPath bool) error + + // DestinationNameToID returns the ID of the destination container. Dest is + // assumed to be a display name. The ID is only populated if the destination + // was added using `AddToCache()`. Returns an empty string if not found. + DestinationNameToID(dest string) string + + // Items returns the containers in the cache. + Items() []CachedContainer } // ====================================== @@ -45,13 +89,15 @@ var _ CachedContainer = &CacheFolder{} type CacheFolder struct { Container + l *path.Builder p *path.Builder } // NewCacheFolder public constructor for struct -func NewCacheFolder(c Container, pb *path.Builder) CacheFolder { +func NewCacheFolder(c Container, pb, lpb *path.Builder) CacheFolder { cf := CacheFolder{ Container: c, + l: lpb, p: pb, } @@ -62,6 +108,14 @@ func NewCacheFolder(c Container, pb *path.Builder) CacheFolder { // Required Functions to satisfy interfaces // ========================================= +func (cf CacheFolder) Location() *path.Builder { + return cf.l +} + +func (cf *CacheFolder) SetLocation(newLocation *path.Builder) { + cf.l = newLocation +} + func (cf CacheFolder) Path() *path.Builder { return cf.p } @@ -108,3 +162,28 @@ func CreateCalendarDisplayable(entry any, parentID string) *CalendarDisplayable parentID: parentID, } } + +// ========================================= +// helper funcs +// ========================================= + +// checkRequiredValues is a helper function to ensure that +// all the pointers are set prior to being called. +func CheckRequiredValues(c Container) error { + idPtr := c.GetId() + if idPtr == nil || len(*idPtr) == 0 { + return errors.New("folder without ID") + } + + ptr := c.GetDisplayName() + if ptr == nil || len(*ptr) == 0 { + return errors.Errorf("folder %s without display name", *idPtr) + } + + ptr = c.GetParentFolderId() + if ptr == nil || len(*ptr) == 0 { + return errors.Errorf("folder %s without parent ID", *idPtr) + } + + return nil +} diff --git a/src/internal/connector/graph/metadata_collection.go b/src/internal/connector/graph/metadata_collection.go index 9506dce86..0274b082c 100644 --- a/src/internal/connector/graph/metadata_collection.go +++ b/src/internal/connector/graph/metadata_collection.go @@ -151,7 +151,7 @@ func (md MetadataCollection) Items() <-chan data.Stream { TotalBytes: totalBytes, }, nil, - md.fullPath.Folder(), + md.fullPath.Folder(false), ) md.statusUpdater(status) diff --git a/src/internal/connector/graph/service.go b/src/internal/connector/graph/service.go index aa5a19f5b..992010c1a 100644 --- a/src/internal/connector/graph/service.go +++ b/src/internal/connector/graph/service.go @@ -1,7 +1,6 @@ package graph import ( - "context" "net/http" "net/http/httputil" "os" @@ -173,57 +172,6 @@ type Servicer interface { Adapter() *msgraphsdk.GraphRequestAdapter } -// Idable represents objects that implement msgraph-sdk-go/models.entityable -// and have the concept of an ID. -type Idable interface { - GetId() *string -} - -// Descendable represents objects that implement msgraph-sdk-go/models.entityable -// and have the concept of a "parent folder". -type Descendable interface { - Idable - GetParentFolderId() *string -} - -// Displayable represents objects that implement msgraph-sdk-go/models.entityable -// and have the concept of a display name. -type Displayable interface { - Idable - GetDisplayName() *string -} - -type Container interface { - Descendable - Displayable -} - -// ContainerResolver houses functions for getting information about containers -// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may -// cache information about containers. -type ContainerResolver interface { - // IDToPath takes an m365 container ID and converts it to a hierarchical path - // to that container. The path has a similar format to paths on the local - // file system. - IDToPath(ctx context.Context, m365ID string) (*path.Builder, error) - // Populate performs initialization steps for the resolver - // @param ctx is necessary param for Graph API tracing - // @param baseFolderID represents the M365ID base that the resolver will - // conclude its search. Default input is "". - Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error - - // PathInCache performs a look up of a path reprensentation - // and returns the m365ID of directory iff the pathString - // matches the path of a container within the cache. - // @returns bool represents if m365ID was found. - PathInCache(pathString string) (string, bool) - - AddToCache(ctx context.Context, m365Container Container) error - - // Items returns the containers in the cache. - Items() []CachedContainer -} - // --------------------------------------------------------------------------- // Client Middleware // --------------------------------------------------------------------------- diff --git a/src/internal/connector/graph_connector.go b/src/internal/connector/graph_connector.go index 06fb86188..45461e6d1 100644 --- a/src/internal/connector/graph_connector.go +++ b/src/internal/connector/graph_connector.go @@ -98,7 +98,7 @@ func NewGraphConnector( // For now this keeps things functioning if callers do pass in a selector like // "*" instead of. if r == AllResources || r == Users { - if err = gc.setTenantUsers(ctx); err != nil { + if err = gc.setTenantUsers(ctx, errs); err != nil { return nil, errors.Wrap(err, "retrieving tenant user list") } } @@ -129,11 +129,11 @@ func (gc *GraphConnector) createService() (*graph.Service, error) { // setTenantUsers queries the M365 to identify the users in the // workspace. The users field is updated during this method // iff the returned error is nil -func (gc *GraphConnector) setTenantUsers(ctx context.Context) error { +func (gc *GraphConnector) setTenantUsers(ctx context.Context, errs *fault.Errors) error { ctx, end := D.Span(ctx, "gc:setTenantUsers") defer end() - users, err := discovery.Users(ctx, gc.Owners.Users()) + users, err := discovery.Users(ctx, gc.Owners.Users(), errs) if err != nil { return err } diff --git a/src/internal/connector/graph_connector_helper_test.go b/src/internal/connector/graph_connector_helper_test.go index 7e49f2c08..48f6d5f7e 100644 --- a/src/internal/connector/graph_connector_helper_test.go +++ b/src/internal/connector/graph_connector_helper_test.go @@ -1013,9 +1013,9 @@ func collectionsForInfo( user, info.category, info.pathElements, - false, - ) - mc := mockconnector.NewMockExchangeCollection(pth, len(info.items)) + false) + + mc := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items)) baseDestPath := backupOutputPathFromRestore(t, dest, pth) baseExpected := expectedData[baseDestPath.String()] @@ -1076,7 +1076,7 @@ func collectionsForInfoVersion0( info.pathElements, false, ) - c := mockconnector.NewMockExchangeCollection(pth, len(info.items)) + c := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items)) baseDestPath := backupOutputPathFromRestore(t, dest, pth) baseExpected := expectedData[baseDestPath.String()] diff --git a/src/internal/connector/graph_connector_test.go b/src/internal/connector/graph_connector_test.go index 8a2a2a39e..6a7620dca 100644 --- a/src/internal/connector/graph_connector_test.go +++ b/src/internal/connector/graph_connector_test.go @@ -129,12 +129,8 @@ func (suite *GraphConnectorUnitSuite) TestUnionSiteIDsAndWebURLs() { ctx, flush := tester.NewContext() defer flush() - errs := fault.New(true) - - result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, errs) + result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true)) assert.NoError(t, err) - assert.NoError(t, errs.Err()) - assert.Empty(t, errs.Errs()) assert.ElementsMatch(t, test.expect, result) }) } @@ -192,9 +188,11 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantUsers() { require.NoError(suite.T(), err) newConnector.Owners = owners - suite.Empty(len(newConnector.Users)) - err = newConnector.setTenantUsers(ctx) + + errs := fault.New(true) + + err = newConnector.setTenantUsers(ctx, errs) suite.NoError(err) suite.Less(0, len(newConnector.Users)) } @@ -219,12 +217,8 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() { newConnector.Service = service assert.Equal(t, 0, len(newConnector.Sites)) - errs := fault.New(true) - - err = newConnector.setTenantSites(ctx, errs) + err = newConnector.setTenantSites(ctx, fault.New(true)) assert.NoError(t, err) - assert.NoError(t, errs.Err()) - assert.Empty(t, errs.Errs()) assert.Less(t, 0, len(newConnector.Sites)) for _, site := range newConnector.Sites { @@ -475,7 +469,7 @@ func runRestoreBackupTest( RestorePermissions: true, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, }, - ) + fault.New(true)) require.NoError(t, err) // No excludes yet because this isn't an incremental backup. assert.Empty(t, excludes) @@ -603,7 +597,7 @@ func runRestoreBackupTestVersion0( RestorePermissions: true, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, }, - ) + fault.New(true)) require.NoError(t, err) // No excludes yet because this isn't an incremental backup. assert.Empty(t, excludes) @@ -1550,7 +1544,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames RestorePermissions: true, ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}, }, - ) + fault.New(true)) require.NoError(t, err) // No excludes yet because this isn't an incremental backup. assert.Empty(t, excludes) diff --git a/src/internal/connector/mockconnector/mock_data_collection.go b/src/internal/connector/mockconnector/mock_data_collection.go index 0b643a699..53d663f08 100644 --- a/src/internal/connector/mockconnector/mock_data_collection.go +++ b/src/internal/connector/mockconnector/mock_data_collection.go @@ -16,6 +16,7 @@ import ( // MockExchangeDataCollection represents a mock exchange mailbox type MockExchangeDataCollection struct { fullPath path.Path + LocPath path.Path messageCount int Data [][]byte Names []string @@ -35,9 +36,14 @@ var ( // NewMockExchangeDataCollection creates an data collection that will return the specified number of // mock messages when iterated. Exchange type mail -func NewMockExchangeCollection(pathRepresentation path.Path, numMessagesToReturn int) *MockExchangeDataCollection { +func NewMockExchangeCollection( + storagePath path.Path, + locationPath path.Path, + numMessagesToReturn int, +) *MockExchangeDataCollection { c := &MockExchangeDataCollection{ - fullPath: pathRepresentation, + fullPath: storagePath, + LocPath: locationPath, messageCount: numMessagesToReturn, Data: [][]byte{}, Names: []string{}, @@ -93,21 +99,11 @@ func NewMockContactCollection(pathRepresentation path.Path, numMessagesToReturn return c } -func (medc *MockExchangeDataCollection) FullPath() path.Path { - return medc.fullPath -} - -func (medc MockExchangeDataCollection) PreviousPath() path.Path { - return medc.PrevPath -} - -func (medc MockExchangeDataCollection) State() data.CollectionState { - return medc.ColState -} - -func (medc MockExchangeDataCollection) DoNotMergeItems() bool { - return medc.DoNotMerge -} +func (medc MockExchangeDataCollection) FullPath() path.Path { return medc.fullPath } +func (medc MockExchangeDataCollection) LocationPath() path.Path { return medc.LocPath } +func (medc MockExchangeDataCollection) PreviousPath() path.Path { return medc.PrevPath } +func (medc MockExchangeDataCollection) State() data.CollectionState { return medc.ColState } +func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return medc.DoNotMerge } // Items returns a channel that has the next items in the collection. The // channel is closed when there are no more items available. diff --git a/src/internal/connector/mockconnector/mock_data_collection_test.go b/src/internal/connector/mockconnector/mock_data_collection_test.go index f2ba4d08e..1fcf3e652 100644 --- a/src/internal/connector/mockconnector/mock_data_collection_test.go +++ b/src/internal/connector/mockconnector/mock_data_collection_test.go @@ -25,7 +25,7 @@ func TestMockExchangeCollectionSuite(t *testing.T) { } func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() { - mdc := mockconnector.NewMockExchangeCollection(nil, 2) + mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2) messagesRead := 0 @@ -40,7 +40,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() { func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() { t := suite.T() - mdc := mockconnector.NewMockExchangeCollection(nil, 2) + mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2) mdc.Data[1] = []byte("This is some buffer of data so that the size is different than the default") @@ -58,7 +58,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() { // functions by verifying no failures on (de)serializing steps using kiota serialization library func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hydration() { t := suite.T() - mdc := mockconnector.NewMockExchangeCollection(nil, 3) + mdc := mockconnector.NewMockExchangeCollection(nil, nil, 3) buf := &bytes.Buffer{} for stream := range mdc.Items() { diff --git a/src/internal/connector/onedrive/collection.go b/src/internal/connector/onedrive/collection.go index 4aff95ac1..54f66fda3 100644 --- a/src/internal/connector/onedrive/collection.go +++ b/src/internal/connector/onedrive/collection.go @@ -69,6 +69,14 @@ type Collection struct { itemMetaReader itemMetaReaderFunc ctrl control.Options + // PrevPath is the previous hierarchical path used by this collection. + // It may be the same as fullPath, if the folder was not renamed or + // moved. It will be empty on its first retrieval. + prevPath path.Path + + // Specifies if it new, moved/rename or deleted + state data.CollectionState + // should only be true if the old delta token expired doNotMergeItems bool } @@ -92,6 +100,7 @@ type itemMetaReaderFunc func( func NewCollection( itemClient *http.Client, folderPath path.Path, + prevPath path.Path, driveID string, service graph.Servicer, statusUpdater support.StatusUpdater, @@ -102,6 +111,7 @@ func NewCollection( c := &Collection{ itemClient: itemClient, folderPath: folderPath, + prevPath: prevPath, driveItems: map[string]models.DriveItemable{}, driveID: driveID, source: source, @@ -109,6 +119,7 @@ func NewCollection( data: make(chan data.Stream, collectionChannelBufferSize), statusUpdater: statusUpdater, ctrl: ctrlOpts, + state: data.StateOf(prevPath, folderPath), doNotMergeItems: doNotMergeItems, } @@ -140,16 +151,12 @@ func (oc *Collection) FullPath() path.Path { return oc.folderPath } -// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old -// and new folder hierarchies. func (oc Collection) PreviousPath() path.Path { - return nil + return oc.prevPath } -// TODO(ashmrtn): Fill in once GraphConnector compares old and new folder -// hierarchies. func (oc Collection) State() data.CollectionState { - return data.NewState + return oc.state } func (oc Collection) DoNotMergeItems() bool { @@ -432,7 +439,7 @@ func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRe TotalBytes: byteCount, // Number of bytes read in the operation, }, errs, - oc.folderPath.Folder(), // Additional details + oc.folderPath.Folder(false), // Additional details ) logger.Ctx(ctx).Debugw("done streaming items", "status", status.String()) oc.statusUpdater(status) diff --git a/src/internal/connector/onedrive/collection_test.go b/src/internal/connector/onedrive/collection_test.go index 39c19c097..4ca6a9850 100644 --- a/src/internal/connector/onedrive/collection_test.go +++ b/src/internal/connector/onedrive/collection_test.go @@ -164,6 +164,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() { coll := NewCollection( graph.HTTPClient(graph.NoTimeout()), folderPath, + nil, "drive-id", suite, suite.testStatusUpdater(&wg, &collStatus), @@ -298,6 +299,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() { coll := NewCollection( graph.HTTPClient(graph.NoTimeout()), folderPath, + nil, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus), @@ -370,6 +372,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() { coll := NewCollection( graph.HTTPClient(graph.NoTimeout()), folderPath, + nil, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus), diff --git a/src/internal/connector/onedrive/collections.go b/src/internal/connector/onedrive/collections.go index a951d682e..11e0d9a30 100644 --- a/src/internal/connector/onedrive/collections.go +++ b/src/internal/connector/onedrive/collections.go @@ -28,7 +28,11 @@ const ( OneDriveSource SharePointSource ) -const restrictedDirectory = "Site Pages" + +const ( + restrictedDirectory = "Site Pages" + rootDrivePattern = "/drives/%s/root:" +) func (ds driveSource) toPathServiceCat() (path.ServiceType, path.CategoryType) { switch ds { @@ -382,11 +386,15 @@ func (c *Collections) UpdateCollections( continue } - if item.GetParentReference() == nil || item.GetParentReference().GetPath() == nil { + if item.GetParentReference() == nil || + item.GetParentReference().GetPath() == nil || + item.GetParentReference().GetId() == nil { return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName()) } // Create a collection for the parent of this item + collectionID := *item.GetParentReference().GetId() + collectionPath, err := GetCanonicalPath( *item.GetParentReference().GetPath(), c.tenant, @@ -411,7 +419,34 @@ func (c *Collections) UpdateCollections( // the deleted folder/package. delete(newPaths, *item.GetId()) - // TODO(ashmrtn): Create a collection with state Deleted. + prevColPath, ok := oldPaths[*item.GetId()] + if !ok { + // It is possible that an item was created and + // deleted between two delta invocations. In + // that case, it will only produce a single + // delete entry in the delta response. + continue + } + + prevPath, err := path.FromDataLayerPath(prevColPath, false) + if err != nil { + logger.Ctx(ctx).Errorw("invalid previous path for deleted item", "error", err) + return err + } + + col := NewCollection( + c.itemClient, + nil, + prevPath, + driveID, + c.service, + c.statusUpdater, + c.source, + c.ctrl, + invalidPrevDelta, + ) + + c.CollectionMap[*item.GetId()] = col break } @@ -454,14 +489,14 @@ func (c *Collections) UpdateCollections( // TODO(ashmrtn): Figure what when an item was moved (maybe) and add it to // the exclude list. - col, found := c.CollectionMap[collectionPath.String()] - + col, found := c.CollectionMap[collectionID] if !found { // TODO(ashmrtn): Compare old and new path and set collection state // accordingly. col = NewCollection( c.itemClient, collectionPath, + nil, driveID, c.service, c.statusUpdater, @@ -470,7 +505,7 @@ func (c *Collections) UpdateCollections( invalidPrevDelta, ) - c.CollectionMap[collectionPath.String()] = col + c.CollectionMap[collectionID] = col c.NumContainers++ } diff --git a/src/internal/connector/onedrive/collections_test.go b/src/internal/connector/onedrive/collections_test.go index 3cc5dbcb5..12c2cb1e1 100644 --- a/src/internal/connector/onedrive/collections_test.go +++ b/src/internal/connector/onedrive/collections_test.go @@ -2,6 +2,7 @@ package onedrive import ( "context" + "fmt" "strings" "testing" @@ -23,21 +24,48 @@ import ( "github.com/alcionai/corso/src/pkg/selectors" ) -const ( - testBaseDrivePath = "drive/driveID1/root:" -) +type statePath struct { + state data.CollectionState + curPath path.Path + prevPath path.Path +} -func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string { - res := make([]string, 0, len(rest)) - - for _, r := range rest { - p, err := GetCanonicalPath(r, tenant, user, OneDriveSource) +func getExpectedStatePathGenerator( + t *testing.T, + tenant, user, base string, +) func(data.CollectionState, string) statePath { + return func(state data.CollectionState, pth string) statePath { + p, err := GetCanonicalPath(base+pth, tenant, user, OneDriveSource) require.NoError(t, err) - res = append(res, p.String()) - } + var ( + cp path.Path + pp path.Path + ) - return res + if state == data.NewState { + cp = p + } else { + pp = p + } + + return statePath{ + state: state, + curPath: cp, + prevPath: pp, + } + } +} + +func getExpectedPathGenerator(t *testing.T, + tenant, user, base string, +) func(string) string { + return func(path string) string { + p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource) + require.NoError(t, err) + + return p.String() + } } type OneDriveCollectionsSuite struct { @@ -102,23 +130,28 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { pkg = "/package" ) + testBaseDrivePath := fmt.Sprintf(rootDrivePattern, "driveID1") + expectedPath := getExpectedPathGenerator(suite.T(), tenant, user, testBaseDrivePath) + expectedStatePath := getExpectedStatePathGenerator(suite.T(), tenant, user, testBaseDrivePath) + tests := []struct { - testCase string - items []models.DriveItemable - inputFolderMap map[string]string - scope selectors.OneDriveScope - expect assert.ErrorAssertionFunc - expectedCollectionPaths []string - expectedItemCount int - expectedContainerCount int - expectedFileCount int - expectedMetadataPaths map[string]string - expectedExcludes map[string]struct{} + testCase string + items []models.DriveItemable + inputFolderMap map[string]string + scope selectors.OneDriveScope + expect assert.ErrorAssertionFunc + expectedCollectionIDs map[string]statePath + expectedItemCount int + expectedContainerCount int + expectedFileCount int + expectedMetadataPaths map[string]string + expectedExcludes map[string]struct{} }{ { testCase: "Invalid item", items: []models.DriveItemable{ - driveItem("item", "item", testBaseDrivePath, false, false, false), + driveRootItem("root"), + driveItem("item", "item", testBaseDrivePath, "root", false, false, false), }, inputFolderMap: map[string]string{}, scope: anyFolder, @@ -129,17 +162,15 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { { testCase: "Single File", items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath, true, false, false), + driveRootItem("root"), + driveItem("file", "file", testBaseDrivePath, "root", true, false, false), }, inputFolderMap: map[string]string{}, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, @@ -150,24 +181,17 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { { testCase: "Single Folder", items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), + driveRootItem("root"), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), }, inputFolderMap: map[string]string{}, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedMetadataPaths: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], + "folder": expectedPath("/folder"), }, expectedItemCount: 1, expectedContainerCount: 1, @@ -176,24 +200,17 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { { testCase: "Single Package", items: []models.DriveItemable{ - driveItem("package", "package", testBaseDrivePath, false, false, true), + driveRootItem("root"), + driveItem("package", "package", testBaseDrivePath, "root", false, false, true), }, inputFolderMap: map[string]string{}, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedMetadataPaths: map[string]string{ - "package": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/package", - )[0], + "package": expectedPath("/package"), }, expectedItemCount: 1, expectedContainerCount: 1, @@ -202,142 +219,109 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { { testCase: "1 root file, 1 folder, 1 package, 2 files, 3 collections", items: []models.DriveItemable{ - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, true, false, false), - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("package", "package", testBaseDrivePath, false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, true, false, false), - driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, true, false, false), + driveRootItem("root"), + driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveItem("package", "package", testBaseDrivePath, "root", false, false, true), + driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), + driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), }, inputFolderMap: map[string]string{}, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - testBaseDrivePath+folder, - testBaseDrivePath+pkg, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + "folder": expectedStatePath(data.NewState, folder), + "package": expectedStatePath(data.NewState, pkg), + }, expectedItemCount: 5, expectedFileCount: 3, expectedContainerCount: 3, expectedMetadataPaths: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "package": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/package", - )[0], + "folder": expectedPath("/folder"), + "package": expectedPath("/package"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "contains folder selector", items: []models.DriveItemable{ - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, true, false, false), - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+folder, false, true, false), - driveItem("folder2", "folder", testBaseDrivePath+folderSub, false, true, false), - driveItem("package", "package", testBaseDrivePath, false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, true, false, false), - driveItem("fileInFolder2", "fileInFolder2", testBaseDrivePath+folderSub+folder, true, false, false), - driveItem("fileInFolderPackage", "fileInPackage", testBaseDrivePath+pkg, true, false, false), + driveRootItem("root"), + driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveItem("subfolder", "subfolder", testBaseDrivePath+folder, "folder", false, true, false), + driveItem("folder2", "folder", testBaseDrivePath+folderSub, "subfolder", false, true, false), + driveItem("package", "package", testBaseDrivePath, "root", false, false, true), + driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), + driveItem("fileInFolder2", "fileInFolder2", testBaseDrivePath+folderSub+folder, "folder2", true, false, false), + driveItem("fileInFolderPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), }, inputFolderMap: map[string]string{}, scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder"})[0], expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - testBaseDrivePath+folderSub, - testBaseDrivePath+folderSub+folder, - ), + expectedCollectionIDs: map[string]statePath{ + "folder": expectedStatePath(data.NewState, folder), + "subfolder": expectedStatePath(data.NewState, folderSub), + "folder2": expectedStatePath(data.NewState, folderSub+folder), + }, expectedItemCount: 4, expectedFileCount: 2, expectedContainerCount: 3, // just "folder" isn't added here because the include check is done on the // parent path since we only check later if something is a folder or not. expectedMetadataPaths: map[string]string{ - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder", - )[0], - "folder2": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder/folder", - )[0], + "subfolder": expectedPath("/folder/subfolder"), + "folder2": expectedPath("/folder/subfolder/folder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "prefix subfolder selector", items: []models.DriveItemable{ - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, true, false, false), - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+folder, false, true, false), - driveItem("folder2", "folder", testBaseDrivePath+folderSub, false, true, false), - driveItem("package", "package", testBaseDrivePath, false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, true, false, false), - driveItem("fileInFolder2", "fileInFolder2", testBaseDrivePath+folderSub+folder, true, false, false), - driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, true, false, false), + driveRootItem("root"), + driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveItem("subfolder", "subfolder", testBaseDrivePath+folder, "folder", false, true, false), + driveItem("folder2", "folder", testBaseDrivePath+folderSub, "subfolder", false, true, false), + driveItem("package", "package", testBaseDrivePath, "root", false, false, true), + driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), + driveItem("fileInFolder2", "fileInFolder2", testBaseDrivePath+folderSub+folder, "folder2", true, false, false), + driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), }, inputFolderMap: map[string]string{}, scope: (&selectors.OneDriveBackup{}). Folders([]string{"/folder/subfolder"}, selectors.PrefixMatch())[0], expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+folderSub, - testBaseDrivePath+folderSub+folder, - ), + expectedCollectionIDs: map[string]statePath{ + "subfolder": expectedStatePath(data.NewState, folderSub), + "folder2": expectedStatePath(data.NewState, folderSub+folder), + }, expectedItemCount: 2, expectedFileCount: 1, expectedContainerCount: 2, expectedMetadataPaths: map[string]string{ - "folder2": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder/folder", - )[0], + "folder2": expectedPath("/folder/subfolder/folder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "match subfolder selector", items: []models.DriveItemable{ - driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, true, false, false), - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath+folder, false, true, false), - driveItem("package", "package", testBaseDrivePath, false, false, true), - driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, true, false, false), - driveItem("fileInSubfolder", "fileInSubfolder", testBaseDrivePath+folderSub, true, false, false), - driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, true, false, false), + driveRootItem("root"), + driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveItem("subfolder", "subfolder", testBaseDrivePath+folder, "folder", false, true, false), + driveItem("package", "package", testBaseDrivePath, "root", false, false, true), + driveItem("fileInFolder", "fileInFolder", testBaseDrivePath+folder, "folder", true, false, false), + driveItem("fileInSubfolder", "fileInSubfolder", testBaseDrivePath+folderSub, "subfolder", true, false, false), + driveItem("fileInPackage", "fileInPackage", testBaseDrivePath+pkg, "package", true, false, false), }, inputFolderMap: map[string]string{}, scope: (&selectors.OneDriveBackup{}).Folders([]string{"folder/subfolder"})[0], expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+folderSub, - ), + expectedCollectionIDs: map[string]statePath{ + "subfolder": expectedStatePath(data.NewState, folderSub), + }, expectedItemCount: 1, expectedFileCount: 1, expectedContainerCount: 1, @@ -348,272 +332,161 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { { testCase: "not moved folder tree", items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), + driveRootItem("root"), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), }, inputFolderMap: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder", - )[0], + "folder": expectedPath("/folder"), + "subfolder": expectedPath("/folder/subfolder"), }, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 1, expectedMetadataPaths: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder", - )[0], + "folder": expectedPath("/folder"), + "subfolder": expectedPath("/folder/subfolder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "moved folder tree", items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), + driveRootItem("root"), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), }, inputFolderMap: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/a-folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/a-folder/subfolder", - )[0], + "folder": expectedPath("/a-folder"), + "subfolder": expectedPath("/a-folder/subfolder"), }, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 1, expectedMetadataPaths: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder", - )[0], + "folder": expectedPath("/folder"), + "subfolder": expectedPath("/folder/subfolder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "moved folder tree and subfolder 1", items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath, false, true, false), + driveRootItem("root"), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), + driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), }, inputFolderMap: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/a-folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/a-folder/subfolder", - )[0], + "folder": expectedPath("/a-folder"), + "subfolder": expectedPath("/a-folder/subfolder"), }, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 1, expectedMetadataPaths: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/subfolder", - )[0], + "folder": expectedPath("/folder"), + "subfolder": expectedPath("/subfolder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "moved folder tree and subfolder 2", items: []models.DriveItemable{ - driveItem("subfolder", "subfolder", testBaseDrivePath, false, true, false), - driveItem("folder", "folder", testBaseDrivePath, false, true, false), + driveRootItem("root"), + driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), + driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), }, inputFolderMap: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/a-folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/a-folder/subfolder", - )[0], + "folder": expectedPath("/a-folder"), + "subfolder": expectedPath("/a-folder/subfolder"), }, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + }, expectedItemCount: 2, expectedFileCount: 0, expectedContainerCount: 1, expectedMetadataPaths: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/subfolder", - )[0], + "folder": expectedPath("/folder"), + "subfolder": expectedPath("/subfolder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "deleted folder and package", items: []models.DriveItemable{ - delItem("folder", testBaseDrivePath, false, true, false), - delItem("package", testBaseDrivePath, false, false, true), + driveRootItem("root"), // root is always present, but not necessary here + delItem("folder", testBaseDrivePath, "root", false, true, false), + delItem("package", testBaseDrivePath, "root", false, false, true), }, inputFolderMap: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "package": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/package", - )[0], + "folder": expectedPath("/folder"), + "package": expectedPath("/package"), }, - scope: anyFolder, - expect: assert.NoError, - expectedCollectionPaths: []string{}, - expectedItemCount: 0, - expectedFileCount: 0, - expectedContainerCount: 0, - expectedMetadataPaths: map[string]string{}, - expectedExcludes: map[string]struct{}{}, + scope: anyFolder, + expect: assert.NoError, + expectedCollectionIDs: map[string]statePath{ + "folder": expectedStatePath(data.DeletedState, folder), + "package": expectedStatePath(data.DeletedState, pkg), + }, + expectedItemCount: 0, + expectedFileCount: 0, + expectedContainerCount: 0, + expectedMetadataPaths: map[string]string{}, + expectedExcludes: map[string]struct{}{}, }, { testCase: "delete folder tree move subfolder", items: []models.DriveItemable{ - delItem("folder", testBaseDrivePath, false, true, false), - driveItem("subfolder", "subfolder", testBaseDrivePath, false, true, false), + driveRootItem("root"), + delItem("folder", testBaseDrivePath, "root", false, true, false), + driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), }, inputFolderMap: map[string]string{ - "folder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0], - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder/subfolder", - )[0], + "folder": expectedPath("/folder"), + "subfolder": expectedPath("/folder/subfolder"), }, scope: anyFolder, expect: assert.NoError, - expectedCollectionPaths: expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - ), + expectedCollectionIDs: map[string]statePath{ + "root": expectedStatePath(data.NewState, ""), + "folder": expectedStatePath(data.DeletedState, folder), + }, expectedItemCount: 1, expectedFileCount: 0, expectedContainerCount: 1, expectedMetadataPaths: map[string]string{ - "subfolder": expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/subfolder", - )[0], + "subfolder": expectedPath("/subfolder"), }, expectedExcludes: map[string]struct{}{}, }, { testCase: "delete file", items: []models.DriveItemable{ - delItem("item", testBaseDrivePath, true, false, false), + delItem("item", testBaseDrivePath, "root", true, false, false), }, - inputFolderMap: map[string]string{}, - scope: anyFolder, - expect: assert.NoError, - expectedCollectionPaths: []string{}, - expectedItemCount: 1, - expectedFileCount: 1, - expectedContainerCount: 0, - expectedMetadataPaths: map[string]string{}, + inputFolderMap: map[string]string{}, + scope: anyFolder, + expect: assert.NoError, + expectedItemCount: 1, + expectedFileCount: 1, + expectedContainerCount: 0, + expectedMetadataPaths: map[string]string{}, expectedExcludes: map[string]struct{}{ "item": {}, }, @@ -640,7 +513,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { err := c.UpdateCollections( ctx, - "driveID", + "driveID1", "General", tt.items, tt.inputFolderMap, @@ -649,12 +522,16 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { false, ) tt.expect(t, err) - assert.Equal(t, len(tt.expectedCollectionPaths), len(c.CollectionMap), "collection paths") + assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap), "total collections") assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count") assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count") - for _, collPath := range tt.expectedCollectionPaths { - assert.Contains(t, c.CollectionMap, collPath) + + for id, sp := range tt.expectedCollectionIDs { + assert.Contains(t, c.CollectionMap, id, "contains collection with id") + assert.Equal(t, sp.state, c.CollectionMap[id].State(), "state for collection") + assert.Equal(t, sp.curPath, c.CollectionMap[id].FullPath(), "current path for collection") + assert.Equal(t, sp.prevPath, c.CollectionMap[id].PreviousPath(), "prev path for collection") } assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap) @@ -1080,19 +957,6 @@ func (suite *OneDriveCollectionsSuite) TestGet() { ) require.NoError(suite.T(), err, "making metadata path") - rootFolderPath := expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - )[0] - folderPath := expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0] - empty := "" next := "next" delta := "delta1" @@ -1108,20 +972,17 @@ func (suite *OneDriveCollectionsSuite) TestGet() { drive2.SetId(&driveID2) drive2.SetName(&driveID2) - driveBasePath2 := "drive/driveID2/root:" + driveBasePath1 := fmt.Sprintf(rootDrivePattern, driveID1) + driveBasePath2 := fmt.Sprintf(rootDrivePattern, driveID2) - rootFolderPath2 := expectedPathAsSlice( - suite.T(), - tenant, - user, - driveBasePath2, - )[0] - folderPath2 := expectedPathAsSlice( - suite.T(), - tenant, - user, - driveBasePath2+"/folder", - )[0] + expectedPath1 := getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1) + expectedPath2 := getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2) + + rootFolderPath1 := expectedPath1("") + folderPath1 := expectedPath1("/folder") + + rootFolderPath2 := expectedPath2("") + folderPath2 := expectedPath2("/folder") table := []struct { name string @@ -1130,7 +991,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() { errCheck assert.ErrorAssertionFunc // Collection name -> set of item IDs. We can't check item data because // that's not mocked out. Metadata is checked separately. - expectedCollections map[string][]string + expectedCollections map[string]map[data.CollectionState][]string expectedDeltaURLs map[string]string expectedFolderPaths map[string]map[string]string expectedDelList map[string]struct{} @@ -1143,14 +1004,15 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID1: { { items: []models.DriveItemable{ - delItem("file", testBaseDrivePath, true, false, false), + driveRootItem("root"), // will be present, not needed + delItem("file", driveBasePath1, "root", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{}, + expectedCollections: map[string]map[data.CollectionState][]string{}, expectedDeltaURLs: map[string]string{ driveID1: delta, }, @@ -1170,20 +1032,16 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID1: { { items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath, true, false, false), + driveRootItem("root"), + driveItem("file", "file", driveBasePath1, "root", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - )[0]: {"file"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + expectedPath1(""): {data.NewState: {"file"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, @@ -1202,24 +1060,25 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID1: { { items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - folderPath: {"file"}, - rootFolderPath: {"folder"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + folderPath1: {data.NewState: {"file"}}, + rootFolderPath1: {data.NewState: {"folder"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, }, expectedFolderPaths: map[string]map[string]string{ driveID1: { - "folder": folderPath, + "folder": folderPath1, }, }, expectedDelList: map[string]struct{}{}, @@ -1231,17 +1090,18 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID1: { { items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), }, deltaLink: &empty, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - folderPath: {"file"}, - rootFolderPath: {"folder"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + folderPath1: {data.NewState: {"file"}}, + rootFolderPath1: {data.NewState: {"folder"}}, }, expectedDeltaURLs: map[string]string{}, expectedFolderPaths: map[string]map[string]string{}, @@ -1254,31 +1114,33 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID1: { { items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), }, nextLink: &next, }, { items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("file2", "file2", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - folderPath: {"file", "file2"}, - rootFolderPath: {"folder"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + folderPath1: {data.NewState: {"file", "file2"}}, + rootFolderPath1: {data.NewState: {"folder"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, }, expectedFolderPaths: map[string]map[string]string{ driveID1: { - "folder": folderPath, + "folder": folderPath1, }, }, expectedDelList: map[string]struct{}{}, @@ -1293,8 +1155,9 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID1: { { items: []models.DriveItemable{ - driveItem("folder", "folder", testBaseDrivePath, false, true, false), - driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), }, deltaLink: &delta, }, @@ -1302,19 +1165,20 @@ func (suite *OneDriveCollectionsSuite) TestGet() { driveID2: { { items: []models.DriveItemable{ - driveItem("folder", "folder", driveBasePath2, false, true, false), - driveItem("file", "file", driveBasePath2+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath2, "root", false, true, false), + driveItem("file", "file", driveBasePath2+"/folder", "folder", true, false, false), }, deltaLink: &delta2, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - folderPath: {"file"}, - folderPath2: {"file"}, - rootFolderPath: {"folder"}, - rootFolderPath2: {"folder"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + folderPath1: {data.NewState: {"file"}}, + folderPath2: {data.NewState: {"file"}}, + rootFolderPath1: {data.NewState: {"folder"}}, + rootFolderPath2: {data.NewState: {"folder"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, @@ -1322,7 +1186,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() { }, expectedFolderPaths: map[string]map[string]string{ driveID1: { - "folder": folderPath, + "folder": folderPath1, }, driveID2: { "folder": folderPath2, @@ -1356,20 +1220,16 @@ func (suite *OneDriveCollectionsSuite) TestGet() { }, { items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath, true, false, false), + driveRootItem("root"), + driveItem("file", "file", driveBasePath1, "root", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - )[0]: {"file"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + expectedPath1(""): {data.NewState: {"file"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, @@ -1383,7 +1243,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() { doNotMergeItems: true, }, { - name: "OneDrive_MultipleCollections_DeltaError", + name: "OneDrive_TwoItemPage_DeltaError", drives: []models.Driveable{drive1}, items: map[string][]deltaPagerResult{ driveID1: { @@ -1392,85 +1252,67 @@ func (suite *OneDriveCollectionsSuite) TestGet() { }, { items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath, true, false, false), + driveRootItem("root"), + driveItem("file", "file", driveBasePath1, "root", true, false, false), }, nextLink: &next, }, { items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - )[0]: {"file"}, - expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0]: {"file"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + expectedPath1(""): {data.NewState: {"file", "folder"}}, + expectedPath1("/folder"): {data.NewState: {"file"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, }, expectedFolderPaths: map[string]map[string]string{ - // We need an empty map here so deserializing metadata knows the delta - // token for this drive is valid. - driveID1: {}, + driveID1: {"folder": folderPath1}, }, expectedDelList: map[string]struct{}{}, doNotMergeItems: true, }, { - name: "OneDrive_MultipleCollections_NoDeltaError", + name: "OneDrive_TwoItemPage_NoDeltaError", drives: []models.Driveable{drive1}, items: map[string][]deltaPagerResult{ driveID1: { { items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath, true, false, false), + driveRootItem("root"), + driveItem("file", "file", driveBasePath1, "root", true, false, false), }, nextLink: &next, }, { items: []models.DriveItemable{ - driveItem("file", "file", testBaseDrivePath+"/folder", true, false, false), + driveRootItem("root"), + driveItem("folder", "folder", driveBasePath1, "root", false, true, false), + driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), }, deltaLink: &delta, }, }, }, errCheck: assert.NoError, - expectedCollections: map[string][]string{ - expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath, - )[0]: {"file"}, - expectedPathAsSlice( - suite.T(), - tenant, - user, - testBaseDrivePath+"/folder", - )[0]: {"file"}, + expectedCollections: map[string]map[data.CollectionState][]string{ + expectedPath1(""): {data.NewState: {"file", "folder"}}, + expectedPath1("/folder"): {data.NewState: {"file"}}, }, expectedDeltaURLs: map[string]string{ driveID1: delta, }, expectedFolderPaths: map[string]map[string]string{ - // We need an empty map here so deserializing metadata knows the delta - // token for this drive is valid. - driveID1: {}, + driveID1: {"folder": folderPath1}, }, expectedDelList: map[string]struct{}{}, doNotMergeItems: false, @@ -1518,8 +1360,33 @@ func (suite *OneDriveCollectionsSuite) TestGet() { c.drivePagerFunc = drivePagerFunc c.itemPagerFunc = itemPagerFunc - // TODO(ashmrtn): Allow passing previous metadata. - cols, delList, err := c.Get(ctx, nil) + mc, err := graph.MakeMetadataCollection( + tenant, + user, + path.OneDriveService, + path.FilesCategory, + []graph.MetadataCollectionEntry{ + graph.NewMetadataEntry( + graph.DeltaURLsFileName, + map[string]string{ + driveID1: "prev-delta", + driveID2: "prev-delta", + }, + ), + graph.NewMetadataEntry( + graph.PreviousPathFileName, + map[string]map[string]string{ + driveID1: {}, + driveID2: {}, + }, + ), + }, + func(*support.ConnectorOperationStatus) {}, + ) + assert.NoError(t, err, "creating metadata collection") + + prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}} + cols, delList, err := c.Get(ctx, prevMetadata) test.errCheck(t, err) if err != nil { @@ -1555,7 +1422,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() { itemIDs = append(itemIDs, id) } - assert.ElementsMatch(t, test.expectedCollections[folderPath], itemIDs) + assert.ElementsMatch(t, test.expectedCollections[folderPath][baseCol.State()], itemIDs) assert.Equal(t, test.doNotMergeItems, baseCol.DoNotMergeItems(), "DoNotMergeItems") } @@ -1568,6 +1435,7 @@ func driveItem( id string, name string, parentPath string, + parentID string, isFile, isFolder, isPackage bool, ) models.DriveItemable { item := models.NewDriveItem() @@ -1576,6 +1444,7 @@ func driveItem( parentReference := models.NewItemReference() parentReference.SetPath(&parentPath) + parentReference.SetId(&parentID) item.SetParentReference(parentReference) switch { @@ -1590,11 +1459,22 @@ func driveItem( return item } +func driveRootItem(id string) models.DriveItemable { + name := "root" + item := models.NewDriveItem() + item.SetName(&name) + item.SetId(&id) + item.SetRoot(models.NewRoot()) + + return item +} + // delItem creates a DriveItemable that is marked as deleted. path must be set // to the base drive path. func delItem( id string, parentPath string, + parentID string, isFile, isFolder, isPackage bool, ) models.DriveItemable { item := models.NewDriveItem() @@ -1603,6 +1483,7 @@ func delItem( parentReference := models.NewItemReference() parentReference.SetPath(&parentPath) + parentReference.SetId(&parentID) item.SetParentReference(parentReference) switch { @@ -1631,12 +1512,14 @@ func getDeltaError() error { func (suite *OneDriveCollectionsSuite) TestCollectItems() { next := "next" delta := "delta" + prevDelta := "prev-delta" table := []struct { name string items []deltaPagerResult deltaURL string prevDeltaSuccess bool + prevDelta string err error }{ { @@ -1646,6 +1529,16 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() { {deltaLink: &delta}, }, prevDeltaSuccess: true, + prevDelta: prevDelta, + }, + { + name: "empty prev delta", + deltaURL: delta, + items: []deltaPagerResult{ + {deltaLink: &delta}, + }, + prevDeltaSuccess: false, + prevDelta: "", }, { name: "next then delta", @@ -1655,6 +1548,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() { {deltaLink: &delta}, }, prevDeltaSuccess: true, + prevDelta: prevDelta, }, { name: "invalid prev delta", @@ -1663,6 +1557,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() { {err: getDeltaError()}, {deltaLink: &delta}, // works on retry }, + prevDelta: prevDelta, prevDeltaSuccess: false, }, { @@ -1671,6 +1566,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() { {nextLink: &next}, {err: assert.AnError}, }, + prevDelta: prevDelta, prevDeltaSuccess: true, err: assert.AnError, }, @@ -1702,7 +1598,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() { "", "General", collectorFunc, - "", + test.prevDelta, ) require.ErrorIs(suite.T(), err, test.err, "delta fetch err") diff --git a/src/internal/connector/onedrive/drive.go b/src/internal/connector/onedrive/drive.go index 471a42aad..0418262ae 100644 --- a/src/internal/connector/onedrive/drive.go +++ b/src/internal/connector/onedrive/drive.go @@ -194,7 +194,7 @@ func collectItems( oldPaths = map[string]string{} newPaths = map[string]string{} excluded = map[string]struct{}{} - invalidPrevDelta = false + invalidPrevDelta = len(prevDelta) == 0 ) maps.Copy(newPaths, oldPaths) diff --git a/src/internal/connector/onedrive/restore.go b/src/internal/connector/onedrive/restore.go index d5df9dedc..ed098ad6a 100644 --- a/src/internal/connector/onedrive/restore.go +++ b/src/internal/connector/onedrive/restore.go @@ -58,6 +58,45 @@ func getParentPermissions( return parentPerms, nil } +func getParentAndCollectionPermissions( + drivePath *path.DrivePath, + collectionPath path.Path, + permissions map[string][]UserPermission, + restorePerms bool, +) ([]UserPermission, []UserPermission, error) { + if !restorePerms { + return nil, nil, nil + } + + var ( + err error + parentPerms []UserPermission + colPerms []UserPermission + ) + + // Only get parent permissions if we're not restoring the root. + if len(drivePath.Folders) > 0 { + parentPath, err := collectionPath.Dir() + if err != nil { + return nil, nil, clues.Wrap(err, "getting parent path") + } + + parentPerms, err = getParentPermissions(parentPath, permissions) + if err != nil { + return nil, nil, clues.Wrap(err, "getting parent permissions") + } + } + + // TODO(ashmrtn): For versions after this pull the permissions from the + // current collection with Fetch(). + colPerms, err = getParentPermissions(collectionPath, permissions) + if err != nil { + return nil, nil, clues.Wrap(err, "getting collection permissions") + } + + return parentPerms, colPerms, nil +} + // RestoreCollections will restore the specified data collections into OneDrive func RestoreCollections( ctx context.Context, @@ -94,24 +133,12 @@ func RestoreCollections( // Iterate through the data collections and restore the contents of each for _, dc := range dcs { - var ( - parentPerms []UserPermission - err error - ) - - if opts.RestorePermissions { - parentPerms, err = getParentPermissions(dc.FullPath(), parentPermissions) - if err != nil { - errUpdater(dc.FullPath().String(), err) - } - } - metrics, folderPerms, permissionIDMappings, canceled = RestoreCollection( ctx, backupVersion, service, dc, - parentPerms, + parentPermissions, OneDriveSource, dest.ContainerName, deets, @@ -150,7 +177,7 @@ func RestoreCollection( backupVersion int, service graph.Servicer, dc data.RestoreCollection, - parentPerms []UserPermission, + parentPermissions map[string][]UserPermission, source driveSource, restoreContainerName string, deets *details.Builder, @@ -186,11 +213,28 @@ func RestoreCollection( trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String()) logger.Ctx(ctx).Infow( "restoring to destination", - "origin", dc.FullPath().Folder(), + "origin", dc.FullPath().Folder(false), "destination", restoreFolderElements) + parentPerms, colPerms, err := getParentAndCollectionPermissions( + drivePath, + dc.FullPath(), + parentPermissions, + restorePerms) + if err != nil { + errUpdater(directory.String(), err) + return metrics, folderPerms, permissionIDMappings, false + } + // Create restore folders and get the folder ID of the folder the data stream will be restored in - restoreFolderID, err := CreateRestoreFolders(ctx, service, drivePath.DriveID, restoreFolderElements) + restoreFolderID, permissionIDMappings, err := createRestoreFoldersWithPermissions( + ctx, + service, + drivePath.DriveID, + restoreFolderElements, + parentPerms, + colPerms, + permissionIDMappings) if err != nil { errUpdater(directory.String(), errors.Wrapf(err, "failed to create folders %v", restoreFolderElements)) return metrics, folderPerms, permissionIDMappings, false @@ -240,7 +284,13 @@ func RestoreCollection( continue } - deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo) + deets.Add( + itemPath.String(), + itemPath.ShortRef(), + "", + "", // TODO: implement locationRef + true, + itemInfo) // Mark it as success without processing .meta // file if we are not restoring permissions @@ -272,7 +322,7 @@ func RestoreCollection( service, drivePath.DriveID, itemID, - parentPerms, + colPerms, meta.Permissions, permissionIDMappings, ) @@ -288,40 +338,16 @@ func RestoreCollection( // RestoreOp, so we still need to handle them in some way. continue } else if strings.HasSuffix(name, DirMetaFileSuffix) { - trimmedName := strings.TrimSuffix(name, DirMetaFileSuffix) - folderID, err := createRestoreFolder( - ctx, - service, - drivePath.DriveID, - trimmedName, - restoreFolderID, - ) - if err != nil { - errUpdater(itemData.UUID(), err) - continue - } - if !restorePerms { continue } - meta, err := getMetadata(itemData.ToReader()) - if err != nil { - errUpdater(itemData.UUID(), err) - continue - } + metaReader := itemData.ToReader() + meta, err := getMetadata(metaReader) + metaReader.Close() - permissionIDMappings, err = restorePermissions( - ctx, - service, - drivePath.DriveID, - folderID, - parentPerms, - meta.Permissions, - permissionIDMappings, - ) if err != nil { - errUpdater(itemData.UUID(), err) + errUpdater(itemData.UUID(), clues.Wrap(err, "folder metadata")) continue } @@ -351,36 +377,56 @@ func RestoreCollection( continue } - deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo) + deets.Add( + itemPath.String(), + itemPath.ShortRef(), + "", + "", // TODO: implement locationRef + true, + itemInfo) metrics.Successes++ } } } } -// Creates a folder with its permissions -func createRestoreFolder( +// createRestoreFoldersWithPermissions creates the restore folder hierarchy in +// the specified drive and returns the folder ID of the last folder entry in the +// hierarchy. Permissions are only applied to the last folder in the hierarchy. +// Passing nil for the permissions results in just creating the folder(s). +func createRestoreFoldersWithPermissions( ctx context.Context, service graph.Servicer, - driveID, folder, parentFolderID string, -) (string, error) { - folderItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(folder, true)) + driveID string, + restoreFolders []string, + parentPermissions []UserPermission, + folderPermissions []UserPermission, + permissionIDMappings map[string]string, +) (string, map[string]string, error) { + id, err := CreateRestoreFolders(ctx, service, driveID, restoreFolders) if err != nil { - return "", errors.Wrapf( - err, - "failed to create folder %s/%s. details: %s", parentFolderID, folder, - support.ConnectorStackErrorTrace(err), - ) + return "", permissionIDMappings, err } - logger.Ctx(ctx).Debugf("Resolved %s in %s to %s", folder, parentFolderID, *folderItem.GetId()) + permissionIDMappings, err = restorePermissions( + ctx, + service, + driveID, + id, + parentPermissions, + folderPermissions, + permissionIDMappings) - return *folderItem.GetId(), nil + return id, permissionIDMappings, err } -// createRestoreFolders creates the restore folder hierarchy in the specified drive and returns the folder ID -// of the last folder entry in the hierarchy -func CreateRestoreFolders(ctx context.Context, service graph.Servicer, driveID string, restoreFolders []string, +// CreateRestoreFolders creates the restore folder hierarchy in the specified +// drive and returns the folder ID of the last folder entry in the hierarchy. +func CreateRestoreFolders( + ctx context.Context, + service graph.Servicer, + driveID string, + restoreFolders []string, ) (string, error) { driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil) if err != nil { diff --git a/src/internal/connector/sharepoint/collection.go b/src/internal/connector/sharepoint/collection.go index 91ebf5d65..d4150fec0 100644 --- a/src/internal/connector/sharepoint/collection.go +++ b/src/internal/connector/sharepoint/collection.go @@ -167,7 +167,7 @@ func (sc *Collection) finishPopulation(ctx context.Context, attempts, success in TotalBytes: totalBytes, }, errs, - sc.fullPath.Folder()) + sc.fullPath.Folder(false)) logger.Ctx(ctx).Debug(status.String()) if sc.statusUpdater != nil { @@ -191,7 +191,7 @@ func (sc *Collection) populate(ctx context.Context) { ctx, sc.fullPath.Category().String(), observe.Safe("name"), - observe.PII(sc.fullPath.Folder())) + observe.PII(sc.fullPath.Folder(false))) go closer() defer func() { diff --git a/src/internal/connector/sharepoint/data_collections_test.go b/src/internal/connector/sharepoint/data_collections_test.go index 623b5c2e7..75710bc24 100644 --- a/src/internal/connector/sharepoint/data_collections_test.go +++ b/src/internal/connector/sharepoint/data_collections_test.go @@ -20,7 +20,7 @@ import ( // --------------------------------------------------------------------------- const ( - testBaseDrivePath = "drive/driveID1/root:" + testBaseDrivePath = "drives/driveID1/root:" ) type testFolderMatcher struct { @@ -60,6 +60,7 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() { items []models.DriveItemable scope selectors.SharePointScope expect assert.ErrorAssertionFunc + expectedCollectionIDs []string expectedCollectionPaths []string expectedItemCount int expectedContainerCount int @@ -68,10 +69,12 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() { { testCase: "Single File", items: []models.DriveItemable{ - driveItem("file", testBaseDrivePath, true), + driveRootItem("root"), + driveItem("file", testBaseDrivePath, "root", true), }, - scope: anyFolder, - expect: assert.NoError, + scope: anyFolder, + expect: assert.NoError, + expectedCollectionIDs: []string{"root"}, expectedCollectionPaths: expectedPathAsSlice( suite.T(), tenant, @@ -101,26 +104,30 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() { &MockGraphService{}, nil, control.Options{}) - err := c.UpdateCollections(ctx, "driveID", "General", test.items, paths, newPaths, excluded, true) + err := c.UpdateCollections(ctx, "driveID1", "General", test.items, paths, newPaths, excluded, true) test.expect(t, err) - assert.Equal(t, len(test.expectedCollectionPaths), len(c.CollectionMap), "collection paths") + assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count") - for _, collPath := range test.expectedCollectionPaths { + for _, collPath := range test.expectedCollectionIDs { assert.Contains(t, c.CollectionMap, collPath) } + for _, col := range c.CollectionMap { + assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String()) + } }) } } -func driveItem(name string, path string, isFile bool) models.DriveItemable { +func driveItem(name, parentPath, parentID string, isFile bool) models.DriveItemable { item := models.NewDriveItem() item.SetName(&name) item.SetId(&name) parentReference := models.NewItemReference() - parentReference.SetPath(&path) + parentReference.SetPath(&parentPath) + parentReference.SetId(&parentID) item.SetParentReference(parentReference) if isFile { @@ -130,6 +137,16 @@ func driveItem(name string, path string, isFile bool) models.DriveItemable { return item } +func driveRootItem(id string) models.DriveItemable { + name := "root" + item := models.NewDriveItem() + item.SetName(&name) + item.SetId(&id) + item.SetRoot(models.NewRoot()) + + return item +} + type SharePointPagesSuite struct { suite.Suite } diff --git a/src/internal/connector/sharepoint/restore.go b/src/internal/connector/sharepoint/restore.go index c2c92249f..3173d5778 100644 --- a/src/internal/connector/sharepoint/restore.go +++ b/src/internal/connector/sharepoint/restore.go @@ -69,7 +69,7 @@ func RestoreCollections( backupVersion, service, dc, - []onedrive.UserPermission{}, // Currently permission data is not stored for sharepoint + map[string][]onedrive.UserPermission{}, // Currently permission data is not stored for sharepoint onedrive.OneDriveSource, dest.ContainerName, deets, @@ -276,6 +276,7 @@ func RestoreListCollection( itemPath.String(), itemPath.ShortRef(), "", + "", // TODO: implement locationRef true, itemInfo) @@ -355,6 +356,7 @@ func RestorePageCollection( itemPath.String(), itemPath.ShortRef(), "", + "", // TODO: implement locationRef true, itemInfo, ) diff --git a/src/internal/connector/support/status.go b/src/internal/connector/support/status.go index dcf5f32c5..849fee9bc 100644 --- a/src/internal/connector/support/status.go +++ b/src/internal/connector/support/status.go @@ -6,8 +6,6 @@ import ( "github.com/dustin/go-humanize" multierror "github.com/hashicorp/go-multierror" - - "github.com/alcionai/corso/src/pkg/logger" ) // ConnectorOperationStatus is a data type used to describe the state of @@ -80,15 +78,6 @@ func CreateStatus( additionalDetails: details, } - if status.ObjectCount != status.ErrorCount+status.Successful { - logger.Ctx(ctx).Errorw( - "status object count does not match errors + successes", - "objects", cm.Objects, - "successes", cm.Successes, - "numErrors", numErr, - "errors", err) - } - return &status } @@ -114,10 +103,11 @@ func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus { } status := ConnectorOperationStatus{ - lastOperation: one.lastOperation, - ObjectCount: one.ObjectCount + two.ObjectCount, - FolderCount: one.FolderCount + two.FolderCount, - Successful: one.Successful + two.Successful, + lastOperation: one.lastOperation, + ObjectCount: one.ObjectCount + two.ObjectCount, + FolderCount: one.FolderCount + two.FolderCount, + Successful: one.Successful + two.Successful, + // TODO: remove in favor of fault.Errors ErrorCount: one.ErrorCount + two.ErrorCount, Err: multierror.Append(one.Err, two.Err).ErrorOrNil(), bytes: one.bytes + two.bytes, @@ -144,14 +134,11 @@ func (cos *ConnectorOperationStatus) String() string { cos.Successful, cos.ObjectCount, humanize.Bytes(uint64(cos.bytes)), - cos.FolderCount, - ) + cos.FolderCount) if cos.incomplete { message += " " + cos.incompleteReason } - message += " " + operationStatement + cos.additionalDetails + "\n" - - return message + return message + " " + operationStatement + cos.additionalDetails } diff --git a/src/internal/connector/uploadsession/uploadsession.go b/src/internal/connector/uploadsession/uploadsession.go index 818159e0b..5dc04388c 100644 --- a/src/internal/connector/uploadsession/uploadsession.go +++ b/src/internal/connector/uploadsession/uploadsession.go @@ -5,7 +5,7 @@ import ( "context" "fmt" - "github.com/pkg/errors" + "github.com/alcionai/clues" "gopkg.in/resty.v1" "github.com/alcionai/corso/src/pkg/logger" @@ -38,7 +38,7 @@ func NewWriter(id, url string, size int64) *writer { // Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on // https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession -func (iw *writer) Write(p []byte) (n int, err error) { +func (iw *writer) Write(p []byte) (int, error) { rangeLength := len(p) logger.Ctx(context.Background()).Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d", iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength) @@ -47,7 +47,7 @@ func (iw *writer) Write(p []byte) (n int, err error) { // PUT the request - set headers `Content-Range`to describe total size and `Content-Length` to describe size of // data in the current request - resp, err := iw.client.R(). + _, err := iw.client.R(). SetHeaders(map[string]string{ contentRangeHeaderKey: fmt.Sprintf(contentRangeHeaderValueFmt, iw.lastWrittenOffset, @@ -57,15 +57,15 @@ func (iw *writer) Write(p []byte) (n int, err error) { }). SetBody(bytes.NewReader(p)).Put(iw.url) if err != nil { - return 0, errors.Wrapf(err, - "failed to upload item %s. Upload failed at Size:%d, Offset: %d, TotalSize: %d ", - iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength) + return 0, clues.Wrap(err, "uploading item").WithAll( + "upload_id", iw.id, + "upload_chunk_size", rangeLength, + "upload_offset", iw.lastWrittenOffset, + "upload_size", iw.contentLength) } // Update last offset iw.lastWrittenOffset = endOffset - logger.Ctx(context.Background()).Debugf("Response: %s", resp.String()) - return rangeLength, nil } diff --git a/src/internal/data/data_collection.go b/src/internal/data/data_collection.go index 764a7b886..3f72bed2e 100644 --- a/src/internal/data/data_collection.go +++ b/src/internal/data/data_collection.go @@ -96,6 +96,12 @@ type Stream interface { Deleted() bool } +// LocationPather provides a LocationPath describing the path with Display Names +// instead of canonical IDs +type LocationPather interface { + LocationPath() path.Path +} + // StreamInfo is used to provide service specific // information about the Stream type StreamInfo interface { @@ -124,7 +130,7 @@ func StateOf(prev, curr path.Path) CollectionState { return NewState } - if curr.Folder() != prev.Folder() { + if curr.Folder(false) != prev.Folder(false) { return MovedState } diff --git a/src/internal/kopia/upload.go b/src/internal/kopia/upload.go index e25e4ed0f..64feed06c 100644 --- a/src/internal/kopia/upload.go +++ b/src/internal/kopia/upload.go @@ -123,10 +123,11 @@ func (rw *restoreStreamReader) Read(p []byte) (n int, err error) { } type itemDetails struct { - info *details.ItemInfo - repoPath path.Path - prevPath path.Path - cached bool + info *details.ItemInfo + repoPath path.Path + prevPath path.Path + locationPath path.Path + cached bool } type corsoProgress struct { @@ -135,7 +136,7 @@ type corsoProgress struct { deets *details.Builder // toMerge represents items that we don't have in-memory item info for. The // item info for these items should be sourced from a base snapshot later on. - toMerge map[string]path.Path + toMerge map[string]PrevRefs mu sync.RWMutex totalBytes int64 errs *fault.Errors @@ -180,27 +181,45 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) { cp.mu.Lock() defer cp.mu.Unlock() - cp.toMerge[d.prevPath.ShortRef()] = d.repoPath + cp.toMerge[d.prevPath.ShortRef()] = PrevRefs{ + Repo: d.repoPath, + Location: d.locationPath, + } return } - parent := d.repoPath.ToBuilder().Dir() + var ( + locationFolders string + locPB *path.Builder + parent = d.repoPath.ToBuilder().Dir() + ) + + if d.locationPath != nil { + locationFolders = d.locationPath.Folder(true) + + locPB = d.locationPath.ToBuilder() + + // folderEntriesForPath assumes the location will + // not have an item element appended + if len(d.locationPath.Item()) > 0 { + locPB = locPB.Dir() + } + } cp.deets.Add( d.repoPath.String(), d.repoPath.ShortRef(), parent.ShortRef(), + locationFolders, !d.cached, - *d.info, - ) + *d.info) - folders := details.FolderEntriesForPath(parent) + folders := details.FolderEntriesForPath(parent, locPB) cp.deets.AddFoldersForItem( folders, *d.info, - !d.cached, - ) + !d.cached) } // Kopia interface function used as a callback when kopia finishes hashing a file. @@ -263,12 +282,17 @@ func collectionEntries( } var ( + locationPath path.Path // Track which items have already been seen so we can skip them if we see // them again in the data from the base snapshot. seen = map[string]struct{}{} items = streamedEnts.Items() ) + if lp, ok := streamedEnts.(data.LocationPather); ok { + locationPath = lp.LocationPath() + } + for { select { case <-ctx.Done(): @@ -328,7 +352,11 @@ func collectionEntries( // previous snapshot then we should populate prevPath here and leave // info nil. itemInfo := ei.Info() - d := &itemDetails{info: &itemInfo, repoPath: itemPath} + d := &itemDetails{ + info: &itemInfo, + repoPath: itemPath, + locationPath: locationPath, + } progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) } @@ -356,6 +384,7 @@ func streamBaseEntries( cb func(context.Context, fs.Entry) error, curPath path.Path, prevPath path.Path, + locationPath path.Path, dir fs.Directory, encodedSeen map[string]struct{}, globalExcludeSet map[string]struct{}, @@ -411,7 +440,12 @@ func streamBaseEntries( // All items have item info in the base backup. However, we need to make // sure we have enough metadata to find those entries. To do that we add the // item to progress and having progress aggregate everything for later. - d := &itemDetails{info: nil, repoPath: itemPath, prevPath: prevItemPath} + d := &itemDetails{ + info: nil, + repoPath: itemPath, + prevPath: prevItemPath, + locationPath: locationPath, + } progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) if err := cb(ctx, entry); err != nil { @@ -455,6 +489,12 @@ func getStreamItemFunc( } } + var locationPath path.Path + + if lp, ok := streamedEnts.(data.LocationPather); ok { + locationPath = lp.LocationPath() + } + seen, err := collectionEntries(ctx, cb, streamedEnts, progress) if err != nil { return errors.Wrap(err, "streaming collection entries") @@ -465,6 +505,7 @@ func getStreamItemFunc( cb, curPath, prevPath, + locationPath, baseDir, seen, globalExcludeSet, @@ -533,6 +574,7 @@ type treeMap struct { // Previous path this directory may have resided at if it is sourced from a // base snapshot. prevPath path.Path + // Child directories of this directory. childDirs map[string]*treeMap // Reference to data pulled from the external service. Contains only items in diff --git a/src/internal/kopia/upload_test.go b/src/internal/kopia/upload_test.go index e284d4d67..3a27834d0 100644 --- a/src/internal/kopia/upload_test.go +++ b/src/internal/kopia/upload_test.go @@ -535,7 +535,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBuildsHierarchyNewItem() { UploadProgress: &snapshotfs.NullUploadProgress{}, deets: bd, pending: map[string]*itemDetails{}, - toMerge: map[string]path.Path{}, + toMerge: map[string]PrevRefs{}, errs: fault.New(true), } @@ -598,30 +598,34 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch true, ) - expectedToMerge := map[string]path.Path{ - prevPath.ShortRef(): suite.targetFilePath, + expectedToMerge := map[string]PrevRefs{ + prevPath.ShortRef(): { + Repo: suite.targetFilePath, + Location: suite.targetFilePath, + }, } // Setup stuff. - bd := &details.Builder{} + db := &details.Builder{} cp := corsoProgress{ UploadProgress: &snapshotfs.NullUploadProgress{}, - deets: bd, + deets: db, pending: map[string]*itemDetails{}, - toMerge: map[string]path.Path{}, + toMerge: map[string]PrevRefs{}, errs: fault.New(true), } deets := &itemDetails{ - info: nil, - repoPath: suite.targetFilePath, - prevPath: prevPath, + info: nil, + repoPath: suite.targetFilePath, + prevPath: prevPath, + locationPath: suite.targetFilePath, } + cp.put(suite.targetFileName, deets) require.Len(t, cp.pending, 1) cp.FinishedFile(suite.targetFileName, nil) - assert.Equal(t, expectedToMerge, cp.toMerge) assert.Empty(t, cp.deets) } @@ -651,15 +655,19 @@ func (suite *CorsoProgressUnitSuite) TestFinishedHashingFile() { type HierarchyBuilderUnitSuite struct { suite.Suite - testPath path.Path + testStoragePath path.Path + testLocationPath path.Path } func (suite *HierarchyBuilderUnitSuite) SetupSuite() { - suite.testPath = makePath( + suite.testStoragePath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxID}, + false) + suite.testLocationPath = makePath( suite.T(), []string{testTenant, service, testUser, category, testInboxDir}, - false, - ) + false) } func TestHierarchyBuilderUnitSuite(t *testing.T) { @@ -672,14 +680,16 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() { defer flush() - t := suite.T() - tenant := "a-tenant" - user1 := testUser - user1Encoded := encodeAsPath(user1) - user2 := "user2" - user2Encoded := encodeAsPath(user2) - - p2 := makePath(t, []string{tenant, service, user2, category, testInboxDir}, false) + var ( + t = suite.T() + tenant = "a-tenant" + user1 = testUser + user1Encoded = encodeAsPath(user1) + user2 = "user2" + user2Encoded = encodeAsPath(user2) + storeP2 = makePath(t, []string{tenant, service, user2, category, testInboxID}, false) + locP2 = makePath(t, []string{tenant, service, user2, category, testInboxDir}, false) + ) // Encode user names here so we don't have to decode things later. expectedFileCount := map[string]int{ @@ -694,13 +704,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() { collections := []data.BackupCollection{ mockconnector.NewMockExchangeCollection( - suite.testPath, - expectedFileCount[user1Encoded], - ), + suite.testStoragePath, + suite.testLocationPath, + expectedFileCount[user1Encoded]), mockconnector.NewMockExchangeCollection( - p2, - expectedFileCount[user2Encoded], - ), + storeP2, + locP2, + expectedFileCount[user2Encoded]), } // Returned directory structure should look like: @@ -734,7 +744,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() { expectDirs(t, entries, encodeElements(category), true) entries = getDirEntriesForEntry(t, ctx, entries[0]) - expectDirs(t, entries, encodeElements(testInboxDir), true) + expectDirs(t, entries, encodeElements(testInboxID), true) entries = getDirEntriesForEntry(t, ctx, entries[0]) assert.Len(t, entries, expectedFileCount[userName]) @@ -752,9 +762,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() ctx, flush := tester.NewContext() defer flush() - subdir := "subfolder" - - p2 := makePath(suite.T(), append(suite.testPath.Elements(), subdir), false) + var ( + subfldID = "subfolder_ID" + subfldDir = "subfolder" + storeP2 = makePath(suite.T(), append(suite.testStoragePath.Elements(), subfldID), false) + locP2 = makePath(suite.T(), append(suite.testLocationPath.Elements(), subfldDir), false) + ) // Test multiple orders of items because right now order can matter. Both // orders result in a directory structure like: @@ -762,8 +775,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() // - exchange // - user1 // - emails - // - Inbox - // - subfolder + // - Inbox_ID + // - subfolder_ID // - 5 separate files // - 42 separate files table := []struct { @@ -774,26 +787,26 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() name: "SubdirFirst", layout: []data.BackupCollection{ mockconnector.NewMockExchangeCollection( - p2, - 5, - ), + storeP2, + locP2, + 5), mockconnector.NewMockExchangeCollection( - suite.testPath, - 42, - ), + suite.testStoragePath, + suite.testLocationPath, + 42), }, }, { name: "SubdirLast", layout: []data.BackupCollection{ mockconnector.NewMockExchangeCollection( - suite.testPath, - 42, - ), + suite.testStoragePath, + suite.testLocationPath, + 42), mockconnector.NewMockExchangeCollection( - p2, - 5, - ), + storeP2, + locP2, + 5), }, }, } @@ -822,7 +835,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() expectDirs(t, entries, encodeElements(category), true) entries = getDirEntriesForEntry(t, ctx, entries[0]) - expectDirs(t, entries, encodeElements(testInboxDir), true) + expectDirs(t, entries, encodeElements(testInboxID), true) entries = getDirEntriesForEntry(t, ctx, entries[0]) // 42 files and 1 subdirectory. @@ -837,7 +850,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() } subDirs = append(subDirs, d) - assert.Equal(t, encodeAsPath(subdir), d.Name()) + assert.Equal(t, encodeAsPath(subfldID), d.Name()) } require.Len(t, subDirs, 1) @@ -849,11 +862,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() } func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() { - p2 := makePath( + storeP2 := makePath( + suite.T(), + []string{"tenant2", service, "user2", category, testInboxID}, + false) + locP2 := makePath( suite.T(), []string{"tenant2", service, "user2", category, testInboxDir}, - false, - ) + false) table := []struct { name string @@ -876,13 +892,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() { // - 42 separate files []data.BackupCollection{ mockconnector.NewMockExchangeCollection( - suite.testPath, - 5, - ), + suite.testStoragePath, + suite.testLocationPath, + 5), mockconnector.NewMockExchangeCollection( - p2, - 42, - ), + storeP2, + locP2, + 42), }, }, { @@ -890,8 +906,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() { []data.BackupCollection{ mockconnector.NewMockExchangeCollection( nil, - 5, - ), + nil, + 5), }, }, } @@ -931,15 +947,19 @@ func mockIncrementalBase( } func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() { - dirPath := makePath( - suite.T(), - []string{testTenant, service, testUser, category, testInboxDir}, - false, - ) - dirPath2 := makePath( - suite.T(), - []string{testTenant, service, testUser, category, testArchiveDir}, - false, + var ( + storePath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxID}, + false) + storePath2 = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testArchiveID}, + false) + locPath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testArchiveDir}, + false) ) table := []struct { @@ -990,17 +1010,17 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() { cols := []data.BackupCollection{} for _, s := range test.states { - prevPath := dirPath - nowPath := dirPath + prevPath := storePath + nowPath := storePath switch s { case data.DeletedState: nowPath = nil case data.MovedState: - nowPath = dirPath2 + nowPath = storePath2 } - mc := mockconnector.NewMockExchangeCollection(nowPath, 0) + mc := mockconnector.NewMockExchangeCollection(nowPath, locPath, 0) mc.ColState = s mc.PrevPath = prevPath @@ -1014,15 +1034,23 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() { } func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { - dirPath := makePath( - suite.T(), - []string{testTenant, service, testUser, category, testInboxDir}, - false, - ) - dirPath2 := makePath( - suite.T(), - []string{testTenant, service, testUser, category, testArchiveDir}, - false, + var ( + storePath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxID}, + false) + storePath2 = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testArchiveID}, + false) + locPath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxDir}, + false) + locPath2 = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testArchiveDir}, + false) ) // Must be a function that returns a new instance each time as StreamingFile @@ -1037,7 +1065,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []fs.Entry{ virtualfs.NewStaticDirectory( - encodeElements(testInboxDir)[0], + encodeElements(testInboxID)[0], []fs.Entry{ virtualfs.StreamingFileWithModTimeFromReader( encodeElements(testFileName)[0], @@ -1058,7 +1086,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { { name: "SkipsDeletedItems", inputCollections: func() []data.BackupCollection { - mc := mockconnector.NewMockExchangeCollection(dirPath, 1) + mc := mockconnector.NewMockExchangeCollection(storePath, locPath, 1) mc.Names[0] = testFileName mc.DeletedItems[0] = true @@ -1073,7 +1101,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{}, }, }, @@ -1082,7 +1110,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { { name: "AddsNewItems", inputCollections: func() []data.BackupCollection { - mc := mockconnector.NewMockExchangeCollection(dirPath, 1) + mc := mockconnector.NewMockExchangeCollection(storePath, locPath, 1) mc.Names[0] = testFileName2 mc.Data[0] = testFileData2 mc.ColState = data.NotMovedState @@ -1098,7 +1126,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: testFileName, @@ -1117,7 +1145,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { { name: "SkipsUpdatedItems", inputCollections: func() []data.BackupCollection { - mc := mockconnector.NewMockExchangeCollection(dirPath, 1) + mc := mockconnector.NewMockExchangeCollection(storePath, locPath, 1) mc.Names[0] = testFileName mc.Data[0] = testFileData2 mc.ColState = data.NotMovedState @@ -1133,7 +1161,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: testFileName, @@ -1148,11 +1176,11 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { { name: "DeleteAndNew", inputCollections: func() []data.BackupCollection { - mc1 := mockconnector.NewMockExchangeCollection(dirPath, 0) + mc1 := mockconnector.NewMockExchangeCollection(storePath, locPath, 0) mc1.ColState = data.DeletedState - mc1.PrevPath = dirPath + mc1.PrevPath = storePath - mc2 := mockconnector.NewMockExchangeCollection(dirPath, 1) + mc2 := mockconnector.NewMockExchangeCollection(storePath, locPath, 1) mc2.ColState = data.NewState mc2.Names[0] = testFileName2 mc2.Data[0] = testFileData2 @@ -1168,7 +1196,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: testFileName2, @@ -1183,11 +1211,11 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { { name: "MovedAndNew", inputCollections: func() []data.BackupCollection { - mc1 := mockconnector.NewMockExchangeCollection(dirPath2, 0) + mc1 := mockconnector.NewMockExchangeCollection(storePath2, locPath2, 0) mc1.ColState = data.MovedState - mc1.PrevPath = dirPath + mc1.PrevPath = storePath - mc2 := mockconnector.NewMockExchangeCollection(dirPath, 1) + mc2 := mockconnector.NewMockExchangeCollection(storePath, locPath, 1) mc2.ColState = data.NewState mc2.Names[0] = testFileName2 mc2.Data[0] = testFileData2 @@ -1203,7 +1231,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: testFileName2, @@ -1213,7 +1241,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, }, { - name: testArchiveDir, + name: testArchiveID, children: []*expectedNode{ { name: testFileName, @@ -1227,7 +1255,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { { name: "NewDoesntMerge", inputCollections: func() []data.BackupCollection { - mc1 := mockconnector.NewMockExchangeCollection(dirPath, 1) + mc1 := mockconnector.NewMockExchangeCollection(storePath, locPath, 1) mc1.ColState = data.NewState mc1.Names[0] = testFileName2 mc1.Data[0] = testFileData2 @@ -1243,7 +1271,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: testFileName2, @@ -1291,37 +1319,49 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirectories() { const ( + personalID = "personal_ID" + workID = "work_ID" personalDir = "personal" workDir = "work" ) - inboxPath := makePath( - suite.T(), - []string{testTenant, service, testUser, category, testInboxDir}, - false, - ) - inboxFileName1 := testFileName - inboxFileData1 := testFileData4 - inboxFileName2 := testFileName5 - inboxFileData2 := testFileData5 + var ( + inboxStorePath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxID}, + false) + inboxLocPath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxDir}, + false) + inboxFileName1 = testFileName + inboxFileData1 = testFileData4 + inboxFileName2 = testFileName5 + inboxFileData2 = testFileData5 - personalPath := makePath( - suite.T(), - append(inboxPath.Elements(), personalDir), - false, - ) - personalFileName1 := inboxFileName1 - personalFileName2 := testFileName2 + personalStorePath = makePath( + suite.T(), + append(inboxStorePath.Elements(), personalID), + false) + personalLocPath = makePath( + suite.T(), + append(inboxLocPath.Elements(), personalDir), + false) + personalFileName1 = inboxFileName1 + personalFileName2 = testFileName2 - workPath := makePath( - suite.T(), - append(inboxPath.Elements(), workDir), - false, + workStorePath = makePath( + suite.T(), + append(inboxStorePath.Elements(), workID), + false) + workLocPath = makePath( + suite.T(), + append(inboxLocPath.Elements(), workDir), + false) + workFileName1 = testFileName3 + workFileName2 = testFileName4 + workFileData2 = testFileData ) - workFileName1 := testFileName3 - workFileName2 := testFileName4 - - workFileData2 := testFileData // Must be a function that returns a new instance each time as StreamingFile // can only return its Reader once. @@ -1330,12 +1370,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto // - exchange // - user1 // - email - // - Inbox + // - Inbox_ID // - file1 - // - personal + // - personal_ID // - file1 // - file2 - // - work + // - work_ID // - file3 getBaseSnapshot := func() fs.Entry { return baseWithChildren( @@ -1347,7 +1387,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []fs.Entry{ virtualfs.NewStaticDirectory( - encodeElements(testInboxDir)[0], + encodeElements(testInboxID)[0], []fs.Entry{ virtualfs.StreamingFileWithModTimeFromReader( encodeElements(inboxFileName1)[0], @@ -1355,7 +1395,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto io.NopCloser(bytes.NewReader(inboxFileData1)), ), virtualfs.NewStaticDirectory( - encodeElements(personalDir)[0], + encodeElements(personalID)[0], []fs.Entry{ virtualfs.StreamingFileWithModTimeFromReader( encodeElements(personalFileName1)[0], @@ -1370,7 +1410,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, ), virtualfs.NewStaticDirectory( - encodeElements(workDir)[0], + encodeElements(workID)[0], []fs.Entry{ virtualfs.StreamingFileWithModTimeFromReader( encodeElements(workFileName1)[0], @@ -1408,10 +1448,10 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: personalFileName2, @@ -1420,7 +1460,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1436,14 +1476,17 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "MovesSubtree", inputCollections: func(t *testing.T) []data.BackupCollection { - newPath := makePath( + newStorePath := makePath( + t, + []string{testTenant, service, testUser, category, testInboxID + "2"}, + false) + newLocPath := makePath( t, []string{testTenant, service, testUser, category, testInboxDir + "2"}, - false, - ) + false) - mc := mockconnector.NewMockExchangeCollection(newPath, 0) - mc.PrevPath = inboxPath + mc := mockconnector.NewMockExchangeCollection(newStorePath, newLocPath, 0) + mc.PrevPath = inboxStorePath mc.ColState = data.MovedState return []data.BackupCollection{mc} @@ -1457,14 +1500,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir + "2", + name: testInboxID + "2", children: []*expectedNode{ { name: inboxFileName1, children: []*expectedNode{}, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: personalFileName1, @@ -1477,7 +1520,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1493,23 +1536,29 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "MovesChildAfterAncestorMove", inputCollections: func(t *testing.T) []data.BackupCollection { - newInboxPath := makePath( + newInboxStorePath := makePath( + t, + []string{testTenant, service, testUser, category, testInboxID + "2"}, + false) + newWorkStorePath := makePath( + t, + []string{testTenant, service, testUser, category, workID}, + false) + newInboxLocPath := makePath( t, []string{testTenant, service, testUser, category, testInboxDir + "2"}, - false, - ) - newWorkPath := makePath( + false) + newWorkLocPath := makePath( t, - []string{testTenant, service, testUser, category, workDir}, - false, - ) + []string{testTenant, service, testUser, category, workID}, + false) - inbox := mockconnector.NewMockExchangeCollection(newInboxPath, 0) - inbox.PrevPath = inboxPath + inbox := mockconnector.NewMockExchangeCollection(newInboxStorePath, newInboxLocPath, 0) + inbox.PrevPath = inboxStorePath inbox.ColState = data.MovedState - work := mockconnector.NewMockExchangeCollection(newWorkPath, 0) - work.PrevPath = workPath + work := mockconnector.NewMockExchangeCollection(newWorkStorePath, newWorkLocPath, 0) + work.PrevPath = workStorePath work.ColState = data.MovedState return []data.BackupCollection{inbox, work} @@ -1523,14 +1572,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir + "2", + name: testInboxID + "2", children: []*expectedNode{ { name: inboxFileName1, children: []*expectedNode{}, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: personalFileName1, @@ -1545,7 +1594,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1559,18 +1608,21 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "MovesChildAfterAncestorDelete", inputCollections: func(t *testing.T) []data.BackupCollection { - newWorkPath := makePath( + newWorkStorePath := makePath( + t, + []string{testTenant, service, testUser, category, workID}, + false) + newWorkLocPath := makePath( t, []string{testTenant, service, testUser, category, workDir}, - false, - ) + false) - inbox := mockconnector.NewMockExchangeCollection(inboxPath, 0) - inbox.PrevPath = inboxPath + inbox := mockconnector.NewMockExchangeCollection(inboxStorePath, inboxLocPath, 0) + inbox.PrevPath = inboxStorePath inbox.ColState = data.DeletedState - work := mockconnector.NewMockExchangeCollection(newWorkPath, 0) - work.PrevPath = workPath + work := mockconnector.NewMockExchangeCollection(newWorkStorePath, newWorkLocPath, 0) + work.PrevPath = workStorePath work.ColState = data.MovedState return []data.BackupCollection{inbox, work} @@ -1584,7 +1636,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1598,12 +1650,12 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "ReplaceDeletedDirectory", inputCollections: func(t *testing.T) []data.BackupCollection { - personal := mockconnector.NewMockExchangeCollection(personalPath, 0) - personal.PrevPath = personalPath + personal := mockconnector.NewMockExchangeCollection(personalStorePath, personalLocPath, 0) + personal.PrevPath = personalStorePath personal.ColState = data.DeletedState - work := mockconnector.NewMockExchangeCollection(personalPath, 0) - work.PrevPath = workPath + work := mockconnector.NewMockExchangeCollection(personalStorePath, personalLocPath, 0) + work.PrevPath = workStorePath work.ColState = data.MovedState return []data.BackupCollection{personal, work} @@ -1617,14 +1669,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: inboxFileName1, children: []*expectedNode{}, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: workFileName1, @@ -1639,11 +1691,11 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "ReplaceDeletedDirectoryWithNew", inputCollections: func(t *testing.T) []data.BackupCollection { - personal := mockconnector.NewMockExchangeCollection(personalPath, 0) - personal.PrevPath = personalPath + personal := mockconnector.NewMockExchangeCollection(personalStorePath, personalLocPath, 0) + personal.PrevPath = personalStorePath personal.ColState = data.DeletedState - newCol := mockconnector.NewMockExchangeCollection(personalPath, 1) + newCol := mockconnector.NewMockExchangeCollection(personalStorePath, personalLocPath, 1) newCol.ColState = data.NewState newCol.Names[0] = workFileName2 newCol.Data[0] = workFileData2 @@ -1659,14 +1711,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: inboxFileName1, children: []*expectedNode{}, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: workFileName2, @@ -1675,7 +1727,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1690,18 +1742,21 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "ReplaceMovedDirectory", inputCollections: func(t *testing.T) []data.BackupCollection { - newPersonalPath := makePath( + newPersonalStorePath := makePath( + t, + []string{testTenant, service, testUser, category, personalID}, + false) + newPersonalLocPath := makePath( t, []string{testTenant, service, testUser, category, personalDir}, - false, - ) + false) - personal := mockconnector.NewMockExchangeCollection(newPersonalPath, 0) - personal.PrevPath = personalPath + personal := mockconnector.NewMockExchangeCollection(newPersonalStorePath, newPersonalLocPath, 0) + personal.PrevPath = personalStorePath personal.ColState = data.MovedState - work := mockconnector.NewMockExchangeCollection(personalPath, 0) - work.PrevPath = workPath + work := mockconnector.NewMockExchangeCollection(personalStorePath, personalLocPath, 0) + work.PrevPath = workStorePath work.ColState = data.MovedState return []data.BackupCollection{personal, work} @@ -1715,14 +1770,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: inboxFileName1, children: []*expectedNode{}, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: workFileName1, @@ -1732,7 +1787,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: personalFileName1, @@ -1748,14 +1803,17 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "MoveDirectoryAndMergeItems", inputCollections: func(t *testing.T) []data.BackupCollection { - newPersonalPath := makePath( + newPersonalStorePath := makePath( + t, + []string{testTenant, service, testUser, category, workID}, + false) + newPersonalLocPath := makePath( t, []string{testTenant, service, testUser, category, workDir}, - false, - ) + false) - personal := mockconnector.NewMockExchangeCollection(newPersonalPath, 2) - personal.PrevPath = personalPath + personal := mockconnector.NewMockExchangeCollection(newPersonalStorePath, newPersonalLocPath, 2) + personal.PrevPath = personalStorePath personal.ColState = data.MovedState personal.Names[0] = personalFileName2 personal.Data[0] = testFileData5 @@ -1773,14 +1831,14 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: inboxFileName1, children: []*expectedNode{}, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1791,7 +1849,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: personalFileName1, @@ -1812,23 +1870,29 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "MoveParentDeleteFileNoMergeSubtreeMerge", inputCollections: func(t *testing.T) []data.BackupCollection { - newInboxPath := makePath( + newInboxStorePath := makePath( + t, + []string{testTenant, service, testUser, category, personalID}, + false) + newInboxLocPath := makePath( t, []string{testTenant, service, testUser, category, personalDir}, - false, - ) + false) // This path is implicitly updated because we update the inbox path. If // we didn't update it here then it would end up at the old location // still. - newWorkPath := makePath( + newWorkStorePath := makePath( + t, + []string{testTenant, service, testUser, category, personalID, workID}, + false) + newWorkLocPath := makePath( t, []string{testTenant, service, testUser, category, personalDir, workDir}, - false, - ) + false) - inbox := mockconnector.NewMockExchangeCollection(newInboxPath, 1) - inbox.PrevPath = inboxPath + inbox := mockconnector.NewMockExchangeCollection(newInboxStorePath, newInboxLocPath, 1) + inbox.PrevPath = inboxStorePath inbox.ColState = data.MovedState inbox.DoNotMerge = true // First file in inbox is implicitly deleted as we're not merging items @@ -1836,8 +1900,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto inbox.Names[0] = inboxFileName2 inbox.Data[0] = inboxFileData2 - work := mockconnector.NewMockExchangeCollection(newWorkPath, 1) - work.PrevPath = workPath + work := mockconnector.NewMockExchangeCollection(newWorkStorePath, newWorkLocPath, 1) + work.PrevPath = workStorePath work.ColState = data.MovedState work.Names[0] = testFileName6 work.Data[0] = testFileData6 @@ -1853,7 +1917,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: inboxFileName2, @@ -1861,7 +1925,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto data: inboxFileData2, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: personalFileName1, @@ -1874,7 +1938,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1895,8 +1959,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto { name: "NoMoveParentDeleteFileNoMergeSubtreeMerge", inputCollections: func(t *testing.T) []data.BackupCollection { - inbox := mockconnector.NewMockExchangeCollection(inboxPath, 1) - inbox.PrevPath = inboxPath + inbox := mockconnector.NewMockExchangeCollection(inboxStorePath, inboxLocPath, 1) + inbox.PrevPath = inboxStorePath inbox.ColState = data.NotMovedState inbox.DoNotMerge = true // First file in inbox is implicitly deleted as we're not merging items @@ -1904,8 +1968,8 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto inbox.Names[0] = inboxFileName2 inbox.Data[0] = inboxFileData2 - work := mockconnector.NewMockExchangeCollection(workPath, 1) - work.PrevPath = workPath + work := mockconnector.NewMockExchangeCollection(workStorePath, workLocPath, 1) + work.PrevPath = workStorePath work.ColState = data.NotMovedState work.Names[0] = testFileName6 work.Data[0] = testFileData6 @@ -1921,7 +1985,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: inboxFileName2, @@ -1929,7 +1993,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto data: inboxFileData2, }, { - name: personalDir, + name: personalID, children: []*expectedNode{ { name: personalFileName1, @@ -1942,7 +2006,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, }, { - name: workDir, + name: workID, children: []*expectedNode{ { name: workFileName1, @@ -1985,8 +2049,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto }, test.inputCollections(t), test.inputExcludes, - progress, - ) + progress) require.NoError(t, err) expectTree(t, ctx, test.expected, dirTree) @@ -2031,7 +2094,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre }, []fs.Entry{ virtualfs.NewStaticDirectory( - encodeElements(testInboxDir)[0], + encodeElements(testInboxID)[0], []fs.Entry{ virtualfs.NewStaticDirectory( encodeElements(personalDir)[0], @@ -2056,7 +2119,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre }, ), virtualfs.NewStaticDirectory( - encodeElements(testArchiveDir)[0], + encodeElements(testArchiveID)[0], []fs.Entry{ virtualfs.NewStaticDirectory( encodeElements(personalDir)[0], @@ -2093,7 +2156,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre }, []*expectedNode{ { - name: testArchiveDir, + name: testArchiveID, children: []*expectedNode{ { name: personalDir, @@ -2122,7 +2185,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre pending: map[string]*itemDetails{}, errs: fault.New(true), } - mc := mockconnector.NewMockExchangeCollection(suite.testPath, 1) + mc := mockconnector.NewMockExchangeCollection(suite.testStoragePath, suite.testStoragePath, 1) mc.PrevPath = mc.FullPath() mc.ColState = data.DeletedState msw := &mockSnapshotWalker{ @@ -2149,8 +2212,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre }, collections, nil, - progress, - ) + progress) require.NoError(t, err) expectTree(t, ctx, expected, dirTree) @@ -2177,25 +2239,26 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt const contactsDir = "contacts" - inboxPath := makePath( - suite.T(), - []string{testTenant, service, testUser, category, testInboxDir}, - false, + var ( + inboxPath = makePath( + suite.T(), + []string{testTenant, service, testUser, category, testInboxID}, + false) + + inboxFileName1 = testFileName + inboxFileName2 = testFileName2 + + inboxFileData1 = testFileData + inboxFileData1v2 = testFileData5 + inboxFileData2 = testFileData2 + + contactsFileName1 = testFileName3 + contactsFileData1 = testFileData3 + + eventsFileName1 = testFileName5 + eventsFileData1 = testFileData ) - inboxFileName1 := testFileName - inboxFileName2 := testFileName2 - - inboxFileData1 := testFileData - inboxFileData1v2 := testFileData5 - inboxFileData2 := testFileData2 - - contactsFileName1 := testFileName3 - contactsFileData1 := testFileData3 - - eventsFileName1 := testFileName5 - eventsFileData1 := testFileData - // Must be a function that returns a new instance each time as StreamingFile // can only return its Reader once. // baseSnapshot with the following layout: @@ -2220,7 +2283,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt encodeElements(category)[0], []fs.Entry{ virtualfs.NewStaticDirectory( - encodeElements(testInboxDir)[0], + encodeElements(testInboxID)[0], []fs.Entry{ virtualfs.StreamingFileWithModTimeFromReader( encodeElements(inboxFileName1)[0], @@ -2274,7 +2337,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt encodeElements(category)[0], []fs.Entry{ virtualfs.NewStaticDirectory( - encodeElements(testInboxDir)[0], + encodeElements(testInboxID)[0], []fs.Entry{ virtualfs.StreamingFileWithModTimeFromReader( encodeElements(inboxFileName1)[0], @@ -2337,7 +2400,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt name: category, children: []*expectedNode{ { - name: testInboxDir, + name: testInboxID, children: []*expectedNode{ { name: inboxFileName1, @@ -2375,7 +2438,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt errs: fault.New(true), } - mc := mockconnector.NewMockExchangeCollection(inboxPath, 1) + mc := mockconnector.NewMockExchangeCollection(inboxPath, inboxPath, 1) mc.PrevPath = mc.FullPath() mc.ColState = data.NotMovedState mc.Names[0] = inboxFileName2 diff --git a/src/internal/kopia/wrapper.go b/src/internal/kopia/wrapper.go index c829b5ccf..0c4f690fa 100644 --- a/src/internal/kopia/wrapper.go +++ b/src/internal/kopia/wrapper.go @@ -114,6 +114,13 @@ type IncrementalBase struct { SubtreePaths []*path.Builder } +// PrevRefs hold the repoRef and locationRef from the items +// that need to be merged in from prior snapshots. +type PrevRefs struct { + Repo path.Path + Location path.Path +} + // BackupCollections takes a set of collections and creates a kopia snapshot // with the data that they contain. previousSnapshots is used for incremental // backups and should represent the base snapshot from which metadata is sourced @@ -128,7 +135,7 @@ func (w Wrapper) BackupCollections( tags map[string]string, buildTreeWithBase bool, errs *fault.Errors, -) (*BackupStats, *details.Builder, map[string]path.Path, error) { +) (*BackupStats, *details.Builder, map[string]PrevRefs, error) { if w.c == nil { return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx) } @@ -143,7 +150,7 @@ func (w Wrapper) BackupCollections( progress := &corsoProgress{ pending: map[string]*itemDetails{}, deets: &details.Builder{}, - toMerge: map[string]path.Path{}, + toMerge: map[string]PrevRefs{}, errs: errs, } diff --git a/src/internal/kopia/wrapper_test.go b/src/internal/kopia/wrapper_test.go index e754ff9cf..0f698c365 100644 --- a/src/internal/kopia/wrapper_test.go +++ b/src/internal/kopia/wrapper_test.go @@ -27,7 +27,9 @@ import ( const ( testTenant = "a-tenant" testUser = "user1" + testInboxID = "Inbox_ID" testInboxDir = "Inbox" + testArchiveID = "Archive_ID" testArchiveDir = "Archive" testFileName = "file1" testFileName2 = "file2" @@ -144,8 +146,10 @@ type KopiaIntegrationSuite struct { ctx context.Context flush func() - testPath1 path.Path - testPath2 path.Path + storePath1 path.Path + storePath2 path.Path + locPath1 path.Path + locPath2 path.Path } func TestKopiaIntegrationSuite(t *testing.T) { @@ -164,21 +168,21 @@ func (suite *KopiaIntegrationSuite) SetupSuite() { testTenant, testUser, path.EmailCategory, - false, - ) + false) require.NoError(suite.T(), err) - suite.testPath1 = tmp + suite.storePath1 = tmp + suite.locPath1 = tmp tmp, err = path.Builder{}.Append(testArchiveDir).ToDataLayerExchangePathForCategory( testTenant, testUser, path.EmailCategory, - false, - ) + false) require.NoError(suite.T(), err) - suite.testPath2 = tmp + suite.storePath2 = tmp + suite.locPath2 = tmp } func (suite *KopiaIntegrationSuite) SetupTest() { @@ -199,13 +203,13 @@ func (suite *KopiaIntegrationSuite) TearDownTest() { func (suite *KopiaIntegrationSuite) TestBackupCollections() { collections := []data.BackupCollection{ mockconnector.NewMockExchangeCollection( - suite.testPath1, - 5, - ), + suite.storePath1, + suite.locPath1, + 5), mockconnector.NewMockExchangeCollection( - suite.testPath2, - 42, - ), + suite.storePath2, + suite.locPath2, + 42), } // tags that are supplied by the caller. This includes basic tags to support @@ -217,14 +221,14 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() { reasons := []Reason{ { - ResourceOwner: suite.testPath1.ResourceOwner(), - Service: suite.testPath1.Service(), - Category: suite.testPath1.Category(), + ResourceOwner: suite.storePath1.ResourceOwner(), + Service: suite.storePath1.Service(), + Category: suite.storePath1.Category(), }, { - ResourceOwner: suite.testPath2.ResourceOwner(), - Service: suite.testPath2.Service(), - Category: suite.testPath2.Category(), + ResourceOwner: suite.storePath2.ResourceOwner(), + Service: suite.storePath2.Service(), + Category: suite.storePath2.Category(), }, } @@ -311,7 +315,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() { prevSnaps = append(prevSnaps, IncrementalBase{ Manifest: snap, SubtreePaths: []*path.Builder{ - suite.testPath1.ToBuilder().Dir(), + suite.storePath1.ToBuilder().Dir(), }, }) }) @@ -342,13 +346,13 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() { tags[k] = "" } - dc1 := mockconnector.NewMockExchangeCollection(suite.testPath1, 1) - dc2 := mockconnector.NewMockExchangeCollection(suite.testPath2, 1) + dc1 := mockconnector.NewMockExchangeCollection(suite.storePath1, suite.locPath1, 1) + dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1) - fp1, err := suite.testPath1.Append(dc1.Names[0], true) + fp1, err := suite.storePath1.Append(dc1.Names[0], true) require.NoError(t, err) - fp2, err := suite.testPath2.Append(dc2.Names[0], true) + fp2, err := suite.storePath2.Append(dc2.Names[0], true) require.NoError(t, err) stats, _, _, err := w.BackupCollections( @@ -434,7 +438,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() { collections := []data.BackupCollection{ &mockBackupCollection{ - path: suite.testPath1, + path: suite.storePath1, streams: []data.Stream{ &mockconnector.MockExchangeData{ ID: testFileName, @@ -447,7 +451,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() { }, }, &mockBackupCollection{ - path: suite.testPath2, + path: suite.storePath2, streams: []data.Stream{ &mockconnector.MockExchangeData{ ID: testFileName3, @@ -487,7 +491,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() { // 5 file and 6 folder entries. assert.Len(t, deets.Details().Entries, 5+6) - failedPath, err := suite.testPath2.Append(testFileName4, true) + failedPath, err := suite.storePath2.Append(testFileName4, true) require.NoError(t, err) ic := i64counter{} @@ -792,8 +796,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() { cols: func() []data.BackupCollection { c := mockconnector.NewMockExchangeCollection( suite.testPath1, - 1, - ) + suite.testPath1, + 1) c.ColState = data.NotMovedState return []data.BackupCollection{c} diff --git a/src/internal/operations/backup.go b/src/internal/operations/backup.go index 5ffec2456..64debf159 100644 --- a/src/internal/operations/backup.go +++ b/src/internal/operations/backup.go @@ -244,7 +244,7 @@ func (op *BackupOperation) do( return nil, errors.Wrap(err, "connectng to m365") } - cs, excludes, err := produceBackupDataCollections(ctx, gc, op.Selectors, mdColls, op.Options) + cs, excludes, err := produceBackupDataCollections(ctx, gc, op.Selectors, mdColls, op.Options, op.Errors) if err != nil { return nil, errors.Wrap(err, "producing backup data collections") } @@ -313,6 +313,7 @@ func produceBackupDataCollections( sel selectors.Selector, metadata []data.RestoreCollection, ctrlOpts control.Options, + errs *fault.Errors, ) ([]data.BackupCollection, map[string]struct{}, error) { complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Discovering items to backup")) defer func() { @@ -321,9 +322,7 @@ func produceBackupDataCollections( closer() }() - cols, excludes, errs := gc.DataCollections(ctx, sel, metadata, ctrlOpts) - - return cols, excludes, errs + return gc.DataCollections(ctx, sel, metadata, ctrlOpts, errs) } // --------------------------------------------------------------------------- @@ -339,7 +338,7 @@ type backuper interface { tags map[string]string, buildTreeWithBase bool, errs *fault.Errors, - ) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) + ) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) } func selectorToReasons(sel selectors.Selector) []kopia.Reason { @@ -398,7 +397,7 @@ func consumeBackupDataCollections( backupID model.StableID, isIncremental bool, errs *fault.Errors, -) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) { +) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) { complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Backing up data")) defer func() { complete <- struct{}{} @@ -504,7 +503,7 @@ func mergeDetails( ms *store.Wrapper, detailsStore detailsReader, mans []*kopia.ManifestEntry, - shortRefsFromPrevBackup map[string]path.Path, + shortRefsFromPrevBackup map[string]kopia.PrevRefs, deets *details.Builder, errs *fault.Errors, ) error { @@ -560,13 +559,16 @@ func mergeDetails( continue } - newPath := shortRefsFromPrevBackup[rr.ShortRef()] - if newPath == nil { + prev, ok := shortRefsFromPrevBackup[rr.ShortRef()] + if !ok { // This entry was not sourced from a base snapshot or cached from a // previous backup, skip it. continue } + newPath := prev.Repo + newLoc := prev.Location + // Fixup paths in the item. item := entry.ItemInfo if err := details.UpdateItem(&item, newPath); err != nil { @@ -575,16 +577,27 @@ func mergeDetails( // TODO(ashmrtn): This may need updated if we start using this merge // strategry for items that were cached in kopia. - itemUpdated := newPath.String() != rr.String() + var ( + itemUpdated = newPath.String() != rr.String() + newLocStr string + locBuilder *path.Builder + ) + + if newLoc != nil { + locBuilder = newLoc.ToBuilder() + newLocStr = newLoc.Folder(true) + itemUpdated = itemUpdated || newLocStr != entry.LocationRef + } deets.Add( newPath.String(), newPath.ShortRef(), newPath.ToBuilder().Dir().ShortRef(), + newLocStr, itemUpdated, item) - folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir()) + folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), locBuilder) deets.AddFoldersForItem(folders, item, itemUpdated) // Track how many entries we added so that we know if we got them all when diff --git a/src/internal/operations/backup_integration_test.go b/src/internal/operations/backup_integration_test.go index 9c3edb4f2..f1b03298f 100644 --- a/src/internal/operations/backup_integration_test.go +++ b/src/internal/operations/backup_integration_test.go @@ -402,7 +402,7 @@ func buildCollections( c.pathFolders, false) - mc := mockconnector.NewMockExchangeCollection(pth, len(c.items)) + mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items)) for i := 0; i < len(c.items); i++ { mc.Names[i] = c.items[i].name @@ -777,8 +777,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() { p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true) require.NoError(t, err) - id, ok := cr.PathInCache(p.Folder()) - require.True(t, ok, "dir %s found in %s cache", p.Folder(), category) + id, ok := cr.PathInCache(p.Folder(false)) + require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category) d := dataset[category].dests[destName] d.containerID = id @@ -895,8 +895,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() { p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true) require.NoError(t, err) - id, ok := cr.PathInCache(p.Folder()) - require.True(t, ok, "dir %s found in %s cache", p.Folder(), category) + id, ok := cr.PathInCache(p.Folder(false)) + require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category) dataset[category].dests[container3] = contDeets{id, deets} } diff --git a/src/internal/operations/backup_test.go b/src/internal/operations/backup_test.go index 12ebcef6c..6613f116b 100644 --- a/src/internal/operations/backup_test.go +++ b/src/internal/operations/backup_test.go @@ -89,8 +89,7 @@ type mockBackuper struct { bases []kopia.IncrementalBase, cs []data.BackupCollection, tags map[string]string, - buildTreeWithBase bool, - ) + buildTreeWithBase bool) } func (mbu mockBackuper) BackupCollections( @@ -101,7 +100,7 @@ func (mbu mockBackuper) BackupCollections( tags map[string]string, buildTreeWithBase bool, errs *fault.Errors, -) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) { +) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) { if mbu.checkFunc != nil { mbu.checkFunc(bases, cs, tags, buildTreeWithBase) } @@ -249,9 +248,10 @@ func makeFolderEntry( t.Helper() return &details.DetailsEntry{ - RepoRef: pb.String(), - ShortRef: pb.ShortRef(), - ParentRef: pb.Dir().ShortRef(), + RepoRef: pb.String(), + ShortRef: pb.ShortRef(), + ParentRef: pb.Dir().ShortRef(), + LocationRef: pb.PopFront().PopFront().PopFront().PopFront().Dir().String(), ItemInfo: details.ItemInfo{ Folder: &details.FolderInfo{ ItemType: details.FolderItem, @@ -277,17 +277,24 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path { func makeDetailsEntry( t *testing.T, p path.Path, + l path.Path, size int, updated bool, ) *details.DetailsEntry { t.Helper() + var lr string + if l != nil { + lr = l.PopFront().PopFront().PopFront().PopFront().Dir().String() + } + res := &details.DetailsEntry{ - RepoRef: p.String(), - ShortRef: p.ShortRef(), - ParentRef: p.ToBuilder().Dir().ShortRef(), - ItemInfo: details.ItemInfo{}, - Updated: updated, + RepoRef: p.String(), + ShortRef: p.ShortRef(), + ParentRef: p.ToBuilder().Dir().ShortRef(), + LocationRef: lr, + ItemInfo: details.ItemInfo{}, + Updated: updated, } switch p.Service() { @@ -607,6 +614,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, true, ) + locationPath1 = makePath( + suite.T(), + []string{ + tenant, + path.OneDriveService.String(), + ro, + path.FilesCategory.String(), + "drives", + "drive-id", + "root:", + "work-display-name", + "item1", + }, + true, + ) itemPath2 = makePath( suite.T(), []string{ @@ -622,6 +644,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, true, ) + locationPath2 = makePath( + suite.T(), + []string{ + tenant, + path.OneDriveService.String(), + ro, + path.FilesCategory.String(), + "drives", + "drive-id", + "root:", + "personal-display-name", + "item2", + }, + true, + ) itemPath3 = makePath( suite.T(), []string{ @@ -634,6 +671,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, true, ) + locationPath3 = makePath( + suite.T(), + []string{ + tenant, + path.ExchangeService.String(), + ro, + path.EmailCategory.String(), + "personal-display-name", + "item3", + }, + true, + ) backup1 = backup.Backup{ BaseModel: model.BaseModel{ @@ -669,7 +718,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { populatedModels map[model.StableID]backup.Backup populatedDetails map[string]*details.Details inputMans []*kopia.ManifestEntry - inputShortRefsFromPrevBackup map[string]path.Path + inputShortRefsFromPrevBackup map[string]kopia.PrevRefs errCheck assert.ErrorAssertionFunc expectedEntries []*details.DetailsEntry @@ -682,15 +731,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "EmptyShortRefsFromPrevBackup", - inputShortRefsFromPrevBackup: map[string]path.Path{}, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{}, errCheck: assert.NoError, // Use empty slice so we don't error out on nil != empty. expectedEntries: []*details.DetailsEntry{}, }, { name: "BackupIDNotFound", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -704,8 +756,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "DetailsIDNotFound", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -727,9 +782,15 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "BaseMissingItems", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, - itemPath2.ShortRef(): itemPath2, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, + itemPath2.ShortRef(): { + Repo: itemPath2, + Location: locationPath2, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -746,7 +807,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false), }, }, }, @@ -755,8 +816,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "TooManyItems", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -779,7 +843,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false), }, }, }, @@ -788,8 +852,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "BadBaseRepoRef", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath2, + Location: locationPath2, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -834,19 +901,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "BadOneDrivePath", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): makePath( - suite.T(), - []string{ - itemPath1.Tenant(), - path.OneDriveService.String(), - itemPath1.ResourceOwner(), - path.FilesCategory.String(), - "personal", - "item1", - }, - true, - ), + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: makePath( + suite.T(), + []string{ + itemPath1.Tenant(), + path.OneDriveService.String(), + itemPath1.ResourceOwner(), + path.FilesCategory.String(), + "personal", + "item1", + }, + true, + ), + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -863,7 +932,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false), }, }, }, @@ -872,8 +941,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { }, { name: "ItemMerged", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -890,20 +962,88 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), }, }, }, }, errCheck: assert.NoError, expectedEntries: []*details.DetailsEntry{ - makeDetailsEntry(suite.T(), itemPath1, 42, false), + makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), + }, + }, + { + name: "ItemMergedNoLocation", + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + }, + }, + inputMans: []*kopia.ManifestEntry{ + { + Manifest: makeManifest(suite.T(), backup1.ID, ""), + Reasons: []kopia.Reason{ + pathReason1, + }, + }, + }, + populatedModels: map[model.StableID]backup.Backup{ + backup1.ID: backup1, + }, + populatedDetails: map[string]*details.Details{ + backup1.DetailsID: { + DetailsModel: details.DetailsModel{ + Entries: []details.DetailsEntry{ + *makeDetailsEntry(suite.T(), itemPath1, nil, 42, false), + }, + }, + }, + }, + errCheck: assert.NoError, + expectedEntries: []*details.DetailsEntry{ + makeDetailsEntry(suite.T(), itemPath1, nil, 42, false), + }, + }, + { + name: "ItemMergedSameLocation", + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: itemPath1, + }, + }, + inputMans: []*kopia.ManifestEntry{ + { + Manifest: makeManifest(suite.T(), backup1.ID, ""), + Reasons: []kopia.Reason{ + pathReason1, + }, + }, + }, + populatedModels: map[model.StableID]backup.Backup{ + backup1.ID: backup1, + }, + populatedDetails: map[string]*details.Details{ + backup1.DetailsID: { + DetailsModel: details.DetailsModel{ + Entries: []details.DetailsEntry{ + *makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false), + }, + }, + }, + }, + errCheck: assert.NoError, + expectedEntries: []*details.DetailsEntry{ + makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false), }, }, { name: "ItemMergedExtraItemsInBase", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -920,21 +1060,24 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), - *makeDetailsEntry(suite.T(), itemPath2, 84, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath2, locationPath2, 84, false), }, }, }, }, errCheck: assert.NoError, expectedEntries: []*details.DetailsEntry{ - makeDetailsEntry(suite.T(), itemPath1, 42, false), + makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), }, }, { name: "ItemMoved", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath2, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath2, + Location: locationPath2, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -951,21 +1094,27 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), }, }, }, }, errCheck: assert.NoError, expectedEntries: []*details.DetailsEntry{ - makeDetailsEntry(suite.T(), itemPath2, 42, true), + makeDetailsEntry(suite.T(), itemPath2, locationPath2, 42, true), }, }, { name: "MultipleBases", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, - itemPath3.ShortRef(): itemPath3, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, + itemPath3.ShortRef(): { + Repo: itemPath3, + Location: locationPath3, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -989,7 +1138,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), }, }, }, @@ -997,23 +1146,26 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ // This entry should not be picked due to a mismatch on Reasons. - *makeDetailsEntry(suite.T(), itemPath1, 84, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false), // This item should be picked. - *makeDetailsEntry(suite.T(), itemPath3, 37, false), + *makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false), }, }, }, }, errCheck: assert.NoError, expectedEntries: []*details.DetailsEntry{ - makeDetailsEntry(suite.T(), itemPath1, 42, false), - makeDetailsEntry(suite.T(), itemPath3, 37, false), + makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), + makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false), }, }, { name: "SomeBasesIncomplete", - inputShortRefsFromPrevBackup: map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locationPath1, + }, }, inputMans: []*kopia.ManifestEntry{ { @@ -1037,7 +1189,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { backup1.DetailsID: { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ - *makeDetailsEntry(suite.T(), itemPath1, 42, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), }, }, }, @@ -1045,14 +1197,14 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { DetailsModel: details.DetailsModel{ Entries: []details.DetailsEntry{ // This entry should not be picked due to being incomplete. - *makeDetailsEntry(suite.T(), itemPath1, 84, false), + *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false), }, }, }, }, errCheck: assert.NoError, expectedEntries: []*details.DetailsEntry{ - makeDetailsEntry(suite.T(), itemPath1, 42, false), + makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), }, }, } @@ -1075,6 +1227,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { &deets, fault.New(true)) test.errCheck(t, err) + if err != nil { return } @@ -1103,8 +1256,12 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders() itemPath1 = makePath( t, pathElems, - true, - ) + true) + + locPath1 = makePath( + t, + pathElems[:len(pathElems)-1], + false) backup1 = backup.Backup{ BaseModel: model.BaseModel{ @@ -1119,8 +1276,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders() Category: itemPath1.Category(), } - inputToMerge = map[string]path.Path{ - itemPath1.ShortRef(): itemPath1, + inputToMerge = map[string]kopia.PrevRefs{ + itemPath1.ShortRef(): { + Repo: itemPath1, + Location: locPath1, + }, } inputMans = []*kopia.ManifestEntry{ @@ -1137,7 +1297,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders() } itemSize = 42 - itemDetails = makeDetailsEntry(t, itemPath1, itemSize, false) + itemDetails = makeDetailsEntry(t, itemPath1, itemPath1, itemSize, false) populatedDetails = map[string]*details.Details{ backup1.DetailsID: { diff --git a/src/internal/operations/manifests.go b/src/internal/operations/manifests.go index a1d7997ed..38ef60e0c 100644 --- a/src/internal/operations/manifests.go +++ b/src/internal/operations/manifests.go @@ -135,13 +135,17 @@ func produceManifestsAndMetadata( // of manifests, that each manifest's Reason (owner, service, category) is only // included once. If a reason is duplicated by any two manifests, an error is // returned. -func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs fault.Adder) error { +func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs *fault.Errors) error { var ( failed bool reasons = map[string]manifest.ID{} ) for _, man := range mans { + if errs.Failed() { + break + } + // Incomplete snapshots are used only for kopia-assisted incrementals. The // fact that we need this check here makes it seem like this should live in // the kopia code. However, keeping it here allows for better debugging as @@ -173,7 +177,7 @@ func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs return clues.New("multiple base snapshots qualify").WithClues(ctx) } - return nil + return errs.Err() } // collectMetadata retrieves all metadata files associated with the manifest. diff --git a/src/internal/operations/manifests_test.go b/src/internal/operations/manifests_test.go index 24c948320..a234b81e8 100644 --- a/src/internal/operations/manifests_test.go +++ b/src/internal/operations/manifests_test.go @@ -15,7 +15,6 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/fault/mock" "github.com/alcionai/corso/src/pkg/path" ) @@ -392,7 +391,7 @@ func (suite *OperationsManifestsUnitSuite) TestVerifyDistinctBases() { ctx, flush := tester.NewContext() defer flush() - err := verifyDistinctBases(ctx, test.mans, mock.NewAdder()) + err := verifyDistinctBases(ctx, test.mans, fault.New(true)) test.expect(t, err) }) } @@ -834,7 +833,7 @@ func (suite *BackupManifestSuite) TestBackupOperation_VerifyDistinctBases() { ctx, flush := tester.NewContext() defer flush() - test.errCheck(t, verifyDistinctBases(ctx, test.input, mock.NewAdder())) + test.errCheck(t, verifyDistinctBases(ctx, test.input, fault.New(true))) }) } } diff --git a/src/internal/streamstore/streamstore_test.go b/src/internal/streamstore/streamstore_test.go index cc3309a11..c49dad147 100644 --- a/src/internal/streamstore/streamstore_test.go +++ b/src/internal/streamstore/streamstore_test.go @@ -44,7 +44,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() { deetsBuilder := &details.Builder{} - deetsBuilder.Add("ref", "shortref", "parentref", true, + deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true, details.ItemInfo{ Exchange: &details.ExchangeInfo{ Subject: "hello world", @@ -66,6 +66,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() { assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef) assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef) assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef) + assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef) assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated) assert.NotNil(t, readDeets.Entries[0].Exchange) assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange) diff --git a/src/pkg/backup/backup.go b/src/pkg/backup/backup.go index 4422b3a47..36e4b16ff 100644 --- a/src/pkg/backup/backup.go +++ b/src/pkg/backup/backup.go @@ -14,7 +14,7 @@ import ( "github.com/alcionai/corso/src/pkg/selectors" ) -const Version = 1 +const Version = 2 // Backup represents the result of a backup operation type Backup struct { diff --git a/src/pkg/backup/details/details.go b/src/pkg/backup/details/details.go index bb392c223..9374c429d 100644 --- a/src/pkg/backup/details/details.go +++ b/src/pkg/backup/details/details.go @@ -15,11 +15,12 @@ import ( ) type folderEntry struct { - RepoRef string - ShortRef string - ParentRef string - Updated bool - Info ItemInfo + RepoRef string + ShortRef string + ParentRef string + LocationRef string + Updated bool + Info ItemInfo } // -------------------------------------------------------------------------------- @@ -110,10 +111,14 @@ type Builder struct { knownFolders map[string]folderEntry `json:"-"` } -func (b *Builder) Add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) { +func (b *Builder) Add( + repoRef, shortRef, parentRef, locationRef string, + updated bool, + info ItemInfo, +) { b.mu.Lock() defer b.mu.Unlock() - b.d.add(repoRef, shortRef, parentRef, updated, info) + b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info) } func (b *Builder) Details() *Details { @@ -131,30 +136,65 @@ func (b *Builder) Details() *Details { // TODO(ashmrtn): If we never need to pre-populate the modified time of a folder // we should just merge this with AddFoldersForItem, have Add call // AddFoldersForItem, and unexport AddFoldersForItem. -func FolderEntriesForPath(parent *path.Builder) []folderEntry { +func FolderEntriesForPath(parent, location *path.Builder) []folderEntry { folders := []folderEntry{} + lfs := locationRefOf(location) for len(parent.Elements()) > 0 { - nextParent := parent.Dir() + var ( + nextParent = parent.Dir() + lr string + dn = parent.LastElem() + ) + + // TODO: We may have future cases where the storage hierarchy + // doesn't match the location hierarchy. + if lfs != nil { + lr = lfs.String() + + if len(lfs.Elements()) > 0 { + dn = lfs.LastElem() + } + } folders = append(folders, folderEntry{ - RepoRef: parent.String(), - ShortRef: parent.ShortRef(), - ParentRef: nextParent.ShortRef(), + RepoRef: parent.String(), + ShortRef: parent.ShortRef(), + ParentRef: nextParent.ShortRef(), + LocationRef: lr, Info: ItemInfo{ Folder: &FolderInfo{ ItemType: FolderItem, - DisplayName: parent.Elements()[len(parent.Elements())-1], + DisplayName: dn, }, }, }) parent = nextParent + + if lfs != nil { + lfs = lfs.Dir() + } } return folders } +// assumes the pb contains a path like: +// ////... +// and returns a string with only /... +func locationRefOf(pb *path.Builder) *path.Builder { + if pb == nil { + return nil + } + + for i := 0; i < 4; i++ { + pb = pb.PopFront() + } + + return pb +} + // AddFoldersForItem adds entries for the given folders. It skips adding entries that // have been added by previous calls. func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) { @@ -202,13 +242,18 @@ type Details struct { DetailsModel } -func (d *Details) add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) { +func (d *Details) add( + repoRef, shortRef, parentRef, locationRef string, + updated bool, + info ItemInfo, +) { d.Entries = append(d.Entries, DetailsEntry{ - RepoRef: repoRef, - ShortRef: shortRef, - ParentRef: parentRef, - Updated: updated, - ItemInfo: info, + RepoRef: repoRef, + ShortRef: shortRef, + ParentRef: parentRef, + LocationRef: locationRef, + Updated: updated, + ItemInfo: info, }) } @@ -233,9 +278,21 @@ type DetailsEntry struct { RepoRef string `json:"repoRef"` ShortRef string `json:"shortRef"` ParentRef string `json:"parentRef,omitempty"` + + // LocationRef contains the logical path structure by its human-readable + // display names. IE: If an item is located at "/Inbox/Important", we + // hold that string in the LocationRef, while the actual IDs of each + // container are used for the RepoRef. + // LocationRef only holds the container values, and does not include + // the metadata prefixes (tenant, service, owner, etc) found in the + // repoRef. + // Currently only implemented for Exchange Calendars. + LocationRef string `json:"locationRef,omitempty"` + // Indicates the item was added or updated in this backup // Always `true` for full backups Updated bool `json:"updated"` + ItemInfo } @@ -316,18 +373,21 @@ const ( FolderItem ItemType = iota + 300 ) -func UpdateItem(item *ItemInfo, newPath path.Path) error { +func UpdateItem(item *ItemInfo, repoPath path.Path) error { // Only OneDrive and SharePoint have information about parent folders // contained in them. + var updatePath func(path.Path) error + switch item.infoType() { case SharePointItem: - return item.SharePoint.UpdateParentPath(newPath) - + updatePath = item.SharePoint.UpdateParentPath case OneDriveItem: - return item.OneDrive.UpdateParentPath(newPath) + updatePath = item.OneDrive.UpdateParentPath + default: + return nil } - return nil + return updatePath(repoPath) } // ItemInfo is a oneOf that contains service specific diff --git a/src/pkg/backup/details/details_test.go b/src/pkg/backup/details/details_test.go index efc654246..cf4cdcbb5 100644 --- a/src/pkg/backup/details/details_test.go +++ b/src/pkg/backup/details/details_test.go @@ -39,8 +39,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { { name: "no info", entry: DetailsEntry{ - RepoRef: "reporef", - ShortRef: "deadbeef", + RepoRef: "reporef", + ShortRef: "deadbeef", + LocationRef: "locationref", }, expectHs: []string{"ID"}, expectVs: []string{"deadbeef"}, @@ -48,8 +49,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { { name: "exchange event info", entry: DetailsEntry{ - RepoRef: "reporef", - ShortRef: "deadbeef", + RepoRef: "reporef", + ShortRef: "deadbeef", + LocationRef: "locationref", ItemInfo: ItemInfo{ Exchange: &ExchangeInfo{ ItemType: ExchangeEvent, @@ -67,8 +69,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { { name: "exchange contact info", entry: DetailsEntry{ - RepoRef: "reporef", - ShortRef: "deadbeef", + RepoRef: "reporef", + ShortRef: "deadbeef", + LocationRef: "locationref", ItemInfo: ItemInfo{ Exchange: &ExchangeInfo{ ItemType: ExchangeContact, @@ -82,8 +85,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { { name: "exchange mail info", entry: DetailsEntry{ - RepoRef: "reporef", - ShortRef: "deadbeef", + RepoRef: "reporef", + ShortRef: "deadbeef", + LocationRef: "locationref", ItemInfo: ItemInfo{ Exchange: &ExchangeInfo{ ItemType: ExchangeMail, @@ -99,8 +103,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { { name: "sharepoint info", entry: DetailsEntry{ - RepoRef: "reporef", - ShortRef: "deadbeef", + RepoRef: "reporef", + ShortRef: "deadbeef", + LocationRef: "locationref", ItemInfo: ItemInfo{ SharePoint: &SharePointInfo{ ItemName: "itemName", @@ -128,8 +133,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { { name: "oneDrive info", entry: DetailsEntry{ - RepoRef: "reporef", - ShortRef: "deadbeef", + RepoRef: "reporef", + ShortRef: "deadbeef", + LocationRef: "locationref", ItemInfo: ItemInfo{ OneDrive: &OneDriveInfo{ ItemName: "itemName", @@ -157,37 +163,57 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() { } var pathItemsTable = []struct { - name string - ents []DetailsEntry - expectRefs []string + name string + ents []DetailsEntry + expectRepoRefs []string + expectLocationRefs []string }{ { - name: "nil entries", - ents: nil, - expectRefs: []string{}, + name: "nil entries", + ents: nil, + expectRepoRefs: []string{}, + expectLocationRefs: []string{}, }, { name: "single entry", ents: []DetailsEntry{ - {RepoRef: "abcde"}, + { + RepoRef: "abcde", + LocationRef: "locationref", + }, }, - expectRefs: []string{"abcde"}, + expectRepoRefs: []string{"abcde"}, + expectLocationRefs: []string{"locationref"}, }, { name: "multiple entries", ents: []DetailsEntry{ - {RepoRef: "abcde"}, - {RepoRef: "12345"}, + { + RepoRef: "abcde", + LocationRef: "locationref", + }, + { + RepoRef: "12345", + LocationRef: "locationref2", + }, }, - expectRefs: []string{"abcde", "12345"}, + expectRepoRefs: []string{"abcde", "12345"}, + expectLocationRefs: []string{"locationref", "locationref2"}, }, { name: "multiple entries with folder", ents: []DetailsEntry{ - {RepoRef: "abcde"}, - {RepoRef: "12345"}, { - RepoRef: "deadbeef", + RepoRef: "abcde", + LocationRef: "locationref", + }, + { + RepoRef: "12345", + LocationRef: "locationref2", + }, + { + RepoRef: "deadbeef", + LocationRef: "locationref3", ItemInfo: ItemInfo{ Folder: &FolderInfo{ DisplayName: "test folder", @@ -195,7 +221,8 @@ var pathItemsTable = []struct { }, }, }, - expectRefs: []string{"abcde", "12345"}, + expectRepoRefs: []string{"abcde", "12345"}, + expectLocationRefs: []string{"locationref", "locationref2"}, }, } @@ -207,7 +234,7 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Path() { Entries: test.ents, }, } - assert.Equal(t, test.expectRefs, d.Paths()) + assert.ElementsMatch(t, test.expectRepoRefs, d.Paths()) }) } } @@ -222,10 +249,11 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Items() { } ents := d.Items() - assert.Len(t, ents, len(test.expectRefs)) + assert.Len(t, ents, len(test.expectRepoRefs)) for _, e := range ents { - assert.Contains(t, test.expectRefs, e.RepoRef) + assert.Contains(t, test.expectRepoRefs, e.RepoRef) + assert.Contains(t, test.expectLocationRefs, e.LocationRef) } }) } @@ -253,9 +281,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() { name: "MultipleFolders", folders: []folderEntry{ { - RepoRef: "rr1", - ShortRef: "sr1", - ParentRef: "pr1", + RepoRef: "rr1", + ShortRef: "sr1", + ParentRef: "pr1", + LocationRef: "lr1", Info: ItemInfo{ Folder: &FolderInfo{ Modified: folderTimeOlderThanItem, @@ -263,9 +292,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() { }, }, { - RepoRef: "rr2", - ShortRef: "sr2", - ParentRef: "pr2", + RepoRef: "rr2", + ShortRef: "sr2", + ParentRef: "pr2", + LocationRef: "lr2", Info: ItemInfo{ Folder: &FolderInfo{ Modified: folderTimeNewerThanItem, @@ -283,9 +313,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() { name: "MultipleFoldersWithRepeats", folders: []folderEntry{ { - RepoRef: "rr1", - ShortRef: "sr1", - ParentRef: "pr1", + RepoRef: "rr1", + ShortRef: "sr1", + ParentRef: "pr1", + LocationRef: "lr1", Info: ItemInfo{ Folder: &FolderInfo{ Modified: folderTimeOlderThanItem, @@ -293,9 +324,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() { }, }, { - RepoRef: "rr2", - ShortRef: "sr2", - ParentRef: "pr2", + RepoRef: "rr2", + ShortRef: "sr2", + ParentRef: "pr2", + LocationRef: "lr2", Info: ItemInfo{ Folder: &FolderInfo{ Modified: folderTimeOlderThanItem, @@ -303,9 +335,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() { }, }, { - RepoRef: "rr1", - ShortRef: "sr1", - ParentRef: "pr1", + RepoRef: "rr1", + ShortRef: "sr1", + ParentRef: "pr1", + LocationRef: "lr1", Info: ItemInfo{ Folder: &FolderInfo{ Modified: folderTimeOlderThanItem, @@ -313,9 +346,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() { }, }, { - RepoRef: "rr3", - ShortRef: "sr3", - ParentRef: "pr3", + RepoRef: "rr3", + ShortRef: "sr3", + ParentRef: "pr3", + LocationRef: "lr3", Info: ItemInfo{ Folder: &FolderInfo{ Modified: folderTimeNewerThanItem, @@ -363,18 +397,20 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() { name: "ItemNotUpdated_NoChange", folders: []folderEntry{ { - RepoRef: "rr1", - ShortRef: "sr1", - ParentRef: "pr1", + RepoRef: "rr1", + ShortRef: "sr1", + ParentRef: "pr1", + LocationRef: "lr1", Info: ItemInfo{ Folder: &FolderInfo{}, }, Updated: true, }, { - RepoRef: "rr2", - ShortRef: "sr2", - ParentRef: "pr2", + RepoRef: "rr2", + ShortRef: "sr2", + ParentRef: "pr2", + LocationRef: "lr2", Info: ItemInfo{ Folder: &FolderInfo{}, }, @@ -390,17 +426,19 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() { name: "ItemUpdated", folders: []folderEntry{ { - RepoRef: "rr1", - ShortRef: "sr1", - ParentRef: "pr1", + RepoRef: "rr1", + ShortRef: "sr1", + ParentRef: "pr1", + LocationRef: "lr1", Info: ItemInfo{ Folder: &FolderInfo{}, }, }, { - RepoRef: "rr2", - ShortRef: "sr2", - ParentRef: "pr2", + RepoRef: "rr2", + ShortRef: "sr2", + ParentRef: "pr2", + LocationRef: "lr2", Info: ItemInfo{ Folder: &FolderInfo{}, }, @@ -482,9 +520,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersDifferentServices() { for _, test := range table { suite.T().Run(test.name, func(t *testing.T) { folder := folderEntry{ - RepoRef: "rr1", - ShortRef: "sr1", - ParentRef: "pr1", + RepoRef: "rr1", + ShortRef: "sr1", + ParentRef: "pr1", + LocationRef: "lr1", Info: ItemInfo{ Folder: &FolderInfo{}, }, @@ -562,7 +601,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { table := []struct { name string input ItemInfo - newPath path.Path + repoPath path.Path + locPath path.Path errCheck assert.ErrorAssertionFunc expectedItem ItemInfo }{ @@ -616,7 +656,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { ParentPath: folder1, }, }, - newPath: newOneDrivePath, + repoPath: newOneDrivePath, + locPath: newOneDrivePath, errCheck: assert.NoError, expectedItem: ItemInfo{ OneDrive: &OneDriveInfo{ @@ -633,7 +674,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { ParentPath: folder1, }, }, - newPath: newOneDrivePath, + repoPath: newOneDrivePath, + locPath: newOneDrivePath, errCheck: assert.NoError, expectedItem: ItemInfo{ SharePoint: &SharePointInfo{ @@ -650,7 +692,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { ParentPath: folder1, }, }, - newPath: badOneDrivePath, + repoPath: badOneDrivePath, + locPath: badOneDrivePath, errCheck: assert.Error, }, { @@ -661,7 +704,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { ParentPath: folder1, }, }, - newPath: badOneDrivePath, + repoPath: badOneDrivePath, + locPath: badOneDrivePath, errCheck: assert.Error, }, } @@ -669,7 +713,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { for _, test := range table { suite.T().Run(test.name, func(t *testing.T) { item := test.input - err := UpdateItem(&item, test.newPath) + err := UpdateItem(&item, test.repoPath) test.errCheck(t, err) if err != nil { @@ -680,3 +724,162 @@ func (suite *DetailsUnitSuite) TestUpdateItem() { }) } } + +var ( + basePath = path.Builder{}.Append("ten", "serv", "user", "type") + baseFolderEnts = []folderEntry{ + { + RepoRef: basePath.String(), + ShortRef: basePath.ShortRef(), + ParentRef: basePath.Dir().ShortRef(), + LocationRef: "", + Info: ItemInfo{ + Folder: &FolderInfo{ + ItemType: FolderItem, + DisplayName: "type", + }, + }, + }, + { + RepoRef: basePath.Dir().String(), + ShortRef: basePath.Dir().ShortRef(), + ParentRef: basePath.Dir().Dir().ShortRef(), + LocationRef: "", + Info: ItemInfo{ + Folder: &FolderInfo{ + ItemType: FolderItem, + DisplayName: "user", + }, + }, + }, + { + RepoRef: basePath.Dir().Dir().String(), + ShortRef: basePath.Dir().Dir().ShortRef(), + ParentRef: basePath.Dir().Dir().Dir().ShortRef(), + LocationRef: "", + Info: ItemInfo{ + Folder: &FolderInfo{ + ItemType: FolderItem, + DisplayName: "serv", + }, + }, + }, + { + RepoRef: basePath.Dir().Dir().Dir().String(), + ShortRef: basePath.Dir().Dir().Dir().ShortRef(), + ParentRef: "", + LocationRef: "", + Info: ItemInfo{ + Folder: &FolderInfo{ + ItemType: FolderItem, + DisplayName: "ten", + }, + }, + }, + } +) + +func folderEntriesFor(pathElems []string, locElems []string) []folderEntry { + p := basePath.Append(pathElems...) + l := path.Builder{}.Append(locElems...) + + ents := make([]folderEntry, 0, len(pathElems)+4) + + for range pathElems { + dn := p.LastElem() + if l != nil && len(l.Elements()) > 0 { + dn = l.LastElem() + } + + fe := folderEntry{ + RepoRef: p.String(), + ShortRef: p.ShortRef(), + ParentRef: p.Dir().ShortRef(), + LocationRef: l.String(), + Info: ItemInfo{ + Folder: &FolderInfo{ + ItemType: FolderItem, + DisplayName: dn, + }, + }, + } + + l = l.Dir() + p = p.Dir() + + ents = append(ents, fe) + } + + return append(ents, baseFolderEnts...) +} + +func (suite *DetailsUnitSuite) TestFolderEntriesForPath() { + var ( + fnords = []string{"fnords"} + smarf = []string{"fnords", "smarf"} + beau = []string{"beau"} + regard = []string{"beau", "regard"} + ) + + table := []struct { + name string + parent *path.Builder + location *path.Builder + expect []folderEntry + }{ + { + name: "base path, parent only", + parent: basePath, + expect: baseFolderEnts, + }, + { + name: "base path with location", + parent: basePath, + location: basePath, + expect: baseFolderEnts, + }, + { + name: "single depth parent only", + parent: basePath.Append(fnords...), + expect: folderEntriesFor(fnords, nil), + }, + { + name: "single depth with location", + parent: basePath.Append(fnords...), + location: basePath.Append(beau...), + expect: folderEntriesFor(fnords, beau), + }, + { + name: "two depth parent only", + parent: basePath.Append(smarf...), + expect: folderEntriesFor(smarf, nil), + }, + { + name: "two depth with location", + parent: basePath.Append(smarf...), + location: basePath.Append(regard...), + expect: folderEntriesFor(smarf, regard), + }, + { + name: "mismatched depth, parent longer", + parent: basePath.Append(smarf...), + location: basePath.Append(beau...), + expect: folderEntriesFor(smarf, beau), + }, + // We can't handle this right now. But we don't have any cases + // which immediately require it, either. Keeping in the test + // as a reminder that this might be required at some point. + // { + // name: "mismatched depth, location longer", + // parent: basePath.Append(fnords...), + // location: basePath.Append(regard...), + // expect: folderEntriesFor(fnords, regard), + // }, + } + for _, test := range table { + suite.T().Run(test.name, func(t *testing.T) { + result := FolderEntriesForPath(test.parent, test.location) + assert.ElementsMatch(t, test.expect, result) + }) + } +} diff --git a/src/pkg/fault/fault.go b/src/pkg/fault/fault.go index d060e969b..ee560965b 100644 --- a/src/pkg/fault/fault.go +++ b/src/pkg/fault/fault.go @@ -102,11 +102,6 @@ func (e *Errors) setErr(err error) *Errors { return e } -type Adder interface { - Add(err error) *Errors - Failed() bool -} - // Add appends the error to the slice of recoverable and // iterated errors (ie: errors.errs). If failFast is true, // the first Added error will get copied to errors.err, diff --git a/src/pkg/fault/mock/mock.go b/src/pkg/fault/mock/mock.go deleted file mode 100644 index 7076f134c..000000000 --- a/src/pkg/fault/mock/mock.go +++ /dev/null @@ -1,22 +0,0 @@ -package mock - -import "github.com/alcionai/corso/src/pkg/fault" - -// Adder mocks an adder interface for testing. -type Adder struct { - FailFast bool - Errs []error -} - -func NewAdder() *Adder { - return &Adder{Errs: []error{}} -} - -func (ma *Adder) Add(err error) *fault.Errors { - ma.Errs = append(ma.Errs, err) - return fault.New(true) -} - -func (ma *Adder) Failed() bool { - return ma.FailFast && len(ma.Errs) > 0 -} diff --git a/src/pkg/logger/logger.go b/src/pkg/logger/logger.go index 6438f3563..31ec1025f 100644 --- a/src/pkg/logger/logger.go +++ b/src/pkg/logger/logger.go @@ -162,8 +162,9 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge // then try to set up a logger directly var ( - lgr *zap.Logger - err error + lgr *zap.Logger + err error + opts = []zap.Option{zap.AddStacktrace(zapcore.PanicLevel)} ) if level != Production { @@ -178,12 +179,13 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel) } - opts := []zap.Option{} - if readableOutput { - opts = append(opts, zap.WithCaller(false), zap.AddStacktrace(zapcore.DPanicLevel)) + opts = append(opts, zap.WithCaller(false)) cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00") - cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + + if logfile == "stderr" || logfile == "stdout" { + cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + } } cfg.OutputPaths = []string{logfile} @@ -191,7 +193,7 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge } else { cfg := zap.NewProductionConfig() cfg.OutputPaths = []string{logfile} - lgr, err = cfg.Build() + lgr, err = cfg.Build(opts...) } // fall back to the core config if the default creation fails diff --git a/src/pkg/path/onedrive.go b/src/pkg/path/onedrive.go index 35738289c..5f2adf2db 100644 --- a/src/pkg/path/onedrive.go +++ b/src/pkg/path/onedrive.go @@ -20,7 +20,7 @@ func ToOneDrivePath(p Path) (*DrivePath, error) { if len(folders) < 3 { return nil, clues. New("folder path doesn't match expected format for OneDrive items"). - With("path_folders", p.Folder()) + With("path_folders", p.Folder(false)) } return &DrivePath{DriveID: folders[1], Folders: folders[3:]}, nil diff --git a/src/pkg/path/path.go b/src/pkg/path/path.go index b0c4456c4..6db2ae0e9 100644 --- a/src/pkg/path/path.go +++ b/src/pkg/path/path.go @@ -86,7 +86,7 @@ type Path interface { Category() CategoryType Tenant() string ResourceOwner() string - Folder() string + Folder(bool) string Folders() []string Item() string // PopFront returns a Builder object with the first element (left-side) @@ -140,6 +140,14 @@ func (pb Builder) UnescapeAndAppend(elements ...string) (*Builder, error) { return res, nil } +// SplitUnescapeAppend takes in an escaped string representing a directory +// path, splits the string, and appends it to the current builder. +func (pb Builder) SplitUnescapeAppend(s string) (*Builder, error) { + elems := Split(TrimTrailingSlash(s)) + + return pb.UnescapeAndAppend(elems...) +} + // Append creates a copy of this Builder and adds the given elements them to the // end of the new Builder. Elements are added in the order they are passed. func (pb Builder) Append(elements ...string) *Builder { @@ -205,6 +213,14 @@ func (pb Builder) Dir() *Builder { } } +func (pb Builder) LastElem() string { + if len(pb.elements) == 0 { + return "" + } + + return pb.elements[len(pb.elements)-1] +} + // String returns a string that contains all path elements joined together. // Elements of the path that need escaping are escaped. func (pb Builder) String() string { @@ -247,11 +263,6 @@ func (pb Builder) Elements() []string { return append([]string{}, pb.elements...) } -//nolint:unused -func (pb Builder) join(start, end int) string { - return join(pb.elements[start:end]) -} - func verifyInputValues(tenant, resourceOwner string) error { if len(tenant) == 0 { return clues.Stack(errMissingSegment, errors.New("tenant")) diff --git a/src/pkg/path/path_test.go b/src/pkg/path/path_test.go index 05f491e7f..ec8a6cc9e 100644 --- a/src/pkg/path/path_test.go +++ b/src/pkg/path/path_test.go @@ -480,13 +480,87 @@ func (suite *PathUnitSuite) TestFromStringErrors() { } } +func (suite *PathUnitSuite) TestFolder() { + table := []struct { + name string + p func(t *testing.T) Path + escape bool + expectFolder string + expectSplit []string + }{ + { + name: "clean path", + p: func(t *testing.T) Path { + p, err := Builder{}. + Append("a", "b", "c"). + ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) + require.NoError(t, err) + + return p + }, + expectFolder: "a/b/c", + expectSplit: []string{"a", "b", "c"}, + }, + { + name: "clean path escaped", + p: func(t *testing.T) Path { + p, err := Builder{}. + Append("a", "b", "c"). + ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) + require.NoError(t, err) + + return p + }, + escape: true, + expectFolder: "a/b/c", + expectSplit: []string{"a", "b", "c"}, + }, + { + name: "escapable path", + p: func(t *testing.T) Path { + p, err := Builder{}. + Append("a/", "b", "c"). + ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) + require.NoError(t, err) + + return p + }, + expectFolder: "a//b/c", + expectSplit: []string{"a", "b", "c"}, + }, + { + name: "escapable path escaped", + p: func(t *testing.T) Path { + p, err := Builder{}. + Append("a/", "b", "c"). + ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false) + require.NoError(t, err) + + return p + }, + escape: true, + expectFolder: "a\\//b/c", + expectSplit: []string{"a\\/", "b", "c"}, + }, + } + for _, test := range table { + suite.T().Run(test.name, func(t *testing.T) { + p := test.p(t) + result := p.Folder(test.escape) + assert.Equal(t, test.expectFolder, result) + assert.Equal(t, test.expectSplit, Split(result)) + }) + } +} + func (suite *PathUnitSuite) TestFromString() { const ( - testTenant = "tenant" - testUser = "user" - testElement1 = "folder" - testElement2 = "folder2" - testElement3 = "other" + testTenant = "tenant" + testUser = "user" + testElement1 = "folder/" + testElementTrimmed = "folder" + testElement2 = "folder2" + testElement3 = "other" ) isItem := []struct { @@ -509,9 +583,13 @@ func (suite *PathUnitSuite) TestFromString() { // Expected result for Folder() if path is marked as a folder. expectedFolder string // Expected result for Item() if path is marked as an item. - expectedItem string + // Expected result for Split(Folder()) if path is marked as a folder. + expectedSplit []string + expectedItem string // Expected result for Folder() if path is marked as an item. expectedItemFolder string + // Expected result for Split(Folder()) if path is marked as an item. + expectedItemSplit []string }{ { name: "BasicPath", @@ -525,16 +603,25 @@ func (suite *PathUnitSuite) TestFromString() { ), expectedFolder: fmt.Sprintf( "%s/%s/%s", - testElement1, + testElementTrimmed, testElement2, testElement3, ), + expectedSplit: []string{ + testElementTrimmed, + testElement2, + testElement3, + }, expectedItem: testElement3, expectedItemFolder: fmt.Sprintf( "%s/%s", - testElement1, + testElementTrimmed, testElement2, ), + expectedItemSplit: []string{ + testElementTrimmed, + testElement2, + }, }, { name: "PathWithEmptyElements", @@ -542,22 +629,31 @@ func (suite *PathUnitSuite) TestFromString() { "/%s//%%s//%s//%%s//%s///%s//%s//", testTenant, testUser, - testElement1, + testElementTrimmed, testElement2, testElement3, ), expectedFolder: fmt.Sprintf( "%s/%s/%s", - testElement1, + testElementTrimmed, testElement2, testElement3, ), + expectedSplit: []string{ + testElementTrimmed, + testElement2, + testElement3, + }, expectedItem: testElement3, expectedItemFolder: fmt.Sprintf( "%s/%s", - testElement1, + testElementTrimmed, testElement2, ), + expectedItemSplit: []string{ + testElementTrimmed, + testElement2, + }, }, } @@ -572,16 +668,25 @@ func (suite *PathUnitSuite) TestFromString() { p, err := FromDataLayerPath(testPath, item.isItem) require.NoError(t, err) - assert.Equal(t, service, p.Service()) - assert.Equal(t, cat, p.Category()) - assert.Equal(t, testTenant, p.Tenant()) - assert.Equal(t, testUser, p.ResourceOwner()) + assert.Equal(t, service, p.Service(), "service") + assert.Equal(t, cat, p.Category(), "category") + assert.Equal(t, testTenant, p.Tenant(), "tenant") + assert.Equal(t, testUser, p.ResourceOwner(), "resource owner") - if !item.isItem { - assert.Equal(t, test.expectedFolder, p.Folder()) + fld := p.Folder(false) + escfld := p.Folder(true) + + if item.isItem { + assert.Equal(t, test.expectedItemFolder, fld, "item folder") + assert.Equal(t, test.expectedItemSplit, Split(fld), "item split") + assert.Equal(t, test.expectedItemFolder, escfld, "escaped item folder") + assert.Equal(t, test.expectedItemSplit, Split(escfld), "escaped item split") + assert.Equal(t, test.expectedItem, p.Item(), "item") } else { - assert.Equal(t, test.expectedItemFolder, p.Folder()) - assert.Equal(t, test.expectedItem, p.Item()) + assert.Equal(t, test.expectedFolder, fld, "dir folder") + assert.Equal(t, test.expectedSplit, Split(fld), "dir split") + assert.Equal(t, test.expectedFolder, escfld, "escaped dir folder") + assert.Equal(t, test.expectedSplit, Split(escfld), "escaped dir split") } }) } diff --git a/src/pkg/path/resource_path.go b/src/pkg/path/resource_path.go index 57f41c6ff..f5384a4ff 100644 --- a/src/pkg/path/resource_path.go +++ b/src/pkg/path/resource_path.go @@ -201,13 +201,20 @@ func (rp dataLayerResourcePath) lastFolderIdx() int { } // Folder returns the folder segment embedded in the dataLayerResourcePath. -func (rp dataLayerResourcePath) Folder() string { +func (rp dataLayerResourcePath) Folder(escape bool) string { endIdx := rp.lastFolderIdx() if endIdx == 4 { return "" } - return rp.Builder.join(4, endIdx) + fs := rp.Folders() + + if !escape { + return join(fs) + } + + // builder.String() will escape all individual elements. + return Builder{}.Append(fs...).String() } // Folders returns the individual folder elements embedded in the diff --git a/src/pkg/path/resource_path_test.go b/src/pkg/path/resource_path_test.go index c3655d19c..644eb4592 100644 --- a/src/pkg/path/resource_path_test.go +++ b/src/pkg/path/resource_path_test.go @@ -172,7 +172,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() { ) require.NoError(t, err) - assert.Empty(t, p.Folder()) + assert.Empty(t, p.Folder(false)) assert.Empty(t, p.Folders()) assert.Equal(t, item, p.Item()) }) @@ -391,7 +391,7 @@ func (suite *DataLayerResourcePath) TestToExchangePathForCategory() { assert.Equal(t, path.ExchangeService, p.Service()) assert.Equal(t, test.category, p.Category()) assert.Equal(t, testUser, p.ResourceOwner()) - assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder()) + assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder(false)) assert.Equal(t, m.expectedFolders, p.Folders()) assert.Equal(t, m.expectedItem, p.Item()) }) @@ -465,7 +465,7 @@ func (suite *PopulatedDataLayerResourcePath) TestFolder() { assert.Equal( t, strings.Join(m.expectedFolders, "/"), - suite.paths[m.isItem].Folder(), + suite.paths[m.isItem].Folder(false), ) }) } @@ -525,7 +525,7 @@ func (suite *PopulatedDataLayerResourcePath) TestAppend() { return } - assert.Equal(t, test.expectedFolder, newPath.Folder()) + assert.Equal(t, test.expectedFolder, newPath.Folder(false)) assert.Equal(t, test.expectedItem, newPath.Item()) }) } diff --git a/src/pkg/selectors/exchange.go b/src/pkg/selectors/exchange.go index a5547749f..d47e67b3f 100644 --- a/src/pkg/selectors/exchange.go +++ b/src/pkg/selectors/exchange.go @@ -575,12 +575,12 @@ func (ec exchangeCategory) isLeaf() bool { return ec == ec.leafCat() } -// pathValues transforms a path to a map of identified properties. +// pathValues transforms the two paths to maps of identified properties. // // Example: // [tenantID, service, userPN, category, mailFolder, mailID] -// => {exchUser: userPN, exchMailFolder: mailFolder, exchMail: mailID} -func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string { +// => {exchMailFolder: mailFolder, exchMail: mailID} +func (ec exchangeCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) { var folderCat, itemCat categorizer switch ec { @@ -594,13 +594,24 @@ func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string { folderCat, itemCat = ExchangeMailFolder, ExchangeMail default: - return map[categorizer]string{} + return map[categorizer]string{}, map[categorizer]string{} } - return map[categorizer]string{ - folderCat: p.Folder(), - itemCat: p.Item(), + rv := map[categorizer]string{ + folderCat: repo.Folder(false), + itemCat: repo.Item(), } + + lv := map[categorizer]string{} + + if location != nil { + lv = map[categorizer]string{ + folderCat: location.Folder(false), + itemCat: location.Item(), + } + } + + return rv, lv } // pathKeys returns the path keys recognized by the receiver's leaf type. @@ -708,7 +719,7 @@ func (s ExchangeScope) setDefaults() { func (s exchange) Reduce( ctx context.Context, deets *details.Details, - errs fault.Adder, + errs *fault.Errors, ) *details.Details { return reduce[ExchangeScope]( ctx, diff --git a/src/pkg/selectors/exchange_test.go b/src/pkg/selectors/exchange_test.go index 830cde0c0..875cba426 100644 --- a/src/pkg/selectors/exchange_test.go +++ b/src/pkg/selectors/exchange_test.go @@ -11,7 +11,7 @@ import ( "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" - "github.com/alcionai/corso/src/pkg/fault/mock" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/path" ) @@ -705,13 +705,16 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesInfo() { func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() { const ( usr = "userID" + fID1 = "mf_id_1" fld1 = "mailFolder" + fID2 = "mf_id_2" fld2 = "subFolder" mail = "mailID" ) var ( - pth = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory) + repo = stubPath(suite.T(), usr, []string{fID1, fID2, mail}, path.EmailCategory) + loc = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory) short = "thisisahashofsomekind" es = NewExchangeRestore(Any()) ) @@ -726,13 +729,18 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() { {"all folders", es.MailFolders(Any()), "", assert.True}, {"no folders", es.MailFolders(None()), "", assert.False}, {"matching folder", es.MailFolders([]string{fld1}), "", assert.True}, + {"matching folder id", es.MailFolders([]string{fID1}), "", assert.True}, {"incomplete matching folder", es.MailFolders([]string{"mail"}), "", assert.False}, + {"incomplete matching folder ID", es.MailFolders([]string{"mf_id"}), "", assert.False}, {"non-matching folder", es.MailFolders([]string{"smarf"}), "", assert.False}, {"non-matching folder substring", es.MailFolders([]string{fld1 + "_suffix"}), "", assert.False}, + {"non-matching folder id substring", es.MailFolders([]string{fID1 + "_suffix"}), "", assert.False}, {"matching folder prefix", es.MailFolders([]string{fld1}, PrefixMatch()), "", assert.True}, + {"matching folder ID prefix", es.MailFolders([]string{fID1}, PrefixMatch()), "", assert.True}, {"incomplete folder prefix", es.MailFolders([]string{"mail"}, PrefixMatch()), "", assert.False}, {"matching folder substring", es.MailFolders([]string{"Folder"}), "", assert.False}, {"one of multiple folders", es.MailFolders([]string{"smarf", fld2}), "", assert.True}, + {"one of multiple folders by ID", es.MailFolders([]string{"smarf", fID2}), "", assert.True}, {"all mail", es.Mails(Any(), Any()), "", assert.True}, {"no mail", es.Mails(Any(), None()), "", assert.False}, {"matching mail", es.Mails(Any(), []string{mail}), "", assert.True}, @@ -746,8 +754,12 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() { scopes := setScopesToDefault(test.scope) var aMatch bool for _, scope := range scopes { - pv := ExchangeMail.pathValues(pth) - if matchesPathValues(scope, ExchangeMail, pv, short) { + repoVals, locVals := ExchangeMail.pathValues(repo, loc) + if matchesPathValues(scope, ExchangeMail, repoVals, short) { + aMatch = true + break + } + if matchesPathValues(scope, ExchangeMail, locVals, short) { aMatch = true break } @@ -833,6 +845,256 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { return deets } + table := []struct { + name string + deets *details.Details + makeSelector func() *ExchangeRestore + expect []string + }{ + { + "no refs", + makeDeets(), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + return er + }, + []string{}, + }, + { + "contact only", + makeDeets(contact), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + return er + }, + []string{contact}, + }, + { + "event only", + makeDeets(event), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + return er + }, + []string{event}, + }, + { + "mail only", + makeDeets(mail), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + return er + }, + []string{mail}, + }, + { + "all", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + return er + }, + []string{contact, event, mail}, + }, + { + "only match contact", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore([]string{"uid"}) + er.Include(er.Contacts([]string{"cfld"}, []string{"cid"})) + return er + }, + []string{contact}, + }, + { + "only match contactInSubFolder", + makeDeets(contactInSubFolder, contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore([]string{"uid"}) + er.Include(er.ContactFolders([]string{"cfld1/cfld2"})) + return er + }, + []string{contactInSubFolder}, + }, + { + "only match contactInSubFolder by prefix", + makeDeets(contactInSubFolder, contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore([]string{"uid"}) + er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch())) + return er + }, + []string{contactInSubFolder}, + }, + { + "only match contactInSubFolder by leaf folder", + makeDeets(contactInSubFolder, contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore([]string{"uid"}) + er.Include(er.ContactFolders([]string{"cfld2"})) + return er + }, + []string{contactInSubFolder}, + }, + { + "only match event", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore([]string{"uid"}) + er.Include(er.Events([]string{"ecld"}, []string{"eid"})) + return er + }, + []string{event}, + }, + { + "only match mail", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore([]string{"uid"}) + er.Include(er.Mails([]string{"mfld"}, []string{"mid"})) + return er + }, + []string{mail}, + }, + { + "exclude contact", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"})) + return er + }, + []string{event, mail}, + }, + { + "exclude event", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + er.Exclude(er.Events([]string{"ecld"}, []string{"eid"})) + return er + }, + []string{contact, mail}, + }, + { + "exclude mail", + makeDeets(contact, event, mail), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"})) + return er + }, + []string{contact, event}, + }, + { + "filter on mail subject", + func() *details.Details { + ds := makeDeets(mail) + for i := range ds.Entries { + ds.Entries[i].Exchange.Subject = "has a subject" + } + return ds + }(), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + er.Filter(er.MailSubject("subj")) + return er + }, + []string{mail}, + }, + { + "filter on mail subject multiple input categories", + func() *details.Details { + mds := makeDeets(mail) + for i := range mds.Entries { + mds.Entries[i].Exchange.Subject = "has a subject" + } + + ds := makeDeets(contact, event) + ds.Entries = append(ds.Entries, mds.Entries...) + + return ds + }(), + func() *ExchangeRestore { + er := NewExchangeRestore(Any()) + er.Include(er.AllData()) + er.Filter(er.MailSubject("subj")) + return er + }, + []string{mail}, + }, + } + for _, test := range table { + suite.T().Run(test.name, func(t *testing.T) { + ctx, flush := tester.NewContext() + defer flush() + + sel := test.makeSelector() + results := sel.Reduce(ctx, test.deets, fault.New(true)) + paths := results.Paths() + assert.Equal(t, test.expect, paths) + }) + } +} + +func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce_locationRef() { + var ( + contact = stubRepoRef(path.ExchangeService, path.ContactsCategory, "uid", "id5/id6", "cid") + contactLocation = "conts/my_cont" + event = stubRepoRef(path.ExchangeService, path.EventsCategory, "uid", "id1/id2", "eid") + eventLocation = "cal/my_cal" + mail = stubRepoRef(path.ExchangeService, path.EmailCategory, "uid", "id3/id4", "mid") + mailLocation = "inbx/my_mail" + ) + + makeDeets := func(refs ...string) *details.Details { + deets := &details.Details{ + DetailsModel: details.DetailsModel{ + Entries: []details.DetailsEntry{}, + }, + } + + for _, r := range refs { + var ( + location string + itype = details.UnknownType + ) + + switch r { + case contact: + itype = details.ExchangeContact + location = contactLocation + case event: + itype = details.ExchangeEvent + location = eventLocation + case mail: + itype = details.ExchangeMail + location = mailLocation + } + + deets.Entries = append(deets.Entries, details.DetailsEntry{ + RepoRef: r, + LocationRef: location, + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: itype, + }, + }, + }) + } + + return deets + } + arr := func(s ...string) []string { return s } @@ -898,47 +1160,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { makeDeets(contact, event, mail), func() *ExchangeRestore { er := NewExchangeRestore([]string{"uid"}) - er.Include(er.Contacts([]string{"cfld"}, []string{"cid"})) + er.Include(er.Contacts([]string{contactLocation}, []string{"cid"})) return er }, arr(contact), }, - { - "only match contactInSubFolder", - makeDeets(contactInSubFolder, contact, event, mail), - func() *ExchangeRestore { - er := NewExchangeRestore([]string{"uid"}) - er.Include(er.ContactFolders([]string{"cfld1/cfld2"})) - return er - }, - arr(contactInSubFolder), - }, - { - "only match contactInSubFolder by prefix", - makeDeets(contactInSubFolder, contact, event, mail), - func() *ExchangeRestore { - er := NewExchangeRestore([]string{"uid"}) - er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch())) - return er - }, - arr(contactInSubFolder), - }, - { - "only match contactInSubFolder by leaf folder", - makeDeets(contactInSubFolder, contact, event, mail), - func() *ExchangeRestore { - er := NewExchangeRestore([]string{"uid"}) - er.Include(er.ContactFolders([]string{"cfld2"})) - return er - }, - arr(contactInSubFolder), - }, { "only match event", makeDeets(contact, event, mail), func() *ExchangeRestore { er := NewExchangeRestore([]string{"uid"}) - er.Include(er.Events([]string{"ecld"}, []string{"eid"})) + er.Include(er.Events([]string{eventLocation}, []string{"eid"})) return er }, arr(event), @@ -948,7 +1180,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { makeDeets(contact, event, mail), func() *ExchangeRestore { er := NewExchangeRestore([]string{"uid"}) - er.Include(er.Mails([]string{"mfld"}, []string{"mid"})) + er.Include(er.Mails([]string{mailLocation}, []string{"mid"})) return er }, arr(mail), @@ -959,7 +1191,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { func() *ExchangeRestore { er := NewExchangeRestore(Any()) er.Include(er.AllData()) - er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"})) + er.Exclude(er.Contacts([]string{contactLocation}, []string{"cid"})) return er }, arr(event, mail), @@ -970,7 +1202,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { func() *ExchangeRestore { er := NewExchangeRestore(Any()) er.Include(er.AllData()) - er.Exclude(er.Events([]string{"ecld"}, []string{"eid"})) + er.Exclude(er.Events([]string{eventLocation}, []string{"eid"})) return er }, arr(contact, mail), @@ -981,7 +1213,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { func() *ExchangeRestore { er := NewExchangeRestore(Any()) er.Include(er.AllData()) - er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"})) + er.Exclude(er.Mails([]string{mailLocation}, []string{"mid"})) return er }, arr(contact, event), @@ -1030,13 +1262,10 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() { ctx, flush := tester.NewContext() defer flush() - errs := mock.NewAdder() - sel := test.makeSelector() - results := sel.Reduce(ctx, test.deets, errs) + results := sel.Reduce(ctx, test.deets, fault.New(true)) paths := results.Paths() assert.Equal(t, test.expect, paths) - assert.Empty(t, errs.Errs) }) } } @@ -1131,9 +1360,12 @@ func (suite *ExchangeSelectorSuite) TestPasses() { } for _, test := range table { suite.T().Run(test.name, func(t *testing.T) { + repoVals, locVals := cat.pathValues(pth, pth) + result := passes( cat, - cat.pathValues(pth), + repoVals, + locVals, entry, test.excludes, test.filters, @@ -1236,17 +1468,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() { contactPath := stubPath(t, "user", []string{"cfolder", "contactitem"}, path.ContactsCategory) contactMap := map[categorizer]string{ - ExchangeContactFolder: contactPath.Folder(), + ExchangeContactFolder: contactPath.Folder(false), ExchangeContact: contactPath.Item(), } eventPath := stubPath(t, "user", []string{"ecalendar", "eventitem"}, path.EventsCategory) eventMap := map[categorizer]string{ - ExchangeEventCalendar: eventPath.Folder(), + ExchangeEventCalendar: eventPath.Folder(false), ExchangeEvent: eventPath.Item(), } mailPath := stubPath(t, "user", []string{"mfolder", "mailitem"}, path.EmailCategory) mailMap := map[categorizer]string{ - ExchangeMailFolder: mailPath.Folder(), + ExchangeMailFolder: mailPath.Folder(false), ExchangeMail: mailPath.Item(), } @@ -1261,7 +1493,9 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() { } for _, test := range table { suite.T().Run(string(test.cat), func(t *testing.T) { - assert.Equal(t, test.cat.pathValues(test.path), test.expect) + r, l := test.cat.pathValues(test.path, test.path) + assert.Equal(t, test.expect, r) + assert.Equal(t, test.expect, l) }) } } diff --git a/src/pkg/selectors/helpers_test.go b/src/pkg/selectors/helpers_test.go index 6b28ea443..5ab0360e5 100644 --- a/src/pkg/selectors/helpers_test.go +++ b/src/pkg/selectors/helpers_test.go @@ -55,11 +55,13 @@ func (mc mockCategorizer) isLeaf() bool { return mc == leafCatStub } -func (mc mockCategorizer) pathValues(pth path.Path) map[categorizer]string { - return map[categorizer]string{ +func (mc mockCategorizer) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) { + pv := map[categorizer]string{ rootCatStub: "root", leafCatStub: "leaf", } + + return pv, pv } func (mc mockCategorizer) pathKeys() []categorizer { diff --git a/src/pkg/selectors/onedrive.go b/src/pkg/selectors/onedrive.go index f4d924a3b..5fe942518 100644 --- a/src/pkg/selectors/onedrive.go +++ b/src/pkg/selectors/onedrive.go @@ -371,19 +371,30 @@ func (c oneDriveCategory) isLeaf() bool { return c == OneDriveItem } -// pathValues transforms a path to a map of identified properties. +// pathValues transforms the two paths to maps of identified properties. // // Example: // [tenantID, service, userPN, category, folder, fileID] -// => {odUser: userPN, odFolder: folder, odFileID: fileID} -func (c oneDriveCategory) pathValues(p path.Path) map[categorizer]string { +// => {odFolder: folder, odFileID: fileID} +func (c oneDriveCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) { // Ignore `drives//root:` for folder comparison - folder := path.Builder{}.Append(p.Folders()...).PopFront().PopFront().PopFront().String() - - return map[categorizer]string{ - OneDriveFolder: folder, - OneDriveItem: p.Item(), + rFld := path.Builder{}.Append(repo.Folders()...).PopFront().PopFront().PopFront().String() + rv := map[categorizer]string{ + OneDriveFolder: rFld, + OneDriveItem: repo.Item(), } + + lv := map[categorizer]string{} + + if location != nil { + lFld := path.Builder{}.Append(location.Folders()...).PopFront().PopFront().PopFront().String() + lv = map[categorizer]string{ + OneDriveFolder: lFld, + OneDriveItem: location.Item(), + } + } + + return rv, lv } // pathKeys returns the path keys recognized by the receiver's leaf type. @@ -487,7 +498,7 @@ func (s OneDriveScope) DiscreteCopy(user string) OneDriveScope { func (s oneDrive) Reduce( ctx context.Context, deets *details.Details, - errs fault.Adder, + errs *fault.Errors, ) *details.Details { return reduce[OneDriveScope]( ctx, diff --git a/src/pkg/selectors/onedrive_test.go b/src/pkg/selectors/onedrive_test.go index 273019519..6fc36f601 100644 --- a/src/pkg/selectors/onedrive_test.go +++ b/src/pkg/selectors/onedrive_test.go @@ -11,7 +11,7 @@ import ( "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" - "github.com/alcionai/corso/src/pkg/fault/mock" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) @@ -242,13 +242,10 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() { ctx, flush := tester.NewContext() defer flush() - errs := mock.NewAdder() - sel := test.makeSelector() - results := sel.Reduce(ctx, test.deets, errs) + results := sel.Reduce(ctx, test.deets, fault.New(true)) paths := results.Paths() assert.Equal(t, test.expect, paths) - assert.Empty(t, errs.Errs) }) } } @@ -265,7 +262,9 @@ func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() { OneDriveItem: "file", } - assert.Equal(t, expected, OneDriveItem.pathValues(filePath)) + r, l := OneDriveItem.pathValues(filePath, filePath) + assert.Equal(t, expected, r) + assert.Equal(t, expected, l) } func (suite *OneDriveSelectorSuite) TestOneDriveScope_MatchesInfo() { diff --git a/src/pkg/selectors/scopes.go b/src/pkg/selectors/scopes.go index 5fc05e789..ff24ca845 100644 --- a/src/pkg/selectors/scopes.go +++ b/src/pkg/selectors/scopes.go @@ -77,17 +77,18 @@ type ( // eg: in a resourceOwner/folder/item structure, the item is the leaf. isLeaf() bool - // pathValues should produce a map of category:string pairs populated by extracting - // values out of the path.Path struct. + // pathValues takes in two paths, both variants of the repoRef, one containing the standard + // repoRef, and the other amended to include the locationRef directories (if available). It + // should produce two maps of category:string pairs populated by extracting the values out of + // each path.Path. // // Ex: given a path builder like ["tenant", "service", "resource", "dataType", "folder", "itemID"], // the func should use the path to construct a map similar to this: // { - // rootCat: resource, // folderCat: folder, // itemCat: itemID, // } - pathValues(path.Path) map[categorizer]string + pathValues(path.Path, path.Path) (map[categorizer]string, map[categorizer]string) // pathKeys produces a list of categorizers that can be used as keys in the pathValues // map. The combination of the two funcs generically interprets the context of the @@ -287,7 +288,7 @@ func reduce[T scopeT, C categoryT]( deets *details.Details, s Selector, dataCategories map[path.CategoryType]C, - errs fault.Adder, + errs *fault.Errors, ) *details.Details { ctx, end := D.Span(ctx, "selectors:reduce") defer end() @@ -317,6 +318,31 @@ func reduce[T scopeT, C categoryT]( continue } + var locationPath path.Path + + // if the details entry has a locationRef specified, use those folders in place + // of the repoRef folders, so that scopes can match against the display names + // instead of container IDs. + if len(ent.LocationRef) > 0 { + pb, err := path.Builder{}.SplitUnescapeAppend(ent.LocationRef) + if err != nil { + errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx)) + continue + } + + locationPath, err = pb.Append(repoPath.Item()). + ToDataLayerPath( + repoPath.Tenant(), + repoPath.ResourceOwner(), + repoPath.Service(), + repoPath.Category(), + true) + if err != nil { + errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx)) + continue + } + } + // first check, every entry needs to match the selector's resource owners. if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) { continue @@ -334,7 +360,9 @@ func reduce[T scopeT, C categoryT]( continue } - passed := passes(dc, dc.pathValues(repoPath), *ent, e, f, i) + rv, lv := dc.pathValues(repoPath, locationPath) + + passed := passes(dc, rv, lv, *ent, e, f, i) if passed { ents = append(ents, *ent) } @@ -379,7 +407,7 @@ func scopesByCategory[T scopeT, C categoryT]( // if the path is included, passes filters, and not excluded. func passes[T scopeT, C categoryT]( cat C, - pathValues map[categorizer]string, + repoValues, locationValues map[categorizer]string, entry details.DetailsEntry, excs, filts, incs []T, ) bool { @@ -395,7 +423,7 @@ func passes[T scopeT, C categoryT]( var included bool for _, inc := range incs { - if matchesEntry(inc, cat, pathValues, entry) { + if matchesEntry(inc, cat, repoValues, locationValues, entry) { included = true break } @@ -408,14 +436,14 @@ func passes[T scopeT, C categoryT]( // all filters must pass for _, filt := range filts { - if !matchesEntry(filt, cat, pathValues, entry) { + if !matchesEntry(filt, cat, repoValues, locationValues, entry) { return false } } // any matching exclusion means failure for _, exc := range excs { - if matchesEntry(exc, cat, pathValues, entry) { + if matchesEntry(exc, cat, repoValues, locationValues, entry) { return false } } @@ -428,7 +456,7 @@ func passes[T scopeT, C categoryT]( func matchesEntry[T scopeT, C categoryT]( sc T, cat C, - pathValues map[categorizer]string, + repoValues, locationValues map[categorizer]string, entry details.DetailsEntry, ) bool { // filterCategory requires matching against service-specific info values @@ -436,7 +464,11 @@ func matchesEntry[T scopeT, C categoryT]( return sc.matchesInfo(entry.ItemInfo) } - return matchesPathValues(sc, cat, pathValues, entry.ShortRef) + if len(locationValues) > 0 && matchesPathValues(sc, cat, locationValues, entry.ShortRef) { + return true + } + + return matchesPathValues(sc, cat, repoValues, entry.ShortRef) } // matchesPathValues will check whether the pathValues have matching entries diff --git a/src/pkg/selectors/scopes_test.go b/src/pkg/selectors/scopes_test.go index 848d55767..e8b4a4cc0 100644 --- a/src/pkg/selectors/scopes_test.go +++ b/src/pkg/selectors/scopes_test.go @@ -9,7 +9,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" - "github.com/alcionai/corso/src/pkg/fault/mock" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/path" ) @@ -274,7 +274,7 @@ func (suite *SelectorScopesSuite) TestReduce() { ctx, flush := tester.NewContext() defer flush() - errs := mock.NewAdder() + errs := fault.New(true) ds := deets() result := reduce[mockScope]( @@ -284,7 +284,48 @@ func (suite *SelectorScopesSuite) TestReduce() { dataCats, errs) require.NotNil(t, result) - require.Empty(t, errs.Errs, "iteration errors") + require.NoError(t, errs.Err(), "no recoverable errors") + assert.Len(t, result.Entries, test.expectLen) + }) + } +} + +func (suite *SelectorScopesSuite) TestReduce_locationRef() { + deets := func() details.Details { + return details.Details{ + DetailsModel: details.DetailsModel{ + Entries: []details.DetailsEntry{ + { + RepoRef: stubRepoRef( + pathServiceStub, + pathCatStub, + rootCatStub.String(), + "stub", + leafCatStub.String(), + ), + LocationRef: "a/b/c//defg", + }, + }, + }, + } + } + dataCats := map[path.CategoryType]mockCategorizer{ + pathCatStub: rootCatStub, + } + + for _, test := range reduceTestTable { + suite.T().Run(test.name, func(t *testing.T) { + ctx, flush := tester.NewContext() + defer flush() + + ds := deets() + result := reduce[mockScope]( + ctx, + &ds, + test.sel().Selector, + dataCats, + fault.New(true)) + require.NotNil(t, result) assert.Len(t, result.Entries, test.expectLen) }) } @@ -309,7 +350,7 @@ func (suite *SelectorScopesSuite) TestScopesByCategory() { func (suite *SelectorScopesSuite) TestPasses() { cat := rootCatStub pth := stubPath(suite.T(), "uid", []string{"fld"}, path.EventsCategory) - pathVals := cat.pathValues(pth) + repoVals, locVals := cat.pathValues(pth, pth) entry := details.DetailsEntry{} for _, test := range reduceTestTable { @@ -320,7 +361,8 @@ func (suite *SelectorScopesSuite) TestPasses() { incl := toMockScope(sel.Includes) result := passes( cat, - pathVals, + repoVals, + locVals, entry, excl, filt, incl) test.expectPasses(t, result) diff --git a/src/pkg/selectors/selectors.go b/src/pkg/selectors/selectors.go index 8a9c02337..41bfecbcd 100644 --- a/src/pkg/selectors/selectors.go +++ b/src/pkg/selectors/selectors.go @@ -70,7 +70,7 @@ var ( const All = "All" type Reducer interface { - Reduce(context.Context, *details.Details, fault.Adder) *details.Details + Reduce(context.Context, *details.Details, *fault.Errors) *details.Details } // selectorResourceOwners aggregates all discrete path category types described @@ -240,7 +240,7 @@ func (s Selector) PathService() path.ServiceType { func (s Selector) Reduce( ctx context.Context, deets *details.Details, - errs fault.Adder, + errs *fault.Errors, ) (*details.Details, error) { r, err := selectorAsIface[Reducer](s) if err != nil { diff --git a/src/pkg/selectors/selectors_reduce_test.go b/src/pkg/selectors/selectors_reduce_test.go index 3748f793a..49dc7ac6f 100644 --- a/src/pkg/selectors/selectors_reduce_test.go +++ b/src/pkg/selectors/selectors_reduce_test.go @@ -10,7 +10,7 @@ import ( "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" - "github.com/alcionai/corso/src/pkg/fault/mock" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors/testdata" ) @@ -48,7 +48,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( - []string{testdata.ExchangeEmailInboxPath.Folder()}, + []string{testdata.ExchangeEmailInboxPath.Folder(false)}, )) return sel @@ -177,7 +177,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( - []string{testdata.ExchangeEmailBasePath.Folder()}, + []string{testdata.ExchangeEmailBasePath.Folder(false)}, )) return sel @@ -192,7 +192,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( - []string{testdata.ExchangeEmailBasePath.Folder()}, + []string{testdata.ExchangeEmailBasePath.Folder(false)}, selectors.PrefixMatch(), // force prefix matching )) @@ -205,7 +205,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( - []string{testdata.ExchangeEmailInboxPath.Folder()}, + []string{testdata.ExchangeEmailInboxPath.Folder(false)}, )) return sel @@ -217,7 +217,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.ContactFolders( - []string{testdata.ExchangeContactsBasePath.Folder()}, + []string{testdata.ExchangeContactsBasePath.Folder(false)}, )) return sel @@ -229,7 +229,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.ContactFolders( - []string{testdata.ExchangeContactsRootPath.Folder()}, + []string{testdata.ExchangeContactsRootPath.Folder(false)}, )) return sel @@ -242,7 +242,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.EventCalendars( - []string{testdata.ExchangeEventsBasePath.Folder()}, + []string{testdata.ExchangeEventsBasePath.Folder(false)}, )) return sel @@ -254,7 +254,7 @@ func (suite *SelectorReduceSuite) TestReduce() { selFunc: func() selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.EventCalendars( - []string{testdata.ExchangeEventsRootPath.Folder()}, + []string{testdata.ExchangeEventsRootPath.Folder(false)}, )) return sel @@ -265,11 +265,8 @@ func (suite *SelectorReduceSuite) TestReduce() { for _, test := range table { suite.T().Run(test.name, func(t *testing.T) { - errs := mock.NewAdder() - - output := test.selFunc().Reduce(ctx, allDetails, errs) + output := test.selFunc().Reduce(ctx, allDetails, fault.New(true)) assert.ElementsMatch(t, test.expected, output.Entries) - assert.Empty(t, errs.Errs) }) } } diff --git a/src/pkg/selectors/sharepoint.go b/src/pkg/selectors/sharepoint.go index 1df132e93..cfefeea82 100644 --- a/src/pkg/selectors/sharepoint.go +++ b/src/pkg/selectors/sharepoint.go @@ -423,12 +423,12 @@ func (c sharePointCategory) isLeaf() bool { return c == c.leafCat() } -// pathValues transforms a path to a map of identified properties. +// pathValues transforms the two paths to maps of identified properties. // // Example: // [tenantID, service, siteID, category, folder, itemID] -// => {spSite: siteID, spFolder: folder, spItemID: itemID} -func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string { +// => {spFolder: folder, spItemID: itemID} +func (c sharePointCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) { var folderCat, itemCat categorizer switch c { @@ -439,13 +439,24 @@ func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string { case SharePointPage, SharePointPageFolder: folderCat, itemCat = SharePointPageFolder, SharePointPage default: - return map[categorizer]string{} + return map[categorizer]string{}, map[categorizer]string{} } - return map[categorizer]string{ - folderCat: p.Folder(), - itemCat: p.Item(), + rv := map[categorizer]string{ + folderCat: repo.Folder(false), + itemCat: repo.Item(), } + + lv := map[categorizer]string{} + + if location != nil { + lv = map[categorizer]string{ + folderCat: location.Folder(false), + itemCat: location.Item(), + } + } + + return rv, lv } // pathKeys returns the path keys recognized by the receiver's leaf type. @@ -559,7 +570,7 @@ func (s SharePointScope) DiscreteCopy(site string) SharePointScope { func (s sharePoint) Reduce( ctx context.Context, deets *details.Details, - errs fault.Adder, + errs *fault.Errors, ) *details.Details { return reduce[SharePointScope]( ctx, diff --git a/src/pkg/selectors/sharepoint_test.go b/src/pkg/selectors/sharepoint_test.go index 2bf3f585c..f0cd958d2 100644 --- a/src/pkg/selectors/sharepoint_test.go +++ b/src/pkg/selectors/sharepoint_test.go @@ -9,7 +9,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" - "github.com/alcionai/corso/src/pkg/fault/mock" + "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) @@ -306,13 +306,10 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() { ctx, flush := tester.NewContext() defer flush() - errs := mock.NewAdder() - sel := test.makeSelector() - results := sel.Reduce(ctx, test.deets, errs) + results := sel.Reduce(ctx, test.deets, fault.New(true)) paths := results.Paths() assert.Equal(t, test.expect, paths) - assert.Empty(t, errs.Errs) }) } } @@ -349,10 +346,11 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() { "tenant", "site", test.sc.PathType(), - true, - ) + true) require.NoError(t, err) - assert.Equal(t, test.expected, test.sc.pathValues(itemPath)) + r, l := test.sc.pathValues(itemPath, itemPath) + assert.Equal(t, test.expected, r) + assert.Equal(t, test.expected, l) }) } } diff --git a/src/pkg/services/m365/m365.go b/src/pkg/services/m365/m365.go index 9379cc028..b6564ea4d 100644 --- a/src/pkg/services/m365/m365.go +++ b/src/pkg/services/m365/m365.go @@ -28,7 +28,7 @@ func Users(ctx context.Context, acct account.Account, errs *fault.Errors) ([]*Us return nil, errors.Wrap(err, "initializing M365 graph connection") } - users, err := discovery.Users(ctx, gc.Owners.Users()) + users, err := discovery.Users(ctx, gc.Owners.Users(), errs) if err != nil { return nil, err } diff --git a/src/pkg/services/m365/m365_test.go b/src/pkg/services/m365/m365_test.go index f5f040dfa..503e220fe 100644 --- a/src/pkg/services/m365/m365_test.go +++ b/src/pkg/services/m365/m365_test.go @@ -31,13 +31,10 @@ func (suite *M365IntegrationSuite) TestUsers() { var ( t = suite.T() acct = tester.NewM365Account(suite.T()) - errs = fault.New(true) ) - users, err := Users(ctx, acct, errs) + users, err := Users(ctx, acct, fault.New(true)) require.NoError(t, err) - require.NoError(t, errs.Err()) - require.Empty(t, errs.Errs()) require.NotNil(t, users) require.Greater(t, len(users), 0) diff --git a/website/blog/2023-2-13-zune.md b/website/blog/2023-2-13-zune.md new file mode 100644 index 000000000..67477a5e4 --- /dev/null +++ b/website/blog/2023-2-13-zune.md @@ -0,0 +1,59 @@ +--- +slug: zune +title: "Corso is giving away a 30GB Zune!" +description: "I swear to god this is not a joke" +authors: nica +tags: [corso, retro, backups, zune] +date: 2023-2-13 +image: ./images/zune.png +--- + +![image of a Microsoft Zune](./images/zune.png) + +The Corso team is all about making sure that your data never goes away. +We've worked hard making the industry's only free and open-source tool for backing up Microsoft 365 data. +And that's why we're not letting anything from Microsoft die, certainly not 2006's best personal media player, the Zune. + +Ummm… look, this all made sense when I bought the Zune and found someone to refurbish it. Just play along with me here, okay? + + + +
+ +![A gif from the show 30 Rock with a character insisting 'beepers are about to make a big comeback, technology is cyclical'](./images/cyclical.gif) + +
+ +## A technology whose time has come + +In many ways, the Zune was ahead of its time. It could stream music over WiFi, and share your own media with other +Zunes on the local network. + +It was also, to the best of my knowledge, the first personal media player to come in brown. This Zune isn't brown, it's black, +but they did make brown ones. + + +
+ +![image of a Microsoft Zune By BulbousSum - Own work, CC BY-SA 4.0, https://commons.wikimedia.org/w/index.php?curid=120341960](./images/brown_zune.jpeg) + +
+ +*Look at that, brown. What a time to be alive.* + +And not only can you load a Zune with MP3 or WMA files, but it's also got an FM radio. + +![image of a zune tuning in FM radio](./images/radio_zune.jpeg) +*FM radio, certainly nothing will replace that!* + +This is a good thing since you'll have to run Windows Vista or earlier to load any media files onto the actual player. + +## All right Nica, I can't possibly get more excited, how can I win this prize? + +It will take you about 15 minutes (or less!). Just: + +* [Give Corso a try](https://corsobackup.io/docs/quickstart/) +* Fill out the [Corso feedback form](https://forms.microsoft.com/r/mRVNKqeKDp) + +That's it! If you do that I will hold a drawing in a few weeks, and send the lucky winner the working Zune that +I have on my desk at this moment. diff --git a/website/blog/images/brown_zune.jpeg b/website/blog/images/brown_zune.jpeg new file mode 100644 index 000000000..1652c7798 Binary files /dev/null and b/website/blog/images/brown_zune.jpeg differ diff --git a/website/blog/images/cyclical.gif b/website/blog/images/cyclical.gif new file mode 100644 index 000000000..5de5b1c7f Binary files /dev/null and b/website/blog/images/cyclical.gif differ diff --git a/website/blog/images/radio_zune.jpeg b/website/blog/images/radio_zune.jpeg new file mode 100644 index 000000000..35e0d076e Binary files /dev/null and b/website/blog/images/radio_zune.jpeg differ diff --git a/website/blog/images/zune.png b/website/blog/images/zune.png new file mode 100644 index 000000000..aa4ee20de Binary files /dev/null and b/website/blog/images/zune.png differ diff --git a/website/styles/Vocab/Base/accept.txt b/website/styles/Vocab/Base/accept.txt index 8831b2ca6..04495804a 100644 --- a/website/styles/Vocab/Base/accept.txt +++ b/website/styles/Vocab/Base/accept.txt @@ -39,4 +39,7 @@ stdout stderr backoff Greenlake -subfolder \ No newline at end of file +subfolder +[zZ]une +Nica +gif \ No newline at end of file