Merge branch 'main' into sanitize-page
This commit is contained in:
commit
4f25447b3e
@ -10,11 +10,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Support for item.Attachment:Mail restore
|
- Support for item.Attachment:Mail restore
|
||||||
|
- Errors from duplicate names in Exchange Calendars
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- When using Restore and Details on Exchange Calendars, the `--event-calendar` flag can now identify calendars by either a Display Name or a Microsoft 365 ID.
|
||||||
|
- Exchange Calendars storage entries now construct their paths using container IDs instead of display names. This fixes cases where duplicate display names caused system failures.
|
||||||
|
|
||||||
### Known Issues
|
### Known Issues
|
||||||
- Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs
|
- Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs
|
||||||
|
- Breaking changes to Exchange Calendar backups.
|
||||||
|
|
||||||
## [v0.3.0] (alpha) - 2023-2-07
|
## [v0.3.0] (alpha) - 2023-2-07
|
||||||
|
|
||||||
|
|||||||
8
src/cli/utils/testdata/opts.go
vendored
8
src/cli/utils/testdata/opts.go
vendored
@ -137,14 +137,14 @@ var (
|
|||||||
Name: "EmailsFolderPrefixMatch",
|
Name: "EmailsFolderPrefixMatch",
|
||||||
Expected: testdata.ExchangeEmailItems,
|
Expected: testdata.ExchangeEmailItems,
|
||||||
Opts: utils.ExchangeOpts{
|
Opts: utils.ExchangeOpts{
|
||||||
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder()},
|
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false)},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "EmailsFolderPrefixMatchTrailingSlash",
|
Name: "EmailsFolderPrefixMatchTrailingSlash",
|
||||||
Expected: testdata.ExchangeEmailItems,
|
Expected: testdata.ExchangeEmailItems,
|
||||||
Opts: utils.ExchangeOpts{
|
Opts: utils.ExchangeOpts{
|
||||||
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder() + "/"},
|
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false) + "/"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -154,7 +154,7 @@ var (
|
|||||||
testdata.ExchangeEmailItems[2],
|
testdata.ExchangeEmailItems[2],
|
||||||
},
|
},
|
||||||
Opts: utils.ExchangeOpts{
|
Opts: utils.ExchangeOpts{
|
||||||
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder()},
|
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false)},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -164,7 +164,7 @@ var (
|
|||||||
testdata.ExchangeEmailItems[2],
|
testdata.ExchangeEmailItems[2],
|
||||||
},
|
},
|
||||||
Opts: utils.ExchangeOpts{
|
Opts: utils.ExchangeOpts{
|
||||||
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder() + "/"},
|
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false) + "/"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -172,7 +172,7 @@ func buildCollections(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items))
|
mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items))
|
||||||
|
|
||||||
for i := 0; i < len(c.items); i++ {
|
for i := 0; i < len(c.items); i++ {
|
||||||
mc.Names[i] = c.items[i].name
|
mc.Names[i] = c.items[i].name
|
||||||
|
|||||||
10
src/go.mod
10
src/go.mod
@ -5,7 +5,7 @@ go 1.19
|
|||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
|
||||||
github.com/alcionai/clues v0.0.0-20230202001016-cbda58c9de9e
|
github.com/alcionai/clues v0.0.0-20230202001016-cbda58c9de9e
|
||||||
github.com/aws/aws-sdk-go v1.44.197
|
github.com/aws/aws-sdk-go v1.44.199
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.0
|
github.com/aws/aws-xray-sdk-go v1.8.0
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/hashicorp/go-multierror v1.1.1
|
github.com/hashicorp/go-multierror v1.1.1
|
||||||
@ -29,7 +29,7 @@ require (
|
|||||||
github.com/vbauerster/mpb/v8 v8.1.6
|
github.com/vbauerster/mpb/v8 v8.1.6
|
||||||
go.uber.org/zap v1.24.0
|
go.uber.org/zap v1.24.0
|
||||||
golang.org/x/exp v0.0.0-20221217163422-3c43f8badb15
|
golang.org/x/exp v0.0.0-20221217163422-3c43f8badb15
|
||||||
golang.org/x/tools v0.5.0
|
golang.org/x/tools v0.6.0
|
||||||
gopkg.in/resty.v1 v1.12.0
|
gopkg.in/resty.v1 v1.12.0
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -111,11 +111,11 @@ require (
|
|||||||
go.uber.org/atomic v1.10.0 // indirect
|
go.uber.org/atomic v1.10.0 // indirect
|
||||||
go.uber.org/multierr v1.8.0 // indirect
|
go.uber.org/multierr v1.8.0 // indirect
|
||||||
golang.org/x/crypto v0.5.0 // indirect
|
golang.org/x/crypto v0.5.0 // indirect
|
||||||
golang.org/x/mod v0.7.0 // indirect
|
golang.org/x/mod v0.8.0 // indirect
|
||||||
golang.org/x/net v0.5.0 // indirect
|
golang.org/x/net v0.6.0 // indirect
|
||||||
golang.org/x/sync v0.1.0 // indirect
|
golang.org/x/sync v0.1.0 // indirect
|
||||||
golang.org/x/sys v0.5.0 // indirect
|
golang.org/x/sys v0.5.0 // indirect
|
||||||
golang.org/x/text v0.6.0 // indirect
|
golang.org/x/text v0.7.0 // indirect
|
||||||
google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef // indirect
|
google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef // indirect
|
||||||
google.golang.org/grpc v1.52.0 // indirect
|
google.golang.org/grpc v1.52.0 // indirect
|
||||||
google.golang.org/protobuf v1.28.1 // indirect
|
google.golang.org/protobuf v1.28.1 // indirect
|
||||||
|
|||||||
20
src/go.sum
20
src/go.sum
@ -62,8 +62,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
|
|||||||
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
|
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
|
||||||
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
|
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
|
||||||
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
github.com/aws/aws-sdk-go v1.44.197 h1:pkg/NZsov9v/CawQWy+qWVzJMIZRQypCtYjUBXFomF8=
|
github.com/aws/aws-sdk-go v1.44.199 h1:hYuQmS4zLMJR9v2iOp2UOD6Vi/0V+nwyR/Uhrkrtlbc=
|
||||||
github.com/aws/aws-sdk-go v1.44.197/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
github.com/aws/aws-sdk-go v1.44.199/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.0 h1:0xncHZ588wB/geLjbM/esoW3FOEThWy2TJyb4VXfLFY=
|
github.com/aws/aws-xray-sdk-go v1.8.0 h1:0xncHZ588wB/geLjbM/esoW3FOEThWy2TJyb4VXfLFY=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.0/go.mod h1:7LKe47H+j3evfvS1+q0wzpoaGXGrF3mUsfM+thqVO+A=
|
github.com/aws/aws-xray-sdk-go v1.8.0/go.mod h1:7LKe47H+j3evfvS1+q0wzpoaGXGrF3mUsfM+thqVO+A=
|
||||||
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||||
@ -487,8 +487,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|||||||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
|
golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8=
|
||||||
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
@ -529,8 +529,8 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su
|
|||||||
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
||||||
golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw=
|
golang.org/x/net v0.6.0 h1:L4ZwwTvKW9gr0ZMS1yrHD9GZhIuVjOBBnaKH+SPQK0Q=
|
||||||
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
@ -625,8 +625,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k=
|
golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
|
||||||
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
@ -678,8 +678,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f
|
|||||||
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.5.0 h1:+bSpV5HIeWkuvgaMfI3UmKRThoTA5ODJTUd8T17NO+4=
|
golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM=
|
||||||
golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
|||||||
@ -1,13 +1,20 @@
|
|||||||
package ptr
|
package ptr
|
||||||
|
|
||||||
// Val helper method for unwrapping strings
|
// ptr package is a common package used for pointer
|
||||||
|
// access and deserialization.
|
||||||
|
|
||||||
|
// Val Generic function for dereferencing pointers.
|
||||||
// Microsoft Graph saves many variables as string pointers.
|
// Microsoft Graph saves many variables as string pointers.
|
||||||
// Function will safely check if the point is nil prior to
|
// Function will safely check if the point is nil prior to
|
||||||
// dereferencing the pointer. If the pointer is nil,
|
// dereferencing the pointer. If the pointer is nil,
|
||||||
// an empty string is returned.
|
// an empty version of the object is returned.
|
||||||
func Val(ptr *string) string {
|
// Operation does not work on Nested objects.
|
||||||
|
// For example:
|
||||||
|
// *evt.GetEnd().GetDateTime() will still cause a panic
|
||||||
|
// if evt is nil or GetEnd() is nil
|
||||||
|
func Val[T any](ptr *T) T {
|
||||||
if ptr == nil {
|
if ptr == nil {
|
||||||
return ""
|
return *new(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
return *ptr
|
return *ptr
|
||||||
|
|||||||
99
src/internal/common/ptr/pointer_test.go
Normal file
99
src/internal/common/ptr/pointer_test.go
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
package ptr_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PointerSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPointerSuite(t *testing.T) {
|
||||||
|
suite.Run(t, new(PointerSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestVal checks to ptr derefencing for the
|
||||||
|
// following types:
|
||||||
|
// - *string
|
||||||
|
// - *bool
|
||||||
|
// - *time.Time
|
||||||
|
func (suite *PointerSuite) TestVal() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
created *time.Time
|
||||||
|
testString *string
|
||||||
|
testBool *bool
|
||||||
|
testInt *int
|
||||||
|
testInt32 *int32
|
||||||
|
testInt64 *int64
|
||||||
|
)
|
||||||
|
|
||||||
|
// String Checks
|
||||||
|
subject := ptr.Val(testString)
|
||||||
|
assert.Empty(t, subject)
|
||||||
|
|
||||||
|
hello := "Hello World"
|
||||||
|
testString = &hello
|
||||||
|
subject = ptr.Val(testString)
|
||||||
|
|
||||||
|
t.Logf("Received: %s", subject)
|
||||||
|
assert.NotEmpty(t, subject)
|
||||||
|
|
||||||
|
// Time Checks
|
||||||
|
|
||||||
|
myTime := ptr.Val(created)
|
||||||
|
assert.Empty(t, myTime)
|
||||||
|
assert.NotNil(t, myTime)
|
||||||
|
|
||||||
|
now := time.Now()
|
||||||
|
created = &now
|
||||||
|
myTime = ptr.Val(created)
|
||||||
|
assert.NotEmpty(t, myTime)
|
||||||
|
|
||||||
|
// Bool Checks
|
||||||
|
truth := true
|
||||||
|
myBool := ptr.Val(testBool)
|
||||||
|
assert.NotNil(t, myBool)
|
||||||
|
assert.False(t, myBool)
|
||||||
|
|
||||||
|
testBool = &truth
|
||||||
|
myBool = ptr.Val(testBool)
|
||||||
|
assert.NotNil(t, myBool)
|
||||||
|
assert.True(t, myBool)
|
||||||
|
|
||||||
|
// Int checks
|
||||||
|
myInt := ptr.Val(testInt)
|
||||||
|
myInt32 := ptr.Val(testInt32)
|
||||||
|
myInt64 := ptr.Val(testInt64)
|
||||||
|
|
||||||
|
assert.NotNil(t, myInt)
|
||||||
|
assert.NotNil(t, myInt32)
|
||||||
|
assert.NotNil(t, myInt64)
|
||||||
|
assert.Empty(t, myInt)
|
||||||
|
assert.Empty(t, myInt32)
|
||||||
|
assert.Empty(t, myInt64)
|
||||||
|
|
||||||
|
num := 4071
|
||||||
|
num32 := int32(num * 32)
|
||||||
|
num64 := int64(num * 2048)
|
||||||
|
testInt = &num
|
||||||
|
testInt32 = &num32
|
||||||
|
testInt64 = &num64
|
||||||
|
|
||||||
|
myInt = ptr.Val(testInt)
|
||||||
|
myInt32 = ptr.Val(testInt32)
|
||||||
|
myInt64 = ptr.Val(testInt64)
|
||||||
|
|
||||||
|
assert.NotNil(t, myInt)
|
||||||
|
assert.NotNil(t, myInt32)
|
||||||
|
assert.NotNil(t, myInt64)
|
||||||
|
assert.NotEmpty(t, myInt)
|
||||||
|
assert.NotEmpty(t, myInt32)
|
||||||
|
assert.NotEmpty(t, myInt64)
|
||||||
|
}
|
||||||
@ -19,6 +19,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
@ -38,6 +39,7 @@ func (gc *GraphConnector) DataCollections(
|
|||||||
sels selectors.Selector,
|
sels selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
|
errs *fault.Errors,
|
||||||
) ([]data.BackupCollection, map[string]struct{}, error) {
|
) ([]data.BackupCollection, map[string]struct{}, error) {
|
||||||
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
|
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
|
||||||
defer end()
|
defer end()
|
||||||
@ -65,7 +67,8 @@ func (gc *GraphConnector) DataCollections(
|
|||||||
gc.credentials,
|
gc.credentials,
|
||||||
// gc.Service,
|
// gc.Service,
|
||||||
gc.UpdateStatus,
|
gc.UpdateStatus,
|
||||||
ctrlOpts)
|
ctrlOpts,
|
||||||
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -105,7 +106,8 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestExchangeDataCollection
|
|||||||
nil,
|
nil,
|
||||||
connector.credentials,
|
connector.credentials,
|
||||||
connector.UpdateStatus,
|
connector.UpdateStatus,
|
||||||
control.Options{})
|
control.Options{},
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Empty(t, excludes)
|
assert.Empty(t, excludes)
|
||||||
@ -201,7 +203,12 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestDataCollections_invali
|
|||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
collections, excludes, err := connector.DataCollections(ctx, test.getSelector(t), nil, control.Options{})
|
collections, excludes, err := connector.DataCollections(
|
||||||
|
ctx,
|
||||||
|
test.getSelector(t),
|
||||||
|
nil,
|
||||||
|
control.Options{},
|
||||||
|
fault.New(true))
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Empty(t, collections)
|
assert.Empty(t, collections)
|
||||||
assert.Empty(t, excludes)
|
assert.Empty(t, excludes)
|
||||||
@ -325,7 +332,12 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
|
|||||||
sel := selectors.NewSharePointBackup(siteIDs)
|
sel := selectors.NewSharePointBackup(siteIDs)
|
||||||
sel.Include(sel.Libraries([]string{"foo"}, selectors.PrefixMatch()))
|
sel.Include(sel.Libraries([]string{"foo"}, selectors.PrefixMatch()))
|
||||||
|
|
||||||
cols, excludes, err := gc.DataCollections(ctx, sel.Selector, nil, control.Options{})
|
cols, excludes, err := gc.DataCollections(
|
||||||
|
ctx,
|
||||||
|
sel.Selector,
|
||||||
|
nil,
|
||||||
|
control.Options{},
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, cols, 1)
|
assert.Len(t, cols, 1)
|
||||||
// No excludes yet as this isn't an incremental backup.
|
// No excludes yet as this isn't an incremental backup.
|
||||||
@ -351,7 +363,12 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
|
|||||||
sel := selectors.NewSharePointBackup(siteIDs)
|
sel := selectors.NewSharePointBackup(siteIDs)
|
||||||
sel.Include(sel.Lists(selectors.Any(), selectors.PrefixMatch()))
|
sel.Include(sel.Lists(selectors.Any(), selectors.PrefixMatch()))
|
||||||
|
|
||||||
cols, excludes, err := gc.DataCollections(ctx, sel.Selector, nil, control.Options{})
|
cols, excludes, err := gc.DataCollections(
|
||||||
|
ctx,
|
||||||
|
sel.Selector,
|
||||||
|
nil,
|
||||||
|
control.Options{},
|
||||||
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Less(t, 0, len(cols))
|
assert.Less(t, 0, len(cols))
|
||||||
// No excludes yet as this isn't an incremental backup.
|
// No excludes yet as this isn't an incremental backup.
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package api
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
absser "github.com/microsoft/kiota-abstractions-go"
|
absser "github.com/microsoft/kiota-abstractions-go"
|
||||||
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -10,7 +11,7 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -85,7 +86,7 @@ func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetAll retrieves all users.
|
// GetAll retrieves all users.
|
||||||
func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) {
|
func (c Users) GetAll(ctx context.Context, errs *fault.Errors) ([]models.Userable, error) {
|
||||||
service, err := c.service()
|
service, err := c.service()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -99,7 +100,7 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, support.ConnectorStackErrorTraceWrap(err, "getting all users")
|
return nil, clues.Wrap(err, "getting all users").WithClues(ctx).WithAll(graph.ErrData(err)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
iter, err := msgraphgocore.NewPageIterator(
|
iter, err := msgraphgocore.NewPageIterator(
|
||||||
@ -107,18 +108,19 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) {
|
|||||||
service.Adapter(),
|
service.Adapter(),
|
||||||
models.CreateUserCollectionResponseFromDiscriminatorValue)
|
models.CreateUserCollectionResponseFromDiscriminatorValue)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, support.ConnectorStackErrorTraceWrap(err, "constructing user iterator")
|
return nil, clues.Wrap(err, "creating users iterator").WithClues(ctx).WithAll(graph.ErrData(err)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
us := make([]models.Userable, 0)
|
||||||
iterErrs error
|
|
||||||
us = make([]models.Userable, 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
iterator := func(item any) bool {
|
iterator := func(item any) bool {
|
||||||
|
if errs.Failed() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
u, err := validateUser(item)
|
u, err := validateUser(item)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
iterErrs = support.WrapAndAppend("validating user", err, iterErrs)
|
errs.Add(clues.Wrap(err, "validating user").WithClues(ctx).WithAll(graph.ErrData(err)...))
|
||||||
} else {
|
} else {
|
||||||
us = append(us, u)
|
us = append(us, u)
|
||||||
}
|
}
|
||||||
@ -127,10 +129,10 @@ func (c Users) GetAll(ctx context.Context) ([]models.Userable, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err := iter.Iterate(ctx, iterator); err != nil {
|
if err := iter.Iterate(ctx, iterator); err != nil {
|
||||||
return nil, support.ConnectorStackErrorTraceWrap(err, "iterating all users")
|
return nil, clues.Wrap(err, "iterating all users").WithClues(ctx).WithAll(graph.ErrData(err)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return us, iterErrs
|
return us, errs.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) {
|
func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, error) {
|
||||||
@ -145,7 +147,7 @@ func (c Users) GetByID(ctx context.Context, userID string) (models.Userable, err
|
|||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, support.ConnectorStackErrorTraceWrap(err, "getting user by id")
|
return nil, clues.Wrap(err, "getting user").WithClues(ctx).WithAll(graph.ErrData(err)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return resp, err
|
return resp, err
|
||||||
@ -167,7 +169,7 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
|||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !graph.IsErrExchangeMailFolderNotFound(err) {
|
if !graph.IsErrExchangeMailFolderNotFound(err) {
|
||||||
return nil, support.ConnectorStackErrorTraceWrap(err, "getting user's exchange mailfolders")
|
return nil, clues.Wrap(err, "getting user's mail folder").WithClues(ctx).WithAll(graph.ErrData(err)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(userInfo.DiscoveredServices, path.ExchangeService)
|
delete(userInfo.DiscoveredServices, path.ExchangeService)
|
||||||
@ -186,15 +188,15 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
|||||||
func validateUser(item any) (models.Userable, error) {
|
func validateUser(item any) (models.Userable, error) {
|
||||||
m, ok := item.(models.Userable)
|
m, ok := item.(models.Userable)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, errors.Errorf("expected Userable, got %T", item)
|
return nil, clues.Stack(clues.New("unexpected model"), errors.Errorf("%T", item))
|
||||||
}
|
}
|
||||||
|
|
||||||
if m.GetId() == nil {
|
if m.GetId() == nil {
|
||||||
return nil, errors.Errorf("missing ID")
|
return nil, clues.New("missing ID")
|
||||||
}
|
}
|
||||||
|
|
||||||
if m.GetUserPrincipalName() == nil {
|
if m.GetUserPrincipalName() == nil {
|
||||||
return nil, errors.New("missing principalName")
|
return nil, clues.New("missing principalName")
|
||||||
}
|
}
|
||||||
|
|
||||||
return m, nil
|
return m, nil
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
"github.com/alcionai/corso/src/internal/connector/discovery/api"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -14,7 +15,7 @@ import (
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type getAller interface {
|
type getAller interface {
|
||||||
GetAll(context.Context) ([]models.Userable, error)
|
GetAll(context.Context, *fault.Errors) ([]models.Userable, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type getter interface {
|
type getter interface {
|
||||||
@ -35,8 +36,8 @@ type getWithInfoer interface {
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// Users fetches all users in the tenant.
|
// Users fetches all users in the tenant.
|
||||||
func Users(ctx context.Context, ga getAller) ([]models.Userable, error) {
|
func Users(ctx context.Context, ga getAller, errs *fault.Errors) ([]models.Userable, error) {
|
||||||
return ga.GetAll(ctx)
|
return ga.GetAll(ctx, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func User(ctx context.Context, gwi getWithInfoer, userID string) (models.Userable, *api.UserInfo, error) {
|
func User(ctx context.Context, gwi getWithInfoer, userID string) (models.Userable, *api.UserInfo, error) {
|
||||||
|
|||||||
@ -161,39 +161,6 @@ func (suite *ExchangeServiceSuite) TestOptionsForContacts() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGraphQueryFunctions verifies if Query functions APIs
|
|
||||||
// through Microsoft Graph are functional
|
|
||||||
func (suite *ExchangeServiceSuite) TestGraphQueryFunctions() {
|
|
||||||
ctx, flush := tester.NewContext()
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
c, err := NewClient(suite.credentials)
|
|
||||||
require.NoError(suite.T(), err)
|
|
||||||
|
|
||||||
userID := tester.M365UserID(suite.T())
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
function GraphQuery
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "GraphQuery: Get All ContactFolders",
|
|
||||||
function: c.Contacts().GetAllContactFolderNamesForUser,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "GraphQuery: Get All Calendars for User",
|
|
||||||
function: c.Events().GetAllCalendarNamesForUser,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
|
||||||
response, err := test.function(ctx, userID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.NotNil(t, response)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:lll
|
//nolint:lll
|
||||||
var stubHTMLContent = "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Happy New Year,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture? </div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><img class=\"FluidPluginCopy ContentPasted0 w-2070 h-1380\" size=\"5854817\" data-outlook-trace=\"F:1|T:1\" src=\"cid:85f4faa3-9851-40c7-ba0a-e63dce1185f9\" style=\"max-width:100%\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Let me know if this meets our culture requirements.</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Warm Regards,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin</div></body></html>"
|
var stubHTMLContent = "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Happy New Year,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">In accordance with TPS report guidelines, there have been questions about how to address our activities SharePoint Cover page. Do you believe this is the best picture? </div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><img class=\"FluidPluginCopy ContentPasted0 w-2070 h-1380\" size=\"5854817\" data-outlook-trace=\"F:1|T:1\" src=\"cid:85f4faa3-9851-40c7-ba0a-e63dce1185f9\" style=\"max-width:100%\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Let me know if this meets our culture requirements.</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Warm Regards,</div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\"><br></div><div class=\"elementToProof\" style=\"font-family:Calibri,Arial,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0); background-color:rgb(255,255,255)\">Dustin</div></body></html>"
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package api
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/hashicorp/go-multierror"
|
"github.com/hashicorp/go-multierror"
|
||||||
@ -13,6 +12,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
@ -80,28 +80,6 @@ func (c Contacts) GetItem(
|
|||||||
return cont, ContactInfo(cont), nil
|
return cont, ContactInfo(cont), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAllContactFolderNamesForUser is a GraphQuery function for getting
|
|
||||||
// ContactFolderId and display names for contacts. All other information is omitted.
|
|
||||||
// Does not return the default Contact Folder
|
|
||||||
func (c Contacts) GetAllContactFolderNamesForUser(
|
|
||||||
ctx context.Context,
|
|
||||||
user string,
|
|
||||||
) (serialization.Parsable, error) {
|
|
||||||
options, err := optionsForContactFolders([]string{"displayName", "parentFolderId"})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var resp models.ContactFolderCollectionResponseable
|
|
||||||
|
|
||||||
err = graph.RunWithRetry(func() error {
|
|
||||||
resp, err = c.stable.Client().UsersById(user).ContactFolders().Get(ctx, options)
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Contacts) GetContainerByID(
|
func (c Contacts) GetContainerByID(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, dirID string,
|
userID, dirID string,
|
||||||
@ -169,10 +147,8 @@ func (c Contacts) EnumerateContainers(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(fold, nil)
|
temp := graph.NewCacheFolder(fold, nil, nil)
|
||||||
|
if err := fn(temp); err != nil {
|
||||||
err = fn(temp)
|
|
||||||
if err != nil {
|
|
||||||
errs = multierror.Append(err, errs)
|
errs = multierror.Append(err, errs)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -317,16 +293,8 @@ func (c Contacts) Serialize(
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func ContactInfo(contact models.Contactable) *details.ExchangeInfo {
|
func ContactInfo(contact models.Contactable) *details.ExchangeInfo {
|
||||||
name := ""
|
name := ptr.Val(contact.GetDisplayName())
|
||||||
created := time.Time{}
|
created := ptr.Val(contact.GetCreatedDateTime())
|
||||||
|
|
||||||
if contact.GetDisplayName() != nil {
|
|
||||||
name = *contact.GetDisplayName()
|
|
||||||
}
|
|
||||||
|
|
||||||
if contact.GetCreatedDateTime() != nil {
|
|
||||||
created = *contact.GetCreatedDateTime()
|
|
||||||
}
|
|
||||||
|
|
||||||
return &details.ExchangeInfo{
|
return &details.ExchangeInfo{
|
||||||
ItemType: details.ExchangeContact,
|
ItemType: details.ExchangeContact,
|
||||||
|
|||||||
@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
@ -143,25 +144,6 @@ func (c Events) GetItem(
|
|||||||
return event, EventInfo(event), nil
|
return event, EventInfo(event), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) GetAllCalendarNamesForUser(
|
|
||||||
ctx context.Context,
|
|
||||||
user string,
|
|
||||||
) (serialization.Parsable, error) {
|
|
||||||
options, err := optionsForCalendars([]string{"name", "owner"})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var resp models.CalendarCollectionResponseable
|
|
||||||
|
|
||||||
err = graph.RunWithRetry(func() error {
|
|
||||||
resp, err = c.stable.Client().UsersById(user).Calendars().Get(ctx, options)
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// EnumerateContainers iterates through all of the users current
|
// EnumerateContainers iterates through all of the users current
|
||||||
// calendars, converting each to a graph.CacheFolder, and
|
// calendars, converting each to a graph.CacheFolder, and
|
||||||
// calling fn(cf) on each one. If fn(cf) errors, the error is
|
// calling fn(cf) on each one. If fn(cf) errors, the error is
|
||||||
@ -209,10 +191,11 @@ func (c Events) EnumerateContainers(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(cd, path.Builder{}.Append(*cd.GetDisplayName()))
|
temp := graph.NewCacheFolder(
|
||||||
|
cd,
|
||||||
err = fn(temp)
|
path.Builder{}.Append(*cd.GetId()), // storage path
|
||||||
if err != nil {
|
path.Builder{}.Append(*cd.GetDisplayName())) // display location
|
||||||
|
if err := fn(temp); err != nil {
|
||||||
errs = multierror.Append(err, errs)
|
errs = multierror.Append(err, errs)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -390,11 +373,12 @@ func (c CalendarDisplayable) GetParentFolderId() *string {
|
|||||||
|
|
||||||
func EventInfo(evt models.Eventable) *details.ExchangeInfo {
|
func EventInfo(evt models.Eventable) *details.ExchangeInfo {
|
||||||
var (
|
var (
|
||||||
organizer, subject string
|
organizer string
|
||||||
recurs bool
|
subject = ptr.Val(evt.GetSubject())
|
||||||
start = time.Time{}
|
recurs bool
|
||||||
end = time.Time{}
|
start = time.Time{}
|
||||||
created = time.Time{}
|
end = time.Time{}
|
||||||
|
created = ptr.Val(evt.GetCreatedDateTime())
|
||||||
)
|
)
|
||||||
|
|
||||||
if evt.GetOrganizer() != nil &&
|
if evt.GetOrganizer() != nil &&
|
||||||
@ -405,10 +389,6 @@ func EventInfo(evt models.Eventable) *details.ExchangeInfo {
|
|||||||
GetAddress()
|
GetAddress()
|
||||||
}
|
}
|
||||||
|
|
||||||
if evt.GetSubject() != nil {
|
|
||||||
subject = *evt.GetSubject()
|
|
||||||
}
|
|
||||||
|
|
||||||
if evt.GetRecurrence() != nil {
|
if evt.GetRecurrence() != nil {
|
||||||
recurs = true
|
recurs = true
|
||||||
}
|
}
|
||||||
@ -437,10 +417,6 @@ func EventInfo(evt models.Eventable) *details.ExchangeInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if evt.GetCreatedDateTime() != nil {
|
|
||||||
created = *evt.GetCreatedDateTime()
|
|
||||||
}
|
|
||||||
|
|
||||||
return &details.ExchangeInfo{
|
return &details.ExchangeInfo{
|
||||||
ItemType: details.ExchangeEvent,
|
ItemType: details.ExchangeEvent,
|
||||||
Organizer: organizer,
|
Organizer: organizer,
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package api
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/hashicorp/go-multierror"
|
"github.com/hashicorp/go-multierror"
|
||||||
@ -13,6 +12,7 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
@ -198,8 +198,7 @@ func (c Mail) EnumerateContainers(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range resp.GetValue() {
|
for _, v := range resp.GetValue() {
|
||||||
temp := graph.NewCacheFolder(v, nil)
|
temp := graph.NewCacheFolder(v, nil, nil)
|
||||||
|
|
||||||
if err := fn(temp); err != nil {
|
if err := fn(temp); err != nil {
|
||||||
errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta"))
|
errs = multierror.Append(errs, errors.Wrap(err, "iterating mail folders delta"))
|
||||||
continue
|
continue
|
||||||
@ -348,9 +347,9 @@ func (c Mail) Serialize(
|
|||||||
|
|
||||||
func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
||||||
sender := ""
|
sender := ""
|
||||||
subject := ""
|
subject := ptr.Val(msg.GetSubject())
|
||||||
received := time.Time{}
|
received := ptr.Val(msg.GetReceivedDateTime())
|
||||||
created := time.Time{}
|
created := ptr.Val(msg.GetCreatedDateTime())
|
||||||
|
|
||||||
if msg.GetSender() != nil &&
|
if msg.GetSender() != nil &&
|
||||||
msg.GetSender().GetEmailAddress() != nil &&
|
msg.GetSender().GetEmailAddress() != nil &&
|
||||||
@ -358,18 +357,6 @@ func MailInfo(msg models.Messageable) *details.ExchangeInfo {
|
|||||||
sender = *msg.GetSender().GetEmailAddress().GetAddress()
|
sender = *msg.GetSender().GetEmailAddress().GetAddress()
|
||||||
}
|
}
|
||||||
|
|
||||||
if msg.GetSubject() != nil {
|
|
||||||
subject = *msg.GetSubject()
|
|
||||||
}
|
|
||||||
|
|
||||||
if msg.GetReceivedDateTime() != nil {
|
|
||||||
received = *msg.GetReceivedDateTime()
|
|
||||||
}
|
|
||||||
|
|
||||||
if msg.GetCreatedDateTime() != nil {
|
|
||||||
created = *msg.GetCreatedDateTime()
|
|
||||||
}
|
|
||||||
|
|
||||||
return &details.ExchangeInfo{
|
return &details.ExchangeInfo{
|
||||||
ItemType: details.ExchangeMail,
|
ItemType: details.ExchangeMail,
|
||||||
Sender: sender,
|
Sender: sender,
|
||||||
|
|||||||
@ -135,27 +135,6 @@ func optionsForCalendarsByID(moreOps []string) (
|
|||||||
return options, nil
|
return options, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// optionsForContactFolders places allowed options for exchange.ContactFolder object
|
|
||||||
// @return is first call in ContactFolders().GetWithRequestConfigurationAndResponseHandler
|
|
||||||
func optionsForContactFolders(moreOps []string) (
|
|
||||||
*users.ItemContactFoldersRequestBuilderGetRequestConfiguration,
|
|
||||||
error,
|
|
||||||
) {
|
|
||||||
selecting, err := buildOptions(moreOps, fieldsForFolders)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
requestParameters := &users.ItemContactFoldersRequestBuilderGetQueryParameters{
|
|
||||||
Select: selecting,
|
|
||||||
}
|
|
||||||
options := &users.ItemContactFoldersRequestBuilderGetRequestConfiguration{
|
|
||||||
QueryParameters: requestParameters,
|
|
||||||
}
|
|
||||||
|
|
||||||
return options, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func optionsForContactFolderByID(moreOps []string) (
|
func optionsForContactFolderByID(moreOps []string) (
|
||||||
*users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration,
|
*users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration,
|
||||||
error,
|
error,
|
||||||
|
|||||||
@ -25,7 +25,8 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func attachmentType(attachment models.Attachmentable) models.AttachmentType {
|
func attachmentType(attachment models.Attachmentable) models.AttachmentType {
|
||||||
switch *attachment.GetOdataType() {
|
attachmentType := ptr.Val(attachment.GetOdataType())
|
||||||
|
switch attachmentType {
|
||||||
case fileAttachmentOdataValue:
|
case fileAttachmentOdataValue:
|
||||||
return models.FILE_ATTACHMENTTYPE
|
return models.FILE_ATTACHMENTTYPE
|
||||||
case itemAttachmentOdataValue:
|
case itemAttachmentOdataValue:
|
||||||
|
|||||||
@ -29,8 +29,10 @@ func (cfc *contactFolderCache) populateContactRoot(
|
|||||||
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
|
return support.ConnectorStackErrorTraceWrap(err, "fetching root folder")
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...))
|
temp := graph.NewCacheFolder(
|
||||||
|
f,
|
||||||
|
path.Builder{}.Append(baseContainerPath...), // storage path
|
||||||
|
path.Builder{}.Append(baseContainerPath...)) // display location
|
||||||
if err := cfc.addFolder(temp); err != nil {
|
if err := cfc.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "adding resolver dir")
|
return errors.Wrap(err, "adding resolver dir")
|
||||||
}
|
}
|
||||||
@ -56,7 +58,7 @@ func (cfc *contactFolderCache) Populate(
|
|||||||
return errors.Wrap(err, "enumerating containers")
|
return errors.Wrap(err, "enumerating containers")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := cfc.populatePaths(ctx); err != nil {
|
if err := cfc.populatePaths(ctx, false); err != nil {
|
||||||
return errors.Wrap(err, "populating paths")
|
return errors.Wrap(err, "populating paths")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -51,38 +51,52 @@ type containerResolver struct {
|
|||||||
func (cr *containerResolver) IDToPath(
|
func (cr *containerResolver) IDToPath(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
folderID string,
|
folderID string,
|
||||||
) (*path.Builder, error) {
|
useIDInPath bool,
|
||||||
return cr.idToPath(ctx, folderID, 0)
|
) (*path.Builder, *path.Builder, error) {
|
||||||
|
return cr.idToPath(ctx, folderID, 0, useIDInPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cr *containerResolver) idToPath(
|
func (cr *containerResolver) idToPath(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
folderID string,
|
folderID string,
|
||||||
depth int,
|
depth int,
|
||||||
) (*path.Builder, error) {
|
useIDInPath bool,
|
||||||
|
) (*path.Builder, *path.Builder, error) {
|
||||||
if depth >= maxIterations {
|
if depth >= maxIterations {
|
||||||
return nil, errors.New("path contains cycle or is too tall")
|
return nil, nil, errors.New("path contains cycle or is too tall")
|
||||||
}
|
}
|
||||||
|
|
||||||
c, ok := cr.cache[folderID]
|
c, ok := cr.cache[folderID]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, errors.Errorf("folder %s not cached", folderID)
|
return nil, nil, errors.Errorf("folder %s not cached", folderID)
|
||||||
}
|
}
|
||||||
|
|
||||||
p := c.Path()
|
p := c.Path()
|
||||||
if p != nil {
|
if p != nil {
|
||||||
return p, nil
|
return p, c.Location(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
parentPath, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1)
|
parentPath, parentLoc, err := cr.idToPath(ctx, *c.GetParentFolderId(), depth+1, useIDInPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "retrieving parent folder")
|
return nil, nil, errors.Wrap(err, "retrieving parent folder")
|
||||||
}
|
}
|
||||||
|
|
||||||
fullPath := parentPath.Append(*c.GetDisplayName())
|
toAppend := *c.GetDisplayName()
|
||||||
|
if useIDInPath {
|
||||||
|
toAppend = *c.GetId()
|
||||||
|
}
|
||||||
|
|
||||||
|
fullPath := parentPath.Append(toAppend)
|
||||||
c.SetPath(fullPath)
|
c.SetPath(fullPath)
|
||||||
|
|
||||||
return fullPath, nil
|
var locPath *path.Builder
|
||||||
|
|
||||||
|
if parentLoc != nil {
|
||||||
|
locPath = parentLoc.Append(*c.GetDisplayName())
|
||||||
|
c.SetLocation(locPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fullPath, locPath, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PathInCache utility function to return m365ID of folder if the path.Folders
|
// PathInCache utility function to return m365ID of folder if the path.Folders
|
||||||
@ -93,13 +107,13 @@ func (cr *containerResolver) PathInCache(pathString string) (string, bool) {
|
|||||||
return "", false
|
return "", false
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, contain := range cr.cache {
|
for _, cc := range cr.cache {
|
||||||
if contain.Path() == nil {
|
if cc.Path() == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if contain.Path().String() == pathString {
|
if cc.Path().String() == pathString {
|
||||||
return *contain.GetId(), true
|
return *cc.GetId(), true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,18 +155,21 @@ func (cr *containerResolver) Items() []graph.CachedContainer {
|
|||||||
|
|
||||||
// AddToCache adds container to map in field 'cache'
|
// AddToCache adds container to map in field 'cache'
|
||||||
// @returns error iff the required values are not accessible.
|
// @returns error iff the required values are not accessible.
|
||||||
func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) error {
|
func (cr *containerResolver) AddToCache(
|
||||||
|
ctx context.Context,
|
||||||
|
f graph.Container,
|
||||||
|
useIDInPath bool,
|
||||||
|
) error {
|
||||||
temp := graph.CacheFolder{
|
temp := graph.CacheFolder{
|
||||||
Container: f,
|
Container: f,
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := cr.addFolder(temp); err != nil {
|
if err := cr.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "adding cache folder")
|
return errors.Wrap(err, "adding cache folder")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Populate the path for this entry so calls to PathInCache succeed no matter
|
// Populate the path for this entry so calls to PathInCache succeed no matter
|
||||||
// when they're made.
|
// when they're made.
|
||||||
_, err := cr.IDToPath(ctx, *f.GetId())
|
_, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "adding cache entry")
|
return errors.Wrap(err, "adding cache entry")
|
||||||
}
|
}
|
||||||
@ -160,12 +177,18 @@ func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container)
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cr *containerResolver) populatePaths(ctx context.Context) error {
|
// DestinationNameToID returns an empty string. This is only supported by exchange
|
||||||
|
// calendars at this time.
|
||||||
|
func (cr *containerResolver) DestinationNameToID(dest string) string {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cr *containerResolver) populatePaths(ctx context.Context, useIDInPath bool) error {
|
||||||
var errs *multierror.Error
|
var errs *multierror.Error
|
||||||
|
|
||||||
// Populate all folder paths.
|
// Populate all folder paths.
|
||||||
for _, f := range cr.Items() {
|
for _, f := range cr.Items() {
|
||||||
_, err := cr.IDToPath(ctx, *f.GetId())
|
_, _, err := cr.IDToPath(ctx, *f.GetId(), useIDInPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = multierror.Append(errs, errors.Wrap(err, "populating path"))
|
errs = multierror.Append(errs, errors.Wrap(err, "populating path"))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
package exchange
|
package exchange
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@ -26,16 +27,19 @@ type mockContainer struct {
|
|||||||
displayName *string
|
displayName *string
|
||||||
parentID *string
|
parentID *string
|
||||||
p *path.Builder
|
p *path.Builder
|
||||||
|
l *path.Builder
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:revive
|
//nolint:revive
|
||||||
func (m mockContainer) GetId() *string { return m.id }
|
func (m mockContainer) GetId() *string { return m.id }
|
||||||
|
|
||||||
//nolint:revive
|
//nolint:revive
|
||||||
func (m mockContainer) GetParentFolderId() *string { return m.parentID }
|
func (m mockContainer) GetParentFolderId() *string { return m.parentID }
|
||||||
func (m mockContainer) GetDisplayName() *string { return m.displayName }
|
func (m mockContainer) GetDisplayName() *string { return m.displayName }
|
||||||
func (m mockContainer) Path() *path.Builder { return m.p }
|
func (m mockContainer) Location() *path.Builder { return m.l }
|
||||||
func (m mockContainer) SetPath(p *path.Builder) {}
|
func (m mockContainer) SetLocation(p *path.Builder) {}
|
||||||
|
func (m mockContainer) Path() *path.Builder { return m.p }
|
||||||
|
func (m mockContainer) SetPath(p *path.Builder) {}
|
||||||
|
|
||||||
func strPtr(s string) *string {
|
func strPtr(s string) *string {
|
||||||
return &s
|
return &s
|
||||||
@ -168,7 +172,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
|
|||||||
parentID: nil,
|
parentID: nil,
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
),
|
nil),
|
||||||
check: assert.Error,
|
check: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -180,7 +184,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
|
|||||||
parentID: nil,
|
parentID: nil,
|
||||||
},
|
},
|
||||||
path.Builder{}.Append("foo"),
|
path.Builder{}.Append("foo"),
|
||||||
),
|
path.Builder{}.Append("loc")),
|
||||||
check: assert.NoError,
|
check: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -192,7 +196,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
|
|||||||
parentID: &testParentID,
|
parentID: &testParentID,
|
||||||
},
|
},
|
||||||
path.Builder{}.Append("foo"),
|
path.Builder{}.Append("foo"),
|
||||||
),
|
path.Builder{}.Append("loc")),
|
||||||
check: assert.Error,
|
check: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -204,7 +208,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
|
|||||||
parentID: &testParentID,
|
parentID: &testParentID,
|
||||||
},
|
},
|
||||||
path.Builder{}.Append("foo"),
|
path.Builder{}.Append("foo"),
|
||||||
),
|
path.Builder{}.Append("loc")),
|
||||||
check: assert.Error,
|
check: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -216,7 +220,7 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() {
|
|||||||
parentID: &testParentID,
|
parentID: &testParentID,
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
),
|
nil),
|
||||||
check: assert.NoError,
|
check: assert.NoError,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -238,52 +242,57 @@ func newMockCachedContainer(name string) *mockCachedContainer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type mockCachedContainer struct {
|
type mockCachedContainer struct {
|
||||||
id string
|
id string
|
||||||
parentID string
|
parentID string
|
||||||
displayName string
|
displayName string
|
||||||
p *path.Builder
|
l *path.Builder
|
||||||
expectedPath string
|
p *path.Builder
|
||||||
|
expectedPath string
|
||||||
|
expectedLocation string
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:revive
|
//nolint:revive
|
||||||
func (m mockCachedContainer) GetId() *string {
|
func (m mockCachedContainer) GetId() *string { return &m.id }
|
||||||
return &m.id
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:revive
|
//nolint:revive
|
||||||
func (m mockCachedContainer) GetParentFolderId() *string {
|
func (m mockCachedContainer) GetParentFolderId() *string { return &m.parentID }
|
||||||
return &m.parentID
|
func (m mockCachedContainer) GetDisplayName() *string { return &m.displayName }
|
||||||
}
|
func (m mockCachedContainer) Location() *path.Builder { return m.l }
|
||||||
|
func (m *mockCachedContainer) SetLocation(newLoc *path.Builder) { m.l = newLoc }
|
||||||
|
func (m mockCachedContainer) Path() *path.Builder { return m.p }
|
||||||
|
func (m *mockCachedContainer) SetPath(newPath *path.Builder) { m.p = newPath }
|
||||||
|
|
||||||
func (m mockCachedContainer) GetDisplayName() *string {
|
func resolverWithContainers(numContainers int, useIDInPath bool) (*containerResolver, []*mockCachedContainer) {
|
||||||
return &m.displayName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m mockCachedContainer) Path() *path.Builder {
|
|
||||||
return m.p
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *mockCachedContainer) SetPath(newPath *path.Builder) {
|
|
||||||
m.p = newPath
|
|
||||||
}
|
|
||||||
|
|
||||||
func resolverWithContainers(numContainers int) (*containerResolver, []*mockCachedContainer) {
|
|
||||||
containers := make([]*mockCachedContainer, 0, numContainers)
|
containers := make([]*mockCachedContainer, 0, numContainers)
|
||||||
|
|
||||||
for i := 0; i < numContainers; i++ {
|
for i := 0; i < numContainers; i++ {
|
||||||
containers = append(containers, newMockCachedContainer("a"))
|
containers = append(containers, newMockCachedContainer(fmt.Sprintf("%d", i)))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Base case for the recursive lookup.
|
// Base case for the recursive lookup.
|
||||||
containers[0].p = path.Builder{}.Append(containers[0].displayName)
|
dn := containers[0].displayName
|
||||||
containers[0].expectedPath = containers[0].displayName
|
|
||||||
|
apndP := dn
|
||||||
|
if useIDInPath {
|
||||||
|
apndP = containers[0].id
|
||||||
|
}
|
||||||
|
|
||||||
|
containers[0].p = path.Builder{}.Append(apndP)
|
||||||
|
containers[0].expectedPath = apndP
|
||||||
|
containers[0].l = path.Builder{}.Append(dn)
|
||||||
|
containers[0].expectedLocation = dn
|
||||||
|
|
||||||
for i := 1; i < len(containers); i++ {
|
for i := 1; i < len(containers); i++ {
|
||||||
|
dn := containers[i].displayName
|
||||||
|
|
||||||
|
apndP := dn
|
||||||
|
if useIDInPath {
|
||||||
|
apndP = containers[i].id
|
||||||
|
}
|
||||||
|
|
||||||
containers[i].parentID = containers[i-1].id
|
containers[i].parentID = containers[i-1].id
|
||||||
containers[i].expectedPath = stdpath.Join(
|
containers[i].expectedPath = stdpath.Join(containers[i-1].expectedPath, apndP)
|
||||||
containers[i-1].expectedPath,
|
containers[i].expectedLocation = stdpath.Join(containers[i-1].expectedLocation, dn)
|
||||||
containers[i].displayName,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resolver := newContainerResolver()
|
resolver := newContainerResolver()
|
||||||
@ -303,13 +312,16 @@ func resolverWithContainers(numContainers int) (*containerResolver, []*mockCache
|
|||||||
type ConfiguredFolderCacheUnitSuite struct {
|
type ConfiguredFolderCacheUnitSuite struct {
|
||||||
suite.Suite
|
suite.Suite
|
||||||
|
|
||||||
fc *containerResolver
|
fc *containerResolver
|
||||||
|
fcWithID *containerResolver
|
||||||
|
|
||||||
allContainers []*mockCachedContainer
|
allContainers []*mockCachedContainer
|
||||||
|
containersWithID []*mockCachedContainer
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ConfiguredFolderCacheUnitSuite) SetupTest() {
|
func (suite *ConfiguredFolderCacheUnitSuite) SetupTest() {
|
||||||
suite.fc, suite.allContainers = resolverWithContainers(4)
|
suite.fc, suite.allContainers = resolverWithContainers(4, false)
|
||||||
|
suite.fcWithID, suite.containersWithID = resolverWithContainers(4, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConfiguredFolderCacheUnitSuite(t *testing.T) {
|
func TestConfiguredFolderCacheUnitSuite(t *testing.T) {
|
||||||
@ -339,8 +351,8 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() {
|
|||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
resolver, containers := resolverWithContainers(test.numContainers)
|
resolver, containers := resolverWithContainers(test.numContainers, false)
|
||||||
_, err := resolver.IDToPath(ctx, containers[len(containers)-1].id)
|
_, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id, false)
|
||||||
test.check(t, err)
|
test.check(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -352,7 +364,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestPopulatePaths() {
|
|||||||
|
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
require.NoError(t, suite.fc.populatePaths(ctx))
|
require.NoError(t, suite.fc.populatePaths(ctx, false))
|
||||||
|
|
||||||
items := suite.fc.Items()
|
items := suite.fc.Items()
|
||||||
gotPaths := make([]string, 0, len(items))
|
gotPaths := make([]string, 0, len(items))
|
||||||
@ -375,10 +387,24 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached
|
|||||||
|
|
||||||
for _, c := range suite.allContainers {
|
for _, c := range suite.allContainers {
|
||||||
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
|
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
|
||||||
p, err := suite.fc.IDToPath(ctx, c.id)
|
p, l, err := suite.fc.IDToPath(ctx, c.id, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, c.expectedPath, p.String())
|
assert.Equal(t, c.expectedPath, p.String())
|
||||||
|
assert.Equal(t, c.expectedLocation, l.String())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached_useID() {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
for _, c := range suite.containersWithID {
|
||||||
|
suite.T().Run(*c.GetDisplayName(), func(t *testing.T) {
|
||||||
|
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, c.expectedPath, p.String())
|
||||||
|
assert.Equal(t, c.expectedLocation, l.String())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -390,17 +416,37 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths()
|
|||||||
t := suite.T()
|
t := suite.T()
|
||||||
c := suite.allContainers[len(suite.allContainers)-1]
|
c := suite.allContainers[len(suite.allContainers)-1]
|
||||||
|
|
||||||
p, err := suite.fc.IDToPath(ctx, c.id)
|
p, l, err := suite.fc.IDToPath(ctx, c.id, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, c.expectedPath, p.String())
|
assert.Equal(t, c.expectedPath, p.String())
|
||||||
|
assert.Equal(t, c.expectedLocation, l.String())
|
||||||
|
|
||||||
c.parentID = "foo"
|
c.parentID = "foo"
|
||||||
|
|
||||||
p, err = suite.fc.IDToPath(ctx, c.id)
|
p, l, err = suite.fc.IDToPath(ctx, c.id, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, c.expectedPath, p.String())
|
assert.Equal(t, c.expectedPath, p.String())
|
||||||
|
assert.Equal(t, c.expectedLocation, l.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_useID() {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
t := suite.T()
|
||||||
|
c := suite.containersWithID[len(suite.containersWithID)-1]
|
||||||
|
|
||||||
|
p, l, err := suite.fcWithID.IDToPath(ctx, c.id, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, c.expectedPath, p.String())
|
||||||
|
assert.Equal(t, c.expectedLocation, l.String())
|
||||||
|
|
||||||
|
c.parentID = "foo"
|
||||||
|
|
||||||
|
p, l, err = suite.fcWithID.IDToPath(ctx, c.id, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, c.expectedPath, p.String())
|
||||||
|
assert.Equal(t, c.expectedLocation, l.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentNotFound() {
|
func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentNotFound() {
|
||||||
@ -413,7 +459,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN
|
|||||||
|
|
||||||
delete(suite.fc.cache, almostLast.id)
|
delete(suite.fc.cache, almostLast.id)
|
||||||
|
|
||||||
_, err := suite.fc.IDToPath(ctx, last.id)
|
_, _, err := suite.fc.IDToPath(ctx, last.id, false)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -423,7 +469,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsNotFoun
|
|||||||
|
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
_, err := suite.fc.IDToPath(ctx, "foo")
|
_, _, err := suite.fc.IDToPath(ctx, "foo", false)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -431,20 +477,26 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
t := suite.T()
|
var (
|
||||||
|
dest = "testAddFolder"
|
||||||
last := suite.allContainers[len(suite.allContainers)-1]
|
t = suite.T()
|
||||||
|
last = suite.allContainers[len(suite.allContainers)-1]
|
||||||
m := newMockCachedContainer("testAddFolder")
|
m = newMockCachedContainer(dest)
|
||||||
|
)
|
||||||
|
|
||||||
m.parentID = last.id
|
m.parentID = last.id
|
||||||
m.expectedPath = stdpath.Join(last.expectedPath, m.displayName)
|
m.expectedPath = stdpath.Join(last.expectedPath, m.displayName)
|
||||||
|
m.expectedLocation = stdpath.Join(last.expectedPath, m.displayName)
|
||||||
|
|
||||||
require.NoError(t, suite.fc.AddToCache(ctx, m))
|
require.Empty(t, suite.fc.DestinationNameToID(dest), "destination not yet added to cache")
|
||||||
|
require.NoError(t, suite.fc.AddToCache(ctx, m, false))
|
||||||
|
require.Empty(t, suite.fc.DestinationNameToID(dest),
|
||||||
|
"destination id from cache, still empty, because this is not a calendar")
|
||||||
|
|
||||||
p, err := suite.fc.IDToPath(ctx, m.id)
|
p, l, err := suite.fc.IDToPath(ctx, m.id, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, m.expectedPath, p.String())
|
assert.Equal(t, m.expectedPath, p.String())
|
||||||
|
assert.Equal(t, m.expectedLocation, l.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -506,32 +558,35 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
|
|||||||
pathFunc2 func(t *testing.T) path.Path
|
pathFunc2 func(t *testing.T) path.Path
|
||||||
category path.CategoryType
|
category path.CategoryType
|
||||||
folderPrefix string
|
folderPrefix string
|
||||||
|
useIDForPath bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "Mail Cache Test",
|
name: "Mail Cache Test",
|
||||||
category: path.EmailCategory,
|
category: path.EmailCategory,
|
||||||
pathFunc1: func(t *testing.T) path.Path {
|
pathFunc1: func(t *testing.T) path.Path {
|
||||||
pth, err := path.Builder{}.Append("Griffindor").
|
pth, err := path.Builder{}.
|
||||||
Append("Croix").ToDataLayerExchangePathForCategory(
|
Append("Griffindor").
|
||||||
suite.credentials.AzureTenantID,
|
Append("Croix").
|
||||||
user,
|
ToDataLayerExchangePathForCategory(
|
||||||
path.EmailCategory,
|
suite.credentials.AzureTenantID,
|
||||||
false,
|
user,
|
||||||
)
|
path.EmailCategory,
|
||||||
|
false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return pth
|
return pth
|
||||||
},
|
},
|
||||||
pathFunc2: func(t *testing.T) path.Path {
|
pathFunc2: func(t *testing.T) path.Path {
|
||||||
pth, err := path.Builder{}.Append("Griffindor").
|
pth, err := path.Builder{}.
|
||||||
Append("Felicius").ToDataLayerExchangePathForCategory(
|
Append("Griffindor").
|
||||||
suite.credentials.AzureTenantID,
|
Append("Felicius").
|
||||||
user,
|
ToDataLayerExchangePathForCategory(
|
||||||
path.EmailCategory,
|
suite.credentials.AzureTenantID,
|
||||||
false,
|
user,
|
||||||
)
|
path.EmailCategory,
|
||||||
|
false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return pth
|
return pth
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -539,63 +594,65 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
|
|||||||
name: "Contact Cache Test",
|
name: "Contact Cache Test",
|
||||||
category: path.ContactsCategory,
|
category: path.ContactsCategory,
|
||||||
pathFunc1: func(t *testing.T) path.Path {
|
pathFunc1: func(t *testing.T) path.Path {
|
||||||
aPath, err := path.Builder{}.Append("HufflePuff").
|
aPath, err := path.Builder{}.
|
||||||
|
Append("HufflePuff").
|
||||||
ToDataLayerExchangePathForCategory(
|
ToDataLayerExchangePathForCategory(
|
||||||
suite.credentials.AzureTenantID,
|
suite.credentials.AzureTenantID,
|
||||||
user,
|
user,
|
||||||
path.ContactsCategory,
|
path.ContactsCategory,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return aPath
|
return aPath
|
||||||
},
|
},
|
||||||
pathFunc2: func(t *testing.T) path.Path {
|
pathFunc2: func(t *testing.T) path.Path {
|
||||||
aPath, err := path.Builder{}.Append("Ravenclaw").
|
aPath, err := path.Builder{}.
|
||||||
|
Append("Ravenclaw").
|
||||||
ToDataLayerExchangePathForCategory(
|
ToDataLayerExchangePathForCategory(
|
||||||
suite.credentials.AzureTenantID,
|
suite.credentials.AzureTenantID,
|
||||||
user,
|
user,
|
||||||
path.ContactsCategory,
|
path.ContactsCategory,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return aPath
|
return aPath
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Event Cache Test",
|
name: "Event Cache Test",
|
||||||
category: path.EventsCategory,
|
category: path.EventsCategory,
|
||||||
|
useIDForPath: true,
|
||||||
pathFunc1: func(t *testing.T) path.Path {
|
pathFunc1: func(t *testing.T) path.Path {
|
||||||
aPath, err := path.Builder{}.Append("Durmstrang").
|
aPath, err := path.Builder{}.
|
||||||
|
Append("Durmstrang").
|
||||||
ToDataLayerExchangePathForCategory(
|
ToDataLayerExchangePathForCategory(
|
||||||
suite.credentials.AzureTenantID,
|
suite.credentials.AzureTenantID,
|
||||||
user,
|
user,
|
||||||
path.EventsCategory,
|
path.EventsCategory,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return aPath
|
return aPath
|
||||||
},
|
},
|
||||||
pathFunc2: func(t *testing.T) path.Path {
|
pathFunc2: func(t *testing.T) path.Path {
|
||||||
aPath, err := path.Builder{}.Append("Beauxbatons").
|
aPath, err := path.Builder{}.
|
||||||
|
Append("Beauxbatons").
|
||||||
ToDataLayerExchangePathForCategory(
|
ToDataLayerExchangePathForCategory(
|
||||||
suite.credentials.AzureTenantID,
|
suite.credentials.AzureTenantID,
|
||||||
user,
|
user,
|
||||||
path.EventsCategory,
|
path.EventsCategory,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return aPath
|
return aPath
|
||||||
},
|
},
|
||||||
folderPrefix: calendarOthersFolder,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
folderID, err := CreateContainerDestinaion(
|
folderID, err := CreateContainerDestination(
|
||||||
ctx,
|
ctx,
|
||||||
m365,
|
m365,
|
||||||
test.pathFunc1(t),
|
test.pathFunc1(t),
|
||||||
@ -605,21 +662,26 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
|
|||||||
|
|
||||||
resolver := directoryCaches[test.category]
|
resolver := directoryCaches[test.category]
|
||||||
|
|
||||||
_, err = resolver.IDToPath(ctx, folderID)
|
_, _, err = resolver.IDToPath(ctx, folderID, test.useIDForPath)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
secondID, err := CreateContainerDestinaion(
|
parentContainer := folderName
|
||||||
|
if test.useIDForPath {
|
||||||
|
parentContainer = folderID
|
||||||
|
}
|
||||||
|
|
||||||
|
secondID, err := CreateContainerDestination(
|
||||||
ctx,
|
ctx,
|
||||||
m365,
|
m365,
|
||||||
test.pathFunc2(t),
|
test.pathFunc2(t),
|
||||||
folderName,
|
parentContainer,
|
||||||
directoryCaches)
|
directoryCaches)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = resolver.IDToPath(ctx, secondID)
|
_, _, err = resolver.IDToPath(ctx, secondID, test.useIDForPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
p := stdpath.Join(test.folderPrefix, folderName)
|
p := stdpath.Join(test.folderPrefix, parentContainer)
|
||||||
_, ok := resolver.PathInCache(p)
|
_, ok := resolver.PathInCache(p)
|
||||||
require.True(t, ok, "looking for path in cache: %s", p)
|
require.True(t, ok, "looking for path in cache: %s", p)
|
||||||
})
|
})
|
||||||
|
|||||||
@ -3,9 +3,8 @@ package exchange
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/hashicorp/go-multierror"
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
@ -15,6 +14,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -90,7 +90,7 @@ func parseMetadataCollections(
|
|||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return nil, errors.Wrap(ctx.Err(), "parsing collection metadata")
|
return nil, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx)
|
||||||
|
|
||||||
case item, ok := <-items:
|
case item, ok := <-items:
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -105,13 +105,13 @@ func parseMetadataCollections(
|
|||||||
|
|
||||||
err := json.NewDecoder(item.ToReader()).Decode(&m)
|
err := json.NewDecoder(item.ToReader()).Decode(&m)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.New("decoding metadata json")
|
return nil, clues.New("decoding metadata json").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch item.UUID() {
|
switch item.UUID() {
|
||||||
case graph.PreviousPathFileName:
|
case graph.PreviousPathFileName:
|
||||||
if _, ok := found[category]["path"]; ok {
|
if _, ok := found[category]["path"]; ok {
|
||||||
return nil, errors.Errorf("multiple versions of %s path metadata", category)
|
return nil, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, p := range m {
|
for k, p := range m {
|
||||||
@ -122,7 +122,7 @@ func parseMetadataCollections(
|
|||||||
|
|
||||||
case graph.DeltaURLsFileName:
|
case graph.DeltaURLsFileName:
|
||||||
if _, ok := found[category]["delta"]; ok {
|
if _, ok := found[category]["delta"]; ok {
|
||||||
return nil, errors.Errorf("multiple versions of %s delta metadata", category)
|
return nil, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, d := range m {
|
for k, d := range m {
|
||||||
@ -167,16 +167,16 @@ func DataCollections(
|
|||||||
acct account.M365Config,
|
acct account.M365Config,
|
||||||
su support.StatusUpdater,
|
su support.StatusUpdater,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
|
errs *fault.Errors,
|
||||||
) ([]data.BackupCollection, map[string]struct{}, error) {
|
) ([]data.BackupCollection, map[string]struct{}, error) {
|
||||||
eb, err := selector.ToExchangeBackup()
|
eb, err := selector.ToExchangeBackup()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, errors.Wrap(err, "exchangeDataCollection: parsing selector")
|
return nil, nil, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
user = selector.DiscreteOwner
|
user = selector.DiscreteOwner
|
||||||
collections = []data.BackupCollection{}
|
collections = []data.BackupCollection{}
|
||||||
errs error
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cdps, err := parseMetadataCollections(ctx, metadata)
|
cdps, err := parseMetadataCollections(ctx, metadata)
|
||||||
@ -185,26 +185,27 @@ func DataCollections(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, scope := range eb.Scopes() {
|
for _, scope := range eb.Scopes() {
|
||||||
dps := cdps[scope.Category().PathType()]
|
if errs.Failed() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
dcs, err := createCollections(
|
dcs, err := createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
user,
|
user,
|
||||||
scope,
|
scope,
|
||||||
dps,
|
cdps[scope.Category().PathType()],
|
||||||
ctrlOpts,
|
ctrlOpts,
|
||||||
su)
|
su)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, support.WrapAndAppend(user, err, errs)
|
errs.Add(err)
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, dcs...)
|
collections = append(collections, dcs...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exchange does not require adding items to the global exclude list so always
|
return collections, nil, errs.Err()
|
||||||
// return nil.
|
|
||||||
return collections, nil, errs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedItemIDsGetter, error) {
|
func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedItemIDsGetter, error) {
|
||||||
@ -216,7 +217,7 @@ func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedIte
|
|||||||
case path.ContactsCategory:
|
case path.ContactsCategory:
|
||||||
return ac.Contacts(), nil
|
return ac.Contacts(), nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("category %s not supported by getFetchIDFunc", category)
|
return nil, clues.Wrap(clues.New(category.String()), "category not supported")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -233,7 +234,6 @@ func createCollections(
|
|||||||
su support.StatusUpdater,
|
su support.StatusUpdater,
|
||||||
) ([]data.BackupCollection, error) {
|
) ([]data.BackupCollection, error) {
|
||||||
var (
|
var (
|
||||||
errs *multierror.Error
|
|
||||||
allCollections = make([]data.BackupCollection, 0)
|
allCollections = make([]data.BackupCollection, 0)
|
||||||
ac = api.Client{Credentials: creds}
|
ac = api.Client{Credentials: creds}
|
||||||
category = scope.Category().PathType()
|
category = scope.Category().PathType()
|
||||||
@ -241,7 +241,7 @@ func createCollections(
|
|||||||
|
|
||||||
getter, err := getterByType(ac, category)
|
getter, err := getterByType(ac, category)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, clues.Stack(err).WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create collection of ExchangeDataCollection
|
// Create collection of ExchangeDataCollection
|
||||||
@ -262,7 +262,7 @@ func createCollections(
|
|||||||
|
|
||||||
resolver, err := PopulateExchangeContainerResolver(ctx, qp)
|
resolver, err := PopulateExchangeContainerResolver(ctx, qp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "getting folder cache")
|
return nil, errors.Wrap(err, "populating container cache")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = filterContainersAndFillCollections(
|
err = filterContainersAndFillCollections(
|
||||||
@ -275,7 +275,6 @@ func createCollections(
|
|||||||
scope,
|
scope,
|
||||||
dps,
|
dps,
|
||||||
ctrlOpts)
|
ctrlOpts)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "filling collections")
|
return nil, errors.Wrap(err, "filling collections")
|
||||||
}
|
}
|
||||||
@ -286,5 +285,5 @@ func createCollections(
|
|||||||
allCollections = append(allCollections, coll)
|
allCollections = append(allCollections, coll)
|
||||||
}
|
}
|
||||||
|
|
||||||
return allCollections, errs.ErrorOrNil()
|
return allCollections, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
@ -274,8 +275,8 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NotEmpty(t, c.FullPath().Folder())
|
require.NotEmpty(t, c.FullPath().Folder(false))
|
||||||
folder := c.FullPath().Folder()
|
folder := c.FullPath().Folder(false)
|
||||||
|
|
||||||
delete(test.folderNames, folder)
|
delete(test.folderNames, folder)
|
||||||
}
|
}
|
||||||
@ -507,7 +508,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, edc.FullPath().Folder(), DefaultContactFolder)
|
assert.Equal(t, edc.FullPath().Folder(false), DefaultContactFolder)
|
||||||
assert.NotZero(t, count)
|
assert.NotZero(t, count)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -527,13 +528,35 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
|
|||||||
|
|
||||||
users := []string{suite.user}
|
users := []string{suite.user}
|
||||||
|
|
||||||
|
ac, err := api.NewClient(acct)
|
||||||
|
require.NoError(suite.T(), err, "creating client")
|
||||||
|
|
||||||
|
var (
|
||||||
|
calID string
|
||||||
|
bdayID string
|
||||||
|
)
|
||||||
|
|
||||||
|
fn := func(gcf graph.CacheFolder) error {
|
||||||
|
if *gcf.GetDisplayName() == DefaultCalendar {
|
||||||
|
calID = *gcf.GetId()
|
||||||
|
}
|
||||||
|
|
||||||
|
if *gcf.GetDisplayName() == "Birthdays" {
|
||||||
|
bdayID = *gcf.GetId()
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(suite.T(), ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn))
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name, expected string
|
name, expected string
|
||||||
scope selectors.ExchangeScope
|
scope selectors.ExchangeScope
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "Default Event Calendar",
|
name: "Default Event Calendar",
|
||||||
expected: DefaultCalendar,
|
expected: calID,
|
||||||
scope: selectors.NewExchangeBackup(users).EventCalendars(
|
scope: selectors.NewExchangeBackup(users).EventCalendars(
|
||||||
[]string{DefaultCalendar},
|
[]string{DefaultCalendar},
|
||||||
selectors.PrefixMatch(),
|
selectors.PrefixMatch(),
|
||||||
@ -541,9 +564,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Birthday Calendar",
|
name: "Birthday Calendar",
|
||||||
expected: calendarOthersFolder + "/Birthdays",
|
expected: bdayID,
|
||||||
scope: selectors.NewExchangeBackup(users).EventCalendars(
|
scope: selectors.NewExchangeBackup(users).EventCalendars(
|
||||||
[]string{calendarOthersFolder + "/Birthdays"},
|
[]string{"Birthdays"},
|
||||||
selectors.PrefixMatch(),
|
selectors.PrefixMatch(),
|
||||||
)[0],
|
)[0],
|
||||||
},
|
},
|
||||||
@ -571,9 +594,9 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
|
|||||||
|
|
||||||
if edc.FullPath().Service() != path.ExchangeMetadataService {
|
if edc.FullPath().Service() != path.ExchangeMetadataService {
|
||||||
isMetadata = true
|
isMetadata = true
|
||||||
assert.Equal(t, test.expected, edc.FullPath().Folder())
|
assert.Equal(t, test.expected, edc.FullPath().Folder(false))
|
||||||
} else {
|
} else {
|
||||||
assert.Equal(t, "", edc.FullPath().Folder())
|
assert.Equal(t, "", edc.FullPath().Folder(false))
|
||||||
}
|
}
|
||||||
|
|
||||||
for item := range edc.Items() {
|
for item := range edc.Items() {
|
||||||
|
|||||||
@ -14,9 +14,10 @@ var _ graph.ContainerResolver = &eventCalendarCache{}
|
|||||||
|
|
||||||
type eventCalendarCache struct {
|
type eventCalendarCache struct {
|
||||||
*containerResolver
|
*containerResolver
|
||||||
enumer containersEnumerator
|
enumer containersEnumerator
|
||||||
getter containerGetter
|
getter containerGetter
|
||||||
userID string
|
userID string
|
||||||
|
newAdditions map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
// init ensures that the structure's fields are initialized.
|
// init ensures that the structure's fields are initialized.
|
||||||
@ -44,7 +45,10 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error {
|
|||||||
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
|
return errors.Wrap(err, "fetching calendar "+support.ConnectorStackErrorTrace(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(f, path.Builder{}.Append(container))
|
temp := graph.NewCacheFolder(
|
||||||
|
f,
|
||||||
|
path.Builder{}.Append(*f.GetId()), // storage path
|
||||||
|
path.Builder{}.Append(*f.GetDisplayName())) // display location
|
||||||
if err := ecc.addFolder(temp); err != nil {
|
if err := ecc.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "initializing calendar resolver")
|
return errors.Wrap(err, "initializing calendar resolver")
|
||||||
}
|
}
|
||||||
@ -68,16 +72,12 @@ func (ecc *eventCalendarCache) Populate(
|
|||||||
ctx,
|
ctx,
|
||||||
ecc.userID,
|
ecc.userID,
|
||||||
"",
|
"",
|
||||||
func(cf graph.CacheFolder) error {
|
ecc.addFolder)
|
||||||
cf.SetPath(path.Builder{}.Append(calendarOthersFolder, *cf.GetDisplayName()))
|
|
||||||
return ecc.addFolder(cf)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "enumerating containers")
|
return errors.Wrap(err, "enumerating containers")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := ecc.populatePaths(ctx); err != nil {
|
if err := ecc.populatePaths(ctx, true); err != nil {
|
||||||
return errors.Wrap(err, "establishing calendar paths")
|
return errors.Wrap(err, "establishing calendar paths")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,23 +86,40 @@ func (ecc *eventCalendarCache) Populate(
|
|||||||
|
|
||||||
// AddToCache adds container to map in field 'cache'
|
// AddToCache adds container to map in field 'cache'
|
||||||
// @returns error iff the required values are not accessible.
|
// @returns error iff the required values are not accessible.
|
||||||
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error {
|
func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container, useIDInPath bool) error {
|
||||||
if err := checkIDAndName(f); err != nil {
|
if err := checkIDAndName(f); err != nil {
|
||||||
return errors.Wrap(err, "validating container")
|
return errors.Wrap(err, "validating container")
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(f, path.Builder{}.Append(calendarOthersFolder, *f.GetDisplayName()))
|
temp := graph.NewCacheFolder(
|
||||||
|
f,
|
||||||
|
path.Builder{}.Append(*f.GetId()), // storage path
|
||||||
|
path.Builder{}.Append(*f.GetDisplayName())) // display location
|
||||||
|
|
||||||
|
if len(ecc.newAdditions) == 0 {
|
||||||
|
ecc.newAdditions = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
ecc.newAdditions[*f.GetDisplayName()] = *f.GetId()
|
||||||
|
|
||||||
if err := ecc.addFolder(temp); err != nil {
|
if err := ecc.addFolder(temp); err != nil {
|
||||||
|
delete(ecc.newAdditions, *f.GetDisplayName())
|
||||||
return errors.Wrap(err, "adding container")
|
return errors.Wrap(err, "adding container")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Populate the path for this entry so calls to PathInCache succeed no matter
|
// Populate the path for this entry so calls to PathInCache succeed no matter
|
||||||
// when they're made.
|
// when they're made.
|
||||||
_, err := ecc.IDToPath(ctx, *f.GetId())
|
_, _, err := ecc.IDToPath(ctx, *f.GetId(), true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
delete(ecc.newAdditions, *f.GetDisplayName())
|
||||||
return errors.Wrap(err, "setting path to container id")
|
return errors.Wrap(err, "setting path to container id")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DestinationNameToID returns an empty string. This is only supported by exchange
|
||||||
|
// calendars at this time.
|
||||||
|
func (ecc *eventCalendarCache) DestinationNameToID(dest string) string {
|
||||||
|
return ecc.newAdditions[dest]
|
||||||
|
}
|
||||||
|
|||||||
@ -77,6 +77,11 @@ type Collection struct {
|
|||||||
// moved. It will be empty on its first retrieval.
|
// moved. It will be empty on its first retrieval.
|
||||||
prevPath path.Path
|
prevPath path.Path
|
||||||
|
|
||||||
|
// LocationPath contains the path with human-readable display names.
|
||||||
|
// IE: "/Inbox/Important" instead of "/abcdxyz123/algha=lgkhal=t"
|
||||||
|
// Currently only implemented for Exchange Calendars.
|
||||||
|
locationPath path.Path
|
||||||
|
|
||||||
state data.CollectionState
|
state data.CollectionState
|
||||||
|
|
||||||
// doNotMergeItems should only be true if the old delta token expired.
|
// doNotMergeItems should only be true if the old delta token expired.
|
||||||
@ -91,7 +96,7 @@ type Collection struct {
|
|||||||
// or notMoved (if they match).
|
// or notMoved (if they match).
|
||||||
func NewCollection(
|
func NewCollection(
|
||||||
user string,
|
user string,
|
||||||
curr, prev path.Path,
|
curr, prev, location path.Path,
|
||||||
category path.CategoryType,
|
category path.CategoryType,
|
||||||
items itemer,
|
items itemer,
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
@ -99,18 +104,19 @@ func NewCollection(
|
|||||||
doNotMergeItems bool,
|
doNotMergeItems bool,
|
||||||
) Collection {
|
) Collection {
|
||||||
collection := Collection{
|
collection := Collection{
|
||||||
|
added: make(map[string]struct{}, 0),
|
||||||
category: category,
|
category: category,
|
||||||
ctrl: ctrlOpts,
|
ctrl: ctrlOpts,
|
||||||
data: make(chan data.Stream, collectionChannelBufferSize),
|
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||||
doNotMergeItems: doNotMergeItems,
|
doNotMergeItems: doNotMergeItems,
|
||||||
fullPath: curr,
|
fullPath: curr,
|
||||||
added: make(map[string]struct{}, 0),
|
items: items,
|
||||||
removed: make(map[string]struct{}, 0),
|
locationPath: location,
|
||||||
prevPath: prev,
|
prevPath: prev,
|
||||||
|
removed: make(map[string]struct{}, 0),
|
||||||
state: data.StateOf(prev, curr),
|
state: data.StateOf(prev, curr),
|
||||||
statusUpdater: statusUpdater,
|
statusUpdater: statusUpdater,
|
||||||
user: user,
|
user: user,
|
||||||
items: items,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return collection
|
return collection
|
||||||
@ -128,6 +134,12 @@ func (col *Collection) FullPath() path.Path {
|
|||||||
return col.fullPath
|
return col.fullPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LocationPath produces the Collection's full path, but with display names
|
||||||
|
// instead of IDs in the folders. Only populated for Calendars.
|
||||||
|
func (col *Collection) LocationPath() path.Path {
|
||||||
|
return col.locationPath
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
|
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
|
||||||
// and new folder hierarchies.
|
// and new folder hierarchies.
|
||||||
func (col Collection) PreviousPath() path.Path {
|
func (col Collection) PreviousPath() path.Path {
|
||||||
@ -172,7 +184,7 @@ func (col *Collection) streamItems(ctx context.Context) {
|
|||||||
ctx,
|
ctx,
|
||||||
col.fullPath.Category().String(),
|
col.fullPath.Category().String(),
|
||||||
observe.PII(user),
|
observe.PII(user),
|
||||||
observe.PII(col.fullPath.Folder()))
|
observe.PII(col.fullPath.Folder(false)))
|
||||||
|
|
||||||
go closer()
|
go closer()
|
||||||
|
|
||||||
@ -331,7 +343,7 @@ func (col *Collection) finishPopulation(ctx context.Context, success int, totalB
|
|||||||
TotalBytes: totalBytes,
|
TotalBytes: totalBytes,
|
||||||
},
|
},
|
||||||
errs,
|
errs,
|
||||||
col.fullPath.Folder())
|
col.fullPath.Folder(false))
|
||||||
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
|
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
|
||||||
col.statusUpdater(status)
|
col.statusUpdater(status)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,8 +12,10 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -116,6 +118,70 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
|
|||||||
suite.Equal(fullPath, edc.FullPath())
|
suite.Equal(fullPath, edc.FullPath())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
|
||||||
|
fooP, err := path.Builder{}.
|
||||||
|
Append("foo").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
|
||||||
|
require.NoError(suite.T(), err)
|
||||||
|
barP, err := path.Builder{}.
|
||||||
|
Append("bar").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
|
||||||
|
require.NoError(suite.T(), err)
|
||||||
|
locP, err := path.Builder{}.
|
||||||
|
Append("human-readable").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", path.EmailCategory, false)
|
||||||
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
prev path.Path
|
||||||
|
curr path.Path
|
||||||
|
loc path.Path
|
||||||
|
expect data.CollectionState
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "new",
|
||||||
|
curr: fooP,
|
||||||
|
loc: locP,
|
||||||
|
expect: data.NewState,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "not moved",
|
||||||
|
prev: fooP,
|
||||||
|
curr: fooP,
|
||||||
|
loc: locP,
|
||||||
|
expect: data.NotMovedState,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "moved",
|
||||||
|
prev: fooP,
|
||||||
|
curr: barP,
|
||||||
|
loc: locP,
|
||||||
|
expect: data.MovedState,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "deleted",
|
||||||
|
prev: fooP,
|
||||||
|
expect: data.DeletedState,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
c := NewCollection(
|
||||||
|
"u",
|
||||||
|
test.curr, test.prev, test.loc,
|
||||||
|
0,
|
||||||
|
&mockItemer{}, nil,
|
||||||
|
control.Options{},
|
||||||
|
false)
|
||||||
|
assert.Equal(t, test.expect, c.State(), "collection state")
|
||||||
|
assert.Equal(t, test.curr, c.fullPath, "full path")
|
||||||
|
assert.Equal(t, test.prev, c.prevPath, "prev path")
|
||||||
|
assert.Equal(t, test.loc, c.locationPath, "location path")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
|
func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
|
|||||||
@ -38,5 +38,4 @@ const (
|
|||||||
rootFolderAlias = "msgfolderroot"
|
rootFolderAlias = "msgfolderroot"
|
||||||
DefaultContactFolder = "Contacts"
|
DefaultContactFolder = "Contacts"
|
||||||
DefaultCalendar = "Calendar"
|
DefaultCalendar = "Calendar"
|
||||||
calendarOthersFolder = "Other Calendars"
|
|
||||||
)
|
)
|
||||||
|
|||||||
@ -47,6 +47,9 @@ func (suite *CacheResolverSuite) TestPopulate() {
|
|||||||
ac, err := api.NewClient(suite.credentials)
|
ac, err := api.NewClient(suite.credentials)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
|
cal, err := ac.Events().GetContainerByID(ctx, tester.M365UserID(suite.T()), DefaultCalendar)
|
||||||
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
eventFunc := func(t *testing.T) graph.ContainerResolver {
|
eventFunc := func(t *testing.T) graph.ContainerResolver {
|
||||||
return &eventCalendarCache{
|
return &eventCalendarCache{
|
||||||
userID: tester.M365UserID(t),
|
userID: tester.M365UserID(t),
|
||||||
@ -64,61 +67,61 @@ func (suite *CacheResolverSuite) TestPopulate() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name, folderName, root, basePath string
|
name, folderInCache, root, basePath string
|
||||||
resolverFunc func(t *testing.T) graph.ContainerResolver
|
resolverFunc func(t *testing.T) graph.ContainerResolver
|
||||||
canFind assert.BoolAssertionFunc
|
canFind assert.BoolAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "Default Event Cache",
|
name: "Default Event Cache",
|
||||||
folderName: DefaultCalendar,
|
folderInCache: *cal.GetId(),
|
||||||
root: DefaultCalendar,
|
root: DefaultCalendar,
|
||||||
basePath: DefaultCalendar,
|
basePath: DefaultCalendar,
|
||||||
resolverFunc: eventFunc,
|
resolverFunc: eventFunc,
|
||||||
canFind: assert.True,
|
canFind: assert.True,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Default Event Folder Hidden",
|
name: "Default Event Folder Hidden",
|
||||||
root: DefaultCalendar,
|
folderInCache: DefaultContactFolder,
|
||||||
folderName: DefaultContactFolder,
|
root: DefaultCalendar,
|
||||||
canFind: assert.False,
|
canFind: assert.False,
|
||||||
resolverFunc: eventFunc,
|
resolverFunc: eventFunc,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Name Not in Cache",
|
name: "Name Not in Cache",
|
||||||
folderName: "testFooBarWhoBar",
|
folderInCache: "testFooBarWhoBar",
|
||||||
root: DefaultCalendar,
|
root: DefaultCalendar,
|
||||||
canFind: assert.False,
|
canFind: assert.False,
|
||||||
resolverFunc: eventFunc,
|
resolverFunc: eventFunc,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Default Contact Cache",
|
name: "Default Contact Cache",
|
||||||
folderName: DefaultContactFolder,
|
folderInCache: DefaultContactFolder,
|
||||||
root: DefaultContactFolder,
|
root: DefaultContactFolder,
|
||||||
basePath: DefaultContactFolder,
|
basePath: DefaultContactFolder,
|
||||||
canFind: assert.True,
|
canFind: assert.True,
|
||||||
resolverFunc: contactFunc,
|
resolverFunc: contactFunc,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Default Contact Hidden",
|
name: "Default Contact Hidden",
|
||||||
folderName: DefaultContactFolder,
|
folderInCache: DefaultContactFolder,
|
||||||
root: DefaultContactFolder,
|
root: DefaultContactFolder,
|
||||||
canFind: assert.False,
|
canFind: assert.False,
|
||||||
resolverFunc: contactFunc,
|
resolverFunc: contactFunc,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Name Not in Cache",
|
name: "Name Not in Cache",
|
||||||
folderName: "testFooBarWhoBar",
|
folderInCache: "testFooBarWhoBar",
|
||||||
root: DefaultContactFolder,
|
root: DefaultContactFolder,
|
||||||
canFind: assert.False,
|
canFind: assert.False,
|
||||||
resolverFunc: contactFunc,
|
resolverFunc: contactFunc,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
resolver := test.resolverFunc(t)
|
resolver := test.resolverFunc(t)
|
||||||
|
|
||||||
require.NoError(t, resolver.Populate(ctx, test.root, test.basePath))
|
require.NoError(t, resolver.Populate(ctx, test.root, test.basePath))
|
||||||
_, isFound := resolver.PathInCache(test.folderName)
|
|
||||||
|
_, isFound := resolver.PathInCache(test.folderInCache)
|
||||||
test.canFind(t, isFound)
|
test.canFind(t, isFound)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -53,7 +53,9 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error {
|
|||||||
directory = DefaultMailFolder
|
directory = DefaultMailFolder
|
||||||
}
|
}
|
||||||
|
|
||||||
temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory))
|
temp := graph.NewCacheFolder(f,
|
||||||
|
path.Builder{}.Append(directory), // storage path
|
||||||
|
path.Builder{}.Append(directory)) // display location
|
||||||
if err := mc.addFolder(temp); err != nil {
|
if err := mc.addFolder(temp); err != nil {
|
||||||
return errors.Wrap(err, "adding resolver dir")
|
return errors.Wrap(err, "adding resolver dir")
|
||||||
}
|
}
|
||||||
@ -81,7 +83,7 @@ func (mc *mailFolderCache) Populate(
|
|||||||
return errors.Wrap(err, "enumerating containers")
|
return errors.Wrap(err, "enumerating containers")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := mc.populatePaths(ctx); err != nil {
|
if err := mc.populatePaths(ctx, false); err != nil {
|
||||||
return errors.Wrap(err, "populating paths")
|
return errors.Wrap(err, "populating paths")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -18,9 +18,9 @@ const (
|
|||||||
// top-level folders right now.
|
// top-level folders right now.
|
||||||
//nolint:lll
|
//nolint:lll
|
||||||
testFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAABl7AqpAAA="
|
testFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAABl7AqpAAA="
|
||||||
|
|
||||||
//nolint:lll
|
//nolint:lll
|
||||||
topFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAEIAAA="
|
topFolderID = "AAMkAGZmNjNlYjI3LWJlZWYtNGI4Mi04YjMyLTIxYThkNGQ4NmY1MwAuAAAAAADCNgjhM9QmQYWNcI7hCpPrAQDSEBNbUIB9RL6ePDeF3FIYAAAAAAEIAAA="
|
||||||
|
//nolint:lll
|
||||||
// Full folder path for the folder above.
|
// Full folder path for the folder above.
|
||||||
expectedFolderPath = "toplevel/subFolder/subsubfolder"
|
expectedFolderPath = "toplevel/subFolder/subsubfolder"
|
||||||
)
|
)
|
||||||
@ -94,9 +94,10 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
|
|||||||
|
|
||||||
require.NoError(t, mfc.Populate(ctx, test.root, test.path...))
|
require.NoError(t, mfc.Populate(ctx, test.root, test.path...))
|
||||||
|
|
||||||
p, err := mfc.IDToPath(ctx, testFolderID)
|
p, l, err := mfc.IDToPath(ctx, testFolderID, true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
t.Logf("Path: %s\n", p.String())
|
t.Logf("Path: %s\n", p.String())
|
||||||
|
t.Logf("Location: %s\n", l.String())
|
||||||
|
|
||||||
expectedPath := stdpath.Join(append(test.path, expectedFolderPath)...)
|
expectedPath := stdpath.Join(append(test.path, expectedFolderPath)...)
|
||||||
assert.Equal(t, expectedPath, p.String())
|
assert.Equal(t, expectedPath, p.String())
|
||||||
|
|||||||
@ -86,44 +86,70 @@ func PopulateExchangeContainerResolver(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if the container passes the scope comparison and should be included.
|
// Returns true if the container passes the scope comparison and should be included.
|
||||||
// Also returns the path representing the directory.
|
// Returns:
|
||||||
|
// - the path representing the directory as it should be stored in the repository.
|
||||||
|
// - the human-readable path using display names.
|
||||||
|
// - true if the path passes the scope comparison.
|
||||||
func includeContainer(
|
func includeContainer(
|
||||||
qp graph.QueryParams,
|
qp graph.QueryParams,
|
||||||
c graph.CachedContainer,
|
c graph.CachedContainer,
|
||||||
scope selectors.ExchangeScope,
|
scope selectors.ExchangeScope,
|
||||||
) (path.Path, bool) {
|
) (path.Path, path.Path, bool) {
|
||||||
var (
|
var (
|
||||||
category = scope.Category().PathType()
|
|
||||||
directory string
|
directory string
|
||||||
|
locPath path.Path
|
||||||
|
category = scope.Category().PathType()
|
||||||
pb = c.Path()
|
pb = c.Path()
|
||||||
|
loc = c.Location()
|
||||||
)
|
)
|
||||||
|
|
||||||
// Clause ensures that DefaultContactFolder is inspected properly
|
// Clause ensures that DefaultContactFolder is inspected properly
|
||||||
if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder {
|
if category == path.ContactsCategory && *c.GetDisplayName() == DefaultContactFolder {
|
||||||
pb = c.Path().Append(DefaultContactFolder)
|
pb = pb.Append(DefaultContactFolder)
|
||||||
|
|
||||||
|
if loc != nil {
|
||||||
|
loc = loc.Append(DefaultContactFolder)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dirPath, err := pb.ToDataLayerExchangePathForCategory(
|
dirPath, err := pb.ToDataLayerExchangePathForCategory(
|
||||||
qp.Credentials.AzureTenantID,
|
qp.Credentials.AzureTenantID,
|
||||||
qp.ResourceOwner,
|
qp.ResourceOwner,
|
||||||
category,
|
category,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
// Containers without a path (e.g. Root mail folder) always err here.
|
// Containers without a path (e.g. Root mail folder) always err here.
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false
|
return nil, nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
directory = pb.String()
|
directory = dirPath.Folder(false)
|
||||||
|
|
||||||
|
if loc != nil {
|
||||||
|
locPath, err = loc.ToDataLayerExchangePathForCategory(
|
||||||
|
qp.Credentials.AzureTenantID,
|
||||||
|
qp.ResourceOwner,
|
||||||
|
category,
|
||||||
|
false)
|
||||||
|
// Containers without a path (e.g. Root mail folder) always err here.
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
directory = locPath.Folder(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
|
||||||
switch category {
|
switch category {
|
||||||
case path.EmailCategory:
|
case path.EmailCategory:
|
||||||
return dirPath, scope.Matches(selectors.ExchangeMailFolder, directory)
|
ok = scope.Matches(selectors.ExchangeMailFolder, directory)
|
||||||
case path.ContactsCategory:
|
case path.ContactsCategory:
|
||||||
return dirPath, scope.Matches(selectors.ExchangeContactFolder, directory)
|
ok = scope.Matches(selectors.ExchangeContactFolder, directory)
|
||||||
case path.EventsCategory:
|
case path.EventsCategory:
|
||||||
return dirPath, scope.Matches(selectors.ExchangeEventCalendar, directory)
|
ok = scope.Matches(selectors.ExchangeEventCalendar, directory)
|
||||||
default:
|
default:
|
||||||
return dirPath, false
|
return nil, nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return dirPath, locPath, ok
|
||||||
}
|
}
|
||||||
|
|||||||
@ -70,7 +70,7 @@ func filterContainersAndFillCollections(
|
|||||||
cID := *c.GetId()
|
cID := *c.GetId()
|
||||||
delete(tombstones, cID)
|
delete(tombstones, cID)
|
||||||
|
|
||||||
currPath, ok := includeContainer(qp, c, scope)
|
currPath, locPath, ok := includeContainer(qp, c, scope)
|
||||||
// Only create a collection if the path matches the scope.
|
// Only create a collection if the path matches the scope.
|
||||||
if !ok {
|
if !ok {
|
||||||
continue
|
continue
|
||||||
@ -110,10 +110,15 @@ func filterContainersAndFillCollections(
|
|||||||
deltaURLs[cID] = newDelta.URL
|
deltaURLs[cID] = newDelta.URL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if qp.Category != path.EventsCategory {
|
||||||
|
locPath = nil
|
||||||
|
}
|
||||||
|
|
||||||
edc := NewCollection(
|
edc := NewCollection(
|
||||||
qp.ResourceOwner,
|
qp.ResourceOwner,
|
||||||
currPath,
|
currPath,
|
||||||
prevPath,
|
prevPath,
|
||||||
|
locPath,
|
||||||
scope.Category().PathType(),
|
scope.Category().PathType(),
|
||||||
ibt,
|
ibt,
|
||||||
statusUpdater,
|
statusUpdater,
|
||||||
@ -167,6 +172,7 @@ func filterContainersAndFillCollections(
|
|||||||
qp.ResourceOwner,
|
qp.ResourceOwner,
|
||||||
nil, // marks the collection as deleted
|
nil, // marks the collection as deleted
|
||||||
prevPath,
|
prevPath,
|
||||||
|
nil, // tombstones don't need a location
|
||||||
scope.Category().PathType(),
|
scope.Category().PathType(),
|
||||||
ibt,
|
ibt,
|
||||||
statusUpdater,
|
statusUpdater,
|
||||||
|
|||||||
@ -59,6 +59,7 @@ var _ graph.ContainerResolver = &mockResolver{}
|
|||||||
type (
|
type (
|
||||||
mockResolver struct {
|
mockResolver struct {
|
||||||
items []graph.CachedContainer
|
items []graph.CachedContainer
|
||||||
|
added map[string]string
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -76,10 +77,21 @@ func (m mockResolver) Items() []graph.CachedContainer {
|
|||||||
return m.items
|
return m.items
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m mockResolver) AddToCache(context.Context, graph.Container) error { return nil }
|
func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container, b bool) error {
|
||||||
func (m mockResolver) IDToPath(context.Context, string) (*path.Builder, error) { return nil, nil }
|
if len(m.added) == 0 {
|
||||||
func (m mockResolver) PathInCache(string) (string, bool) { return "", false }
|
m.added = map[string]string{}
|
||||||
func (m mockResolver) Populate(context.Context, string, ...string) error { return nil }
|
}
|
||||||
|
|
||||||
|
m.added[*gc.GetDisplayName()] = *gc.GetId()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (m mockResolver) DestinationNameToID(dest string) string { return m.added[dest] }
|
||||||
|
func (m mockResolver) IDToPath(context.Context, string, bool) (*path.Builder, *path.Builder, error) {
|
||||||
|
return nil, nil, nil
|
||||||
|
}
|
||||||
|
func (m mockResolver) PathInCache(string) (string, bool) { return "", false }
|
||||||
|
func (m mockResolver) Populate(context.Context, string, ...string) error { return nil }
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// tests
|
// tests
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
@ -71,7 +72,7 @@ func RestoreExchangeContact(
|
|||||||
|
|
||||||
response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil)
|
response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
name := *contact.GetGivenName()
|
name := ptr.Val(contact.GetGivenName())
|
||||||
|
|
||||||
return nil, errors.Wrap(
|
return nil, errors.Wrap(
|
||||||
err,
|
err,
|
||||||
@ -146,7 +147,8 @@ func RestoreExchangeEvent(
|
|||||||
errs = support.WrapAndAppend(
|
errs = support.WrapAndAppend(
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"uploading attachment for message %s: %s",
|
"uploading attachment for message %s: %s",
|
||||||
*transformedEvent.GetId(), support.ConnectorStackErrorTrace(err),
|
ptr.Val(transformedEvent.GetId()),
|
||||||
|
support.ConnectorStackErrorTrace(err),
|
||||||
),
|
),
|
||||||
err,
|
err,
|
||||||
errs,
|
errs,
|
||||||
@ -283,12 +285,8 @@ func SendMailToBackStore(
|
|||||||
|
|
||||||
for _, attachment := range attached {
|
for _, attachment := range attached {
|
||||||
if err := uploadAttachment(ctx, uploader, attachment); err != nil {
|
if err := uploadAttachment(ctx, uploader, attachment); err != nil {
|
||||||
if attachment.GetOdataType() != nil &&
|
if ptr.Val(attachment.GetOdataType()) == "#microsoft.graph.itemAttachment" {
|
||||||
*attachment.GetOdataType() == "#microsoft.graph.itemAttachment" {
|
name := ptr.Val(attachment.GetName())
|
||||||
var name string
|
|
||||||
if attachment.GetName() != nil {
|
|
||||||
name = *attachment.GetName()
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Ctx(ctx).Infow(
|
logger.Ctx(ctx).Infow(
|
||||||
"item attachment upload not successful. content not accepted by M365 server",
|
"item attachment upload not successful. content not accepted by M365 server",
|
||||||
@ -344,7 +342,7 @@ func RestoreExchangeDataCollections(
|
|||||||
userCaches = directoryCaches[userID]
|
userCaches = directoryCaches[userID]
|
||||||
}
|
}
|
||||||
|
|
||||||
containerID, err := CreateContainerDestinaion(
|
containerID, err := CreateContainerDestination(
|
||||||
ctx,
|
ctx,
|
||||||
creds,
|
creds,
|
||||||
dc.FullPath(),
|
dc.FullPath(),
|
||||||
@ -400,7 +398,7 @@ func restoreCollection(
|
|||||||
ctx,
|
ctx,
|
||||||
category.String(),
|
category.String(),
|
||||||
observe.PII(user),
|
observe.PII(user),
|
||||||
observe.PII(directory.Folder()))
|
observe.PII(directory.Folder(false)))
|
||||||
defer closer()
|
defer closer()
|
||||||
defer close(colProgress)
|
defer close(colProgress)
|
||||||
|
|
||||||
@ -447,10 +445,16 @@ func restoreCollection(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var locationRef string
|
||||||
|
if category == path.ContactsCategory {
|
||||||
|
locationRef = itemPath.Folder(false)
|
||||||
|
}
|
||||||
|
|
||||||
deets.Add(
|
deets.Add(
|
||||||
itemPath.String(),
|
itemPath.String(),
|
||||||
itemPath.ShortRef(),
|
itemPath.ShortRef(),
|
||||||
"",
|
"",
|
||||||
|
locationRef,
|
||||||
true,
|
true,
|
||||||
details.ItemInfo{
|
details.ItemInfo{
|
||||||
Exchange: info,
|
Exchange: info,
|
||||||
@ -461,12 +465,12 @@ func restoreCollection(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateContainerDestinaion builds the destination into the container
|
// CreateContainerDestination builds the destination into the container
|
||||||
// at the provided path. As a precondition, the destination cannot
|
// at the provided path. As a precondition, the destination cannot
|
||||||
// already exist. If it does then an error is returned. The provided
|
// already exist. If it does then an error is returned. The provided
|
||||||
// containerResolver is updated with the new destination.
|
// containerResolver is updated with the new destination.
|
||||||
// @ returns the container ID of the new destination container.
|
// @ returns the container ID of the new destination container.
|
||||||
func CreateContainerDestinaion(
|
func CreateContainerDestination(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
creds account.M365Config,
|
||||||
directory path.Path,
|
directory path.Path,
|
||||||
@ -478,7 +482,6 @@ func CreateContainerDestinaion(
|
|||||||
user = directory.ResourceOwner()
|
user = directory.ResourceOwner()
|
||||||
category = directory.Category()
|
category = directory.Category()
|
||||||
directoryCache = caches[category]
|
directoryCache = caches[category]
|
||||||
newPathFolders = append([]string{destination}, directory.Folders()...)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// TODO(rkeepers): pass the api client into this func, rather than generating one.
|
// TODO(rkeepers): pass the api client into this func, rather than generating one.
|
||||||
@ -489,6 +492,8 @@ func CreateContainerDestinaion(
|
|||||||
|
|
||||||
switch category {
|
switch category {
|
||||||
case path.EmailCategory:
|
case path.EmailCategory:
|
||||||
|
folders := append([]string{destination}, directory.Folders()...)
|
||||||
|
|
||||||
if directoryCache == nil {
|
if directoryCache == nil {
|
||||||
acm := ac.Mail()
|
acm := ac.Mail()
|
||||||
mfc := &mailFolderCache{
|
mfc := &mailFolderCache{
|
||||||
@ -505,12 +510,14 @@ func CreateContainerDestinaion(
|
|||||||
return establishMailRestoreLocation(
|
return establishMailRestoreLocation(
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
newPathFolders,
|
folders,
|
||||||
directoryCache,
|
directoryCache,
|
||||||
user,
|
user,
|
||||||
newCache)
|
newCache)
|
||||||
|
|
||||||
case path.ContactsCategory:
|
case path.ContactsCategory:
|
||||||
|
folders := append([]string{destination}, directory.Folders()...)
|
||||||
|
|
||||||
if directoryCache == nil {
|
if directoryCache == nil {
|
||||||
acc := ac.Contacts()
|
acc := ac.Contacts()
|
||||||
cfc := &contactFolderCache{
|
cfc := &contactFolderCache{
|
||||||
@ -526,12 +533,14 @@ func CreateContainerDestinaion(
|
|||||||
return establishContactsRestoreLocation(
|
return establishContactsRestoreLocation(
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
newPathFolders,
|
folders,
|
||||||
directoryCache,
|
directoryCache,
|
||||||
user,
|
user,
|
||||||
newCache)
|
newCache)
|
||||||
|
|
||||||
case path.EventsCategory:
|
case path.EventsCategory:
|
||||||
|
dest := destination
|
||||||
|
|
||||||
if directoryCache == nil {
|
if directoryCache == nil {
|
||||||
ace := ac.Events()
|
ace := ac.Events()
|
||||||
ecc := &eventCalendarCache{
|
ecc := &eventCalendarCache{
|
||||||
@ -542,16 +551,23 @@ func CreateContainerDestinaion(
|
|||||||
caches[category] = ecc
|
caches[category] = ecc
|
||||||
newCache = true
|
newCache = true
|
||||||
directoryCache = ecc
|
directoryCache = ecc
|
||||||
|
} else if did := directoryCache.DestinationNameToID(dest); len(did) > 0 {
|
||||||
|
// calendars are cached by ID in the resolver, not name, so once we have
|
||||||
|
// created the destination calendar, we need to look up its id and use
|
||||||
|
// that for resolver lookups instead of the display name.
|
||||||
|
dest = did
|
||||||
}
|
}
|
||||||
|
|
||||||
|
folders := append([]string{dest}, directory.Folders()...)
|
||||||
|
|
||||||
return establishEventsRestoreLocation(
|
return establishEventsRestoreLocation(
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
newPathFolders,
|
folders,
|
||||||
directoryCache,
|
directoryCache,
|
||||||
user,
|
user,
|
||||||
newCache,
|
newCache)
|
||||||
)
|
|
||||||
default:
|
default:
|
||||||
return "", fmt.Errorf("category: %s not support for exchange cache", category)
|
return "", fmt.Errorf("category: %s not support for exchange cache", category)
|
||||||
}
|
}
|
||||||
@ -604,7 +620,7 @@ func establishMailRestoreLocation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NOOP if the folder is already in the cache.
|
// NOOP if the folder is already in the cache.
|
||||||
if err = mfc.AddToCache(ctx, temp); err != nil {
|
if err = mfc.AddToCache(ctx, temp, false); err != nil {
|
||||||
return "", errors.Wrap(err, "adding folder to cache")
|
return "", errors.Wrap(err, "adding folder to cache")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -643,7 +659,7 @@ func establishContactsRestoreLocation(
|
|||||||
return "", errors.Wrap(err, "populating contact cache")
|
return "", errors.Wrap(err, "populating contact cache")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = cfc.AddToCache(ctx, temp); err != nil {
|
if err = cfc.AddToCache(ctx, temp, false); err != nil {
|
||||||
return "", errors.Wrap(err, "adding contact folder to cache")
|
return "", errors.Wrap(err, "adding contact folder to cache")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -660,10 +676,7 @@ func establishEventsRestoreLocation(
|
|||||||
isNewCache bool,
|
isNewCache bool,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
// Need to prefix with the "Other Calendars" folder so lookup happens properly.
|
// Need to prefix with the "Other Calendars" folder so lookup happens properly.
|
||||||
cached, ok := ecc.PathInCache(path.Builder{}.Append(
|
cached, ok := ecc.PathInCache(folders[0])
|
||||||
calendarOthersFolder,
|
|
||||||
folders[0],
|
|
||||||
).String())
|
|
||||||
if ok {
|
if ok {
|
||||||
return cached, nil
|
return cached, nil
|
||||||
}
|
}
|
||||||
@ -681,7 +694,7 @@ func establishEventsRestoreLocation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
displayable := api.CalendarDisplayable{Calendarable: temp}
|
displayable := api.CalendarDisplayable{Calendarable: temp}
|
||||||
if err = ecc.AddToCache(ctx, displayable); err != nil {
|
if err = ecc.AddToCache(ctx, displayable, true); err != nil {
|
||||||
return "", errors.Wrap(err, "adding new calendar to cache")
|
return "", errors.Wrap(err, "adding new calendar to cache")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,40 +1,84 @@
|
|||||||
package graph
|
package graph
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Idable represents objects that implement msgraph-sdk-go/models.entityable
|
||||||
|
// and have the concept of an ID.
|
||||||
|
type Idable interface {
|
||||||
|
GetId() *string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Descendable represents objects that implement msgraph-sdk-go/models.entityable
|
||||||
|
// and have the concept of a "parent folder".
|
||||||
|
type Descendable interface {
|
||||||
|
Idable
|
||||||
|
GetParentFolderId() *string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Displayable represents objects that implement msgraph-sdk-go/models.entityable
|
||||||
|
// and have the concept of a display name.
|
||||||
|
type Displayable interface {
|
||||||
|
Idable
|
||||||
|
GetDisplayName() *string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Container interface {
|
||||||
|
Descendable
|
||||||
|
Displayable
|
||||||
|
}
|
||||||
|
|
||||||
// CachedContainer is used for local unit tests but also makes it so that this
|
// CachedContainer is used for local unit tests but also makes it so that this
|
||||||
// code can be broken into generic- and service-specific chunks later on to
|
// code can be broken into generic- and service-specific chunks later on to
|
||||||
// reuse logic in IDToPath.
|
// reuse logic in IDToPath.
|
||||||
type CachedContainer interface {
|
type CachedContainer interface {
|
||||||
Container
|
Container
|
||||||
|
// Location contains either the display names for the dirs (if this is a calendar)
|
||||||
|
// or nil
|
||||||
|
Location() *path.Builder
|
||||||
|
SetLocation(*path.Builder)
|
||||||
|
// Path contains either the ids for the dirs (if this is a calendar)
|
||||||
|
// or the display names for the dirs
|
||||||
Path() *path.Builder
|
Path() *path.Builder
|
||||||
SetPath(*path.Builder)
|
SetPath(*path.Builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
// checkRequiredValues is a helper function to ensure that
|
// ContainerResolver houses functions for getting information about containers
|
||||||
// all the pointers are set prior to being called.
|
// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may
|
||||||
func CheckRequiredValues(c Container) error {
|
// cache information about containers.
|
||||||
idPtr := c.GetId()
|
type ContainerResolver interface {
|
||||||
if idPtr == nil || len(*idPtr) == 0 {
|
// IDToPath takes an m365 container ID and converts it to a hierarchical path
|
||||||
return errors.New("folder without ID")
|
// to that container. The path has a similar format to paths on the local
|
||||||
}
|
// file system.
|
||||||
|
IDToPath(ctx context.Context, m365ID string, useIDInPath bool) (*path.Builder, *path.Builder, error)
|
||||||
|
|
||||||
ptr := c.GetDisplayName()
|
// Populate performs initialization steps for the resolver
|
||||||
if ptr == nil || len(*ptr) == 0 {
|
// @param ctx is necessary param for Graph API tracing
|
||||||
return errors.Errorf("folder %s without display name", *idPtr)
|
// @param baseFolderID represents the M365ID base that the resolver will
|
||||||
}
|
// conclude its search. Default input is "".
|
||||||
|
Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error
|
||||||
|
|
||||||
ptr = c.GetParentFolderId()
|
// PathInCache performs a look up of a path reprensentation
|
||||||
if ptr == nil || len(*ptr) == 0 {
|
// and returns the m365ID of directory iff the pathString
|
||||||
return errors.Errorf("folder %s without parent ID", *idPtr)
|
// matches the path of a container within the cache.
|
||||||
}
|
// @returns bool represents if m365ID was found.
|
||||||
|
PathInCache(pathString string) (string, bool)
|
||||||
|
|
||||||
return nil
|
AddToCache(ctx context.Context, m365Container Container, useIDInPath bool) error
|
||||||
|
|
||||||
|
// DestinationNameToID returns the ID of the destination container. Dest is
|
||||||
|
// assumed to be a display name. The ID is only populated if the destination
|
||||||
|
// was added using `AddToCache()`. Returns an empty string if not found.
|
||||||
|
DestinationNameToID(dest string) string
|
||||||
|
|
||||||
|
// Items returns the containers in the cache.
|
||||||
|
Items() []CachedContainer
|
||||||
}
|
}
|
||||||
|
|
||||||
// ======================================
|
// ======================================
|
||||||
@ -45,13 +89,15 @@ var _ CachedContainer = &CacheFolder{}
|
|||||||
|
|
||||||
type CacheFolder struct {
|
type CacheFolder struct {
|
||||||
Container
|
Container
|
||||||
|
l *path.Builder
|
||||||
p *path.Builder
|
p *path.Builder
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewCacheFolder public constructor for struct
|
// NewCacheFolder public constructor for struct
|
||||||
func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
|
func NewCacheFolder(c Container, pb, lpb *path.Builder) CacheFolder {
|
||||||
cf := CacheFolder{
|
cf := CacheFolder{
|
||||||
Container: c,
|
Container: c,
|
||||||
|
l: lpb,
|
||||||
p: pb,
|
p: pb,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,6 +108,14 @@ func NewCacheFolder(c Container, pb *path.Builder) CacheFolder {
|
|||||||
// Required Functions to satisfy interfaces
|
// Required Functions to satisfy interfaces
|
||||||
// =========================================
|
// =========================================
|
||||||
|
|
||||||
|
func (cf CacheFolder) Location() *path.Builder {
|
||||||
|
return cf.l
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cf *CacheFolder) SetLocation(newLocation *path.Builder) {
|
||||||
|
cf.l = newLocation
|
||||||
|
}
|
||||||
|
|
||||||
func (cf CacheFolder) Path() *path.Builder {
|
func (cf CacheFolder) Path() *path.Builder {
|
||||||
return cf.p
|
return cf.p
|
||||||
}
|
}
|
||||||
@ -108,3 +162,28 @@ func CreateCalendarDisplayable(entry any, parentID string) *CalendarDisplayable
|
|||||||
parentID: parentID,
|
parentID: parentID,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// =========================================
|
||||||
|
// helper funcs
|
||||||
|
// =========================================
|
||||||
|
|
||||||
|
// checkRequiredValues is a helper function to ensure that
|
||||||
|
// all the pointers are set prior to being called.
|
||||||
|
func CheckRequiredValues(c Container) error {
|
||||||
|
idPtr := c.GetId()
|
||||||
|
if idPtr == nil || len(*idPtr) == 0 {
|
||||||
|
return errors.New("folder without ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
ptr := c.GetDisplayName()
|
||||||
|
if ptr == nil || len(*ptr) == 0 {
|
||||||
|
return errors.Errorf("folder %s without display name", *idPtr)
|
||||||
|
}
|
||||||
|
|
||||||
|
ptr = c.GetParentFolderId()
|
||||||
|
if ptr == nil || len(*ptr) == 0 {
|
||||||
|
return errors.Errorf("folder %s without parent ID", *idPtr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@ -151,7 +151,7 @@ func (md MetadataCollection) Items() <-chan data.Stream {
|
|||||||
TotalBytes: totalBytes,
|
TotalBytes: totalBytes,
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
md.fullPath.Folder(),
|
md.fullPath.Folder(false),
|
||||||
)
|
)
|
||||||
|
|
||||||
md.statusUpdater(status)
|
md.statusUpdater(status)
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
package graph
|
package graph
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
"os"
|
"os"
|
||||||
@ -173,57 +172,6 @@ type Servicer interface {
|
|||||||
Adapter() *msgraphsdk.GraphRequestAdapter
|
Adapter() *msgraphsdk.GraphRequestAdapter
|
||||||
}
|
}
|
||||||
|
|
||||||
// Idable represents objects that implement msgraph-sdk-go/models.entityable
|
|
||||||
// and have the concept of an ID.
|
|
||||||
type Idable interface {
|
|
||||||
GetId() *string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Descendable represents objects that implement msgraph-sdk-go/models.entityable
|
|
||||||
// and have the concept of a "parent folder".
|
|
||||||
type Descendable interface {
|
|
||||||
Idable
|
|
||||||
GetParentFolderId() *string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Displayable represents objects that implement msgraph-sdk-go/models.entityable
|
|
||||||
// and have the concept of a display name.
|
|
||||||
type Displayable interface {
|
|
||||||
Idable
|
|
||||||
GetDisplayName() *string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Container interface {
|
|
||||||
Descendable
|
|
||||||
Displayable
|
|
||||||
}
|
|
||||||
|
|
||||||
// ContainerResolver houses functions for getting information about containers
|
|
||||||
// from remote APIs (i.e. resolve folder paths with Graph API). Resolvers may
|
|
||||||
// cache information about containers.
|
|
||||||
type ContainerResolver interface {
|
|
||||||
// IDToPath takes an m365 container ID and converts it to a hierarchical path
|
|
||||||
// to that container. The path has a similar format to paths on the local
|
|
||||||
// file system.
|
|
||||||
IDToPath(ctx context.Context, m365ID string) (*path.Builder, error)
|
|
||||||
// Populate performs initialization steps for the resolver
|
|
||||||
// @param ctx is necessary param for Graph API tracing
|
|
||||||
// @param baseFolderID represents the M365ID base that the resolver will
|
|
||||||
// conclude its search. Default input is "".
|
|
||||||
Populate(ctx context.Context, baseFolderID string, baseContainerPather ...string) error
|
|
||||||
|
|
||||||
// PathInCache performs a look up of a path reprensentation
|
|
||||||
// and returns the m365ID of directory iff the pathString
|
|
||||||
// matches the path of a container within the cache.
|
|
||||||
// @returns bool represents if m365ID was found.
|
|
||||||
PathInCache(pathString string) (string, bool)
|
|
||||||
|
|
||||||
AddToCache(ctx context.Context, m365Container Container) error
|
|
||||||
|
|
||||||
// Items returns the containers in the cache.
|
|
||||||
Items() []CachedContainer
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Client Middleware
|
// Client Middleware
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -98,7 +98,7 @@ func NewGraphConnector(
|
|||||||
// For now this keeps things functioning if callers do pass in a selector like
|
// For now this keeps things functioning if callers do pass in a selector like
|
||||||
// "*" instead of.
|
// "*" instead of.
|
||||||
if r == AllResources || r == Users {
|
if r == AllResources || r == Users {
|
||||||
if err = gc.setTenantUsers(ctx); err != nil {
|
if err = gc.setTenantUsers(ctx, errs); err != nil {
|
||||||
return nil, errors.Wrap(err, "retrieving tenant user list")
|
return nil, errors.Wrap(err, "retrieving tenant user list")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -129,11 +129,11 @@ func (gc *GraphConnector) createService() (*graph.Service, error) {
|
|||||||
// setTenantUsers queries the M365 to identify the users in the
|
// setTenantUsers queries the M365 to identify the users in the
|
||||||
// workspace. The users field is updated during this method
|
// workspace. The users field is updated during this method
|
||||||
// iff the returned error is nil
|
// iff the returned error is nil
|
||||||
func (gc *GraphConnector) setTenantUsers(ctx context.Context) error {
|
func (gc *GraphConnector) setTenantUsers(ctx context.Context, errs *fault.Errors) error {
|
||||||
ctx, end := D.Span(ctx, "gc:setTenantUsers")
|
ctx, end := D.Span(ctx, "gc:setTenantUsers")
|
||||||
defer end()
|
defer end()
|
||||||
|
|
||||||
users, err := discovery.Users(ctx, gc.Owners.Users())
|
users, err := discovery.Users(ctx, gc.Owners.Users(), errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1013,9 +1013,9 @@ func collectionsForInfo(
|
|||||||
user,
|
user,
|
||||||
info.category,
|
info.category,
|
||||||
info.pathElements,
|
info.pathElements,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
mc := mockconnector.NewMockExchangeCollection(pth, len(info.items))
|
mc := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items))
|
||||||
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
||||||
|
|
||||||
baseExpected := expectedData[baseDestPath.String()]
|
baseExpected := expectedData[baseDestPath.String()]
|
||||||
@ -1076,7 +1076,7 @@ func collectionsForInfoVersion0(
|
|||||||
info.pathElements,
|
info.pathElements,
|
||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
c := mockconnector.NewMockExchangeCollection(pth, len(info.items))
|
c := mockconnector.NewMockExchangeCollection(pth, pth, len(info.items))
|
||||||
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
||||||
|
|
||||||
baseExpected := expectedData[baseDestPath.String()]
|
baseExpected := expectedData[baseDestPath.String()]
|
||||||
|
|||||||
@ -129,12 +129,8 @@ func (suite *GraphConnectorUnitSuite) TestUnionSiteIDsAndWebURLs() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
errs := fault.New(true)
|
result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, fault.New(true))
|
||||||
|
|
||||||
result, err := gc.UnionSiteIDsAndWebURLs(ctx, test.ids, test.urls, errs)
|
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NoError(t, errs.Err())
|
|
||||||
assert.Empty(t, errs.Errs())
|
|
||||||
assert.ElementsMatch(t, test.expect, result)
|
assert.ElementsMatch(t, test.expect, result)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -192,9 +188,11 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantUsers() {
|
|||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
newConnector.Owners = owners
|
newConnector.Owners = owners
|
||||||
|
|
||||||
suite.Empty(len(newConnector.Users))
|
suite.Empty(len(newConnector.Users))
|
||||||
err = newConnector.setTenantUsers(ctx)
|
|
||||||
|
errs := fault.New(true)
|
||||||
|
|
||||||
|
err = newConnector.setTenantUsers(ctx, errs)
|
||||||
suite.NoError(err)
|
suite.NoError(err)
|
||||||
suite.Less(0, len(newConnector.Users))
|
suite.Less(0, len(newConnector.Users))
|
||||||
}
|
}
|
||||||
@ -219,12 +217,8 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
|
|||||||
newConnector.Service = service
|
newConnector.Service = service
|
||||||
assert.Equal(t, 0, len(newConnector.Sites))
|
assert.Equal(t, 0, len(newConnector.Sites))
|
||||||
|
|
||||||
errs := fault.New(true)
|
err = newConnector.setTenantSites(ctx, fault.New(true))
|
||||||
|
|
||||||
err = newConnector.setTenantSites(ctx, errs)
|
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NoError(t, errs.Err())
|
|
||||||
assert.Empty(t, errs.Errs())
|
|
||||||
assert.Less(t, 0, len(newConnector.Sites))
|
assert.Less(t, 0, len(newConnector.Sites))
|
||||||
|
|
||||||
for _, site := range newConnector.Sites {
|
for _, site := range newConnector.Sites {
|
||||||
@ -475,7 +469,7 @@ func runRestoreBackupTest(
|
|||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
|
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
|
||||||
},
|
},
|
||||||
)
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
assert.Empty(t, excludes)
|
assert.Empty(t, excludes)
|
||||||
@ -603,7 +597,7 @@ func runRestoreBackupTestVersion0(
|
|||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
|
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
|
||||||
},
|
},
|
||||||
)
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
assert.Empty(t, excludes)
|
assert.Empty(t, excludes)
|
||||||
@ -1550,7 +1544,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
|
ToggleFeatures: control.Toggles{EnablePermissionsBackup: true},
|
||||||
},
|
},
|
||||||
)
|
fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
// No excludes yet because this isn't an incremental backup.
|
// No excludes yet because this isn't an incremental backup.
|
||||||
assert.Empty(t, excludes)
|
assert.Empty(t, excludes)
|
||||||
|
|||||||
@ -16,6 +16,7 @@ import (
|
|||||||
// MockExchangeDataCollection represents a mock exchange mailbox
|
// MockExchangeDataCollection represents a mock exchange mailbox
|
||||||
type MockExchangeDataCollection struct {
|
type MockExchangeDataCollection struct {
|
||||||
fullPath path.Path
|
fullPath path.Path
|
||||||
|
LocPath path.Path
|
||||||
messageCount int
|
messageCount int
|
||||||
Data [][]byte
|
Data [][]byte
|
||||||
Names []string
|
Names []string
|
||||||
@ -35,9 +36,14 @@ var (
|
|||||||
|
|
||||||
// NewMockExchangeDataCollection creates an data collection that will return the specified number of
|
// NewMockExchangeDataCollection creates an data collection that will return the specified number of
|
||||||
// mock messages when iterated. Exchange type mail
|
// mock messages when iterated. Exchange type mail
|
||||||
func NewMockExchangeCollection(pathRepresentation path.Path, numMessagesToReturn int) *MockExchangeDataCollection {
|
func NewMockExchangeCollection(
|
||||||
|
storagePath path.Path,
|
||||||
|
locationPath path.Path,
|
||||||
|
numMessagesToReturn int,
|
||||||
|
) *MockExchangeDataCollection {
|
||||||
c := &MockExchangeDataCollection{
|
c := &MockExchangeDataCollection{
|
||||||
fullPath: pathRepresentation,
|
fullPath: storagePath,
|
||||||
|
LocPath: locationPath,
|
||||||
messageCount: numMessagesToReturn,
|
messageCount: numMessagesToReturn,
|
||||||
Data: [][]byte{},
|
Data: [][]byte{},
|
||||||
Names: []string{},
|
Names: []string{},
|
||||||
@ -93,21 +99,11 @@ func NewMockContactCollection(pathRepresentation path.Path, numMessagesToReturn
|
|||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
func (medc *MockExchangeDataCollection) FullPath() path.Path {
|
func (medc MockExchangeDataCollection) FullPath() path.Path { return medc.fullPath }
|
||||||
return medc.fullPath
|
func (medc MockExchangeDataCollection) LocationPath() path.Path { return medc.LocPath }
|
||||||
}
|
func (medc MockExchangeDataCollection) PreviousPath() path.Path { return medc.PrevPath }
|
||||||
|
func (medc MockExchangeDataCollection) State() data.CollectionState { return medc.ColState }
|
||||||
func (medc MockExchangeDataCollection) PreviousPath() path.Path {
|
func (medc MockExchangeDataCollection) DoNotMergeItems() bool { return medc.DoNotMerge }
|
||||||
return medc.PrevPath
|
|
||||||
}
|
|
||||||
|
|
||||||
func (medc MockExchangeDataCollection) State() data.CollectionState {
|
|
||||||
return medc.ColState
|
|
||||||
}
|
|
||||||
|
|
||||||
func (medc MockExchangeDataCollection) DoNotMergeItems() bool {
|
|
||||||
return medc.DoNotMerge
|
|
||||||
}
|
|
||||||
|
|
||||||
// Items returns a channel that has the next items in the collection. The
|
// Items returns a channel that has the next items in the collection. The
|
||||||
// channel is closed when there are no more items available.
|
// channel is closed when there are no more items available.
|
||||||
|
|||||||
@ -25,7 +25,7 @@ func TestMockExchangeCollectionSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
|
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
|
||||||
mdc := mockconnector.NewMockExchangeCollection(nil, 2)
|
mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2)
|
||||||
|
|
||||||
messagesRead := 0
|
messagesRead := 0
|
||||||
|
|
||||||
@ -40,7 +40,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection() {
|
|||||||
|
|
||||||
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
|
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
mdc := mockconnector.NewMockExchangeCollection(nil, 2)
|
mdc := mockconnector.NewMockExchangeCollection(nil, nil, 2)
|
||||||
|
|
||||||
mdc.Data[1] = []byte("This is some buffer of data so that the size is different than the default")
|
mdc.Data[1] = []byte("This is some buffer of data so that the size is different than the default")
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ func (suite *MockExchangeCollectionSuite) TestMockExchangeCollectionItemSize() {
|
|||||||
// functions by verifying no failures on (de)serializing steps using kiota serialization library
|
// functions by verifying no failures on (de)serializing steps using kiota serialization library
|
||||||
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hydration() {
|
func (suite *MockExchangeCollectionSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hydration() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
mdc := mockconnector.NewMockExchangeCollection(nil, 3)
|
mdc := mockconnector.NewMockExchangeCollection(nil, nil, 3)
|
||||||
buf := &bytes.Buffer{}
|
buf := &bytes.Buffer{}
|
||||||
|
|
||||||
for stream := range mdc.Items() {
|
for stream := range mdc.Items() {
|
||||||
|
|||||||
@ -69,6 +69,14 @@ type Collection struct {
|
|||||||
itemMetaReader itemMetaReaderFunc
|
itemMetaReader itemMetaReaderFunc
|
||||||
ctrl control.Options
|
ctrl control.Options
|
||||||
|
|
||||||
|
// PrevPath is the previous hierarchical path used by this collection.
|
||||||
|
// It may be the same as fullPath, if the folder was not renamed or
|
||||||
|
// moved. It will be empty on its first retrieval.
|
||||||
|
prevPath path.Path
|
||||||
|
|
||||||
|
// Specifies if it new, moved/rename or deleted
|
||||||
|
state data.CollectionState
|
||||||
|
|
||||||
// should only be true if the old delta token expired
|
// should only be true if the old delta token expired
|
||||||
doNotMergeItems bool
|
doNotMergeItems bool
|
||||||
}
|
}
|
||||||
@ -92,6 +100,7 @@ type itemMetaReaderFunc func(
|
|||||||
func NewCollection(
|
func NewCollection(
|
||||||
itemClient *http.Client,
|
itemClient *http.Client,
|
||||||
folderPath path.Path,
|
folderPath path.Path,
|
||||||
|
prevPath path.Path,
|
||||||
driveID string,
|
driveID string,
|
||||||
service graph.Servicer,
|
service graph.Servicer,
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
@ -102,6 +111,7 @@ func NewCollection(
|
|||||||
c := &Collection{
|
c := &Collection{
|
||||||
itemClient: itemClient,
|
itemClient: itemClient,
|
||||||
folderPath: folderPath,
|
folderPath: folderPath,
|
||||||
|
prevPath: prevPath,
|
||||||
driveItems: map[string]models.DriveItemable{},
|
driveItems: map[string]models.DriveItemable{},
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
source: source,
|
source: source,
|
||||||
@ -109,6 +119,7 @@ func NewCollection(
|
|||||||
data: make(chan data.Stream, collectionChannelBufferSize),
|
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||||
statusUpdater: statusUpdater,
|
statusUpdater: statusUpdater,
|
||||||
ctrl: ctrlOpts,
|
ctrl: ctrlOpts,
|
||||||
|
state: data.StateOf(prevPath, folderPath),
|
||||||
doNotMergeItems: doNotMergeItems,
|
doNotMergeItems: doNotMergeItems,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -140,16 +151,12 @@ func (oc *Collection) FullPath() path.Path {
|
|||||||
return oc.folderPath
|
return oc.folderPath
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old
|
|
||||||
// and new folder hierarchies.
|
|
||||||
func (oc Collection) PreviousPath() path.Path {
|
func (oc Collection) PreviousPath() path.Path {
|
||||||
return nil
|
return oc.prevPath
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(ashmrtn): Fill in once GraphConnector compares old and new folder
|
|
||||||
// hierarchies.
|
|
||||||
func (oc Collection) State() data.CollectionState {
|
func (oc Collection) State() data.CollectionState {
|
||||||
return data.NewState
|
return oc.state
|
||||||
}
|
}
|
||||||
|
|
||||||
func (oc Collection) DoNotMergeItems() bool {
|
func (oc Collection) DoNotMergeItems() bool {
|
||||||
@ -432,7 +439,7 @@ func (oc *Collection) reportAsCompleted(ctx context.Context, itemsFound, itemsRe
|
|||||||
TotalBytes: byteCount, // Number of bytes read in the operation,
|
TotalBytes: byteCount, // Number of bytes read in the operation,
|
||||||
},
|
},
|
||||||
errs,
|
errs,
|
||||||
oc.folderPath.Folder(), // Additional details
|
oc.folderPath.Folder(false), // Additional details
|
||||||
)
|
)
|
||||||
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
|
logger.Ctx(ctx).Debugw("done streaming items", "status", status.String())
|
||||||
oc.statusUpdater(status)
|
oc.statusUpdater(status)
|
||||||
|
|||||||
@ -164,6 +164,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
|||||||
coll := NewCollection(
|
coll := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
|
nil,
|
||||||
"drive-id",
|
"drive-id",
|
||||||
suite,
|
suite,
|
||||||
suite.testStatusUpdater(&wg, &collStatus),
|
suite.testStatusUpdater(&wg, &collStatus),
|
||||||
@ -298,6 +299,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
|
|||||||
coll := NewCollection(
|
coll := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
|
nil,
|
||||||
"fakeDriveID",
|
"fakeDriveID",
|
||||||
suite,
|
suite,
|
||||||
suite.testStatusUpdater(&wg, &collStatus),
|
suite.testStatusUpdater(&wg, &collStatus),
|
||||||
@ -370,6 +372,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionDisablePermissionsBackup() {
|
|||||||
coll := NewCollection(
|
coll := NewCollection(
|
||||||
graph.HTTPClient(graph.NoTimeout()),
|
graph.HTTPClient(graph.NoTimeout()),
|
||||||
folderPath,
|
folderPath,
|
||||||
|
nil,
|
||||||
"fakeDriveID",
|
"fakeDriveID",
|
||||||
suite,
|
suite,
|
||||||
suite.testStatusUpdater(&wg, &collStatus),
|
suite.testStatusUpdater(&wg, &collStatus),
|
||||||
|
|||||||
@ -28,7 +28,11 @@ const (
|
|||||||
OneDriveSource
|
OneDriveSource
|
||||||
SharePointSource
|
SharePointSource
|
||||||
)
|
)
|
||||||
const restrictedDirectory = "Site Pages"
|
|
||||||
|
const (
|
||||||
|
restrictedDirectory = "Site Pages"
|
||||||
|
rootDrivePattern = "/drives/%s/root:"
|
||||||
|
)
|
||||||
|
|
||||||
func (ds driveSource) toPathServiceCat() (path.ServiceType, path.CategoryType) {
|
func (ds driveSource) toPathServiceCat() (path.ServiceType, path.CategoryType) {
|
||||||
switch ds {
|
switch ds {
|
||||||
@ -382,11 +386,15 @@ func (c *Collections) UpdateCollections(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if item.GetParentReference() == nil || item.GetParentReference().GetPath() == nil {
|
if item.GetParentReference() == nil ||
|
||||||
|
item.GetParentReference().GetPath() == nil ||
|
||||||
|
item.GetParentReference().GetId() == nil {
|
||||||
return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName())
|
return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a collection for the parent of this item
|
// Create a collection for the parent of this item
|
||||||
|
collectionID := *item.GetParentReference().GetId()
|
||||||
|
|
||||||
collectionPath, err := GetCanonicalPath(
|
collectionPath, err := GetCanonicalPath(
|
||||||
*item.GetParentReference().GetPath(),
|
*item.GetParentReference().GetPath(),
|
||||||
c.tenant,
|
c.tenant,
|
||||||
@ -411,7 +419,34 @@ func (c *Collections) UpdateCollections(
|
|||||||
// the deleted folder/package.
|
// the deleted folder/package.
|
||||||
delete(newPaths, *item.GetId())
|
delete(newPaths, *item.GetId())
|
||||||
|
|
||||||
// TODO(ashmrtn): Create a collection with state Deleted.
|
prevColPath, ok := oldPaths[*item.GetId()]
|
||||||
|
if !ok {
|
||||||
|
// It is possible that an item was created and
|
||||||
|
// deleted between two delta invocations. In
|
||||||
|
// that case, it will only produce a single
|
||||||
|
// delete entry in the delta response.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
prevPath, err := path.FromDataLayerPath(prevColPath, false)
|
||||||
|
if err != nil {
|
||||||
|
logger.Ctx(ctx).Errorw("invalid previous path for deleted item", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
col := NewCollection(
|
||||||
|
c.itemClient,
|
||||||
|
nil,
|
||||||
|
prevPath,
|
||||||
|
driveID,
|
||||||
|
c.service,
|
||||||
|
c.statusUpdater,
|
||||||
|
c.source,
|
||||||
|
c.ctrl,
|
||||||
|
invalidPrevDelta,
|
||||||
|
)
|
||||||
|
|
||||||
|
c.CollectionMap[*item.GetId()] = col
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -454,14 +489,14 @@ func (c *Collections) UpdateCollections(
|
|||||||
// TODO(ashmrtn): Figure what when an item was moved (maybe) and add it to
|
// TODO(ashmrtn): Figure what when an item was moved (maybe) and add it to
|
||||||
// the exclude list.
|
// the exclude list.
|
||||||
|
|
||||||
col, found := c.CollectionMap[collectionPath.String()]
|
col, found := c.CollectionMap[collectionID]
|
||||||
|
|
||||||
if !found {
|
if !found {
|
||||||
// TODO(ashmrtn): Compare old and new path and set collection state
|
// TODO(ashmrtn): Compare old and new path and set collection state
|
||||||
// accordingly.
|
// accordingly.
|
||||||
col = NewCollection(
|
col = NewCollection(
|
||||||
c.itemClient,
|
c.itemClient,
|
||||||
collectionPath,
|
collectionPath,
|
||||||
|
nil,
|
||||||
driveID,
|
driveID,
|
||||||
c.service,
|
c.service,
|
||||||
c.statusUpdater,
|
c.statusUpdater,
|
||||||
@ -470,7 +505,7 @@ func (c *Collections) UpdateCollections(
|
|||||||
invalidPrevDelta,
|
invalidPrevDelta,
|
||||||
)
|
)
|
||||||
|
|
||||||
c.CollectionMap[collectionPath.String()] = col
|
c.CollectionMap[collectionID] = col
|
||||||
c.NumContainers++
|
c.NumContainers++
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -194,7 +194,7 @@ func collectItems(
|
|||||||
oldPaths = map[string]string{}
|
oldPaths = map[string]string{}
|
||||||
newPaths = map[string]string{}
|
newPaths = map[string]string{}
|
||||||
excluded = map[string]struct{}{}
|
excluded = map[string]struct{}{}
|
||||||
invalidPrevDelta = false
|
invalidPrevDelta = len(prevDelta) == 0
|
||||||
)
|
)
|
||||||
|
|
||||||
maps.Copy(newPaths, oldPaths)
|
maps.Copy(newPaths, oldPaths)
|
||||||
|
|||||||
@ -58,6 +58,45 @@ func getParentPermissions(
|
|||||||
return parentPerms, nil
|
return parentPerms, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getParentAndCollectionPermissions(
|
||||||
|
drivePath *path.DrivePath,
|
||||||
|
collectionPath path.Path,
|
||||||
|
permissions map[string][]UserPermission,
|
||||||
|
restorePerms bool,
|
||||||
|
) ([]UserPermission, []UserPermission, error) {
|
||||||
|
if !restorePerms {
|
||||||
|
return nil, nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
parentPerms []UserPermission
|
||||||
|
colPerms []UserPermission
|
||||||
|
)
|
||||||
|
|
||||||
|
// Only get parent permissions if we're not restoring the root.
|
||||||
|
if len(drivePath.Folders) > 0 {
|
||||||
|
parentPath, err := collectionPath.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, clues.Wrap(err, "getting parent path")
|
||||||
|
}
|
||||||
|
|
||||||
|
parentPerms, err = getParentPermissions(parentPath, permissions)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, clues.Wrap(err, "getting parent permissions")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(ashmrtn): For versions after this pull the permissions from the
|
||||||
|
// current collection with Fetch().
|
||||||
|
colPerms, err = getParentPermissions(collectionPath, permissions)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, clues.Wrap(err, "getting collection permissions")
|
||||||
|
}
|
||||||
|
|
||||||
|
return parentPerms, colPerms, nil
|
||||||
|
}
|
||||||
|
|
||||||
// RestoreCollections will restore the specified data collections into OneDrive
|
// RestoreCollections will restore the specified data collections into OneDrive
|
||||||
func RestoreCollections(
|
func RestoreCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -94,24 +133,12 @@ func RestoreCollections(
|
|||||||
|
|
||||||
// Iterate through the data collections and restore the contents of each
|
// Iterate through the data collections and restore the contents of each
|
||||||
for _, dc := range dcs {
|
for _, dc := range dcs {
|
||||||
var (
|
|
||||||
parentPerms []UserPermission
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
if opts.RestorePermissions {
|
|
||||||
parentPerms, err = getParentPermissions(dc.FullPath(), parentPermissions)
|
|
||||||
if err != nil {
|
|
||||||
errUpdater(dc.FullPath().String(), err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
metrics, folderPerms, permissionIDMappings, canceled = RestoreCollection(
|
metrics, folderPerms, permissionIDMappings, canceled = RestoreCollection(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
service,
|
service,
|
||||||
dc,
|
dc,
|
||||||
parentPerms,
|
parentPermissions,
|
||||||
OneDriveSource,
|
OneDriveSource,
|
||||||
dest.ContainerName,
|
dest.ContainerName,
|
||||||
deets,
|
deets,
|
||||||
@ -150,7 +177,7 @@ func RestoreCollection(
|
|||||||
backupVersion int,
|
backupVersion int,
|
||||||
service graph.Servicer,
|
service graph.Servicer,
|
||||||
dc data.RestoreCollection,
|
dc data.RestoreCollection,
|
||||||
parentPerms []UserPermission,
|
parentPermissions map[string][]UserPermission,
|
||||||
source driveSource,
|
source driveSource,
|
||||||
restoreContainerName string,
|
restoreContainerName string,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
@ -186,11 +213,28 @@ func RestoreCollection(
|
|||||||
trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String())
|
trace.Log(ctx, "gc:oneDrive:restoreCollection", directory.String())
|
||||||
logger.Ctx(ctx).Infow(
|
logger.Ctx(ctx).Infow(
|
||||||
"restoring to destination",
|
"restoring to destination",
|
||||||
"origin", dc.FullPath().Folder(),
|
"origin", dc.FullPath().Folder(false),
|
||||||
"destination", restoreFolderElements)
|
"destination", restoreFolderElements)
|
||||||
|
|
||||||
|
parentPerms, colPerms, err := getParentAndCollectionPermissions(
|
||||||
|
drivePath,
|
||||||
|
dc.FullPath(),
|
||||||
|
parentPermissions,
|
||||||
|
restorePerms)
|
||||||
|
if err != nil {
|
||||||
|
errUpdater(directory.String(), err)
|
||||||
|
return metrics, folderPerms, permissionIDMappings, false
|
||||||
|
}
|
||||||
|
|
||||||
// Create restore folders and get the folder ID of the folder the data stream will be restored in
|
// Create restore folders and get the folder ID of the folder the data stream will be restored in
|
||||||
restoreFolderID, err := CreateRestoreFolders(ctx, service, drivePath.DriveID, restoreFolderElements)
|
restoreFolderID, permissionIDMappings, err := createRestoreFoldersWithPermissions(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
drivePath.DriveID,
|
||||||
|
restoreFolderElements,
|
||||||
|
parentPerms,
|
||||||
|
colPerms,
|
||||||
|
permissionIDMappings)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errUpdater(directory.String(), errors.Wrapf(err, "failed to create folders %v", restoreFolderElements))
|
errUpdater(directory.String(), errors.Wrapf(err, "failed to create folders %v", restoreFolderElements))
|
||||||
return metrics, folderPerms, permissionIDMappings, false
|
return metrics, folderPerms, permissionIDMappings, false
|
||||||
@ -240,7 +284,13 @@ func RestoreCollection(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
|
deets.Add(
|
||||||
|
itemPath.String(),
|
||||||
|
itemPath.ShortRef(),
|
||||||
|
"",
|
||||||
|
"", // TODO: implement locationRef
|
||||||
|
true,
|
||||||
|
itemInfo)
|
||||||
|
|
||||||
// Mark it as success without processing .meta
|
// Mark it as success without processing .meta
|
||||||
// file if we are not restoring permissions
|
// file if we are not restoring permissions
|
||||||
@ -272,7 +322,7 @@ func RestoreCollection(
|
|||||||
service,
|
service,
|
||||||
drivePath.DriveID,
|
drivePath.DriveID,
|
||||||
itemID,
|
itemID,
|
||||||
parentPerms,
|
colPerms,
|
||||||
meta.Permissions,
|
meta.Permissions,
|
||||||
permissionIDMappings,
|
permissionIDMappings,
|
||||||
)
|
)
|
||||||
@ -288,40 +338,16 @@ func RestoreCollection(
|
|||||||
// RestoreOp, so we still need to handle them in some way.
|
// RestoreOp, so we still need to handle them in some way.
|
||||||
continue
|
continue
|
||||||
} else if strings.HasSuffix(name, DirMetaFileSuffix) {
|
} else if strings.HasSuffix(name, DirMetaFileSuffix) {
|
||||||
trimmedName := strings.TrimSuffix(name, DirMetaFileSuffix)
|
|
||||||
folderID, err := createRestoreFolder(
|
|
||||||
ctx,
|
|
||||||
service,
|
|
||||||
drivePath.DriveID,
|
|
||||||
trimmedName,
|
|
||||||
restoreFolderID,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
errUpdater(itemData.UUID(), err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if !restorePerms {
|
if !restorePerms {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
meta, err := getMetadata(itemData.ToReader())
|
metaReader := itemData.ToReader()
|
||||||
if err != nil {
|
meta, err := getMetadata(metaReader)
|
||||||
errUpdater(itemData.UUID(), err)
|
metaReader.Close()
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
permissionIDMappings, err = restorePermissions(
|
|
||||||
ctx,
|
|
||||||
service,
|
|
||||||
drivePath.DriveID,
|
|
||||||
folderID,
|
|
||||||
parentPerms,
|
|
||||||
meta.Permissions,
|
|
||||||
permissionIDMappings,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errUpdater(itemData.UUID(), err)
|
errUpdater(itemData.UUID(), clues.Wrap(err, "folder metadata"))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,36 +377,56 @@ func RestoreCollection(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
deets.Add(itemPath.String(), itemPath.ShortRef(), "", true, itemInfo)
|
deets.Add(
|
||||||
|
itemPath.String(),
|
||||||
|
itemPath.ShortRef(),
|
||||||
|
"",
|
||||||
|
"", // TODO: implement locationRef
|
||||||
|
true,
|
||||||
|
itemInfo)
|
||||||
metrics.Successes++
|
metrics.Successes++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates a folder with its permissions
|
// createRestoreFoldersWithPermissions creates the restore folder hierarchy in
|
||||||
func createRestoreFolder(
|
// the specified drive and returns the folder ID of the last folder entry in the
|
||||||
|
// hierarchy. Permissions are only applied to the last folder in the hierarchy.
|
||||||
|
// Passing nil for the permissions results in just creating the folder(s).
|
||||||
|
func createRestoreFoldersWithPermissions(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Servicer,
|
service graph.Servicer,
|
||||||
driveID, folder, parentFolderID string,
|
driveID string,
|
||||||
) (string, error) {
|
restoreFolders []string,
|
||||||
folderItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(folder, true))
|
parentPermissions []UserPermission,
|
||||||
|
folderPermissions []UserPermission,
|
||||||
|
permissionIDMappings map[string]string,
|
||||||
|
) (string, map[string]string, error) {
|
||||||
|
id, err := CreateRestoreFolders(ctx, service, driveID, restoreFolders)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", errors.Wrapf(
|
return "", permissionIDMappings, err
|
||||||
err,
|
|
||||||
"failed to create folder %s/%s. details: %s", parentFolderID, folder,
|
|
||||||
support.ConnectorStackErrorTrace(err),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Debugf("Resolved %s in %s to %s", folder, parentFolderID, *folderItem.GetId())
|
permissionIDMappings, err = restorePermissions(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
driveID,
|
||||||
|
id,
|
||||||
|
parentPermissions,
|
||||||
|
folderPermissions,
|
||||||
|
permissionIDMappings)
|
||||||
|
|
||||||
return *folderItem.GetId(), nil
|
return id, permissionIDMappings, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// createRestoreFolders creates the restore folder hierarchy in the specified drive and returns the folder ID
|
// CreateRestoreFolders creates the restore folder hierarchy in the specified
|
||||||
// of the last folder entry in the hierarchy
|
// drive and returns the folder ID of the last folder entry in the hierarchy.
|
||||||
func CreateRestoreFolders(ctx context.Context, service graph.Servicer, driveID string, restoreFolders []string,
|
func CreateRestoreFolders(
|
||||||
|
ctx context.Context,
|
||||||
|
service graph.Servicer,
|
||||||
|
driveID string,
|
||||||
|
restoreFolders []string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil)
|
driveRoot, err := service.Client().DrivesById(driveID).Root().Get(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -167,7 +167,7 @@ func (sc *Collection) finishPopulation(ctx context.Context, attempts, success in
|
|||||||
TotalBytes: totalBytes,
|
TotalBytes: totalBytes,
|
||||||
},
|
},
|
||||||
errs,
|
errs,
|
||||||
sc.fullPath.Folder())
|
sc.fullPath.Folder(false))
|
||||||
logger.Ctx(ctx).Debug(status.String())
|
logger.Ctx(ctx).Debug(status.String())
|
||||||
|
|
||||||
if sc.statusUpdater != nil {
|
if sc.statusUpdater != nil {
|
||||||
@ -191,7 +191,7 @@ func (sc *Collection) populate(ctx context.Context) {
|
|||||||
ctx,
|
ctx,
|
||||||
sc.fullPath.Category().String(),
|
sc.fullPath.Category().String(),
|
||||||
observe.Safe("name"),
|
observe.Safe("name"),
|
||||||
observe.PII(sc.fullPath.Folder()))
|
observe.PII(sc.fullPath.Folder(false)))
|
||||||
go closer()
|
go closer()
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
|
|||||||
@ -20,7 +20,7 @@ import (
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
const (
|
const (
|
||||||
testBaseDrivePath = "drive/driveID1/root:"
|
testBaseDrivePath = "drives/driveID1/root:"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testFolderMatcher struct {
|
type testFolderMatcher struct {
|
||||||
@ -60,6 +60,7 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
|||||||
items []models.DriveItemable
|
items []models.DriveItemable
|
||||||
scope selectors.SharePointScope
|
scope selectors.SharePointScope
|
||||||
expect assert.ErrorAssertionFunc
|
expect assert.ErrorAssertionFunc
|
||||||
|
expectedCollectionIDs []string
|
||||||
expectedCollectionPaths []string
|
expectedCollectionPaths []string
|
||||||
expectedItemCount int
|
expectedItemCount int
|
||||||
expectedContainerCount int
|
expectedContainerCount int
|
||||||
@ -68,10 +69,12 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
|||||||
{
|
{
|
||||||
testCase: "Single File",
|
testCase: "Single File",
|
||||||
items: []models.DriveItemable{
|
items: []models.DriveItemable{
|
||||||
driveItem("file", testBaseDrivePath, true),
|
driveRootItem("root"),
|
||||||
|
driveItem("file", testBaseDrivePath, "root", true),
|
||||||
},
|
},
|
||||||
scope: anyFolder,
|
scope: anyFolder,
|
||||||
expect: assert.NoError,
|
expect: assert.NoError,
|
||||||
|
expectedCollectionIDs: []string{"root"},
|
||||||
expectedCollectionPaths: expectedPathAsSlice(
|
expectedCollectionPaths: expectedPathAsSlice(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
tenant,
|
tenant,
|
||||||
@ -101,26 +104,30 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
|||||||
&MockGraphService{},
|
&MockGraphService{},
|
||||||
nil,
|
nil,
|
||||||
control.Options{})
|
control.Options{})
|
||||||
err := c.UpdateCollections(ctx, "driveID", "General", test.items, paths, newPaths, excluded, true)
|
err := c.UpdateCollections(ctx, "driveID1", "General", test.items, paths, newPaths, excluded, true)
|
||||||
test.expect(t, err)
|
test.expect(t, err)
|
||||||
assert.Equal(t, len(test.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
|
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths")
|
||||||
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
|
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
|
||||||
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
|
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
|
||||||
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
|
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
|
||||||
for _, collPath := range test.expectedCollectionPaths {
|
for _, collPath := range test.expectedCollectionIDs {
|
||||||
assert.Contains(t, c.CollectionMap, collPath)
|
assert.Contains(t, c.CollectionMap, collPath)
|
||||||
}
|
}
|
||||||
|
for _, col := range c.CollectionMap {
|
||||||
|
assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String())
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func driveItem(name string, path string, isFile bool) models.DriveItemable {
|
func driveItem(name, parentPath, parentID string, isFile bool) models.DriveItemable {
|
||||||
item := models.NewDriveItem()
|
item := models.NewDriveItem()
|
||||||
item.SetName(&name)
|
item.SetName(&name)
|
||||||
item.SetId(&name)
|
item.SetId(&name)
|
||||||
|
|
||||||
parentReference := models.NewItemReference()
|
parentReference := models.NewItemReference()
|
||||||
parentReference.SetPath(&path)
|
parentReference.SetPath(&parentPath)
|
||||||
|
parentReference.SetId(&parentID)
|
||||||
item.SetParentReference(parentReference)
|
item.SetParentReference(parentReference)
|
||||||
|
|
||||||
if isFile {
|
if isFile {
|
||||||
@ -130,6 +137,16 @@ func driveItem(name string, path string, isFile bool) models.DriveItemable {
|
|||||||
return item
|
return item
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func driveRootItem(id string) models.DriveItemable {
|
||||||
|
name := "root"
|
||||||
|
item := models.NewDriveItem()
|
||||||
|
item.SetName(&name)
|
||||||
|
item.SetId(&id)
|
||||||
|
item.SetRoot(models.NewRoot())
|
||||||
|
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
type SharePointPagesSuite struct {
|
type SharePointPagesSuite struct {
|
||||||
suite.Suite
|
suite.Suite
|
||||||
}
|
}
|
||||||
|
|||||||
@ -69,7 +69,7 @@ func RestoreCollections(
|
|||||||
backupVersion,
|
backupVersion,
|
||||||
service,
|
service,
|
||||||
dc,
|
dc,
|
||||||
[]onedrive.UserPermission{}, // Currently permission data is not stored for sharepoint
|
map[string][]onedrive.UserPermission{}, // Currently permission data is not stored for sharepoint
|
||||||
onedrive.OneDriveSource,
|
onedrive.OneDriveSource,
|
||||||
dest.ContainerName,
|
dest.ContainerName,
|
||||||
deets,
|
deets,
|
||||||
@ -276,6 +276,7 @@ func RestoreListCollection(
|
|||||||
itemPath.String(),
|
itemPath.String(),
|
||||||
itemPath.ShortRef(),
|
itemPath.ShortRef(),
|
||||||
"",
|
"",
|
||||||
|
"", // TODO: implement locationRef
|
||||||
true,
|
true,
|
||||||
itemInfo)
|
itemInfo)
|
||||||
|
|
||||||
@ -355,6 +356,7 @@ func RestorePageCollection(
|
|||||||
itemPath.String(),
|
itemPath.String(),
|
||||||
itemPath.ShortRef(),
|
itemPath.ShortRef(),
|
||||||
"",
|
"",
|
||||||
|
"", // TODO: implement locationRef
|
||||||
true,
|
true,
|
||||||
itemInfo,
|
itemInfo,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -6,8 +6,6 @@ import (
|
|||||||
|
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/dustin/go-humanize"
|
||||||
multierror "github.com/hashicorp/go-multierror"
|
multierror "github.com/hashicorp/go-multierror"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ConnectorOperationStatus is a data type used to describe the state of
|
// ConnectorOperationStatus is a data type used to describe the state of
|
||||||
@ -80,15 +78,6 @@ func CreateStatus(
|
|||||||
additionalDetails: details,
|
additionalDetails: details,
|
||||||
}
|
}
|
||||||
|
|
||||||
if status.ObjectCount != status.ErrorCount+status.Successful {
|
|
||||||
logger.Ctx(ctx).Errorw(
|
|
||||||
"status object count does not match errors + successes",
|
|
||||||
"objects", cm.Objects,
|
|
||||||
"successes", cm.Successes,
|
|
||||||
"numErrors", numErr,
|
|
||||||
"errors", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &status
|
return &status
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,10 +103,11 @@ func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
status := ConnectorOperationStatus{
|
status := ConnectorOperationStatus{
|
||||||
lastOperation: one.lastOperation,
|
lastOperation: one.lastOperation,
|
||||||
ObjectCount: one.ObjectCount + two.ObjectCount,
|
ObjectCount: one.ObjectCount + two.ObjectCount,
|
||||||
FolderCount: one.FolderCount + two.FolderCount,
|
FolderCount: one.FolderCount + two.FolderCount,
|
||||||
Successful: one.Successful + two.Successful,
|
Successful: one.Successful + two.Successful,
|
||||||
|
// TODO: remove in favor of fault.Errors
|
||||||
ErrorCount: one.ErrorCount + two.ErrorCount,
|
ErrorCount: one.ErrorCount + two.ErrorCount,
|
||||||
Err: multierror.Append(one.Err, two.Err).ErrorOrNil(),
|
Err: multierror.Append(one.Err, two.Err).ErrorOrNil(),
|
||||||
bytes: one.bytes + two.bytes,
|
bytes: one.bytes + two.bytes,
|
||||||
@ -144,14 +134,11 @@ func (cos *ConnectorOperationStatus) String() string {
|
|||||||
cos.Successful,
|
cos.Successful,
|
||||||
cos.ObjectCount,
|
cos.ObjectCount,
|
||||||
humanize.Bytes(uint64(cos.bytes)),
|
humanize.Bytes(uint64(cos.bytes)),
|
||||||
cos.FolderCount,
|
cos.FolderCount)
|
||||||
)
|
|
||||||
|
|
||||||
if cos.incomplete {
|
if cos.incomplete {
|
||||||
message += " " + cos.incompleteReason
|
message += " " + cos.incompleteReason
|
||||||
}
|
}
|
||||||
|
|
||||||
message += " " + operationStatement + cos.additionalDetails + "\n"
|
return message + " " + operationStatement + cos.additionalDetails
|
||||||
|
|
||||||
return message
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/alcionai/clues"
|
||||||
"gopkg.in/resty.v1"
|
"gopkg.in/resty.v1"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
@ -38,7 +38,7 @@ func NewWriter(id, url string, size int64) *writer {
|
|||||||
|
|
||||||
// Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on
|
// Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on
|
||||||
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession
|
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession
|
||||||
func (iw *writer) Write(p []byte) (n int, err error) {
|
func (iw *writer) Write(p []byte) (int, error) {
|
||||||
rangeLength := len(p)
|
rangeLength := len(p)
|
||||||
logger.Ctx(context.Background()).Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d",
|
logger.Ctx(context.Background()).Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d",
|
||||||
iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength)
|
iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength)
|
||||||
@ -47,7 +47,7 @@ func (iw *writer) Write(p []byte) (n int, err error) {
|
|||||||
|
|
||||||
// PUT the request - set headers `Content-Range`to describe total size and `Content-Length` to describe size of
|
// PUT the request - set headers `Content-Range`to describe total size and `Content-Length` to describe size of
|
||||||
// data in the current request
|
// data in the current request
|
||||||
resp, err := iw.client.R().
|
_, err := iw.client.R().
|
||||||
SetHeaders(map[string]string{
|
SetHeaders(map[string]string{
|
||||||
contentRangeHeaderKey: fmt.Sprintf(contentRangeHeaderValueFmt,
|
contentRangeHeaderKey: fmt.Sprintf(contentRangeHeaderValueFmt,
|
||||||
iw.lastWrittenOffset,
|
iw.lastWrittenOffset,
|
||||||
@ -57,15 +57,15 @@ func (iw *writer) Write(p []byte) (n int, err error) {
|
|||||||
}).
|
}).
|
||||||
SetBody(bytes.NewReader(p)).Put(iw.url)
|
SetBody(bytes.NewReader(p)).Put(iw.url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, errors.Wrapf(err,
|
return 0, clues.Wrap(err, "uploading item").WithAll(
|
||||||
"failed to upload item %s. Upload failed at Size:%d, Offset: %d, TotalSize: %d ",
|
"upload_id", iw.id,
|
||||||
iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength)
|
"upload_chunk_size", rangeLength,
|
||||||
|
"upload_offset", iw.lastWrittenOffset,
|
||||||
|
"upload_size", iw.contentLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update last offset
|
// Update last offset
|
||||||
iw.lastWrittenOffset = endOffset
|
iw.lastWrittenOffset = endOffset
|
||||||
|
|
||||||
logger.Ctx(context.Background()).Debugf("Response: %s", resp.String())
|
|
||||||
|
|
||||||
return rangeLength, nil
|
return rangeLength, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -96,6 +96,12 @@ type Stream interface {
|
|||||||
Deleted() bool
|
Deleted() bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LocationPather provides a LocationPath describing the path with Display Names
|
||||||
|
// instead of canonical IDs
|
||||||
|
type LocationPather interface {
|
||||||
|
LocationPath() path.Path
|
||||||
|
}
|
||||||
|
|
||||||
// StreamInfo is used to provide service specific
|
// StreamInfo is used to provide service specific
|
||||||
// information about the Stream
|
// information about the Stream
|
||||||
type StreamInfo interface {
|
type StreamInfo interface {
|
||||||
@ -124,7 +130,7 @@ func StateOf(prev, curr path.Path) CollectionState {
|
|||||||
return NewState
|
return NewState
|
||||||
}
|
}
|
||||||
|
|
||||||
if curr.Folder() != prev.Folder() {
|
if curr.Folder(false) != prev.Folder(false) {
|
||||||
return MovedState
|
return MovedState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -123,10 +123,11 @@ func (rw *restoreStreamReader) Read(p []byte) (n int, err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type itemDetails struct {
|
type itemDetails struct {
|
||||||
info *details.ItemInfo
|
info *details.ItemInfo
|
||||||
repoPath path.Path
|
repoPath path.Path
|
||||||
prevPath path.Path
|
prevPath path.Path
|
||||||
cached bool
|
locationPath path.Path
|
||||||
|
cached bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type corsoProgress struct {
|
type corsoProgress struct {
|
||||||
@ -135,7 +136,7 @@ type corsoProgress struct {
|
|||||||
deets *details.Builder
|
deets *details.Builder
|
||||||
// toMerge represents items that we don't have in-memory item info for. The
|
// toMerge represents items that we don't have in-memory item info for. The
|
||||||
// item info for these items should be sourced from a base snapshot later on.
|
// item info for these items should be sourced from a base snapshot later on.
|
||||||
toMerge map[string]path.Path
|
toMerge map[string]PrevRefs
|
||||||
mu sync.RWMutex
|
mu sync.RWMutex
|
||||||
totalBytes int64
|
totalBytes int64
|
||||||
errs *fault.Errors
|
errs *fault.Errors
|
||||||
@ -180,27 +181,45 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
|
|||||||
cp.mu.Lock()
|
cp.mu.Lock()
|
||||||
defer cp.mu.Unlock()
|
defer cp.mu.Unlock()
|
||||||
|
|
||||||
cp.toMerge[d.prevPath.ShortRef()] = d.repoPath
|
cp.toMerge[d.prevPath.ShortRef()] = PrevRefs{
|
||||||
|
Repo: d.repoPath,
|
||||||
|
Location: d.locationPath,
|
||||||
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
parent := d.repoPath.ToBuilder().Dir()
|
var (
|
||||||
|
locationFolders string
|
||||||
|
locPB *path.Builder
|
||||||
|
parent = d.repoPath.ToBuilder().Dir()
|
||||||
|
)
|
||||||
|
|
||||||
|
if d.locationPath != nil {
|
||||||
|
locationFolders = d.locationPath.Folder(true)
|
||||||
|
|
||||||
|
locPB = d.locationPath.ToBuilder()
|
||||||
|
|
||||||
|
// folderEntriesForPath assumes the location will
|
||||||
|
// not have an item element appended
|
||||||
|
if len(d.locationPath.Item()) > 0 {
|
||||||
|
locPB = locPB.Dir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
cp.deets.Add(
|
cp.deets.Add(
|
||||||
d.repoPath.String(),
|
d.repoPath.String(),
|
||||||
d.repoPath.ShortRef(),
|
d.repoPath.ShortRef(),
|
||||||
parent.ShortRef(),
|
parent.ShortRef(),
|
||||||
|
locationFolders,
|
||||||
!d.cached,
|
!d.cached,
|
||||||
*d.info,
|
*d.info)
|
||||||
)
|
|
||||||
|
|
||||||
folders := details.FolderEntriesForPath(parent)
|
folders := details.FolderEntriesForPath(parent, locPB)
|
||||||
cp.deets.AddFoldersForItem(
|
cp.deets.AddFoldersForItem(
|
||||||
folders,
|
folders,
|
||||||
*d.info,
|
*d.info,
|
||||||
!d.cached,
|
!d.cached)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Kopia interface function used as a callback when kopia finishes hashing a file.
|
// Kopia interface function used as a callback when kopia finishes hashing a file.
|
||||||
@ -263,12 +282,17 @@ func collectionEntries(
|
|||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
locationPath path.Path
|
||||||
// Track which items have already been seen so we can skip them if we see
|
// Track which items have already been seen so we can skip them if we see
|
||||||
// them again in the data from the base snapshot.
|
// them again in the data from the base snapshot.
|
||||||
seen = map[string]struct{}{}
|
seen = map[string]struct{}{}
|
||||||
items = streamedEnts.Items()
|
items = streamedEnts.Items()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if lp, ok := streamedEnts.(data.LocationPather); ok {
|
||||||
|
locationPath = lp.LocationPath()
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
@ -328,7 +352,11 @@ func collectionEntries(
|
|||||||
// previous snapshot then we should populate prevPath here and leave
|
// previous snapshot then we should populate prevPath here and leave
|
||||||
// info nil.
|
// info nil.
|
||||||
itemInfo := ei.Info()
|
itemInfo := ei.Info()
|
||||||
d := &itemDetails{info: &itemInfo, repoPath: itemPath}
|
d := &itemDetails{
|
||||||
|
info: &itemInfo,
|
||||||
|
repoPath: itemPath,
|
||||||
|
locationPath: locationPath,
|
||||||
|
}
|
||||||
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
|
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -356,6 +384,7 @@ func streamBaseEntries(
|
|||||||
cb func(context.Context, fs.Entry) error,
|
cb func(context.Context, fs.Entry) error,
|
||||||
curPath path.Path,
|
curPath path.Path,
|
||||||
prevPath path.Path,
|
prevPath path.Path,
|
||||||
|
locationPath path.Path,
|
||||||
dir fs.Directory,
|
dir fs.Directory,
|
||||||
encodedSeen map[string]struct{},
|
encodedSeen map[string]struct{},
|
||||||
globalExcludeSet map[string]struct{},
|
globalExcludeSet map[string]struct{},
|
||||||
@ -411,7 +440,12 @@ func streamBaseEntries(
|
|||||||
// All items have item info in the base backup. However, we need to make
|
// All items have item info in the base backup. However, we need to make
|
||||||
// sure we have enough metadata to find those entries. To do that we add the
|
// sure we have enough metadata to find those entries. To do that we add the
|
||||||
// item to progress and having progress aggregate everything for later.
|
// item to progress and having progress aggregate everything for later.
|
||||||
d := &itemDetails{info: nil, repoPath: itemPath, prevPath: prevItemPath}
|
d := &itemDetails{
|
||||||
|
info: nil,
|
||||||
|
repoPath: itemPath,
|
||||||
|
prevPath: prevItemPath,
|
||||||
|
locationPath: locationPath,
|
||||||
|
}
|
||||||
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
|
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
|
||||||
|
|
||||||
if err := cb(ctx, entry); err != nil {
|
if err := cb(ctx, entry); err != nil {
|
||||||
@ -455,6 +489,12 @@ func getStreamItemFunc(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var locationPath path.Path
|
||||||
|
|
||||||
|
if lp, ok := streamedEnts.(data.LocationPather); ok {
|
||||||
|
locationPath = lp.LocationPath()
|
||||||
|
}
|
||||||
|
|
||||||
seen, err := collectionEntries(ctx, cb, streamedEnts, progress)
|
seen, err := collectionEntries(ctx, cb, streamedEnts, progress)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "streaming collection entries")
|
return errors.Wrap(err, "streaming collection entries")
|
||||||
@ -465,6 +505,7 @@ func getStreamItemFunc(
|
|||||||
cb,
|
cb,
|
||||||
curPath,
|
curPath,
|
||||||
prevPath,
|
prevPath,
|
||||||
|
locationPath,
|
||||||
baseDir,
|
baseDir,
|
||||||
seen,
|
seen,
|
||||||
globalExcludeSet,
|
globalExcludeSet,
|
||||||
@ -533,6 +574,7 @@ type treeMap struct {
|
|||||||
// Previous path this directory may have resided at if it is sourced from a
|
// Previous path this directory may have resided at if it is sourced from a
|
||||||
// base snapshot.
|
// base snapshot.
|
||||||
prevPath path.Path
|
prevPath path.Path
|
||||||
|
|
||||||
// Child directories of this directory.
|
// Child directories of this directory.
|
||||||
childDirs map[string]*treeMap
|
childDirs map[string]*treeMap
|
||||||
// Reference to data pulled from the external service. Contains only items in
|
// Reference to data pulled from the external service. Contains only items in
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -114,6 +114,13 @@ type IncrementalBase struct {
|
|||||||
SubtreePaths []*path.Builder
|
SubtreePaths []*path.Builder
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// PrevRefs hold the repoRef and locationRef from the items
|
||||||
|
// that need to be merged in from prior snapshots.
|
||||||
|
type PrevRefs struct {
|
||||||
|
Repo path.Path
|
||||||
|
Location path.Path
|
||||||
|
}
|
||||||
|
|
||||||
// BackupCollections takes a set of collections and creates a kopia snapshot
|
// BackupCollections takes a set of collections and creates a kopia snapshot
|
||||||
// with the data that they contain. previousSnapshots is used for incremental
|
// with the data that they contain. previousSnapshots is used for incremental
|
||||||
// backups and should represent the base snapshot from which metadata is sourced
|
// backups and should represent the base snapshot from which metadata is sourced
|
||||||
@ -128,7 +135,7 @@ func (w Wrapper) BackupCollections(
|
|||||||
tags map[string]string,
|
tags map[string]string,
|
||||||
buildTreeWithBase bool,
|
buildTreeWithBase bool,
|
||||||
errs *fault.Errors,
|
errs *fault.Errors,
|
||||||
) (*BackupStats, *details.Builder, map[string]path.Path, error) {
|
) (*BackupStats, *details.Builder, map[string]PrevRefs, error) {
|
||||||
if w.c == nil {
|
if w.c == nil {
|
||||||
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
|
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
|
||||||
}
|
}
|
||||||
@ -143,7 +150,7 @@ func (w Wrapper) BackupCollections(
|
|||||||
progress := &corsoProgress{
|
progress := &corsoProgress{
|
||||||
pending: map[string]*itemDetails{},
|
pending: map[string]*itemDetails{},
|
||||||
deets: &details.Builder{},
|
deets: &details.Builder{},
|
||||||
toMerge: map[string]path.Path{},
|
toMerge: map[string]PrevRefs{},
|
||||||
errs: errs,
|
errs: errs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -27,7 +27,9 @@ import (
|
|||||||
const (
|
const (
|
||||||
testTenant = "a-tenant"
|
testTenant = "a-tenant"
|
||||||
testUser = "user1"
|
testUser = "user1"
|
||||||
|
testInboxID = "Inbox_ID"
|
||||||
testInboxDir = "Inbox"
|
testInboxDir = "Inbox"
|
||||||
|
testArchiveID = "Archive_ID"
|
||||||
testArchiveDir = "Archive"
|
testArchiveDir = "Archive"
|
||||||
testFileName = "file1"
|
testFileName = "file1"
|
||||||
testFileName2 = "file2"
|
testFileName2 = "file2"
|
||||||
@ -144,8 +146,10 @@ type KopiaIntegrationSuite struct {
|
|||||||
ctx context.Context
|
ctx context.Context
|
||||||
flush func()
|
flush func()
|
||||||
|
|
||||||
testPath1 path.Path
|
storePath1 path.Path
|
||||||
testPath2 path.Path
|
storePath2 path.Path
|
||||||
|
locPath1 path.Path
|
||||||
|
locPath2 path.Path
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKopiaIntegrationSuite(t *testing.T) {
|
func TestKopiaIntegrationSuite(t *testing.T) {
|
||||||
@ -164,21 +168,21 @@ func (suite *KopiaIntegrationSuite) SetupSuite() {
|
|||||||
testTenant,
|
testTenant,
|
||||||
testUser,
|
testUser,
|
||||||
path.EmailCategory,
|
path.EmailCategory,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
suite.testPath1 = tmp
|
suite.storePath1 = tmp
|
||||||
|
suite.locPath1 = tmp
|
||||||
|
|
||||||
tmp, err = path.Builder{}.Append(testArchiveDir).ToDataLayerExchangePathForCategory(
|
tmp, err = path.Builder{}.Append(testArchiveDir).ToDataLayerExchangePathForCategory(
|
||||||
testTenant,
|
testTenant,
|
||||||
testUser,
|
testUser,
|
||||||
path.EmailCategory,
|
path.EmailCategory,
|
||||||
false,
|
false)
|
||||||
)
|
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
suite.testPath2 = tmp
|
suite.storePath2 = tmp
|
||||||
|
suite.locPath2 = tmp
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *KopiaIntegrationSuite) SetupTest() {
|
func (suite *KopiaIntegrationSuite) SetupTest() {
|
||||||
@ -199,13 +203,13 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
|
|||||||
func (suite *KopiaIntegrationSuite) TestBackupCollections() {
|
func (suite *KopiaIntegrationSuite) TestBackupCollections() {
|
||||||
collections := []data.BackupCollection{
|
collections := []data.BackupCollection{
|
||||||
mockconnector.NewMockExchangeCollection(
|
mockconnector.NewMockExchangeCollection(
|
||||||
suite.testPath1,
|
suite.storePath1,
|
||||||
5,
|
suite.locPath1,
|
||||||
),
|
5),
|
||||||
mockconnector.NewMockExchangeCollection(
|
mockconnector.NewMockExchangeCollection(
|
||||||
suite.testPath2,
|
suite.storePath2,
|
||||||
42,
|
suite.locPath2,
|
||||||
),
|
42),
|
||||||
}
|
}
|
||||||
|
|
||||||
// tags that are supplied by the caller. This includes basic tags to support
|
// tags that are supplied by the caller. This includes basic tags to support
|
||||||
@ -217,14 +221,14 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
|
|||||||
|
|
||||||
reasons := []Reason{
|
reasons := []Reason{
|
||||||
{
|
{
|
||||||
ResourceOwner: suite.testPath1.ResourceOwner(),
|
ResourceOwner: suite.storePath1.ResourceOwner(),
|
||||||
Service: suite.testPath1.Service(),
|
Service: suite.storePath1.Service(),
|
||||||
Category: suite.testPath1.Category(),
|
Category: suite.storePath1.Category(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ResourceOwner: suite.testPath2.ResourceOwner(),
|
ResourceOwner: suite.storePath2.ResourceOwner(),
|
||||||
Service: suite.testPath2.Service(),
|
Service: suite.storePath2.Service(),
|
||||||
Category: suite.testPath2.Category(),
|
Category: suite.storePath2.Category(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -311,7 +315,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
|
|||||||
prevSnaps = append(prevSnaps, IncrementalBase{
|
prevSnaps = append(prevSnaps, IncrementalBase{
|
||||||
Manifest: snap,
|
Manifest: snap,
|
||||||
SubtreePaths: []*path.Builder{
|
SubtreePaths: []*path.Builder{
|
||||||
suite.testPath1.ToBuilder().Dir(),
|
suite.storePath1.ToBuilder().Dir(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -342,13 +346,13 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
|
|||||||
tags[k] = ""
|
tags[k] = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
dc1 := mockconnector.NewMockExchangeCollection(suite.testPath1, 1)
|
dc1 := mockconnector.NewMockExchangeCollection(suite.storePath1, suite.locPath1, 1)
|
||||||
dc2 := mockconnector.NewMockExchangeCollection(suite.testPath2, 1)
|
dc2 := mockconnector.NewMockExchangeCollection(suite.storePath2, suite.locPath2, 1)
|
||||||
|
|
||||||
fp1, err := suite.testPath1.Append(dc1.Names[0], true)
|
fp1, err := suite.storePath1.Append(dc1.Names[0], true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
fp2, err := suite.testPath2.Append(dc2.Names[0], true)
|
fp2, err := suite.storePath2.Append(dc2.Names[0], true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
stats, _, _, err := w.BackupCollections(
|
stats, _, _, err := w.BackupCollections(
|
||||||
@ -434,7 +438,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
|
|
||||||
collections := []data.BackupCollection{
|
collections := []data.BackupCollection{
|
||||||
&mockBackupCollection{
|
&mockBackupCollection{
|
||||||
path: suite.testPath1,
|
path: suite.storePath1,
|
||||||
streams: []data.Stream{
|
streams: []data.Stream{
|
||||||
&mockconnector.MockExchangeData{
|
&mockconnector.MockExchangeData{
|
||||||
ID: testFileName,
|
ID: testFileName,
|
||||||
@ -447,7 +451,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
&mockBackupCollection{
|
&mockBackupCollection{
|
||||||
path: suite.testPath2,
|
path: suite.storePath2,
|
||||||
streams: []data.Stream{
|
streams: []data.Stream{
|
||||||
&mockconnector.MockExchangeData{
|
&mockconnector.MockExchangeData{
|
||||||
ID: testFileName3,
|
ID: testFileName3,
|
||||||
@ -487,7 +491,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
// 5 file and 6 folder entries.
|
// 5 file and 6 folder entries.
|
||||||
assert.Len(t, deets.Details().Entries, 5+6)
|
assert.Len(t, deets.Details().Entries, 5+6)
|
||||||
|
|
||||||
failedPath, err := suite.testPath2.Append(testFileName4, true)
|
failedPath, err := suite.storePath2.Append(testFileName4, true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
ic := i64counter{}
|
ic := i64counter{}
|
||||||
@ -792,8 +796,8 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
|
|||||||
cols: func() []data.BackupCollection {
|
cols: func() []data.BackupCollection {
|
||||||
c := mockconnector.NewMockExchangeCollection(
|
c := mockconnector.NewMockExchangeCollection(
|
||||||
suite.testPath1,
|
suite.testPath1,
|
||||||
1,
|
suite.testPath1,
|
||||||
)
|
1)
|
||||||
c.ColState = data.NotMovedState
|
c.ColState = data.NotMovedState
|
||||||
|
|
||||||
return []data.BackupCollection{c}
|
return []data.BackupCollection{c}
|
||||||
|
|||||||
@ -244,7 +244,7 @@ func (op *BackupOperation) do(
|
|||||||
return nil, errors.Wrap(err, "connectng to m365")
|
return nil, errors.Wrap(err, "connectng to m365")
|
||||||
}
|
}
|
||||||
|
|
||||||
cs, excludes, err := produceBackupDataCollections(ctx, gc, op.Selectors, mdColls, op.Options)
|
cs, excludes, err := produceBackupDataCollections(ctx, gc, op.Selectors, mdColls, op.Options, op.Errors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "producing backup data collections")
|
return nil, errors.Wrap(err, "producing backup data collections")
|
||||||
}
|
}
|
||||||
@ -313,6 +313,7 @@ func produceBackupDataCollections(
|
|||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
|
errs *fault.Errors,
|
||||||
) ([]data.BackupCollection, map[string]struct{}, error) {
|
) ([]data.BackupCollection, map[string]struct{}, error) {
|
||||||
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Discovering items to backup"))
|
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Discovering items to backup"))
|
||||||
defer func() {
|
defer func() {
|
||||||
@ -321,9 +322,7 @@ func produceBackupDataCollections(
|
|||||||
closer()
|
closer()
|
||||||
}()
|
}()
|
||||||
|
|
||||||
cols, excludes, errs := gc.DataCollections(ctx, sel, metadata, ctrlOpts)
|
return gc.DataCollections(ctx, sel, metadata, ctrlOpts, errs)
|
||||||
|
|
||||||
return cols, excludes, errs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -339,7 +338,7 @@ type backuper interface {
|
|||||||
tags map[string]string,
|
tags map[string]string,
|
||||||
buildTreeWithBase bool,
|
buildTreeWithBase bool,
|
||||||
errs *fault.Errors,
|
errs *fault.Errors,
|
||||||
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error)
|
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func selectorToReasons(sel selectors.Selector) []kopia.Reason {
|
func selectorToReasons(sel selectors.Selector) []kopia.Reason {
|
||||||
@ -398,7 +397,7 @@ func consumeBackupDataCollections(
|
|||||||
backupID model.StableID,
|
backupID model.StableID,
|
||||||
isIncremental bool,
|
isIncremental bool,
|
||||||
errs *fault.Errors,
|
errs *fault.Errors,
|
||||||
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) {
|
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) {
|
||||||
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Backing up data"))
|
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Backing up data"))
|
||||||
defer func() {
|
defer func() {
|
||||||
complete <- struct{}{}
|
complete <- struct{}{}
|
||||||
@ -504,7 +503,7 @@ func mergeDetails(
|
|||||||
ms *store.Wrapper,
|
ms *store.Wrapper,
|
||||||
detailsStore detailsReader,
|
detailsStore detailsReader,
|
||||||
mans []*kopia.ManifestEntry,
|
mans []*kopia.ManifestEntry,
|
||||||
shortRefsFromPrevBackup map[string]path.Path,
|
shortRefsFromPrevBackup map[string]kopia.PrevRefs,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
errs *fault.Errors,
|
errs *fault.Errors,
|
||||||
) error {
|
) error {
|
||||||
@ -560,13 +559,16 @@ func mergeDetails(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
newPath := shortRefsFromPrevBackup[rr.ShortRef()]
|
prev, ok := shortRefsFromPrevBackup[rr.ShortRef()]
|
||||||
if newPath == nil {
|
if !ok {
|
||||||
// This entry was not sourced from a base snapshot or cached from a
|
// This entry was not sourced from a base snapshot or cached from a
|
||||||
// previous backup, skip it.
|
// previous backup, skip it.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
newPath := prev.Repo
|
||||||
|
newLoc := prev.Location
|
||||||
|
|
||||||
// Fixup paths in the item.
|
// Fixup paths in the item.
|
||||||
item := entry.ItemInfo
|
item := entry.ItemInfo
|
||||||
if err := details.UpdateItem(&item, newPath); err != nil {
|
if err := details.UpdateItem(&item, newPath); err != nil {
|
||||||
@ -575,16 +577,27 @@ func mergeDetails(
|
|||||||
|
|
||||||
// TODO(ashmrtn): This may need updated if we start using this merge
|
// TODO(ashmrtn): This may need updated if we start using this merge
|
||||||
// strategry for items that were cached in kopia.
|
// strategry for items that were cached in kopia.
|
||||||
itemUpdated := newPath.String() != rr.String()
|
var (
|
||||||
|
itemUpdated = newPath.String() != rr.String()
|
||||||
|
newLocStr string
|
||||||
|
locBuilder *path.Builder
|
||||||
|
)
|
||||||
|
|
||||||
|
if newLoc != nil {
|
||||||
|
locBuilder = newLoc.ToBuilder()
|
||||||
|
newLocStr = newLoc.Folder(true)
|
||||||
|
itemUpdated = itemUpdated || newLocStr != entry.LocationRef
|
||||||
|
}
|
||||||
|
|
||||||
deets.Add(
|
deets.Add(
|
||||||
newPath.String(),
|
newPath.String(),
|
||||||
newPath.ShortRef(),
|
newPath.ShortRef(),
|
||||||
newPath.ToBuilder().Dir().ShortRef(),
|
newPath.ToBuilder().Dir().ShortRef(),
|
||||||
|
newLocStr,
|
||||||
itemUpdated,
|
itemUpdated,
|
||||||
item)
|
item)
|
||||||
|
|
||||||
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir())
|
folders := details.FolderEntriesForPath(newPath.ToBuilder().Dir(), locBuilder)
|
||||||
deets.AddFoldersForItem(folders, item, itemUpdated)
|
deets.AddFoldersForItem(folders, item, itemUpdated)
|
||||||
|
|
||||||
// Track how many entries we added so that we know if we got them all when
|
// Track how many entries we added so that we know if we got them all when
|
||||||
|
|||||||
@ -402,7 +402,7 @@ func buildCollections(
|
|||||||
c.pathFolders,
|
c.pathFolders,
|
||||||
false)
|
false)
|
||||||
|
|
||||||
mc := mockconnector.NewMockExchangeCollection(pth, len(c.items))
|
mc := mockconnector.NewMockExchangeCollection(pth, pth, len(c.items))
|
||||||
|
|
||||||
for i := 0; i < len(c.items); i++ {
|
for i := 0; i < len(c.items); i++ {
|
||||||
mc.Names[i] = c.items[i].name
|
mc.Names[i] = c.items[i].name
|
||||||
@ -777,8 +777,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
|
|||||||
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
|
p, err := path.FromDataLayerPath(dest.deets.Entries[0].RepoRef, true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
id, ok := cr.PathInCache(p.Folder())
|
id, ok := cr.PathInCache(p.Folder(false))
|
||||||
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category)
|
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
|
||||||
|
|
||||||
d := dataset[category].dests[destName]
|
d := dataset[category].dests[destName]
|
||||||
d.containerID = id
|
d.containerID = id
|
||||||
@ -895,8 +895,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
|
|||||||
p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true)
|
p, err := path.FromDataLayerPath(deets.Entries[0].RepoRef, true)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
id, ok := cr.PathInCache(p.Folder())
|
id, ok := cr.PathInCache(p.Folder(false))
|
||||||
require.True(t, ok, "dir %s found in %s cache", p.Folder(), category)
|
require.True(t, ok, "dir %s found in %s cache", p.Folder(false), category)
|
||||||
|
|
||||||
dataset[category].dests[container3] = contDeets{id, deets}
|
dataset[category].dests[container3] = contDeets{id, deets}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -89,8 +89,7 @@ type mockBackuper struct {
|
|||||||
bases []kopia.IncrementalBase,
|
bases []kopia.IncrementalBase,
|
||||||
cs []data.BackupCollection,
|
cs []data.BackupCollection,
|
||||||
tags map[string]string,
|
tags map[string]string,
|
||||||
buildTreeWithBase bool,
|
buildTreeWithBase bool)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mbu mockBackuper) BackupCollections(
|
func (mbu mockBackuper) BackupCollections(
|
||||||
@ -101,7 +100,7 @@ func (mbu mockBackuper) BackupCollections(
|
|||||||
tags map[string]string,
|
tags map[string]string,
|
||||||
buildTreeWithBase bool,
|
buildTreeWithBase bool,
|
||||||
errs *fault.Errors,
|
errs *fault.Errors,
|
||||||
) (*kopia.BackupStats, *details.Builder, map[string]path.Path, error) {
|
) (*kopia.BackupStats, *details.Builder, map[string]kopia.PrevRefs, error) {
|
||||||
if mbu.checkFunc != nil {
|
if mbu.checkFunc != nil {
|
||||||
mbu.checkFunc(bases, cs, tags, buildTreeWithBase)
|
mbu.checkFunc(bases, cs, tags, buildTreeWithBase)
|
||||||
}
|
}
|
||||||
@ -249,9 +248,10 @@ func makeFolderEntry(
|
|||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
return &details.DetailsEntry{
|
return &details.DetailsEntry{
|
||||||
RepoRef: pb.String(),
|
RepoRef: pb.String(),
|
||||||
ShortRef: pb.ShortRef(),
|
ShortRef: pb.ShortRef(),
|
||||||
ParentRef: pb.Dir().ShortRef(),
|
ParentRef: pb.Dir().ShortRef(),
|
||||||
|
LocationRef: pb.PopFront().PopFront().PopFront().PopFront().Dir().String(),
|
||||||
ItemInfo: details.ItemInfo{
|
ItemInfo: details.ItemInfo{
|
||||||
Folder: &details.FolderInfo{
|
Folder: &details.FolderInfo{
|
||||||
ItemType: details.FolderItem,
|
ItemType: details.FolderItem,
|
||||||
@ -277,17 +277,24 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path {
|
|||||||
func makeDetailsEntry(
|
func makeDetailsEntry(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
p path.Path,
|
p path.Path,
|
||||||
|
l path.Path,
|
||||||
size int,
|
size int,
|
||||||
updated bool,
|
updated bool,
|
||||||
) *details.DetailsEntry {
|
) *details.DetailsEntry {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
|
var lr string
|
||||||
|
if l != nil {
|
||||||
|
lr = l.PopFront().PopFront().PopFront().PopFront().Dir().String()
|
||||||
|
}
|
||||||
|
|
||||||
res := &details.DetailsEntry{
|
res := &details.DetailsEntry{
|
||||||
RepoRef: p.String(),
|
RepoRef: p.String(),
|
||||||
ShortRef: p.ShortRef(),
|
ShortRef: p.ShortRef(),
|
||||||
ParentRef: p.ToBuilder().Dir().ShortRef(),
|
ParentRef: p.ToBuilder().Dir().ShortRef(),
|
||||||
ItemInfo: details.ItemInfo{},
|
LocationRef: lr,
|
||||||
Updated: updated,
|
ItemInfo: details.ItemInfo{},
|
||||||
|
Updated: updated,
|
||||||
}
|
}
|
||||||
|
|
||||||
switch p.Service() {
|
switch p.Service() {
|
||||||
@ -607,6 +614,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
|
locationPath1 = makePath(
|
||||||
|
suite.T(),
|
||||||
|
[]string{
|
||||||
|
tenant,
|
||||||
|
path.OneDriveService.String(),
|
||||||
|
ro,
|
||||||
|
path.FilesCategory.String(),
|
||||||
|
"drives",
|
||||||
|
"drive-id",
|
||||||
|
"root:",
|
||||||
|
"work-display-name",
|
||||||
|
"item1",
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
)
|
||||||
itemPath2 = makePath(
|
itemPath2 = makePath(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
[]string{
|
[]string{
|
||||||
@ -622,6 +644,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
|
locationPath2 = makePath(
|
||||||
|
suite.T(),
|
||||||
|
[]string{
|
||||||
|
tenant,
|
||||||
|
path.OneDriveService.String(),
|
||||||
|
ro,
|
||||||
|
path.FilesCategory.String(),
|
||||||
|
"drives",
|
||||||
|
"drive-id",
|
||||||
|
"root:",
|
||||||
|
"personal-display-name",
|
||||||
|
"item2",
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
)
|
||||||
itemPath3 = makePath(
|
itemPath3 = makePath(
|
||||||
suite.T(),
|
suite.T(),
|
||||||
[]string{
|
[]string{
|
||||||
@ -634,6 +671,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
|
locationPath3 = makePath(
|
||||||
|
suite.T(),
|
||||||
|
[]string{
|
||||||
|
tenant,
|
||||||
|
path.ExchangeService.String(),
|
||||||
|
ro,
|
||||||
|
path.EmailCategory.String(),
|
||||||
|
"personal-display-name",
|
||||||
|
"item3",
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
|
||||||
backup1 = backup.Backup{
|
backup1 = backup.Backup{
|
||||||
BaseModel: model.BaseModel{
|
BaseModel: model.BaseModel{
|
||||||
@ -669,7 +718,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
populatedModels map[model.StableID]backup.Backup
|
populatedModels map[model.StableID]backup.Backup
|
||||||
populatedDetails map[string]*details.Details
|
populatedDetails map[string]*details.Details
|
||||||
inputMans []*kopia.ManifestEntry
|
inputMans []*kopia.ManifestEntry
|
||||||
inputShortRefsFromPrevBackup map[string]path.Path
|
inputShortRefsFromPrevBackup map[string]kopia.PrevRefs
|
||||||
|
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
expectedEntries []*details.DetailsEntry
|
expectedEntries []*details.DetailsEntry
|
||||||
@ -682,15 +731,18 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "EmptyShortRefsFromPrevBackup",
|
name: "EmptyShortRefsFromPrevBackup",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{},
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{},
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
// Use empty slice so we don't error out on nil != empty.
|
// Use empty slice so we don't error out on nil != empty.
|
||||||
expectedEntries: []*details.DetailsEntry{},
|
expectedEntries: []*details.DetailsEntry{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "BackupIDNotFound",
|
name: "BackupIDNotFound",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -704,8 +756,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "DetailsIDNotFound",
|
name: "DetailsIDNotFound",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -727,9 +782,15 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "BaseMissingItems",
|
name: "BaseMissingItems",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
itemPath2.ShortRef(): itemPath2,
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
|
itemPath2.ShortRef(): {
|
||||||
|
Repo: itemPath2,
|
||||||
|
Location: locationPath2,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -746,7 +807,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -755,8 +816,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "TooManyItems",
|
name: "TooManyItems",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -779,7 +843,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -788,8 +852,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "BadBaseRepoRef",
|
name: "BadBaseRepoRef",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath2,
|
||||||
|
Location: locationPath2,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -834,19 +901,21 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "BadOneDrivePath",
|
name: "BadOneDrivePath",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): makePath(
|
itemPath1.ShortRef(): {
|
||||||
suite.T(),
|
Repo: makePath(
|
||||||
[]string{
|
suite.T(),
|
||||||
itemPath1.Tenant(),
|
[]string{
|
||||||
path.OneDriveService.String(),
|
itemPath1.Tenant(),
|
||||||
itemPath1.ResourceOwner(),
|
path.OneDriveService.String(),
|
||||||
path.FilesCategory.String(),
|
itemPath1.ResourceOwner(),
|
||||||
"personal",
|
path.FilesCategory.String(),
|
||||||
"item1",
|
"personal",
|
||||||
},
|
"item1",
|
||||||
true,
|
},
|
||||||
),
|
true,
|
||||||
|
),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -863,7 +932,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -872,8 +941,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ItemMerged",
|
name: "ItemMerged",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -890,20 +962,88 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedEntries: []*details.DetailsEntry{
|
expectedEntries: []*details.DetailsEntry{
|
||||||
makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ItemMergedNoLocation",
|
||||||
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
inputMans: []*kopia.ManifestEntry{
|
||||||
|
{
|
||||||
|
Manifest: makeManifest(suite.T(), backup1.ID, ""),
|
||||||
|
Reasons: []kopia.Reason{
|
||||||
|
pathReason1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
populatedModels: map[model.StableID]backup.Backup{
|
||||||
|
backup1.ID: backup1,
|
||||||
|
},
|
||||||
|
populatedDetails: map[string]*details.Details{
|
||||||
|
backup1.DetailsID: {
|
||||||
|
DetailsModel: details.DetailsModel{
|
||||||
|
Entries: []details.DetailsEntry{
|
||||||
|
*makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
errCheck: assert.NoError,
|
||||||
|
expectedEntries: []*details.DetailsEntry{
|
||||||
|
makeDetailsEntry(suite.T(), itemPath1, nil, 42, false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ItemMergedSameLocation",
|
||||||
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: itemPath1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
inputMans: []*kopia.ManifestEntry{
|
||||||
|
{
|
||||||
|
Manifest: makeManifest(suite.T(), backup1.ID, ""),
|
||||||
|
Reasons: []kopia.Reason{
|
||||||
|
pathReason1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
populatedModels: map[model.StableID]backup.Backup{
|
||||||
|
backup1.ID: backup1,
|
||||||
|
},
|
||||||
|
populatedDetails: map[string]*details.Details{
|
||||||
|
backup1.DetailsID: {
|
||||||
|
DetailsModel: details.DetailsModel{
|
||||||
|
Entries: []details.DetailsEntry{
|
||||||
|
*makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
errCheck: assert.NoError,
|
||||||
|
expectedEntries: []*details.DetailsEntry{
|
||||||
|
makeDetailsEntry(suite.T(), itemPath1, itemPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ItemMergedExtraItemsInBase",
|
name: "ItemMergedExtraItemsInBase",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -920,21 +1060,24 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
*makeDetailsEntry(suite.T(), itemPath2, 84, false),
|
*makeDetailsEntry(suite.T(), itemPath2, locationPath2, 84, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedEntries: []*details.DetailsEntry{
|
expectedEntries: []*details.DetailsEntry{
|
||||||
makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ItemMoved",
|
name: "ItemMoved",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath2,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath2,
|
||||||
|
Location: locationPath2,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -951,21 +1094,27 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedEntries: []*details.DetailsEntry{
|
expectedEntries: []*details.DetailsEntry{
|
||||||
makeDetailsEntry(suite.T(), itemPath2, 42, true),
|
makeDetailsEntry(suite.T(), itemPath2, locationPath2, 42, true),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MultipleBases",
|
name: "MultipleBases",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
itemPath3.ShortRef(): itemPath3,
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
|
itemPath3.ShortRef(): {
|
||||||
|
Repo: itemPath3,
|
||||||
|
Location: locationPath3,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -989,7 +1138,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -997,23 +1146,26 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
// This entry should not be picked due to a mismatch on Reasons.
|
// This entry should not be picked due to a mismatch on Reasons.
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 84, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false),
|
||||||
// This item should be picked.
|
// This item should be picked.
|
||||||
*makeDetailsEntry(suite.T(), itemPath3, 37, false),
|
*makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedEntries: []*details.DetailsEntry{
|
expectedEntries: []*details.DetailsEntry{
|
||||||
makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
makeDetailsEntry(suite.T(), itemPath3, 37, false),
|
makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SomeBasesIncomplete",
|
name: "SomeBasesIncomplete",
|
||||||
inputShortRefsFromPrevBackup: map[string]path.Path{
|
inputShortRefsFromPrevBackup: map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locationPath1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
inputMans: []*kopia.ManifestEntry{
|
inputMans: []*kopia.ManifestEntry{
|
||||||
{
|
{
|
||||||
@ -1037,7 +1189,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1045,14 +1197,14 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
DetailsModel: details.DetailsModel{
|
DetailsModel: details.DetailsModel{
|
||||||
Entries: []details.DetailsEntry{
|
Entries: []details.DetailsEntry{
|
||||||
// This entry should not be picked due to being incomplete.
|
// This entry should not be picked due to being incomplete.
|
||||||
*makeDetailsEntry(suite.T(), itemPath1, 84, false),
|
*makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedEntries: []*details.DetailsEntry{
|
expectedEntries: []*details.DetailsEntry{
|
||||||
makeDetailsEntry(suite.T(), itemPath1, 42, false),
|
makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -1075,6 +1227,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
|
|||||||
&deets,
|
&deets,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
test.errCheck(t, err)
|
test.errCheck(t, err)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -1103,8 +1256,12 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
|
|||||||
itemPath1 = makePath(
|
itemPath1 = makePath(
|
||||||
t,
|
t,
|
||||||
pathElems,
|
pathElems,
|
||||||
true,
|
true)
|
||||||
)
|
|
||||||
|
locPath1 = makePath(
|
||||||
|
t,
|
||||||
|
pathElems[:len(pathElems)-1],
|
||||||
|
false)
|
||||||
|
|
||||||
backup1 = backup.Backup{
|
backup1 = backup.Backup{
|
||||||
BaseModel: model.BaseModel{
|
BaseModel: model.BaseModel{
|
||||||
@ -1119,8 +1276,11 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
|
|||||||
Category: itemPath1.Category(),
|
Category: itemPath1.Category(),
|
||||||
}
|
}
|
||||||
|
|
||||||
inputToMerge = map[string]path.Path{
|
inputToMerge = map[string]kopia.PrevRefs{
|
||||||
itemPath1.ShortRef(): itemPath1,
|
itemPath1.ShortRef(): {
|
||||||
|
Repo: itemPath1,
|
||||||
|
Location: locPath1,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
inputMans = []*kopia.ManifestEntry{
|
inputMans = []*kopia.ManifestEntry{
|
||||||
@ -1137,7 +1297,7 @@ func (suite *BackupOpSuite) TestBackupOperation_MergeBackupDetails_AddsFolders()
|
|||||||
}
|
}
|
||||||
|
|
||||||
itemSize = 42
|
itemSize = 42
|
||||||
itemDetails = makeDetailsEntry(t, itemPath1, itemSize, false)
|
itemDetails = makeDetailsEntry(t, itemPath1, itemPath1, itemSize, false)
|
||||||
|
|
||||||
populatedDetails = map[string]*details.Details{
|
populatedDetails = map[string]*details.Details{
|
||||||
backup1.DetailsID: {
|
backup1.DetailsID: {
|
||||||
|
|||||||
@ -135,13 +135,17 @@ func produceManifestsAndMetadata(
|
|||||||
// of manifests, that each manifest's Reason (owner, service, category) is only
|
// of manifests, that each manifest's Reason (owner, service, category) is only
|
||||||
// included once. If a reason is duplicated by any two manifests, an error is
|
// included once. If a reason is duplicated by any two manifests, an error is
|
||||||
// returned.
|
// returned.
|
||||||
func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs fault.Adder) error {
|
func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs *fault.Errors) error {
|
||||||
var (
|
var (
|
||||||
failed bool
|
failed bool
|
||||||
reasons = map[string]manifest.ID{}
|
reasons = map[string]manifest.ID{}
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, man := range mans {
|
for _, man := range mans {
|
||||||
|
if errs.Failed() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
// Incomplete snapshots are used only for kopia-assisted incrementals. The
|
// Incomplete snapshots are used only for kopia-assisted incrementals. The
|
||||||
// fact that we need this check here makes it seem like this should live in
|
// fact that we need this check here makes it seem like this should live in
|
||||||
// the kopia code. However, keeping it here allows for better debugging as
|
// the kopia code. However, keeping it here allows for better debugging as
|
||||||
@ -173,7 +177,7 @@ func verifyDistinctBases(ctx context.Context, mans []*kopia.ManifestEntry, errs
|
|||||||
return clues.New("multiple base snapshots qualify").WithClues(ctx)
|
return clues.New("multiple base snapshots qualify").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return errs.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
// collectMetadata retrieves all metadata files associated with the manifest.
|
// collectMetadata retrieves all metadata files associated with the manifest.
|
||||||
|
|||||||
@ -15,7 +15,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/fault/mock"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -392,7 +391,7 @@ func (suite *OperationsManifestsUnitSuite) TestVerifyDistinctBases() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
err := verifyDistinctBases(ctx, test.mans, mock.NewAdder())
|
err := verifyDistinctBases(ctx, test.mans, fault.New(true))
|
||||||
test.expect(t, err)
|
test.expect(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -834,7 +833,7 @@ func (suite *BackupManifestSuite) TestBackupOperation_VerifyDistinctBases() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
test.errCheck(t, verifyDistinctBases(ctx, test.input, mock.NewAdder()))
|
test.errCheck(t, verifyDistinctBases(ctx, test.input, fault.New(true)))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -44,7 +44,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
|
|||||||
|
|
||||||
deetsBuilder := &details.Builder{}
|
deetsBuilder := &details.Builder{}
|
||||||
|
|
||||||
deetsBuilder.Add("ref", "shortref", "parentref", true,
|
deetsBuilder.Add("ref", "shortref", "parentref", "locationRef", true,
|
||||||
details.ItemInfo{
|
details.ItemInfo{
|
||||||
Exchange: &details.ExchangeInfo{
|
Exchange: &details.ExchangeInfo{
|
||||||
Subject: "hello world",
|
Subject: "hello world",
|
||||||
@ -66,6 +66,7 @@ func (suite *StreamStoreIntegrationSuite) TestDetails() {
|
|||||||
assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef)
|
assert.Equal(t, deets.Entries[0].ParentRef, readDeets.Entries[0].ParentRef)
|
||||||
assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef)
|
assert.Equal(t, deets.Entries[0].ShortRef, readDeets.Entries[0].ShortRef)
|
||||||
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
|
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
|
||||||
|
assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef)
|
||||||
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
|
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
|
||||||
assert.NotNil(t, readDeets.Entries[0].Exchange)
|
assert.NotNil(t, readDeets.Entries[0].Exchange)
|
||||||
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)
|
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Version = 1
|
const Version = 2
|
||||||
|
|
||||||
// Backup represents the result of a backup operation
|
// Backup represents the result of a backup operation
|
||||||
type Backup struct {
|
type Backup struct {
|
||||||
|
|||||||
@ -15,11 +15,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type folderEntry struct {
|
type folderEntry struct {
|
||||||
RepoRef string
|
RepoRef string
|
||||||
ShortRef string
|
ShortRef string
|
||||||
ParentRef string
|
ParentRef string
|
||||||
Updated bool
|
LocationRef string
|
||||||
Info ItemInfo
|
Updated bool
|
||||||
|
Info ItemInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
// --------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------
|
||||||
@ -110,10 +111,14 @@ type Builder struct {
|
|||||||
knownFolders map[string]folderEntry `json:"-"`
|
knownFolders map[string]folderEntry `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Builder) Add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) {
|
func (b *Builder) Add(
|
||||||
|
repoRef, shortRef, parentRef, locationRef string,
|
||||||
|
updated bool,
|
||||||
|
info ItemInfo,
|
||||||
|
) {
|
||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
b.d.add(repoRef, shortRef, parentRef, updated, info)
|
b.d.add(repoRef, shortRef, parentRef, locationRef, updated, info)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Builder) Details() *Details {
|
func (b *Builder) Details() *Details {
|
||||||
@ -131,30 +136,65 @@ func (b *Builder) Details() *Details {
|
|||||||
// TODO(ashmrtn): If we never need to pre-populate the modified time of a folder
|
// TODO(ashmrtn): If we never need to pre-populate the modified time of a folder
|
||||||
// we should just merge this with AddFoldersForItem, have Add call
|
// we should just merge this with AddFoldersForItem, have Add call
|
||||||
// AddFoldersForItem, and unexport AddFoldersForItem.
|
// AddFoldersForItem, and unexport AddFoldersForItem.
|
||||||
func FolderEntriesForPath(parent *path.Builder) []folderEntry {
|
func FolderEntriesForPath(parent, location *path.Builder) []folderEntry {
|
||||||
folders := []folderEntry{}
|
folders := []folderEntry{}
|
||||||
|
lfs := locationRefOf(location)
|
||||||
|
|
||||||
for len(parent.Elements()) > 0 {
|
for len(parent.Elements()) > 0 {
|
||||||
nextParent := parent.Dir()
|
var (
|
||||||
|
nextParent = parent.Dir()
|
||||||
|
lr string
|
||||||
|
dn = parent.LastElem()
|
||||||
|
)
|
||||||
|
|
||||||
|
// TODO: We may have future cases where the storage hierarchy
|
||||||
|
// doesn't match the location hierarchy.
|
||||||
|
if lfs != nil {
|
||||||
|
lr = lfs.String()
|
||||||
|
|
||||||
|
if len(lfs.Elements()) > 0 {
|
||||||
|
dn = lfs.LastElem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
folders = append(folders, folderEntry{
|
folders = append(folders, folderEntry{
|
||||||
RepoRef: parent.String(),
|
RepoRef: parent.String(),
|
||||||
ShortRef: parent.ShortRef(),
|
ShortRef: parent.ShortRef(),
|
||||||
ParentRef: nextParent.ShortRef(),
|
ParentRef: nextParent.ShortRef(),
|
||||||
|
LocationRef: lr,
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
ItemType: FolderItem,
|
ItemType: FolderItem,
|
||||||
DisplayName: parent.Elements()[len(parent.Elements())-1],
|
DisplayName: dn,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
parent = nextParent
|
parent = nextParent
|
||||||
|
|
||||||
|
if lfs != nil {
|
||||||
|
lfs = lfs.Dir()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return folders
|
return folders
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// assumes the pb contains a path like:
|
||||||
|
// <tenant>/<service>/<owner>/<category>/<logical_containers>...
|
||||||
|
// and returns a string with only <logical_containers>/...
|
||||||
|
func locationRefOf(pb *path.Builder) *path.Builder {
|
||||||
|
if pb == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < 4; i++ {
|
||||||
|
pb = pb.PopFront()
|
||||||
|
}
|
||||||
|
|
||||||
|
return pb
|
||||||
|
}
|
||||||
|
|
||||||
// AddFoldersForItem adds entries for the given folders. It skips adding entries that
|
// AddFoldersForItem adds entries for the given folders. It skips adding entries that
|
||||||
// have been added by previous calls.
|
// have been added by previous calls.
|
||||||
func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) {
|
func (b *Builder) AddFoldersForItem(folders []folderEntry, itemInfo ItemInfo, updated bool) {
|
||||||
@ -202,13 +242,18 @@ type Details struct {
|
|||||||
DetailsModel
|
DetailsModel
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Details) add(repoRef, shortRef, parentRef string, updated bool, info ItemInfo) {
|
func (d *Details) add(
|
||||||
|
repoRef, shortRef, parentRef, locationRef string,
|
||||||
|
updated bool,
|
||||||
|
info ItemInfo,
|
||||||
|
) {
|
||||||
d.Entries = append(d.Entries, DetailsEntry{
|
d.Entries = append(d.Entries, DetailsEntry{
|
||||||
RepoRef: repoRef,
|
RepoRef: repoRef,
|
||||||
ShortRef: shortRef,
|
ShortRef: shortRef,
|
||||||
ParentRef: parentRef,
|
ParentRef: parentRef,
|
||||||
Updated: updated,
|
LocationRef: locationRef,
|
||||||
ItemInfo: info,
|
Updated: updated,
|
||||||
|
ItemInfo: info,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -233,9 +278,21 @@ type DetailsEntry struct {
|
|||||||
RepoRef string `json:"repoRef"`
|
RepoRef string `json:"repoRef"`
|
||||||
ShortRef string `json:"shortRef"`
|
ShortRef string `json:"shortRef"`
|
||||||
ParentRef string `json:"parentRef,omitempty"`
|
ParentRef string `json:"parentRef,omitempty"`
|
||||||
|
|
||||||
|
// LocationRef contains the logical path structure by its human-readable
|
||||||
|
// display names. IE: If an item is located at "/Inbox/Important", we
|
||||||
|
// hold that string in the LocationRef, while the actual IDs of each
|
||||||
|
// container are used for the RepoRef.
|
||||||
|
// LocationRef only holds the container values, and does not include
|
||||||
|
// the metadata prefixes (tenant, service, owner, etc) found in the
|
||||||
|
// repoRef.
|
||||||
|
// Currently only implemented for Exchange Calendars.
|
||||||
|
LocationRef string `json:"locationRef,omitempty"`
|
||||||
|
|
||||||
// Indicates the item was added or updated in this backup
|
// Indicates the item was added or updated in this backup
|
||||||
// Always `true` for full backups
|
// Always `true` for full backups
|
||||||
Updated bool `json:"updated"`
|
Updated bool `json:"updated"`
|
||||||
|
|
||||||
ItemInfo
|
ItemInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -316,18 +373,21 @@ const (
|
|||||||
FolderItem ItemType = iota + 300
|
FolderItem ItemType = iota + 300
|
||||||
)
|
)
|
||||||
|
|
||||||
func UpdateItem(item *ItemInfo, newPath path.Path) error {
|
func UpdateItem(item *ItemInfo, repoPath path.Path) error {
|
||||||
// Only OneDrive and SharePoint have information about parent folders
|
// Only OneDrive and SharePoint have information about parent folders
|
||||||
// contained in them.
|
// contained in them.
|
||||||
|
var updatePath func(path.Path) error
|
||||||
|
|
||||||
switch item.infoType() {
|
switch item.infoType() {
|
||||||
case SharePointItem:
|
case SharePointItem:
|
||||||
return item.SharePoint.UpdateParentPath(newPath)
|
updatePath = item.SharePoint.UpdateParentPath
|
||||||
|
|
||||||
case OneDriveItem:
|
case OneDriveItem:
|
||||||
return item.OneDrive.UpdateParentPath(newPath)
|
updatePath = item.OneDrive.UpdateParentPath
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return updatePath(repoPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ItemInfo is a oneOf that contains service specific
|
// ItemInfo is a oneOf that contains service specific
|
||||||
|
|||||||
@ -39,8 +39,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
{
|
{
|
||||||
name: "no info",
|
name: "no info",
|
||||||
entry: DetailsEntry{
|
entry: DetailsEntry{
|
||||||
RepoRef: "reporef",
|
RepoRef: "reporef",
|
||||||
ShortRef: "deadbeef",
|
ShortRef: "deadbeef",
|
||||||
|
LocationRef: "locationref",
|
||||||
},
|
},
|
||||||
expectHs: []string{"ID"},
|
expectHs: []string{"ID"},
|
||||||
expectVs: []string{"deadbeef"},
|
expectVs: []string{"deadbeef"},
|
||||||
@ -48,8 +49,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
{
|
{
|
||||||
name: "exchange event info",
|
name: "exchange event info",
|
||||||
entry: DetailsEntry{
|
entry: DetailsEntry{
|
||||||
RepoRef: "reporef",
|
RepoRef: "reporef",
|
||||||
ShortRef: "deadbeef",
|
ShortRef: "deadbeef",
|
||||||
|
LocationRef: "locationref",
|
||||||
ItemInfo: ItemInfo{
|
ItemInfo: ItemInfo{
|
||||||
Exchange: &ExchangeInfo{
|
Exchange: &ExchangeInfo{
|
||||||
ItemType: ExchangeEvent,
|
ItemType: ExchangeEvent,
|
||||||
@ -67,8 +69,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
{
|
{
|
||||||
name: "exchange contact info",
|
name: "exchange contact info",
|
||||||
entry: DetailsEntry{
|
entry: DetailsEntry{
|
||||||
RepoRef: "reporef",
|
RepoRef: "reporef",
|
||||||
ShortRef: "deadbeef",
|
ShortRef: "deadbeef",
|
||||||
|
LocationRef: "locationref",
|
||||||
ItemInfo: ItemInfo{
|
ItemInfo: ItemInfo{
|
||||||
Exchange: &ExchangeInfo{
|
Exchange: &ExchangeInfo{
|
||||||
ItemType: ExchangeContact,
|
ItemType: ExchangeContact,
|
||||||
@ -82,8 +85,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
{
|
{
|
||||||
name: "exchange mail info",
|
name: "exchange mail info",
|
||||||
entry: DetailsEntry{
|
entry: DetailsEntry{
|
||||||
RepoRef: "reporef",
|
RepoRef: "reporef",
|
||||||
ShortRef: "deadbeef",
|
ShortRef: "deadbeef",
|
||||||
|
LocationRef: "locationref",
|
||||||
ItemInfo: ItemInfo{
|
ItemInfo: ItemInfo{
|
||||||
Exchange: &ExchangeInfo{
|
Exchange: &ExchangeInfo{
|
||||||
ItemType: ExchangeMail,
|
ItemType: ExchangeMail,
|
||||||
@ -99,8 +103,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
{
|
{
|
||||||
name: "sharepoint info",
|
name: "sharepoint info",
|
||||||
entry: DetailsEntry{
|
entry: DetailsEntry{
|
||||||
RepoRef: "reporef",
|
RepoRef: "reporef",
|
||||||
ShortRef: "deadbeef",
|
ShortRef: "deadbeef",
|
||||||
|
LocationRef: "locationref",
|
||||||
ItemInfo: ItemInfo{
|
ItemInfo: ItemInfo{
|
||||||
SharePoint: &SharePointInfo{
|
SharePoint: &SharePointInfo{
|
||||||
ItemName: "itemName",
|
ItemName: "itemName",
|
||||||
@ -128,8 +133,9 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
{
|
{
|
||||||
name: "oneDrive info",
|
name: "oneDrive info",
|
||||||
entry: DetailsEntry{
|
entry: DetailsEntry{
|
||||||
RepoRef: "reporef",
|
RepoRef: "reporef",
|
||||||
ShortRef: "deadbeef",
|
ShortRef: "deadbeef",
|
||||||
|
LocationRef: "locationref",
|
||||||
ItemInfo: ItemInfo{
|
ItemInfo: ItemInfo{
|
||||||
OneDrive: &OneDriveInfo{
|
OneDrive: &OneDriveInfo{
|
||||||
ItemName: "itemName",
|
ItemName: "itemName",
|
||||||
@ -157,37 +163,57 @@ func (suite *DetailsUnitSuite) TestDetailsEntry_HeadersValues() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var pathItemsTable = []struct {
|
var pathItemsTable = []struct {
|
||||||
name string
|
name string
|
||||||
ents []DetailsEntry
|
ents []DetailsEntry
|
||||||
expectRefs []string
|
expectRepoRefs []string
|
||||||
|
expectLocationRefs []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "nil entries",
|
name: "nil entries",
|
||||||
ents: nil,
|
ents: nil,
|
||||||
expectRefs: []string{},
|
expectRepoRefs: []string{},
|
||||||
|
expectLocationRefs: []string{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "single entry",
|
name: "single entry",
|
||||||
ents: []DetailsEntry{
|
ents: []DetailsEntry{
|
||||||
{RepoRef: "abcde"},
|
{
|
||||||
|
RepoRef: "abcde",
|
||||||
|
LocationRef: "locationref",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
expectRefs: []string{"abcde"},
|
expectRepoRefs: []string{"abcde"},
|
||||||
|
expectLocationRefs: []string{"locationref"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple entries",
|
name: "multiple entries",
|
||||||
ents: []DetailsEntry{
|
ents: []DetailsEntry{
|
||||||
{RepoRef: "abcde"},
|
{
|
||||||
{RepoRef: "12345"},
|
RepoRef: "abcde",
|
||||||
|
LocationRef: "locationref",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: "12345",
|
||||||
|
LocationRef: "locationref2",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
expectRefs: []string{"abcde", "12345"},
|
expectRepoRefs: []string{"abcde", "12345"},
|
||||||
|
expectLocationRefs: []string{"locationref", "locationref2"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple entries with folder",
|
name: "multiple entries with folder",
|
||||||
ents: []DetailsEntry{
|
ents: []DetailsEntry{
|
||||||
{RepoRef: "abcde"},
|
|
||||||
{RepoRef: "12345"},
|
|
||||||
{
|
{
|
||||||
RepoRef: "deadbeef",
|
RepoRef: "abcde",
|
||||||
|
LocationRef: "locationref",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: "12345",
|
||||||
|
LocationRef: "locationref2",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: "deadbeef",
|
||||||
|
LocationRef: "locationref3",
|
||||||
ItemInfo: ItemInfo{
|
ItemInfo: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
DisplayName: "test folder",
|
DisplayName: "test folder",
|
||||||
@ -195,7 +221,8 @@ var pathItemsTable = []struct {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectRefs: []string{"abcde", "12345"},
|
expectRepoRefs: []string{"abcde", "12345"},
|
||||||
|
expectLocationRefs: []string{"locationref", "locationref2"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -207,7 +234,7 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Path() {
|
|||||||
Entries: test.ents,
|
Entries: test.ents,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert.Equal(t, test.expectRefs, d.Paths())
|
assert.ElementsMatch(t, test.expectRepoRefs, d.Paths())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -222,10 +249,11 @@ func (suite *DetailsUnitSuite) TestDetailsModel_Items() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ents := d.Items()
|
ents := d.Items()
|
||||||
assert.Len(t, ents, len(test.expectRefs))
|
assert.Len(t, ents, len(test.expectRepoRefs))
|
||||||
|
|
||||||
for _, e := range ents {
|
for _, e := range ents {
|
||||||
assert.Contains(t, test.expectRefs, e.RepoRef)
|
assert.Contains(t, test.expectRepoRefs, e.RepoRef)
|
||||||
|
assert.Contains(t, test.expectLocationRefs, e.LocationRef)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -253,9 +281,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
|||||||
name: "MultipleFolders",
|
name: "MultipleFolders",
|
||||||
folders: []folderEntry{
|
folders: []folderEntry{
|
||||||
{
|
{
|
||||||
RepoRef: "rr1",
|
RepoRef: "rr1",
|
||||||
ShortRef: "sr1",
|
ShortRef: "sr1",
|
||||||
ParentRef: "pr1",
|
ParentRef: "pr1",
|
||||||
|
LocationRef: "lr1",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
Modified: folderTimeOlderThanItem,
|
Modified: folderTimeOlderThanItem,
|
||||||
@ -263,9 +292,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
RepoRef: "rr2",
|
RepoRef: "rr2",
|
||||||
ShortRef: "sr2",
|
ShortRef: "sr2",
|
||||||
ParentRef: "pr2",
|
ParentRef: "pr2",
|
||||||
|
LocationRef: "lr2",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
Modified: folderTimeNewerThanItem,
|
Modified: folderTimeNewerThanItem,
|
||||||
@ -283,9 +313,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
|||||||
name: "MultipleFoldersWithRepeats",
|
name: "MultipleFoldersWithRepeats",
|
||||||
folders: []folderEntry{
|
folders: []folderEntry{
|
||||||
{
|
{
|
||||||
RepoRef: "rr1",
|
RepoRef: "rr1",
|
||||||
ShortRef: "sr1",
|
ShortRef: "sr1",
|
||||||
ParentRef: "pr1",
|
ParentRef: "pr1",
|
||||||
|
LocationRef: "lr1",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
Modified: folderTimeOlderThanItem,
|
Modified: folderTimeOlderThanItem,
|
||||||
@ -293,9 +324,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
RepoRef: "rr2",
|
RepoRef: "rr2",
|
||||||
ShortRef: "sr2",
|
ShortRef: "sr2",
|
||||||
ParentRef: "pr2",
|
ParentRef: "pr2",
|
||||||
|
LocationRef: "lr2",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
Modified: folderTimeOlderThanItem,
|
Modified: folderTimeOlderThanItem,
|
||||||
@ -303,9 +335,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
RepoRef: "rr1",
|
RepoRef: "rr1",
|
||||||
ShortRef: "sr1",
|
ShortRef: "sr1",
|
||||||
ParentRef: "pr1",
|
ParentRef: "pr1",
|
||||||
|
LocationRef: "lr1",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
Modified: folderTimeOlderThanItem,
|
Modified: folderTimeOlderThanItem,
|
||||||
@ -313,9 +346,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFolders() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
RepoRef: "rr3",
|
RepoRef: "rr3",
|
||||||
ShortRef: "sr3",
|
ShortRef: "sr3",
|
||||||
ParentRef: "pr3",
|
ParentRef: "pr3",
|
||||||
|
LocationRef: "lr3",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{
|
Folder: &FolderInfo{
|
||||||
Modified: folderTimeNewerThanItem,
|
Modified: folderTimeNewerThanItem,
|
||||||
@ -363,18 +397,20 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
|
|||||||
name: "ItemNotUpdated_NoChange",
|
name: "ItemNotUpdated_NoChange",
|
||||||
folders: []folderEntry{
|
folders: []folderEntry{
|
||||||
{
|
{
|
||||||
RepoRef: "rr1",
|
RepoRef: "rr1",
|
||||||
ShortRef: "sr1",
|
ShortRef: "sr1",
|
||||||
ParentRef: "pr1",
|
ParentRef: "pr1",
|
||||||
|
LocationRef: "lr1",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{},
|
Folder: &FolderInfo{},
|
||||||
},
|
},
|
||||||
Updated: true,
|
Updated: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
RepoRef: "rr2",
|
RepoRef: "rr2",
|
||||||
ShortRef: "sr2",
|
ShortRef: "sr2",
|
||||||
ParentRef: "pr2",
|
ParentRef: "pr2",
|
||||||
|
LocationRef: "lr2",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{},
|
Folder: &FolderInfo{},
|
||||||
},
|
},
|
||||||
@ -390,17 +426,19 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersUpdate() {
|
|||||||
name: "ItemUpdated",
|
name: "ItemUpdated",
|
||||||
folders: []folderEntry{
|
folders: []folderEntry{
|
||||||
{
|
{
|
||||||
RepoRef: "rr1",
|
RepoRef: "rr1",
|
||||||
ShortRef: "sr1",
|
ShortRef: "sr1",
|
||||||
ParentRef: "pr1",
|
ParentRef: "pr1",
|
||||||
|
LocationRef: "lr1",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{},
|
Folder: &FolderInfo{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
RepoRef: "rr2",
|
RepoRef: "rr2",
|
||||||
ShortRef: "sr2",
|
ShortRef: "sr2",
|
||||||
ParentRef: "pr2",
|
ParentRef: "pr2",
|
||||||
|
LocationRef: "lr2",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{},
|
Folder: &FolderInfo{},
|
||||||
},
|
},
|
||||||
@ -482,9 +520,10 @@ func (suite *DetailsUnitSuite) TestDetails_AddFoldersDifferentServices() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
folder := folderEntry{
|
folder := folderEntry{
|
||||||
RepoRef: "rr1",
|
RepoRef: "rr1",
|
||||||
ShortRef: "sr1",
|
ShortRef: "sr1",
|
||||||
ParentRef: "pr1",
|
ParentRef: "pr1",
|
||||||
|
LocationRef: "lr1",
|
||||||
Info: ItemInfo{
|
Info: ItemInfo{
|
||||||
Folder: &FolderInfo{},
|
Folder: &FolderInfo{},
|
||||||
},
|
},
|
||||||
@ -562,7 +601,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
input ItemInfo
|
input ItemInfo
|
||||||
newPath path.Path
|
repoPath path.Path
|
||||||
|
locPath path.Path
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
expectedItem ItemInfo
|
expectedItem ItemInfo
|
||||||
}{
|
}{
|
||||||
@ -616,7 +656,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
newPath: newOneDrivePath,
|
repoPath: newOneDrivePath,
|
||||||
|
locPath: newOneDrivePath,
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
OneDrive: &OneDriveInfo{
|
OneDrive: &OneDriveInfo{
|
||||||
@ -633,7 +674,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
newPath: newOneDrivePath,
|
repoPath: newOneDrivePath,
|
||||||
|
locPath: newOneDrivePath,
|
||||||
errCheck: assert.NoError,
|
errCheck: assert.NoError,
|
||||||
expectedItem: ItemInfo{
|
expectedItem: ItemInfo{
|
||||||
SharePoint: &SharePointInfo{
|
SharePoint: &SharePointInfo{
|
||||||
@ -650,7 +692,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
newPath: badOneDrivePath,
|
repoPath: badOneDrivePath,
|
||||||
|
locPath: badOneDrivePath,
|
||||||
errCheck: assert.Error,
|
errCheck: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -661,7 +704,8 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
ParentPath: folder1,
|
ParentPath: folder1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
newPath: badOneDrivePath,
|
repoPath: badOneDrivePath,
|
||||||
|
locPath: badOneDrivePath,
|
||||||
errCheck: assert.Error,
|
errCheck: assert.Error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -669,7 +713,7 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
item := test.input
|
item := test.input
|
||||||
err := UpdateItem(&item, test.newPath)
|
err := UpdateItem(&item, test.repoPath)
|
||||||
test.errCheck(t, err)
|
test.errCheck(t, err)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -680,3 +724,162 @@ func (suite *DetailsUnitSuite) TestUpdateItem() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
basePath = path.Builder{}.Append("ten", "serv", "user", "type")
|
||||||
|
baseFolderEnts = []folderEntry{
|
||||||
|
{
|
||||||
|
RepoRef: basePath.String(),
|
||||||
|
ShortRef: basePath.ShortRef(),
|
||||||
|
ParentRef: basePath.Dir().ShortRef(),
|
||||||
|
LocationRef: "",
|
||||||
|
Info: ItemInfo{
|
||||||
|
Folder: &FolderInfo{
|
||||||
|
ItemType: FolderItem,
|
||||||
|
DisplayName: "type",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: basePath.Dir().String(),
|
||||||
|
ShortRef: basePath.Dir().ShortRef(),
|
||||||
|
ParentRef: basePath.Dir().Dir().ShortRef(),
|
||||||
|
LocationRef: "",
|
||||||
|
Info: ItemInfo{
|
||||||
|
Folder: &FolderInfo{
|
||||||
|
ItemType: FolderItem,
|
||||||
|
DisplayName: "user",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: basePath.Dir().Dir().String(),
|
||||||
|
ShortRef: basePath.Dir().Dir().ShortRef(),
|
||||||
|
ParentRef: basePath.Dir().Dir().Dir().ShortRef(),
|
||||||
|
LocationRef: "",
|
||||||
|
Info: ItemInfo{
|
||||||
|
Folder: &FolderInfo{
|
||||||
|
ItemType: FolderItem,
|
||||||
|
DisplayName: "serv",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: basePath.Dir().Dir().Dir().String(),
|
||||||
|
ShortRef: basePath.Dir().Dir().Dir().ShortRef(),
|
||||||
|
ParentRef: "",
|
||||||
|
LocationRef: "",
|
||||||
|
Info: ItemInfo{
|
||||||
|
Folder: &FolderInfo{
|
||||||
|
ItemType: FolderItem,
|
||||||
|
DisplayName: "ten",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func folderEntriesFor(pathElems []string, locElems []string) []folderEntry {
|
||||||
|
p := basePath.Append(pathElems...)
|
||||||
|
l := path.Builder{}.Append(locElems...)
|
||||||
|
|
||||||
|
ents := make([]folderEntry, 0, len(pathElems)+4)
|
||||||
|
|
||||||
|
for range pathElems {
|
||||||
|
dn := p.LastElem()
|
||||||
|
if l != nil && len(l.Elements()) > 0 {
|
||||||
|
dn = l.LastElem()
|
||||||
|
}
|
||||||
|
|
||||||
|
fe := folderEntry{
|
||||||
|
RepoRef: p.String(),
|
||||||
|
ShortRef: p.ShortRef(),
|
||||||
|
ParentRef: p.Dir().ShortRef(),
|
||||||
|
LocationRef: l.String(),
|
||||||
|
Info: ItemInfo{
|
||||||
|
Folder: &FolderInfo{
|
||||||
|
ItemType: FolderItem,
|
||||||
|
DisplayName: dn,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
l = l.Dir()
|
||||||
|
p = p.Dir()
|
||||||
|
|
||||||
|
ents = append(ents, fe)
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(ents, baseFolderEnts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DetailsUnitSuite) TestFolderEntriesForPath() {
|
||||||
|
var (
|
||||||
|
fnords = []string{"fnords"}
|
||||||
|
smarf = []string{"fnords", "smarf"}
|
||||||
|
beau = []string{"beau"}
|
||||||
|
regard = []string{"beau", "regard"}
|
||||||
|
)
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
parent *path.Builder
|
||||||
|
location *path.Builder
|
||||||
|
expect []folderEntry
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "base path, parent only",
|
||||||
|
parent: basePath,
|
||||||
|
expect: baseFolderEnts,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "base path with location",
|
||||||
|
parent: basePath,
|
||||||
|
location: basePath,
|
||||||
|
expect: baseFolderEnts,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single depth parent only",
|
||||||
|
parent: basePath.Append(fnords...),
|
||||||
|
expect: folderEntriesFor(fnords, nil),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single depth with location",
|
||||||
|
parent: basePath.Append(fnords...),
|
||||||
|
location: basePath.Append(beau...),
|
||||||
|
expect: folderEntriesFor(fnords, beau),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "two depth parent only",
|
||||||
|
parent: basePath.Append(smarf...),
|
||||||
|
expect: folderEntriesFor(smarf, nil),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "two depth with location",
|
||||||
|
parent: basePath.Append(smarf...),
|
||||||
|
location: basePath.Append(regard...),
|
||||||
|
expect: folderEntriesFor(smarf, regard),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "mismatched depth, parent longer",
|
||||||
|
parent: basePath.Append(smarf...),
|
||||||
|
location: basePath.Append(beau...),
|
||||||
|
expect: folderEntriesFor(smarf, beau),
|
||||||
|
},
|
||||||
|
// We can't handle this right now. But we don't have any cases
|
||||||
|
// which immediately require it, either. Keeping in the test
|
||||||
|
// as a reminder that this might be required at some point.
|
||||||
|
// {
|
||||||
|
// name: "mismatched depth, location longer",
|
||||||
|
// parent: basePath.Append(fnords...),
|
||||||
|
// location: basePath.Append(regard...),
|
||||||
|
// expect: folderEntriesFor(fnords, regard),
|
||||||
|
// },
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
result := FolderEntriesForPath(test.parent, test.location)
|
||||||
|
assert.ElementsMatch(t, test.expect, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -102,11 +102,6 @@ func (e *Errors) setErr(err error) *Errors {
|
|||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
|
|
||||||
type Adder interface {
|
|
||||||
Add(err error) *Errors
|
|
||||||
Failed() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add appends the error to the slice of recoverable and
|
// Add appends the error to the slice of recoverable and
|
||||||
// iterated errors (ie: errors.errs). If failFast is true,
|
// iterated errors (ie: errors.errs). If failFast is true,
|
||||||
// the first Added error will get copied to errors.err,
|
// the first Added error will get copied to errors.err,
|
||||||
|
|||||||
@ -1,22 +0,0 @@
|
|||||||
package mock
|
|
||||||
|
|
||||||
import "github.com/alcionai/corso/src/pkg/fault"
|
|
||||||
|
|
||||||
// Adder mocks an adder interface for testing.
|
|
||||||
type Adder struct {
|
|
||||||
FailFast bool
|
|
||||||
Errs []error
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewAdder() *Adder {
|
|
||||||
return &Adder{Errs: []error{}}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ma *Adder) Add(err error) *fault.Errors {
|
|
||||||
ma.Errs = append(ma.Errs, err)
|
|
||||||
return fault.New(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ma *Adder) Failed() bool {
|
|
||||||
return ma.FailFast && len(ma.Errs) > 0
|
|
||||||
}
|
|
||||||
@ -162,8 +162,9 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
|
|||||||
|
|
||||||
// then try to set up a logger directly
|
// then try to set up a logger directly
|
||||||
var (
|
var (
|
||||||
lgr *zap.Logger
|
lgr *zap.Logger
|
||||||
err error
|
err error
|
||||||
|
opts = []zap.Option{zap.AddStacktrace(zapcore.PanicLevel)}
|
||||||
)
|
)
|
||||||
|
|
||||||
if level != Production {
|
if level != Production {
|
||||||
@ -178,12 +179,13 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
|
|||||||
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
|
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
|
||||||
}
|
}
|
||||||
|
|
||||||
opts := []zap.Option{}
|
|
||||||
|
|
||||||
if readableOutput {
|
if readableOutput {
|
||||||
opts = append(opts, zap.WithCaller(false), zap.AddStacktrace(zapcore.DPanicLevel))
|
opts = append(opts, zap.WithCaller(false))
|
||||||
cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00")
|
cfg.EncoderConfig.EncodeTime = zapcore.TimeEncoderOfLayout("15:04:05.00")
|
||||||
cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
|
||||||
|
if logfile == "stderr" || logfile == "stdout" {
|
||||||
|
cfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg.OutputPaths = []string{logfile}
|
cfg.OutputPaths = []string{logfile}
|
||||||
@ -191,7 +193,7 @@ func genLogger(level logLevel, logfile string) (*zapcore.Core, *zap.SugaredLogge
|
|||||||
} else {
|
} else {
|
||||||
cfg := zap.NewProductionConfig()
|
cfg := zap.NewProductionConfig()
|
||||||
cfg.OutputPaths = []string{logfile}
|
cfg.OutputPaths = []string{logfile}
|
||||||
lgr, err = cfg.Build()
|
lgr, err = cfg.Build(opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// fall back to the core config if the default creation fails
|
// fall back to the core config if the default creation fails
|
||||||
|
|||||||
@ -20,7 +20,7 @@ func ToOneDrivePath(p Path) (*DrivePath, error) {
|
|||||||
if len(folders) < 3 {
|
if len(folders) < 3 {
|
||||||
return nil, clues.
|
return nil, clues.
|
||||||
New("folder path doesn't match expected format for OneDrive items").
|
New("folder path doesn't match expected format for OneDrive items").
|
||||||
With("path_folders", p.Folder())
|
With("path_folders", p.Folder(false))
|
||||||
}
|
}
|
||||||
|
|
||||||
return &DrivePath{DriveID: folders[1], Folders: folders[3:]}, nil
|
return &DrivePath{DriveID: folders[1], Folders: folders[3:]}, nil
|
||||||
|
|||||||
@ -86,7 +86,7 @@ type Path interface {
|
|||||||
Category() CategoryType
|
Category() CategoryType
|
||||||
Tenant() string
|
Tenant() string
|
||||||
ResourceOwner() string
|
ResourceOwner() string
|
||||||
Folder() string
|
Folder(bool) string
|
||||||
Folders() []string
|
Folders() []string
|
||||||
Item() string
|
Item() string
|
||||||
// PopFront returns a Builder object with the first element (left-side)
|
// PopFront returns a Builder object with the first element (left-side)
|
||||||
@ -140,6 +140,14 @@ func (pb Builder) UnescapeAndAppend(elements ...string) (*Builder, error) {
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SplitUnescapeAppend takes in an escaped string representing a directory
|
||||||
|
// path, splits the string, and appends it to the current builder.
|
||||||
|
func (pb Builder) SplitUnescapeAppend(s string) (*Builder, error) {
|
||||||
|
elems := Split(TrimTrailingSlash(s))
|
||||||
|
|
||||||
|
return pb.UnescapeAndAppend(elems...)
|
||||||
|
}
|
||||||
|
|
||||||
// Append creates a copy of this Builder and adds the given elements them to the
|
// Append creates a copy of this Builder and adds the given elements them to the
|
||||||
// end of the new Builder. Elements are added in the order they are passed.
|
// end of the new Builder. Elements are added in the order they are passed.
|
||||||
func (pb Builder) Append(elements ...string) *Builder {
|
func (pb Builder) Append(elements ...string) *Builder {
|
||||||
@ -205,6 +213,14 @@ func (pb Builder) Dir() *Builder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (pb Builder) LastElem() string {
|
||||||
|
if len(pb.elements) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return pb.elements[len(pb.elements)-1]
|
||||||
|
}
|
||||||
|
|
||||||
// String returns a string that contains all path elements joined together.
|
// String returns a string that contains all path elements joined together.
|
||||||
// Elements of the path that need escaping are escaped.
|
// Elements of the path that need escaping are escaped.
|
||||||
func (pb Builder) String() string {
|
func (pb Builder) String() string {
|
||||||
@ -247,11 +263,6 @@ func (pb Builder) Elements() []string {
|
|||||||
return append([]string{}, pb.elements...)
|
return append([]string{}, pb.elements...)
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:unused
|
|
||||||
func (pb Builder) join(start, end int) string {
|
|
||||||
return join(pb.elements[start:end])
|
|
||||||
}
|
|
||||||
|
|
||||||
func verifyInputValues(tenant, resourceOwner string) error {
|
func verifyInputValues(tenant, resourceOwner string) error {
|
||||||
if len(tenant) == 0 {
|
if len(tenant) == 0 {
|
||||||
return clues.Stack(errMissingSegment, errors.New("tenant"))
|
return clues.Stack(errMissingSegment, errors.New("tenant"))
|
||||||
|
|||||||
@ -480,13 +480,87 @@ func (suite *PathUnitSuite) TestFromStringErrors() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *PathUnitSuite) TestFolder() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
p func(t *testing.T) Path
|
||||||
|
escape bool
|
||||||
|
expectFolder string
|
||||||
|
expectSplit []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "clean path",
|
||||||
|
p: func(t *testing.T) Path {
|
||||||
|
p, err := Builder{}.
|
||||||
|
Append("a", "b", "c").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
expectFolder: "a/b/c",
|
||||||
|
expectSplit: []string{"a", "b", "c"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "clean path escaped",
|
||||||
|
p: func(t *testing.T) Path {
|
||||||
|
p, err := Builder{}.
|
||||||
|
Append("a", "b", "c").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
escape: true,
|
||||||
|
expectFolder: "a/b/c",
|
||||||
|
expectSplit: []string{"a", "b", "c"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "escapable path",
|
||||||
|
p: func(t *testing.T) Path {
|
||||||
|
p, err := Builder{}.
|
||||||
|
Append("a/", "b", "c").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
expectFolder: "a//b/c",
|
||||||
|
expectSplit: []string{"a", "b", "c"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "escapable path escaped",
|
||||||
|
p: func(t *testing.T) Path {
|
||||||
|
p, err := Builder{}.
|
||||||
|
Append("a/", "b", "c").
|
||||||
|
ToDataLayerExchangePathForCategory("t", "u", EmailCategory, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
escape: true,
|
||||||
|
expectFolder: "a\\//b/c",
|
||||||
|
expectSplit: []string{"a\\/", "b", "c"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
p := test.p(t)
|
||||||
|
result := p.Folder(test.escape)
|
||||||
|
assert.Equal(t, test.expectFolder, result)
|
||||||
|
assert.Equal(t, test.expectSplit, Split(result))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *PathUnitSuite) TestFromString() {
|
func (suite *PathUnitSuite) TestFromString() {
|
||||||
const (
|
const (
|
||||||
testTenant = "tenant"
|
testTenant = "tenant"
|
||||||
testUser = "user"
|
testUser = "user"
|
||||||
testElement1 = "folder"
|
testElement1 = "folder/"
|
||||||
testElement2 = "folder2"
|
testElementTrimmed = "folder"
|
||||||
testElement3 = "other"
|
testElement2 = "folder2"
|
||||||
|
testElement3 = "other"
|
||||||
)
|
)
|
||||||
|
|
||||||
isItem := []struct {
|
isItem := []struct {
|
||||||
@ -509,9 +583,13 @@ func (suite *PathUnitSuite) TestFromString() {
|
|||||||
// Expected result for Folder() if path is marked as a folder.
|
// Expected result for Folder() if path is marked as a folder.
|
||||||
expectedFolder string
|
expectedFolder string
|
||||||
// Expected result for Item() if path is marked as an item.
|
// Expected result for Item() if path is marked as an item.
|
||||||
expectedItem string
|
// Expected result for Split(Folder()) if path is marked as a folder.
|
||||||
|
expectedSplit []string
|
||||||
|
expectedItem string
|
||||||
// Expected result for Folder() if path is marked as an item.
|
// Expected result for Folder() if path is marked as an item.
|
||||||
expectedItemFolder string
|
expectedItemFolder string
|
||||||
|
// Expected result for Split(Folder()) if path is marked as an item.
|
||||||
|
expectedItemSplit []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "BasicPath",
|
name: "BasicPath",
|
||||||
@ -525,16 +603,25 @@ func (suite *PathUnitSuite) TestFromString() {
|
|||||||
),
|
),
|
||||||
expectedFolder: fmt.Sprintf(
|
expectedFolder: fmt.Sprintf(
|
||||||
"%s/%s/%s",
|
"%s/%s/%s",
|
||||||
testElement1,
|
testElementTrimmed,
|
||||||
testElement2,
|
testElement2,
|
||||||
testElement3,
|
testElement3,
|
||||||
),
|
),
|
||||||
|
expectedSplit: []string{
|
||||||
|
testElementTrimmed,
|
||||||
|
testElement2,
|
||||||
|
testElement3,
|
||||||
|
},
|
||||||
expectedItem: testElement3,
|
expectedItem: testElement3,
|
||||||
expectedItemFolder: fmt.Sprintf(
|
expectedItemFolder: fmt.Sprintf(
|
||||||
"%s/%s",
|
"%s/%s",
|
||||||
testElement1,
|
testElementTrimmed,
|
||||||
testElement2,
|
testElement2,
|
||||||
),
|
),
|
||||||
|
expectedItemSplit: []string{
|
||||||
|
testElementTrimmed,
|
||||||
|
testElement2,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "PathWithEmptyElements",
|
name: "PathWithEmptyElements",
|
||||||
@ -542,22 +629,31 @@ func (suite *PathUnitSuite) TestFromString() {
|
|||||||
"/%s//%%s//%s//%%s//%s///%s//%s//",
|
"/%s//%%s//%s//%%s//%s///%s//%s//",
|
||||||
testTenant,
|
testTenant,
|
||||||
testUser,
|
testUser,
|
||||||
testElement1,
|
testElementTrimmed,
|
||||||
testElement2,
|
testElement2,
|
||||||
testElement3,
|
testElement3,
|
||||||
),
|
),
|
||||||
expectedFolder: fmt.Sprintf(
|
expectedFolder: fmt.Sprintf(
|
||||||
"%s/%s/%s",
|
"%s/%s/%s",
|
||||||
testElement1,
|
testElementTrimmed,
|
||||||
testElement2,
|
testElement2,
|
||||||
testElement3,
|
testElement3,
|
||||||
),
|
),
|
||||||
|
expectedSplit: []string{
|
||||||
|
testElementTrimmed,
|
||||||
|
testElement2,
|
||||||
|
testElement3,
|
||||||
|
},
|
||||||
expectedItem: testElement3,
|
expectedItem: testElement3,
|
||||||
expectedItemFolder: fmt.Sprintf(
|
expectedItemFolder: fmt.Sprintf(
|
||||||
"%s/%s",
|
"%s/%s",
|
||||||
testElement1,
|
testElementTrimmed,
|
||||||
testElement2,
|
testElement2,
|
||||||
),
|
),
|
||||||
|
expectedItemSplit: []string{
|
||||||
|
testElementTrimmed,
|
||||||
|
testElement2,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -572,16 +668,25 @@ func (suite *PathUnitSuite) TestFromString() {
|
|||||||
p, err := FromDataLayerPath(testPath, item.isItem)
|
p, err := FromDataLayerPath(testPath, item.isItem)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, service, p.Service())
|
assert.Equal(t, service, p.Service(), "service")
|
||||||
assert.Equal(t, cat, p.Category())
|
assert.Equal(t, cat, p.Category(), "category")
|
||||||
assert.Equal(t, testTenant, p.Tenant())
|
assert.Equal(t, testTenant, p.Tenant(), "tenant")
|
||||||
assert.Equal(t, testUser, p.ResourceOwner())
|
assert.Equal(t, testUser, p.ResourceOwner(), "resource owner")
|
||||||
|
|
||||||
if !item.isItem {
|
fld := p.Folder(false)
|
||||||
assert.Equal(t, test.expectedFolder, p.Folder())
|
escfld := p.Folder(true)
|
||||||
|
|
||||||
|
if item.isItem {
|
||||||
|
assert.Equal(t, test.expectedItemFolder, fld, "item folder")
|
||||||
|
assert.Equal(t, test.expectedItemSplit, Split(fld), "item split")
|
||||||
|
assert.Equal(t, test.expectedItemFolder, escfld, "escaped item folder")
|
||||||
|
assert.Equal(t, test.expectedItemSplit, Split(escfld), "escaped item split")
|
||||||
|
assert.Equal(t, test.expectedItem, p.Item(), "item")
|
||||||
} else {
|
} else {
|
||||||
assert.Equal(t, test.expectedItemFolder, p.Folder())
|
assert.Equal(t, test.expectedFolder, fld, "dir folder")
|
||||||
assert.Equal(t, test.expectedItem, p.Item())
|
assert.Equal(t, test.expectedSplit, Split(fld), "dir split")
|
||||||
|
assert.Equal(t, test.expectedFolder, escfld, "escaped dir folder")
|
||||||
|
assert.Equal(t, test.expectedSplit, Split(escfld), "escaped dir split")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -201,13 +201,20 @@ func (rp dataLayerResourcePath) lastFolderIdx() int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Folder returns the folder segment embedded in the dataLayerResourcePath.
|
// Folder returns the folder segment embedded in the dataLayerResourcePath.
|
||||||
func (rp dataLayerResourcePath) Folder() string {
|
func (rp dataLayerResourcePath) Folder(escape bool) string {
|
||||||
endIdx := rp.lastFolderIdx()
|
endIdx := rp.lastFolderIdx()
|
||||||
if endIdx == 4 {
|
if endIdx == 4 {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
return rp.Builder.join(4, endIdx)
|
fs := rp.Folders()
|
||||||
|
|
||||||
|
if !escape {
|
||||||
|
return join(fs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// builder.String() will escape all individual elements.
|
||||||
|
return Builder{}.Append(fs...).String()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Folders returns the individual folder elements embedded in the
|
// Folders returns the individual folder elements embedded in the
|
||||||
|
|||||||
@ -172,7 +172,7 @@ func (suite *DataLayerResourcePath) TestMailItemNoFolder() {
|
|||||||
)
|
)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Empty(t, p.Folder())
|
assert.Empty(t, p.Folder(false))
|
||||||
assert.Empty(t, p.Folders())
|
assert.Empty(t, p.Folders())
|
||||||
assert.Equal(t, item, p.Item())
|
assert.Equal(t, item, p.Item())
|
||||||
})
|
})
|
||||||
@ -391,7 +391,7 @@ func (suite *DataLayerResourcePath) TestToExchangePathForCategory() {
|
|||||||
assert.Equal(t, path.ExchangeService, p.Service())
|
assert.Equal(t, path.ExchangeService, p.Service())
|
||||||
assert.Equal(t, test.category, p.Category())
|
assert.Equal(t, test.category, p.Category())
|
||||||
assert.Equal(t, testUser, p.ResourceOwner())
|
assert.Equal(t, testUser, p.ResourceOwner())
|
||||||
assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder())
|
assert.Equal(t, strings.Join(m.expectedFolders, "/"), p.Folder(false))
|
||||||
assert.Equal(t, m.expectedFolders, p.Folders())
|
assert.Equal(t, m.expectedFolders, p.Folders())
|
||||||
assert.Equal(t, m.expectedItem, p.Item())
|
assert.Equal(t, m.expectedItem, p.Item())
|
||||||
})
|
})
|
||||||
@ -465,7 +465,7 @@ func (suite *PopulatedDataLayerResourcePath) TestFolder() {
|
|||||||
assert.Equal(
|
assert.Equal(
|
||||||
t,
|
t,
|
||||||
strings.Join(m.expectedFolders, "/"),
|
strings.Join(m.expectedFolders, "/"),
|
||||||
suite.paths[m.isItem].Folder(),
|
suite.paths[m.isItem].Folder(false),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -525,7 +525,7 @@ func (suite *PopulatedDataLayerResourcePath) TestAppend() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, test.expectedFolder, newPath.Folder())
|
assert.Equal(t, test.expectedFolder, newPath.Folder(false))
|
||||||
assert.Equal(t, test.expectedItem, newPath.Item())
|
assert.Equal(t, test.expectedItem, newPath.Item())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -575,12 +575,12 @@ func (ec exchangeCategory) isLeaf() bool {
|
|||||||
return ec == ec.leafCat()
|
return ec == ec.leafCat()
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathValues transforms a path to a map of identified properties.
|
// pathValues transforms the two paths to maps of identified properties.
|
||||||
//
|
//
|
||||||
// Example:
|
// Example:
|
||||||
// [tenantID, service, userPN, category, mailFolder, mailID]
|
// [tenantID, service, userPN, category, mailFolder, mailID]
|
||||||
// => {exchUser: userPN, exchMailFolder: mailFolder, exchMail: mailID}
|
// => {exchMailFolder: mailFolder, exchMail: mailID}
|
||||||
func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string {
|
func (ec exchangeCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
|
||||||
var folderCat, itemCat categorizer
|
var folderCat, itemCat categorizer
|
||||||
|
|
||||||
switch ec {
|
switch ec {
|
||||||
@ -594,13 +594,24 @@ func (ec exchangeCategory) pathValues(p path.Path) map[categorizer]string {
|
|||||||
folderCat, itemCat = ExchangeMailFolder, ExchangeMail
|
folderCat, itemCat = ExchangeMailFolder, ExchangeMail
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return map[categorizer]string{}
|
return map[categorizer]string{}, map[categorizer]string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
return map[categorizer]string{
|
rv := map[categorizer]string{
|
||||||
folderCat: p.Folder(),
|
folderCat: repo.Folder(false),
|
||||||
itemCat: p.Item(),
|
itemCat: repo.Item(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lv := map[categorizer]string{}
|
||||||
|
|
||||||
|
if location != nil {
|
||||||
|
lv = map[categorizer]string{
|
||||||
|
folderCat: location.Folder(false),
|
||||||
|
itemCat: location.Item(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rv, lv
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
||||||
@ -708,7 +719,7 @@ func (s ExchangeScope) setDefaults() {
|
|||||||
func (s exchange) Reduce(
|
func (s exchange) Reduce(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
errs fault.Adder,
|
errs *fault.Errors,
|
||||||
) *details.Details {
|
) *details.Details {
|
||||||
return reduce[ExchangeScope](
|
return reduce[ExchangeScope](
|
||||||
ctx,
|
ctx,
|
||||||
|
|||||||
@ -11,7 +11,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault/mock"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -705,13 +705,16 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesInfo() {
|
|||||||
func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
|
func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
|
||||||
const (
|
const (
|
||||||
usr = "userID"
|
usr = "userID"
|
||||||
|
fID1 = "mf_id_1"
|
||||||
fld1 = "mailFolder"
|
fld1 = "mailFolder"
|
||||||
|
fID2 = "mf_id_2"
|
||||||
fld2 = "subFolder"
|
fld2 = "subFolder"
|
||||||
mail = "mailID"
|
mail = "mailID"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
pth = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory)
|
repo = stubPath(suite.T(), usr, []string{fID1, fID2, mail}, path.EmailCategory)
|
||||||
|
loc = stubPath(suite.T(), usr, []string{fld1, fld2, mail}, path.EmailCategory)
|
||||||
short = "thisisahashofsomekind"
|
short = "thisisahashofsomekind"
|
||||||
es = NewExchangeRestore(Any())
|
es = NewExchangeRestore(Any())
|
||||||
)
|
)
|
||||||
@ -726,13 +729,18 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
|
|||||||
{"all folders", es.MailFolders(Any()), "", assert.True},
|
{"all folders", es.MailFolders(Any()), "", assert.True},
|
||||||
{"no folders", es.MailFolders(None()), "", assert.False},
|
{"no folders", es.MailFolders(None()), "", assert.False},
|
||||||
{"matching folder", es.MailFolders([]string{fld1}), "", assert.True},
|
{"matching folder", es.MailFolders([]string{fld1}), "", assert.True},
|
||||||
|
{"matching folder id", es.MailFolders([]string{fID1}), "", assert.True},
|
||||||
{"incomplete matching folder", es.MailFolders([]string{"mail"}), "", assert.False},
|
{"incomplete matching folder", es.MailFolders([]string{"mail"}), "", assert.False},
|
||||||
|
{"incomplete matching folder ID", es.MailFolders([]string{"mf_id"}), "", assert.False},
|
||||||
{"non-matching folder", es.MailFolders([]string{"smarf"}), "", assert.False},
|
{"non-matching folder", es.MailFolders([]string{"smarf"}), "", assert.False},
|
||||||
{"non-matching folder substring", es.MailFolders([]string{fld1 + "_suffix"}), "", assert.False},
|
{"non-matching folder substring", es.MailFolders([]string{fld1 + "_suffix"}), "", assert.False},
|
||||||
|
{"non-matching folder id substring", es.MailFolders([]string{fID1 + "_suffix"}), "", assert.False},
|
||||||
{"matching folder prefix", es.MailFolders([]string{fld1}, PrefixMatch()), "", assert.True},
|
{"matching folder prefix", es.MailFolders([]string{fld1}, PrefixMatch()), "", assert.True},
|
||||||
|
{"matching folder ID prefix", es.MailFolders([]string{fID1}, PrefixMatch()), "", assert.True},
|
||||||
{"incomplete folder prefix", es.MailFolders([]string{"mail"}, PrefixMatch()), "", assert.False},
|
{"incomplete folder prefix", es.MailFolders([]string{"mail"}, PrefixMatch()), "", assert.False},
|
||||||
{"matching folder substring", es.MailFolders([]string{"Folder"}), "", assert.False},
|
{"matching folder substring", es.MailFolders([]string{"Folder"}), "", assert.False},
|
||||||
{"one of multiple folders", es.MailFolders([]string{"smarf", fld2}), "", assert.True},
|
{"one of multiple folders", es.MailFolders([]string{"smarf", fld2}), "", assert.True},
|
||||||
|
{"one of multiple folders by ID", es.MailFolders([]string{"smarf", fID2}), "", assert.True},
|
||||||
{"all mail", es.Mails(Any(), Any()), "", assert.True},
|
{"all mail", es.Mails(Any(), Any()), "", assert.True},
|
||||||
{"no mail", es.Mails(Any(), None()), "", assert.False},
|
{"no mail", es.Mails(Any(), None()), "", assert.False},
|
||||||
{"matching mail", es.Mails(Any(), []string{mail}), "", assert.True},
|
{"matching mail", es.Mails(Any(), []string{mail}), "", assert.True},
|
||||||
@ -746,8 +754,12 @@ func (suite *ExchangeSelectorSuite) TestExchangeScope_MatchesPath() {
|
|||||||
scopes := setScopesToDefault(test.scope)
|
scopes := setScopesToDefault(test.scope)
|
||||||
var aMatch bool
|
var aMatch bool
|
||||||
for _, scope := range scopes {
|
for _, scope := range scopes {
|
||||||
pv := ExchangeMail.pathValues(pth)
|
repoVals, locVals := ExchangeMail.pathValues(repo, loc)
|
||||||
if matchesPathValues(scope, ExchangeMail, pv, short) {
|
if matchesPathValues(scope, ExchangeMail, repoVals, short) {
|
||||||
|
aMatch = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if matchesPathValues(scope, ExchangeMail, locVals, short) {
|
||||||
aMatch = true
|
aMatch = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -833,6 +845,256 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
return deets
|
return deets
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
deets *details.Details
|
||||||
|
makeSelector func() *ExchangeRestore
|
||||||
|
expect []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"no refs",
|
||||||
|
makeDeets(),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"contact only",
|
||||||
|
makeDeets(contact),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contact},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"event only",
|
||||||
|
makeDeets(event),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{event},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"mail only",
|
||||||
|
makeDeets(mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{mail},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"all",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contact, event, mail},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"only match contact",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
|
er.Include(er.Contacts([]string{"cfld"}, []string{"cid"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contact},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"only match contactInSubFolder",
|
||||||
|
makeDeets(contactInSubFolder, contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
|
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contactInSubFolder},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"only match contactInSubFolder by prefix",
|
||||||
|
makeDeets(contactInSubFolder, contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
|
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch()))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contactInSubFolder},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"only match contactInSubFolder by leaf folder",
|
||||||
|
makeDeets(contactInSubFolder, contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
|
er.Include(er.ContactFolders([]string{"cfld2"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contactInSubFolder},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"only match event",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
|
er.Include(er.Events([]string{"ecld"}, []string{"eid"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{event},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"only match mail",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
|
er.Include(er.Mails([]string{"mfld"}, []string{"mid"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{mail},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"exclude contact",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{event, mail},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"exclude event",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
er.Exclude(er.Events([]string{"ecld"}, []string{"eid"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contact, mail},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"exclude mail",
|
||||||
|
makeDeets(contact, event, mail),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"}))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{contact, event},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filter on mail subject",
|
||||||
|
func() *details.Details {
|
||||||
|
ds := makeDeets(mail)
|
||||||
|
for i := range ds.Entries {
|
||||||
|
ds.Entries[i].Exchange.Subject = "has a subject"
|
||||||
|
}
|
||||||
|
return ds
|
||||||
|
}(),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
er.Filter(er.MailSubject("subj"))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{mail},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filter on mail subject multiple input categories",
|
||||||
|
func() *details.Details {
|
||||||
|
mds := makeDeets(mail)
|
||||||
|
for i := range mds.Entries {
|
||||||
|
mds.Entries[i].Exchange.Subject = "has a subject"
|
||||||
|
}
|
||||||
|
|
||||||
|
ds := makeDeets(contact, event)
|
||||||
|
ds.Entries = append(ds.Entries, mds.Entries...)
|
||||||
|
|
||||||
|
return ds
|
||||||
|
}(),
|
||||||
|
func() *ExchangeRestore {
|
||||||
|
er := NewExchangeRestore(Any())
|
||||||
|
er.Include(er.AllData())
|
||||||
|
er.Filter(er.MailSubject("subj"))
|
||||||
|
return er
|
||||||
|
},
|
||||||
|
[]string{mail},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
sel := test.makeSelector()
|
||||||
|
results := sel.Reduce(ctx, test.deets, fault.New(true))
|
||||||
|
paths := results.Paths()
|
||||||
|
assert.Equal(t, test.expect, paths)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce_locationRef() {
|
||||||
|
var (
|
||||||
|
contact = stubRepoRef(path.ExchangeService, path.ContactsCategory, "uid", "id5/id6", "cid")
|
||||||
|
contactLocation = "conts/my_cont"
|
||||||
|
event = stubRepoRef(path.ExchangeService, path.EventsCategory, "uid", "id1/id2", "eid")
|
||||||
|
eventLocation = "cal/my_cal"
|
||||||
|
mail = stubRepoRef(path.ExchangeService, path.EmailCategory, "uid", "id3/id4", "mid")
|
||||||
|
mailLocation = "inbx/my_mail"
|
||||||
|
)
|
||||||
|
|
||||||
|
makeDeets := func(refs ...string) *details.Details {
|
||||||
|
deets := &details.Details{
|
||||||
|
DetailsModel: details.DetailsModel{
|
||||||
|
Entries: []details.DetailsEntry{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range refs {
|
||||||
|
var (
|
||||||
|
location string
|
||||||
|
itype = details.UnknownType
|
||||||
|
)
|
||||||
|
|
||||||
|
switch r {
|
||||||
|
case contact:
|
||||||
|
itype = details.ExchangeContact
|
||||||
|
location = contactLocation
|
||||||
|
case event:
|
||||||
|
itype = details.ExchangeEvent
|
||||||
|
location = eventLocation
|
||||||
|
case mail:
|
||||||
|
itype = details.ExchangeMail
|
||||||
|
location = mailLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
deets.Entries = append(deets.Entries, details.DetailsEntry{
|
||||||
|
RepoRef: r,
|
||||||
|
LocationRef: location,
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Exchange: &details.ExchangeInfo{
|
||||||
|
ItemType: itype,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return deets
|
||||||
|
}
|
||||||
|
|
||||||
arr := func(s ...string) []string {
|
arr := func(s ...string) []string {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
@ -898,47 +1160,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
makeDeets(contact, event, mail),
|
makeDeets(contact, event, mail),
|
||||||
func() *ExchangeRestore {
|
func() *ExchangeRestore {
|
||||||
er := NewExchangeRestore([]string{"uid"})
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
er.Include(er.Contacts([]string{"cfld"}, []string{"cid"}))
|
er.Include(er.Contacts([]string{contactLocation}, []string{"cid"}))
|
||||||
return er
|
return er
|
||||||
},
|
},
|
||||||
arr(contact),
|
arr(contact),
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"only match contactInSubFolder",
|
|
||||||
makeDeets(contactInSubFolder, contact, event, mail),
|
|
||||||
func() *ExchangeRestore {
|
|
||||||
er := NewExchangeRestore([]string{"uid"})
|
|
||||||
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}))
|
|
||||||
return er
|
|
||||||
},
|
|
||||||
arr(contactInSubFolder),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"only match contactInSubFolder by prefix",
|
|
||||||
makeDeets(contactInSubFolder, contact, event, mail),
|
|
||||||
func() *ExchangeRestore {
|
|
||||||
er := NewExchangeRestore([]string{"uid"})
|
|
||||||
er.Include(er.ContactFolders([]string{"cfld1/cfld2"}, PrefixMatch()))
|
|
||||||
return er
|
|
||||||
},
|
|
||||||
arr(contactInSubFolder),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"only match contactInSubFolder by leaf folder",
|
|
||||||
makeDeets(contactInSubFolder, contact, event, mail),
|
|
||||||
func() *ExchangeRestore {
|
|
||||||
er := NewExchangeRestore([]string{"uid"})
|
|
||||||
er.Include(er.ContactFolders([]string{"cfld2"}))
|
|
||||||
return er
|
|
||||||
},
|
|
||||||
arr(contactInSubFolder),
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"only match event",
|
"only match event",
|
||||||
makeDeets(contact, event, mail),
|
makeDeets(contact, event, mail),
|
||||||
func() *ExchangeRestore {
|
func() *ExchangeRestore {
|
||||||
er := NewExchangeRestore([]string{"uid"})
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
er.Include(er.Events([]string{"ecld"}, []string{"eid"}))
|
er.Include(er.Events([]string{eventLocation}, []string{"eid"}))
|
||||||
return er
|
return er
|
||||||
},
|
},
|
||||||
arr(event),
|
arr(event),
|
||||||
@ -948,7 +1180,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
makeDeets(contact, event, mail),
|
makeDeets(contact, event, mail),
|
||||||
func() *ExchangeRestore {
|
func() *ExchangeRestore {
|
||||||
er := NewExchangeRestore([]string{"uid"})
|
er := NewExchangeRestore([]string{"uid"})
|
||||||
er.Include(er.Mails([]string{"mfld"}, []string{"mid"}))
|
er.Include(er.Mails([]string{mailLocation}, []string{"mid"}))
|
||||||
return er
|
return er
|
||||||
},
|
},
|
||||||
arr(mail),
|
arr(mail),
|
||||||
@ -959,7 +1191,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
func() *ExchangeRestore {
|
func() *ExchangeRestore {
|
||||||
er := NewExchangeRestore(Any())
|
er := NewExchangeRestore(Any())
|
||||||
er.Include(er.AllData())
|
er.Include(er.AllData())
|
||||||
er.Exclude(er.Contacts([]string{"cfld"}, []string{"cid"}))
|
er.Exclude(er.Contacts([]string{contactLocation}, []string{"cid"}))
|
||||||
return er
|
return er
|
||||||
},
|
},
|
||||||
arr(event, mail),
|
arr(event, mail),
|
||||||
@ -970,7 +1202,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
func() *ExchangeRestore {
|
func() *ExchangeRestore {
|
||||||
er := NewExchangeRestore(Any())
|
er := NewExchangeRestore(Any())
|
||||||
er.Include(er.AllData())
|
er.Include(er.AllData())
|
||||||
er.Exclude(er.Events([]string{"ecld"}, []string{"eid"}))
|
er.Exclude(er.Events([]string{eventLocation}, []string{"eid"}))
|
||||||
return er
|
return er
|
||||||
},
|
},
|
||||||
arr(contact, mail),
|
arr(contact, mail),
|
||||||
@ -981,7 +1213,7 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
func() *ExchangeRestore {
|
func() *ExchangeRestore {
|
||||||
er := NewExchangeRestore(Any())
|
er := NewExchangeRestore(Any())
|
||||||
er.Include(er.AllData())
|
er.Include(er.AllData())
|
||||||
er.Exclude(er.Mails([]string{"mfld"}, []string{"mid"}))
|
er.Exclude(er.Mails([]string{mailLocation}, []string{"mid"}))
|
||||||
return er
|
return er
|
||||||
},
|
},
|
||||||
arr(contact, event),
|
arr(contact, event),
|
||||||
@ -1030,13 +1262,10 @@ func (suite *ExchangeSelectorSuite) TestExchangeRestore_Reduce() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
errs := mock.NewAdder()
|
|
||||||
|
|
||||||
sel := test.makeSelector()
|
sel := test.makeSelector()
|
||||||
results := sel.Reduce(ctx, test.deets, errs)
|
results := sel.Reduce(ctx, test.deets, fault.New(true))
|
||||||
paths := results.Paths()
|
paths := results.Paths()
|
||||||
assert.Equal(t, test.expect, paths)
|
assert.Equal(t, test.expect, paths)
|
||||||
assert.Empty(t, errs.Errs)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1131,9 +1360,12 @@ func (suite *ExchangeSelectorSuite) TestPasses() {
|
|||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
repoVals, locVals := cat.pathValues(pth, pth)
|
||||||
|
|
||||||
result := passes(
|
result := passes(
|
||||||
cat,
|
cat,
|
||||||
cat.pathValues(pth),
|
repoVals,
|
||||||
|
locVals,
|
||||||
entry,
|
entry,
|
||||||
test.excludes,
|
test.excludes,
|
||||||
test.filters,
|
test.filters,
|
||||||
@ -1236,17 +1468,17 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
|
|||||||
|
|
||||||
contactPath := stubPath(t, "user", []string{"cfolder", "contactitem"}, path.ContactsCategory)
|
contactPath := stubPath(t, "user", []string{"cfolder", "contactitem"}, path.ContactsCategory)
|
||||||
contactMap := map[categorizer]string{
|
contactMap := map[categorizer]string{
|
||||||
ExchangeContactFolder: contactPath.Folder(),
|
ExchangeContactFolder: contactPath.Folder(false),
|
||||||
ExchangeContact: contactPath.Item(),
|
ExchangeContact: contactPath.Item(),
|
||||||
}
|
}
|
||||||
eventPath := stubPath(t, "user", []string{"ecalendar", "eventitem"}, path.EventsCategory)
|
eventPath := stubPath(t, "user", []string{"ecalendar", "eventitem"}, path.EventsCategory)
|
||||||
eventMap := map[categorizer]string{
|
eventMap := map[categorizer]string{
|
||||||
ExchangeEventCalendar: eventPath.Folder(),
|
ExchangeEventCalendar: eventPath.Folder(false),
|
||||||
ExchangeEvent: eventPath.Item(),
|
ExchangeEvent: eventPath.Item(),
|
||||||
}
|
}
|
||||||
mailPath := stubPath(t, "user", []string{"mfolder", "mailitem"}, path.EmailCategory)
|
mailPath := stubPath(t, "user", []string{"mfolder", "mailitem"}, path.EmailCategory)
|
||||||
mailMap := map[categorizer]string{
|
mailMap := map[categorizer]string{
|
||||||
ExchangeMailFolder: mailPath.Folder(),
|
ExchangeMailFolder: mailPath.Folder(false),
|
||||||
ExchangeMail: mailPath.Item(),
|
ExchangeMail: mailPath.Item(),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1261,7 +1493,9 @@ func (suite *ExchangeSelectorSuite) TestExchangeCategory_PathValues() {
|
|||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.T().Run(string(test.cat), func(t *testing.T) {
|
suite.T().Run(string(test.cat), func(t *testing.T) {
|
||||||
assert.Equal(t, test.cat.pathValues(test.path), test.expect)
|
r, l := test.cat.pathValues(test.path, test.path)
|
||||||
|
assert.Equal(t, test.expect, r)
|
||||||
|
assert.Equal(t, test.expect, l)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -55,11 +55,13 @@ func (mc mockCategorizer) isLeaf() bool {
|
|||||||
return mc == leafCatStub
|
return mc == leafCatStub
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mc mockCategorizer) pathValues(pth path.Path) map[categorizer]string {
|
func (mc mockCategorizer) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
|
||||||
return map[categorizer]string{
|
pv := map[categorizer]string{
|
||||||
rootCatStub: "root",
|
rootCatStub: "root",
|
||||||
leafCatStub: "leaf",
|
leafCatStub: "leaf",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return pv, pv
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mc mockCategorizer) pathKeys() []categorizer {
|
func (mc mockCategorizer) pathKeys() []categorizer {
|
||||||
|
|||||||
@ -371,19 +371,30 @@ func (c oneDriveCategory) isLeaf() bool {
|
|||||||
return c == OneDriveItem
|
return c == OneDriveItem
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathValues transforms a path to a map of identified properties.
|
// pathValues transforms the two paths to maps of identified properties.
|
||||||
//
|
//
|
||||||
// Example:
|
// Example:
|
||||||
// [tenantID, service, userPN, category, folder, fileID]
|
// [tenantID, service, userPN, category, folder, fileID]
|
||||||
// => {odUser: userPN, odFolder: folder, odFileID: fileID}
|
// => {odFolder: folder, odFileID: fileID}
|
||||||
func (c oneDriveCategory) pathValues(p path.Path) map[categorizer]string {
|
func (c oneDriveCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
|
||||||
// Ignore `drives/<driveID>/root:` for folder comparison
|
// Ignore `drives/<driveID>/root:` for folder comparison
|
||||||
folder := path.Builder{}.Append(p.Folders()...).PopFront().PopFront().PopFront().String()
|
rFld := path.Builder{}.Append(repo.Folders()...).PopFront().PopFront().PopFront().String()
|
||||||
|
rv := map[categorizer]string{
|
||||||
return map[categorizer]string{
|
OneDriveFolder: rFld,
|
||||||
OneDriveFolder: folder,
|
OneDriveItem: repo.Item(),
|
||||||
OneDriveItem: p.Item(),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lv := map[categorizer]string{}
|
||||||
|
|
||||||
|
if location != nil {
|
||||||
|
lFld := path.Builder{}.Append(location.Folders()...).PopFront().PopFront().PopFront().String()
|
||||||
|
lv = map[categorizer]string{
|
||||||
|
OneDriveFolder: lFld,
|
||||||
|
OneDriveItem: location.Item(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rv, lv
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
||||||
@ -487,7 +498,7 @@ func (s OneDriveScope) DiscreteCopy(user string) OneDriveScope {
|
|||||||
func (s oneDrive) Reduce(
|
func (s oneDrive) Reduce(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
errs fault.Adder,
|
errs *fault.Errors,
|
||||||
) *details.Details {
|
) *details.Details {
|
||||||
return reduce[OneDriveScope](
|
return reduce[OneDriveScope](
|
||||||
ctx,
|
ctx,
|
||||||
|
|||||||
@ -11,7 +11,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault/mock"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -242,13 +242,10 @@ func (suite *OneDriveSelectorSuite) TestOneDriveRestore_Reduce() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
errs := mock.NewAdder()
|
|
||||||
|
|
||||||
sel := test.makeSelector()
|
sel := test.makeSelector()
|
||||||
results := sel.Reduce(ctx, test.deets, errs)
|
results := sel.Reduce(ctx, test.deets, fault.New(true))
|
||||||
paths := results.Paths()
|
paths := results.Paths()
|
||||||
assert.Equal(t, test.expect, paths)
|
assert.Equal(t, test.expect, paths)
|
||||||
assert.Empty(t, errs.Errs)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -265,7 +262,9 @@ func (suite *OneDriveSelectorSuite) TestOneDriveCategory_PathValues() {
|
|||||||
OneDriveItem: "file",
|
OneDriveItem: "file",
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, expected, OneDriveItem.pathValues(filePath))
|
r, l := OneDriveItem.pathValues(filePath, filePath)
|
||||||
|
assert.Equal(t, expected, r)
|
||||||
|
assert.Equal(t, expected, l)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveSelectorSuite) TestOneDriveScope_MatchesInfo() {
|
func (suite *OneDriveSelectorSuite) TestOneDriveScope_MatchesInfo() {
|
||||||
|
|||||||
@ -77,17 +77,18 @@ type (
|
|||||||
// eg: in a resourceOwner/folder/item structure, the item is the leaf.
|
// eg: in a resourceOwner/folder/item structure, the item is the leaf.
|
||||||
isLeaf() bool
|
isLeaf() bool
|
||||||
|
|
||||||
// pathValues should produce a map of category:string pairs populated by extracting
|
// pathValues takes in two paths, both variants of the repoRef, one containing the standard
|
||||||
// values out of the path.Path struct.
|
// repoRef, and the other amended to include the locationRef directories (if available). It
|
||||||
|
// should produce two maps of category:string pairs populated by extracting the values out of
|
||||||
|
// each path.Path.
|
||||||
//
|
//
|
||||||
// Ex: given a path builder like ["tenant", "service", "resource", "dataType", "folder", "itemID"],
|
// Ex: given a path builder like ["tenant", "service", "resource", "dataType", "folder", "itemID"],
|
||||||
// the func should use the path to construct a map similar to this:
|
// the func should use the path to construct a map similar to this:
|
||||||
// {
|
// {
|
||||||
// rootCat: resource,
|
|
||||||
// folderCat: folder,
|
// folderCat: folder,
|
||||||
// itemCat: itemID,
|
// itemCat: itemID,
|
||||||
// }
|
// }
|
||||||
pathValues(path.Path) map[categorizer]string
|
pathValues(path.Path, path.Path) (map[categorizer]string, map[categorizer]string)
|
||||||
|
|
||||||
// pathKeys produces a list of categorizers that can be used as keys in the pathValues
|
// pathKeys produces a list of categorizers that can be used as keys in the pathValues
|
||||||
// map. The combination of the two funcs generically interprets the context of the
|
// map. The combination of the two funcs generically interprets the context of the
|
||||||
@ -287,7 +288,7 @@ func reduce[T scopeT, C categoryT](
|
|||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
s Selector,
|
s Selector,
|
||||||
dataCategories map[path.CategoryType]C,
|
dataCategories map[path.CategoryType]C,
|
||||||
errs fault.Adder,
|
errs *fault.Errors,
|
||||||
) *details.Details {
|
) *details.Details {
|
||||||
ctx, end := D.Span(ctx, "selectors:reduce")
|
ctx, end := D.Span(ctx, "selectors:reduce")
|
||||||
defer end()
|
defer end()
|
||||||
@ -317,6 +318,31 @@ func reduce[T scopeT, C categoryT](
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var locationPath path.Path
|
||||||
|
|
||||||
|
// if the details entry has a locationRef specified, use those folders in place
|
||||||
|
// of the repoRef folders, so that scopes can match against the display names
|
||||||
|
// instead of container IDs.
|
||||||
|
if len(ent.LocationRef) > 0 {
|
||||||
|
pb, err := path.Builder{}.SplitUnescapeAppend(ent.LocationRef)
|
||||||
|
if err != nil {
|
||||||
|
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
locationPath, err = pb.Append(repoPath.Item()).
|
||||||
|
ToDataLayerPath(
|
||||||
|
repoPath.Tenant(),
|
||||||
|
repoPath.ResourceOwner(),
|
||||||
|
repoPath.Service(),
|
||||||
|
repoPath.Category(),
|
||||||
|
true)
|
||||||
|
if err != nil {
|
||||||
|
errs.Add(clues.Wrap(err, "transforming locationRef to path").WithClues(ctx))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// first check, every entry needs to match the selector's resource owners.
|
// first check, every entry needs to match the selector's resource owners.
|
||||||
if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) {
|
if !matchesResourceOwner.Compare(repoPath.ResourceOwner()) {
|
||||||
continue
|
continue
|
||||||
@ -334,7 +360,9 @@ func reduce[T scopeT, C categoryT](
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
passed := passes(dc, dc.pathValues(repoPath), *ent, e, f, i)
|
rv, lv := dc.pathValues(repoPath, locationPath)
|
||||||
|
|
||||||
|
passed := passes(dc, rv, lv, *ent, e, f, i)
|
||||||
if passed {
|
if passed {
|
||||||
ents = append(ents, *ent)
|
ents = append(ents, *ent)
|
||||||
}
|
}
|
||||||
@ -379,7 +407,7 @@ func scopesByCategory[T scopeT, C categoryT](
|
|||||||
// if the path is included, passes filters, and not excluded.
|
// if the path is included, passes filters, and not excluded.
|
||||||
func passes[T scopeT, C categoryT](
|
func passes[T scopeT, C categoryT](
|
||||||
cat C,
|
cat C,
|
||||||
pathValues map[categorizer]string,
|
repoValues, locationValues map[categorizer]string,
|
||||||
entry details.DetailsEntry,
|
entry details.DetailsEntry,
|
||||||
excs, filts, incs []T,
|
excs, filts, incs []T,
|
||||||
) bool {
|
) bool {
|
||||||
@ -395,7 +423,7 @@ func passes[T scopeT, C categoryT](
|
|||||||
var included bool
|
var included bool
|
||||||
|
|
||||||
for _, inc := range incs {
|
for _, inc := range incs {
|
||||||
if matchesEntry(inc, cat, pathValues, entry) {
|
if matchesEntry(inc, cat, repoValues, locationValues, entry) {
|
||||||
included = true
|
included = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -408,14 +436,14 @@ func passes[T scopeT, C categoryT](
|
|||||||
|
|
||||||
// all filters must pass
|
// all filters must pass
|
||||||
for _, filt := range filts {
|
for _, filt := range filts {
|
||||||
if !matchesEntry(filt, cat, pathValues, entry) {
|
if !matchesEntry(filt, cat, repoValues, locationValues, entry) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// any matching exclusion means failure
|
// any matching exclusion means failure
|
||||||
for _, exc := range excs {
|
for _, exc := range excs {
|
||||||
if matchesEntry(exc, cat, pathValues, entry) {
|
if matchesEntry(exc, cat, repoValues, locationValues, entry) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -428,7 +456,7 @@ func passes[T scopeT, C categoryT](
|
|||||||
func matchesEntry[T scopeT, C categoryT](
|
func matchesEntry[T scopeT, C categoryT](
|
||||||
sc T,
|
sc T,
|
||||||
cat C,
|
cat C,
|
||||||
pathValues map[categorizer]string,
|
repoValues, locationValues map[categorizer]string,
|
||||||
entry details.DetailsEntry,
|
entry details.DetailsEntry,
|
||||||
) bool {
|
) bool {
|
||||||
// filterCategory requires matching against service-specific info values
|
// filterCategory requires matching against service-specific info values
|
||||||
@ -436,7 +464,11 @@ func matchesEntry[T scopeT, C categoryT](
|
|||||||
return sc.matchesInfo(entry.ItemInfo)
|
return sc.matchesInfo(entry.ItemInfo)
|
||||||
}
|
}
|
||||||
|
|
||||||
return matchesPathValues(sc, cat, pathValues, entry.ShortRef)
|
if len(locationValues) > 0 && matchesPathValues(sc, cat, locationValues, entry.ShortRef) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return matchesPathValues(sc, cat, repoValues, entry.ShortRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
// matchesPathValues will check whether the pathValues have matching entries
|
// matchesPathValues will check whether the pathValues have matching entries
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault/mock"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -274,7 +274,7 @@ func (suite *SelectorScopesSuite) TestReduce() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
errs := mock.NewAdder()
|
errs := fault.New(true)
|
||||||
|
|
||||||
ds := deets()
|
ds := deets()
|
||||||
result := reduce[mockScope](
|
result := reduce[mockScope](
|
||||||
@ -284,7 +284,48 @@ func (suite *SelectorScopesSuite) TestReduce() {
|
|||||||
dataCats,
|
dataCats,
|
||||||
errs)
|
errs)
|
||||||
require.NotNil(t, result)
|
require.NotNil(t, result)
|
||||||
require.Empty(t, errs.Errs, "iteration errors")
|
require.NoError(t, errs.Err(), "no recoverable errors")
|
||||||
|
assert.Len(t, result.Entries, test.expectLen)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SelectorScopesSuite) TestReduce_locationRef() {
|
||||||
|
deets := func() details.Details {
|
||||||
|
return details.Details{
|
||||||
|
DetailsModel: details.DetailsModel{
|
||||||
|
Entries: []details.DetailsEntry{
|
||||||
|
{
|
||||||
|
RepoRef: stubRepoRef(
|
||||||
|
pathServiceStub,
|
||||||
|
pathCatStub,
|
||||||
|
rootCatStub.String(),
|
||||||
|
"stub",
|
||||||
|
leafCatStub.String(),
|
||||||
|
),
|
||||||
|
LocationRef: "a/b/c//defg",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dataCats := map[path.CategoryType]mockCategorizer{
|
||||||
|
pathCatStub: rootCatStub,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range reduceTestTable {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
ds := deets()
|
||||||
|
result := reduce[mockScope](
|
||||||
|
ctx,
|
||||||
|
&ds,
|
||||||
|
test.sel().Selector,
|
||||||
|
dataCats,
|
||||||
|
fault.New(true))
|
||||||
|
require.NotNil(t, result)
|
||||||
assert.Len(t, result.Entries, test.expectLen)
|
assert.Len(t, result.Entries, test.expectLen)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -309,7 +350,7 @@ func (suite *SelectorScopesSuite) TestScopesByCategory() {
|
|||||||
func (suite *SelectorScopesSuite) TestPasses() {
|
func (suite *SelectorScopesSuite) TestPasses() {
|
||||||
cat := rootCatStub
|
cat := rootCatStub
|
||||||
pth := stubPath(suite.T(), "uid", []string{"fld"}, path.EventsCategory)
|
pth := stubPath(suite.T(), "uid", []string{"fld"}, path.EventsCategory)
|
||||||
pathVals := cat.pathValues(pth)
|
repoVals, locVals := cat.pathValues(pth, pth)
|
||||||
entry := details.DetailsEntry{}
|
entry := details.DetailsEntry{}
|
||||||
|
|
||||||
for _, test := range reduceTestTable {
|
for _, test := range reduceTestTable {
|
||||||
@ -320,7 +361,8 @@ func (suite *SelectorScopesSuite) TestPasses() {
|
|||||||
incl := toMockScope(sel.Includes)
|
incl := toMockScope(sel.Includes)
|
||||||
result := passes(
|
result := passes(
|
||||||
cat,
|
cat,
|
||||||
pathVals,
|
repoVals,
|
||||||
|
locVals,
|
||||||
entry,
|
entry,
|
||||||
excl, filt, incl)
|
excl, filt, incl)
|
||||||
test.expectPasses(t, result)
|
test.expectPasses(t, result)
|
||||||
|
|||||||
@ -70,7 +70,7 @@ var (
|
|||||||
const All = "All"
|
const All = "All"
|
||||||
|
|
||||||
type Reducer interface {
|
type Reducer interface {
|
||||||
Reduce(context.Context, *details.Details, fault.Adder) *details.Details
|
Reduce(context.Context, *details.Details, *fault.Errors) *details.Details
|
||||||
}
|
}
|
||||||
|
|
||||||
// selectorResourceOwners aggregates all discrete path category types described
|
// selectorResourceOwners aggregates all discrete path category types described
|
||||||
@ -240,7 +240,7 @@ func (s Selector) PathService() path.ServiceType {
|
|||||||
func (s Selector) Reduce(
|
func (s Selector) Reduce(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
errs fault.Adder,
|
errs *fault.Errors,
|
||||||
) (*details.Details, error) {
|
) (*details.Details, error) {
|
||||||
r, err := selectorAsIface[Reducer](s)
|
r, err := selectorAsIface[Reducer](s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault/mock"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors/testdata"
|
"github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||||
)
|
)
|
||||||
@ -48,7 +48,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.MailFolders(
|
sel.Include(sel.MailFolders(
|
||||||
[]string{testdata.ExchangeEmailInboxPath.Folder()},
|
[]string{testdata.ExchangeEmailInboxPath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -177,7 +177,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.MailFolders(
|
sel.Include(sel.MailFolders(
|
||||||
[]string{testdata.ExchangeEmailBasePath.Folder()},
|
[]string{testdata.ExchangeEmailBasePath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -192,7 +192,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.MailFolders(
|
sel.Include(sel.MailFolders(
|
||||||
[]string{testdata.ExchangeEmailBasePath.Folder()},
|
[]string{testdata.ExchangeEmailBasePath.Folder(false)},
|
||||||
selectors.PrefixMatch(), // force prefix matching
|
selectors.PrefixMatch(), // force prefix matching
|
||||||
))
|
))
|
||||||
|
|
||||||
@ -205,7 +205,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.MailFolders(
|
sel.Include(sel.MailFolders(
|
||||||
[]string{testdata.ExchangeEmailInboxPath.Folder()},
|
[]string{testdata.ExchangeEmailInboxPath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -217,7 +217,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.ContactFolders(
|
sel.Include(sel.ContactFolders(
|
||||||
[]string{testdata.ExchangeContactsBasePath.Folder()},
|
[]string{testdata.ExchangeContactsBasePath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -229,7 +229,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.ContactFolders(
|
sel.Include(sel.ContactFolders(
|
||||||
[]string{testdata.ExchangeContactsRootPath.Folder()},
|
[]string{testdata.ExchangeContactsRootPath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -242,7 +242,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.EventCalendars(
|
sel.Include(sel.EventCalendars(
|
||||||
[]string{testdata.ExchangeEventsBasePath.Folder()},
|
[]string{testdata.ExchangeEventsBasePath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -254,7 +254,7 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
selFunc: func() selectors.Reducer {
|
selFunc: func() selectors.Reducer {
|
||||||
sel := selectors.NewExchangeRestore(selectors.Any())
|
sel := selectors.NewExchangeRestore(selectors.Any())
|
||||||
sel.Include(sel.EventCalendars(
|
sel.Include(sel.EventCalendars(
|
||||||
[]string{testdata.ExchangeEventsRootPath.Folder()},
|
[]string{testdata.ExchangeEventsRootPath.Folder(false)},
|
||||||
))
|
))
|
||||||
|
|
||||||
return sel
|
return sel
|
||||||
@ -265,11 +265,8 @@ func (suite *SelectorReduceSuite) TestReduce() {
|
|||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.T().Run(test.name, func(t *testing.T) {
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
errs := mock.NewAdder()
|
output := test.selFunc().Reduce(ctx, allDetails, fault.New(true))
|
||||||
|
|
||||||
output := test.selFunc().Reduce(ctx, allDetails, errs)
|
|
||||||
assert.ElementsMatch(t, test.expected, output.Entries)
|
assert.ElementsMatch(t, test.expected, output.Entries)
|
||||||
assert.Empty(t, errs.Errs)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -423,12 +423,12 @@ func (c sharePointCategory) isLeaf() bool {
|
|||||||
return c == c.leafCat()
|
return c == c.leafCat()
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathValues transforms a path to a map of identified properties.
|
// pathValues transforms the two paths to maps of identified properties.
|
||||||
//
|
//
|
||||||
// Example:
|
// Example:
|
||||||
// [tenantID, service, siteID, category, folder, itemID]
|
// [tenantID, service, siteID, category, folder, itemID]
|
||||||
// => {spSite: siteID, spFolder: folder, spItemID: itemID}
|
// => {spFolder: folder, spItemID: itemID}
|
||||||
func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string {
|
func (c sharePointCategory) pathValues(repo, location path.Path) (map[categorizer]string, map[categorizer]string) {
|
||||||
var folderCat, itemCat categorizer
|
var folderCat, itemCat categorizer
|
||||||
|
|
||||||
switch c {
|
switch c {
|
||||||
@ -439,13 +439,24 @@ func (c sharePointCategory) pathValues(p path.Path) map[categorizer]string {
|
|||||||
case SharePointPage, SharePointPageFolder:
|
case SharePointPage, SharePointPageFolder:
|
||||||
folderCat, itemCat = SharePointPageFolder, SharePointPage
|
folderCat, itemCat = SharePointPageFolder, SharePointPage
|
||||||
default:
|
default:
|
||||||
return map[categorizer]string{}
|
return map[categorizer]string{}, map[categorizer]string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
return map[categorizer]string{
|
rv := map[categorizer]string{
|
||||||
folderCat: p.Folder(),
|
folderCat: repo.Folder(false),
|
||||||
itemCat: p.Item(),
|
itemCat: repo.Item(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lv := map[categorizer]string{}
|
||||||
|
|
||||||
|
if location != nil {
|
||||||
|
lv = map[categorizer]string{
|
||||||
|
folderCat: location.Folder(false),
|
||||||
|
itemCat: location.Item(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rv, lv
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
// pathKeys returns the path keys recognized by the receiver's leaf type.
|
||||||
@ -559,7 +570,7 @@ func (s SharePointScope) DiscreteCopy(site string) SharePointScope {
|
|||||||
func (s sharePoint) Reduce(
|
func (s sharePoint) Reduce(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
errs fault.Adder,
|
errs *fault.Errors,
|
||||||
) *details.Details {
|
) *details.Details {
|
||||||
return reduce[SharePointScope](
|
return reduce[SharePointScope](
|
||||||
ctx,
|
ctx,
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault/mock"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -306,13 +306,10 @@ func (suite *SharePointSelectorSuite) TestSharePointRestore_Reduce() {
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
errs := mock.NewAdder()
|
|
||||||
|
|
||||||
sel := test.makeSelector()
|
sel := test.makeSelector()
|
||||||
results := sel.Reduce(ctx, test.deets, errs)
|
results := sel.Reduce(ctx, test.deets, fault.New(true))
|
||||||
paths := results.Paths()
|
paths := results.Paths()
|
||||||
assert.Equal(t, test.expect, paths)
|
assert.Equal(t, test.expect, paths)
|
||||||
assert.Empty(t, errs.Errs)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -349,10 +346,11 @@ func (suite *SharePointSelectorSuite) TestSharePointCategory_PathValues() {
|
|||||||
"tenant",
|
"tenant",
|
||||||
"site",
|
"site",
|
||||||
test.sc.PathType(),
|
test.sc.PathType(),
|
||||||
true,
|
true)
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, test.expected, test.sc.pathValues(itemPath))
|
r, l := test.sc.pathValues(itemPath, itemPath)
|
||||||
|
assert.Equal(t, test.expected, r)
|
||||||
|
assert.Equal(t, test.expected, l)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,7 +28,7 @@ func Users(ctx context.Context, acct account.Account, errs *fault.Errors) ([]*Us
|
|||||||
return nil, errors.Wrap(err, "initializing M365 graph connection")
|
return nil, errors.Wrap(err, "initializing M365 graph connection")
|
||||||
}
|
}
|
||||||
|
|
||||||
users, err := discovery.Users(ctx, gc.Owners.Users())
|
users, err := discovery.Users(ctx, gc.Owners.Users(), errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -31,13 +31,10 @@ func (suite *M365IntegrationSuite) TestUsers() {
|
|||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
acct = tester.NewM365Account(suite.T())
|
acct = tester.NewM365Account(suite.T())
|
||||||
errs = fault.New(true)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
users, err := Users(ctx, acct, errs)
|
users, err := Users(ctx, acct, fault.New(true))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NoError(t, errs.Err())
|
|
||||||
require.Empty(t, errs.Errs())
|
|
||||||
require.NotNil(t, users)
|
require.NotNil(t, users)
|
||||||
require.Greater(t, len(users), 0)
|
require.Greater(t, len(users), 0)
|
||||||
|
|
||||||
|
|||||||
59
website/blog/2023-2-13-zune.md
Normal file
59
website/blog/2023-2-13-zune.md
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
slug: zune
|
||||||
|
title: "Corso is giving away a 30GB Zune!"
|
||||||
|
description: "I swear to god this is not a joke"
|
||||||
|
authors: nica
|
||||||
|
tags: [corso, retro, backups, zune]
|
||||||
|
date: 2023-2-13
|
||||||
|
image: ./images/zune.png
|
||||||
|
---
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
The Corso team is all about making sure that your data never goes away.
|
||||||
|
We've worked hard making the industry's only free and open-source tool for backing up Microsoft 365 data.
|
||||||
|
And that's why we're not letting anything from Microsoft die, certainly not 2006's best personal media player, the Zune.
|
||||||
|
|
||||||
|
Ummm… look, this all made sense when I bought the Zune and found someone to refurbish it. Just play along with me here, okay?
|
||||||
|
|
||||||
|
<!-- truncate -->
|
||||||
|
|
||||||
|
<div style={{textAlign: 'center'}}>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
## A technology whose time has come
|
||||||
|
|
||||||
|
In many ways, the Zune was ahead of its time. It could stream music over WiFi, and share your own media with other
|
||||||
|
Zunes on the local network.
|
||||||
|
<!-- vale proselint.Cliches = NO -->
|
||||||
|
It was also, to the best of my knowledge, the first personal media player to come in brown. This Zune isn't brown, it's black,
|
||||||
|
but they did make brown ones.
|
||||||
|
<!-- vale proselint.Cliches = YES -->
|
||||||
|
|
||||||
|
<div style={{textAlign: 'center'}}>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
*Look at that, brown. What a time to be alive.*
|
||||||
|
|
||||||
|
And not only can you load a Zune with MP3 or WMA files, but it's also got an FM radio.
|
||||||
|
|
||||||
|

|
||||||
|
*FM radio, certainly nothing will replace that!*
|
||||||
|
|
||||||
|
This is a good thing since you'll have to run Windows Vista or earlier to load any media files onto the actual player.
|
||||||
|
|
||||||
|
## All right Nica, I can't possibly get more excited, how can I win this prize?
|
||||||
|
|
||||||
|
It will take you about 15 minutes (or less!). Just:
|
||||||
|
|
||||||
|
* [Give Corso a try](https://corsobackup.io/docs/quickstart/)
|
||||||
|
* Fill out the [Corso feedback form](https://forms.microsoft.com/r/mRVNKqeKDp)
|
||||||
|
|
||||||
|
That's it! If you do that I will hold a drawing in a few weeks, and send the lucky winner the working Zune that
|
||||||
|
I have on my desk at this moment.
|
||||||
BIN
website/blog/images/brown_zune.jpeg
Normal file
BIN
website/blog/images/brown_zune.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
website/blog/images/cyclical.gif
Normal file
BIN
website/blog/images/cyclical.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 481 KiB |
BIN
website/blog/images/radio_zune.jpeg
Normal file
BIN
website/blog/images/radio_zune.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 134 KiB |
BIN
website/blog/images/zune.png
Normal file
BIN
website/blog/images/zune.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 581 KiB |
@ -39,4 +39,7 @@ stdout
|
|||||||
stderr
|
stderr
|
||||||
backoff
|
backoff
|
||||||
Greenlake
|
Greenlake
|
||||||
subfolder
|
subfolder
|
||||||
|
[zZ]une
|
||||||
|
Nica
|
||||||
|
gif
|
||||||
Loading…
x
Reference in New Issue
Block a user