From 754796e9921e6574bb8948ef3648996fe397ee0d Mon Sep 17 00:00:00 2001 From: Niraj Tolia Date: Tue, 6 Jun 2023 20:00:03 -0700 Subject: [PATCH 01/41] Add structured data for the Corso website (#3573) Adds a website name for Google to pick up. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :world_map: Documentation --- website/src/pages/index.js | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 134379a34..44869ff4d 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -1,5 +1,6 @@ import React, { useEffect } from "react"; import Layout from "@theme/Layout"; +import Head from "@docusaurus/Head" import { MainComp } from "@site/src/components/parts/MainComp"; import { useColorMode } from "@docusaurus/theme-common"; @@ -24,8 +25,17 @@ export default function Home() { return ( + description="Intro, docs, and blog for Corso, an open-source tool, that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage."> + + + From 4545feeb802f1fb9fb0e9faeb891a4a9d592d070 Mon Sep 17 00:00:00 2001 From: Niraj Tolia Date: Tue, 6 Jun 2023 20:04:32 -0700 Subject: [PATCH 02/41] Fix broken blog link (#3572) Remove a reference to localhost #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :bug: Bugfix - [x] :world_map: Documentation --- website/blog/2023-05-12-incrementals-pt1.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/blog/2023-05-12-incrementals-pt1.md b/website/blog/2023-05-12-incrementals-pt1.md index 10305a013..fcb475014 100644 --- a/website/blog/2023-05-12-incrementals-pt1.md +++ b/website/blog/2023-05-12-incrementals-pt1.md @@ -154,5 +154,5 @@ accurately reflect all updates. If you have feedback, questions, or want more in ## Try Corso Today Corso implements compression, deduplication *and* incremental backups to give -you the best backup performance. Check -[our quickstart guide](http://localhost:3000/docs/quickstart/) to see how to get started. +you the best backup performance. Check out +[our quickstart guide](../../docs/quickstart/) to see how to get started. From 3e1226a78620e8707281904202ba88e97aea1b93 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Jun 2023 05:56:35 +0000 Subject: [PATCH 03/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20github.com/aw?= =?UTF-8?q?s/aws-sdk-go=20from=201.44.275=20to=201.44.276=20in=20/src=20(#?= =?UTF-8?q?3571)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.275 to 1.44.276.
Release notes

Sourced from github.com/aws/aws-sdk-go's releases.

Release v1.44.276 (2023-06-05)

Service Client Updates

  • service/cloudformation: Updates service API and documentation
    • AWS CloudFormation StackSets provides customers with three new APIs to activate, deactivate, and describe AWS Organizations trusted access which is needed to get started with service-managed StackSets.
  • service/ec2: Updates service API
    • Making InstanceTagAttribute as the required parameter for the DeregisterInstanceEventNotificationAttributes and RegisterInstanceEventNotificationAttributes APIs.
  • service/finspace: Updates service API, documentation, and paginators
  • service/frauddetector: Updates service API and documentation
  • service/keyspaces: Updates service API and documentation
  • service/kms: Updates service API, documentation, and examples
    • This release includes feature to import customer's asymmetric (RSA and ECC) and HMAC keys into KMS. It also includes feature to allow customers to specify number of days to schedule a KMS key deletion as a policy condition key.
  • service/lambda: Updates service API and documentation
    • Add Ruby 3.2 (ruby3.2) Runtime support to AWS Lambda.
  • service/mwaa: Updates service API and documentation
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/aws/aws-sdk-go&package-manager=go_modules&previous-version=1.44.275&new-version=1.44.276)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 2 +- src/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index 00926e9c6..11e2deee1 100644 --- a/src/go.mod +++ b/src/go.mod @@ -8,7 +8,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c github.com/armon/go-metrics v0.4.1 - github.com/aws/aws-sdk-go v1.44.275 + github.com/aws/aws-sdk-go v1.44.276 github.com/aws/aws-xray-sdk-go v1.8.1 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.0 diff --git a/src/go.sum b/src/go.sum index 935ff6230..f064f821b 100644 --- a/src/go.sum +++ b/src/go.sum @@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= -github.com/aws/aws-sdk-go v1.44.275 h1:VqRULgqrigvQLll4e4hXuc568EQAtZQ6jmBzLlQHzSI= -github.com/aws/aws-sdk-go v1.44.275/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.276 h1:ywPlx9C5Yc482dUgAZ9bHpQ6onVvJvYE9FJWsNDCEy0= +github.com/aws/aws-sdk-go v1.44.276/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= From 1b9722555532e31fb6c1d3a248fabaccebef6d43 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Jun 2023 06:13:39 +0000 Subject: [PATCH 04/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20sass=20from?= =?UTF-8?q?=201.62.1=20to=201.63.2=20in=20/website=20(#3576)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- website/package-lock.json | 14 +++++++------- website/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/website/package-lock.json b/website/package-lock.json index a16b9ad50..81cac0618 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -24,7 +24,7 @@ "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", - "sass": "^1.62.1", + "sass": "^1.63.2", "tiny-slider": "^2.9.4", "tw-elements": "^1.0.0-alpha13", "wow.js": "^1.2.2" @@ -12522,9 +12522,9 @@ "license": "MIT" }, "node_modules/sass": { - "version": "1.62.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.62.1.tgz", - "integrity": "sha512-NHpxIzN29MXvWiuswfc1W3I0N8SXBd8UR26WntmDlRYf0bSADnwnOjsyMZ3lMezSlArD33Vs3YFhp7dWvL770A==", + "version": "1.63.2", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.2.tgz", + "integrity": "sha512-u56TU0AIFqMtauKl/OJ1AeFsXqRHkgO7nCWmHaDwfxDo9GUMSqBA4NEh6GMuh1CYVM7zuROYtZrHzPc2ixK+ww==", "dependencies": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", @@ -23699,9 +23699,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sass": { - "version": "1.62.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.62.1.tgz", - "integrity": "sha512-NHpxIzN29MXvWiuswfc1W3I0N8SXBd8UR26WntmDlRYf0bSADnwnOjsyMZ3lMezSlArD33Vs3YFhp7dWvL770A==", + "version": "1.63.2", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.2.tgz", + "integrity": "sha512-u56TU0AIFqMtauKl/OJ1AeFsXqRHkgO7nCWmHaDwfxDo9GUMSqBA4NEh6GMuh1CYVM7zuROYtZrHzPc2ixK+ww==", "requires": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", diff --git a/website/package.json b/website/package.json index c17a46655..c60b476be 100644 --- a/website/package.json +++ b/website/package.json @@ -30,7 +30,7 @@ "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", - "sass": "^1.62.1", + "sass": "^1.63.2", "tiny-slider": "^2.9.4", "tw-elements": "^1.0.0-alpha13", "wow.js": "^1.2.2" From 4464ec7fcec6840d25df94178ecab9433bd3bde7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 Jun 2023 06:05:52 +0000 Subject: [PATCH 05/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20mermaid=20fro?= =?UTF-8?q?m=2010.2.2=20to=2010.2.3=20in=20/website=20(#3580)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- website/package-lock.json | 14 +++++++------- website/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/website/package-lock.json b/website/package-lock.json index 81cac0618..916ee132b 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -20,7 +20,7 @@ "feather-icons": "^4.29.0", "jarallax": "^2.1.3", "mdx-mermaid": "^1.3.2", - "mermaid": "^10.2.2", + "mermaid": "^10.2.3", "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", @@ -9259,9 +9259,9 @@ } }, "node_modules/mermaid": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.2.tgz", - "integrity": "sha512-ifYKlCcZKYq48hxC1poJXnvk/PbCdgqqbg5B4qsybb8nIItPM1ATKqVEDkyde6BBJxVFhVJr9hoUjipzniQJZg==", + "version": "10.2.3", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.3.tgz", + "integrity": "sha512-cMVE5s9PlQvOwfORkyVpr5beMsLdInrycAosdr+tpZ0WFjG4RJ/bUHST7aTgHNJbujHkdBRAm+N50P3puQOfPw==", "dependencies": { "@braintree/sanitize-url": "^6.0.2", "cytoscape": "^3.23.0", @@ -21647,9 +21647,9 @@ "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" }, "mermaid": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.2.tgz", - "integrity": "sha512-ifYKlCcZKYq48hxC1poJXnvk/PbCdgqqbg5B4qsybb8nIItPM1ATKqVEDkyde6BBJxVFhVJr9hoUjipzniQJZg==", + "version": "10.2.3", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.2.3.tgz", + "integrity": "sha512-cMVE5s9PlQvOwfORkyVpr5beMsLdInrycAosdr+tpZ0WFjG4RJ/bUHST7aTgHNJbujHkdBRAm+N50P3puQOfPw==", "requires": { "@braintree/sanitize-url": "^6.0.2", "cytoscape": "^3.23.0", diff --git a/website/package.json b/website/package.json index c60b476be..aa28b9a1b 100644 --- a/website/package.json +++ b/website/package.json @@ -26,7 +26,7 @@ "feather-icons": "^4.29.0", "jarallax": "^2.1.3", "mdx-mermaid": "^1.3.2", - "mermaid": "^10.2.2", + "mermaid": "^10.2.3", "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", From ea5eaf3aaf2f5fc3cb0721cde699a8fa4870686a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 06:08:40 +0000 Subject: [PATCH 06/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20sass=20from?= =?UTF-8?q?=201.63.2=20to=201.63.3=20in=20/website=20(#3587)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- website/package-lock.json | 14 +++++++------- website/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/website/package-lock.json b/website/package-lock.json index 916ee132b..9cffb7e0a 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -24,7 +24,7 @@ "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", - "sass": "^1.63.2", + "sass": "^1.63.3", "tiny-slider": "^2.9.4", "tw-elements": "^1.0.0-alpha13", "wow.js": "^1.2.2" @@ -12522,9 +12522,9 @@ "license": "MIT" }, "node_modules/sass": { - "version": "1.63.2", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.2.tgz", - "integrity": "sha512-u56TU0AIFqMtauKl/OJ1AeFsXqRHkgO7nCWmHaDwfxDo9GUMSqBA4NEh6GMuh1CYVM7zuROYtZrHzPc2ixK+ww==", + "version": "1.63.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.3.tgz", + "integrity": "sha512-ySdXN+DVpfwq49jG1+hmtDslYqpS7SkOR5GpF6o2bmb1RL/xS+wvPmegMvMywyfsmAV6p7TgwXYGrCZIFFbAHg==", "dependencies": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", @@ -23699,9 +23699,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sass": { - "version": "1.63.2", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.2.tgz", - "integrity": "sha512-u56TU0AIFqMtauKl/OJ1AeFsXqRHkgO7nCWmHaDwfxDo9GUMSqBA4NEh6GMuh1CYVM7zuROYtZrHzPc2ixK+ww==", + "version": "1.63.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.3.tgz", + "integrity": "sha512-ySdXN+DVpfwq49jG1+hmtDslYqpS7SkOR5GpF6o2bmb1RL/xS+wvPmegMvMywyfsmAV6p7TgwXYGrCZIFFbAHg==", "requires": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", diff --git a/website/package.json b/website/package.json index aa28b9a1b..5704d651f 100644 --- a/website/package.json +++ b/website/package.json @@ -30,7 +30,7 @@ "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", - "sass": "^1.63.2", + "sass": "^1.63.3", "tiny-slider": "^2.9.4", "tw-elements": "^1.0.0-alpha13", "wow.js": "^1.2.2" From bc7792744e70a83d73187e89cfeb694ea68727b0 Mon Sep 17 00:00:00 2001 From: Abhishek Pandey Date: Mon, 12 Jun 2023 11:15:19 -0700 Subject: [PATCH 07/41] Don't dump response body while checking for malware (#3592) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Corso detects malware in a few places for OD/SP. 1. During item enumeration, using [malware property](https://learn.microsoft.com/en-us/graph/api/resources/driveitem?view=graph-rest-1.0#properties) which is set by graph - [code](https://github.com/alcionai/corso/blob/ed47c134b09a19dd6b4869f605f3a384f5763c06/src/internal/connector/onedrive/collections.go#L658) 2. During item fetch using download URL - [code](https://github.com/alcionai/corso/blob/ed47c134b09a19dd6b4869f605f3a384f5763c06/src/internal/connector/onedrive/item.go#LL53C18-L53C18) - Graph seems to return 403 with `{"error":{"code":"malwareDetected","message":"Malware detected"}}`. Tested with eicar. 3. By analyzing http response headers in `IsMalwareResp`  Don't see these headers documented by graph. This is for [sharepoint protocol compatibility](https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wdvmoduu/6fa6d4a9-ac18-4cd7-b696-8a3b14a98291) it seems. 4. By analyzing response body in `IsMalwareResp` . We check for malwareDetected string inside http response body. We are accidentally dumping entire file ( http response dump) while checking for malware in 4. This is leading to high mem utilization/OOMs, especially while processing very large files. I don't think we need 4 at all. If graph is the entity which detects malware, I think we will never get past 2. This PR removes item 4. --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [x] :muscle: Manual - [ ] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/connector/graph/errors.go | 24 +++--------------------- 1 file changed, 3 insertions(+), 21 deletions(-) diff --git a/src/internal/connector/graph/errors.go b/src/internal/connector/graph/errors.go index e72c6dd29..2f2427b6c 100644 --- a/src/internal/connector/graph/errors.go +++ b/src/internal/connector/graph/errors.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "net/http" - "net/http/httputil" "net/url" "os" "strings" @@ -18,7 +17,6 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/filters" - "github.com/alcionai/corso/src/pkg/logger" ) // --------------------------------------------------------------------------- @@ -169,27 +167,11 @@ func IsMalware(err error) bool { } func IsMalwareResp(ctx context.Context, resp *http.Response) bool { - if resp == nil { - return false - } - // https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wsshp/ba4ee7a8-704c-4e9c-ab14-fa44c574bdf4 // https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wdvmoduu/6fa6d4a9-ac18-4cd7-b696-8a3b14a98291 - if len(resp.Header) > 0 && resp.Header.Get("X-Virus-Infected") == "true" { - return true - } - - respDump, err := httputil.DumpResponse(resp, true) - if err != nil { - logger.Ctx(ctx).Errorw("dumping http response", "error", err) - return false - } - - if strings.Contains(string(respDump), string(malwareDetected)) { - return true - } - - return false + return resp != nil && + len(resp.Header) > 0 && + resp.Header.Get("X-Virus-Infected") == "true" } func IsErrFolderExists(err error) bool { From 083f1b18e23b108b085afab57674691bcc48e3cd Mon Sep 17 00:00:00 2001 From: Abin Simon Date: Tue, 13 Jun 2023 00:14:29 +0530 Subject: [PATCH 08/41] Fix time format for M365DateTimeTimeZone (#3577) This was only used in tests and so should not affect any code, but let's fix it. --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/common/dttm/dttm.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/internal/common/dttm/dttm.go b/src/internal/common/dttm/dttm.go index ae9e084c5..d91f7596c 100644 --- a/src/internal/common/dttm/dttm.go +++ b/src/internal/common/dttm/dttm.go @@ -41,7 +41,7 @@ const ( // M365dateTimeTimeZoneTimeFormat is the format used by M365 for datetimetimezone resource // https://learn.microsoft.com/en-us/graph/api/resources/datetimetimezone?view=graph-rest-1.0 - M365DateTimeTimeZone TimeFormat = "2006-01-02T15:04:05.000000" + M365DateTimeTimeZone TimeFormat = "2006-01-02T15:04:05.0000000" ) // these regexes are used to extract time formats from strings. Their primary purpose is to From 3f79d790aac2e03fa1e5f290a408adb1b4757fd4 Mon Sep 17 00:00:00 2001 From: Abhishek Pandey Date: Mon, 12 Jun 2023 12:05:03 -0700 Subject: [PATCH 09/41] Retry on econnreset regardless of http response (#3560) Retry on `ECONNRESET` even if HTTP response is set to 2xx. This is unlikely but it can happen. Currently we fail the backup for this scenario with `connection reset by peer` error. --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/connector/graph/middleware.go | 17 ++++-- .../connector/graph/middleware_test.go | 60 +++++++++++++++++-- 2 files changed, 65 insertions(+), 12 deletions(-) diff --git a/src/internal/connector/graph/middleware.go b/src/internal/connector/graph/middleware.go index bc9aabe2d..2e053f9d9 100644 --- a/src/internal/connector/graph/middleware.go +++ b/src/internal/connector/graph/middleware.go @@ -209,12 +209,16 @@ func (mw RetryMiddleware) Intercept( ctx := req.Context() resp, err := pipeline.Next(req, middlewareIndex) - if err != nil && !IsErrTimeout(err) && !IsErrConnectionReset(err) { - return resp, stackReq(ctx, req, resp, err) - } - if resp != nil && resp.StatusCode/100 != 4 && resp.StatusCode/100 != 5 { - return resp, err + retriable := IsErrTimeout(err) || IsErrConnectionReset(err) || + (resp != nil && (resp.StatusCode/100 == 4 || resp.StatusCode/100 == 5)) + + if !retriable { + if err != nil { + return resp, stackReq(ctx, req, resp, err) + } + + return resp, nil } exponentialBackOff := backoff.NewExponentialBackOff() @@ -304,7 +308,8 @@ func (mw RetryMiddleware) retryRequest( return nextResp, stackReq(ctx, req, nextResp, err) } - return mw.retryRequest(ctx, + return mw.retryRequest( + ctx, pipeline, middlewareIndex, req, diff --git a/src/internal/connector/graph/middleware_test.go b/src/internal/connector/graph/middleware_test.go index 15faf7a7a..f122cdd72 100644 --- a/src/internal/connector/graph/middleware_test.go +++ b/src/internal/connector/graph/middleware_test.go @@ -4,6 +4,7 @@ import ( "bytes" "io" "net/http" + "syscall" "testing" "time" @@ -37,12 +38,18 @@ func newMWReturns(code int, body []byte, err error) mwReturns { brc = io.NopCloser(bytes.NewBuffer(body)) } + resp := &http.Response{ + StatusCode: code, + Body: brc, + } + + if code == 0 { + resp = nil + } + return mwReturns{ - err: err, - resp: &http.Response{ - StatusCode: code, - Body: brc, - }, + err: err, + resp: resp, } } @@ -142,6 +149,7 @@ func (suite *RetryMWIntgSuite) TestRetryMiddleware_Intercept_byStatusCode() { tests := []struct { name string status int + providedErr error expectRetryCount int mw testMW expectErr assert.ErrorAssertionFunc @@ -149,12 +157,14 @@ func (suite *RetryMWIntgSuite) TestRetryMiddleware_Intercept_byStatusCode() { { name: "200, no retries", status: http.StatusOK, + providedErr: nil, expectRetryCount: 0, expectErr: assert.NoError, }, { name: "400, no retries", status: http.StatusBadRequest, + providedErr: nil, expectRetryCount: 0, expectErr: assert.Error, }, @@ -162,9 +172,47 @@ func (suite *RetryMWIntgSuite) TestRetryMiddleware_Intercept_byStatusCode() { // don't test 504: gets intercepted by graph client for long waits. name: "502", status: http.StatusBadGateway, + providedErr: nil, expectRetryCount: defaultMaxRetries, expectErr: assert.Error, }, + { + name: "conn reset with 5xx", + status: http.StatusBadGateway, + providedErr: syscall.ECONNRESET, + expectRetryCount: defaultMaxRetries, + expectErr: assert.Error, + }, + { + name: "conn reset with 2xx", + status: http.StatusOK, + providedErr: syscall.ECONNRESET, + expectRetryCount: defaultMaxRetries, + expectErr: assert.Error, + }, + { + name: "conn reset with nil resp", + providedErr: syscall.ECONNRESET, + // Use 0 to denote nil http response + status: 0, + expectRetryCount: 3, + expectErr: assert.Error, + }, + { + // Unlikely but check if connection reset error takes precedence + name: "conn reset with 400 resp", + providedErr: syscall.ECONNRESET, + status: http.StatusBadRequest, + expectRetryCount: 3, + expectErr: assert.Error, + }, + { + name: "http timeout", + providedErr: http.ErrHandlerTimeout, + status: 0, + expectRetryCount: 3, + expectErr: assert.Error, + }, } for _, test := range tests { @@ -177,7 +225,7 @@ func (suite *RetryMWIntgSuite) TestRetryMiddleware_Intercept_byStatusCode() { called := 0 mw := newTestMW( func(*http.Request) { called++ }, - newMWReturns(test.status, nil, nil)) + newMWReturns(test.status, nil, test.providedErr)) mw.repeatReturn0 = true adpt, err := mockAdapter(suite.creds, mw) From de589a457154b777b4d4c649e041314c5ff6d4cf Mon Sep 17 00:00:00 2001 From: Vaibhav Kamra Date: Mon, 12 Jun 2023 12:49:41 -0700 Subject: [PATCH 10/41] Use mergequeue for dependabot auto merges (#3594) Keeps the same auto-merge behavior (for `semver-minor` updates) but changes it to use our mergequeue. This improves DX because it prevents dependabot merges from bypassing the queue and also means that sanity tests for these PRs don't fail (they run with the correct env) --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [ ] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * #3591 #### Test Plan - [ ] :muscle: Manual - [ ] :zap: Unit test - [ ] :green_heart: E2E --- .github/workflows/auto-merge.yml | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index f92ca86c9..6e09338f1 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -1,3 +1,4 @@ +# See https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#common-dependabot-automations name: auto-merge on: @@ -5,11 +6,24 @@ on: paths-ignore: - "src/**" # prevent auto-merge for go dependencies +permissions: + pull-requests: write + jobs: - auto-merge: + auto-approve-label: runs-on: ubuntu-latest + if: ${{ github.actor == 'dependabot[bot]' }} steps: - - uses: actions/checkout@v3 - - uses: ahmadnassri/action-dependabot-auto-merge@v2 # https://github.com/marketplace/actions/dependabot-auto-merge + - name: Dependabot metadata + id: metadata + uses: dependabot/fetch-metadata@v1 with: - github-token: ${{ secrets.DEPENDABOT_TOKEN }} + github-token: "${{ secrets.GITHUB_TOKEN }}" + - name: Enable auto-merge for Dependabot PRs + if: ${{steps.metadata.outputs.update-type == 'version-update:semver-minor'}} + run: | + gh pr edit "$PR_URL" --add-label "mergequeue" + gh pr review --approve "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} From 960e8b79a0f2bb76bb23053a3c5affb0848a6bbd Mon Sep 17 00:00:00 2001 From: Keepers Date: Mon, 12 Jun 2023 17:33:22 -0600 Subject: [PATCH 11/41] cetnralize restoreConfig (#3563) centralizes all restore configuration management within a restoreConfig struct. This struct is owned by the control package, which allows it to be utilized by both CLI and SDK consumers. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #3562 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- src/cli/restore/exchange.go | 6 +- src/cli/restore/onedrive.go | 6 +- src/cli/restore/sharepoint.go | 6 +- src/cmd/factory/impl/common.go | 16 ++-- src/internal/connector/data_collections.go | 8 +- .../exchange/contacts_restore_test.go | 2 +- .../connector/exchange/events_restore_test.go | 2 +- .../connector/exchange/mail_restore_test.go | 2 +- .../connector/exchange/restore_test.go | 32 ++++---- .../exchange/service_iterators_test.go | 2 +- .../connector/exchange/service_restore.go | 6 +- .../connector/graph_connector_helper_test.go | 2 +- .../graph_connector_onedrive_test_helper.go | 2 +- .../connector/graph_connector_test.go | 36 ++++----- .../connector/graph_connector_test_helper.go | 12 +-- src/internal/connector/mock/connector.go | 2 +- src/internal/connector/onedrive/item_test.go | 2 +- src/internal/connector/onedrive/restore.go | 10 +-- .../connector/onedrive/url_cache_test.go | 2 +- .../connector/sharepoint/api/pages_test.go | 2 +- .../connector/sharepoint/collection_test.go | 2 +- src/internal/connector/sharepoint/restore.go | 12 +-- .../operations/backup_integration_test.go | 10 +-- src/internal/operations/inject/inject.go | 2 +- src/internal/operations/restore.go | 34 ++++---- src/internal/operations/restore_test.go | 46 +++++------ src/internal/tester/restore_destination.go | 10 +-- src/pkg/control/collisionpolicy_string.go | 26 ------ src/pkg/control/options.go | 79 ++++++++++--------- .../loadtest/repository_load_test.go | 4 +- src/pkg/repository/repository.go | 6 +- src/pkg/repository/repository_test.go | 4 +- 32 files changed, 186 insertions(+), 207 deletions(-) delete mode 100644 src/pkg/control/collisionpolicy_string.go diff --git a/src/cli/restore/exchange.go b/src/cli/restore/exchange.go index 3ad22fbb2..f4390ef4c 100644 --- a/src/cli/restore/exchange.go +++ b/src/cli/restore/exchange.go @@ -96,13 +96,13 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error { defer utils.CloseRepo(ctx, r) - dest := control.DefaultRestoreDestination(dttm.HumanReadable) - Infof(ctx, "Restoring to folder %s", dest.ContainerName) + restoreCfg := control.DefaultRestoreConfig(dttm.HumanReadable) + Infof(ctx, "Restoring to folder %s", restoreCfg.Location) sel := utils.IncludeExchangeRestoreDataSelectors(opts) utils.FilterExchangeRestoreInfoSelectors(sel, opts) - ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) + ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg) if err != nil { return Only(ctx, clues.Wrap(err, "Failed to initialize Exchange restore")) } diff --git a/src/cli/restore/onedrive.go b/src/cli/restore/onedrive.go index 90caf57df..008ac18fd 100644 --- a/src/cli/restore/onedrive.go +++ b/src/cli/restore/onedrive.go @@ -97,13 +97,13 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error { defer utils.CloseRepo(ctx, r) - dest := control.DefaultRestoreDestination(dttm.HumanReadableDriveItem) - Infof(ctx, "Restoring to folder %s", dest.ContainerName) + restoreCfg := control.DefaultRestoreConfig(dttm.HumanReadableDriveItem) + Infof(ctx, "Restoring to folder %s", restoreCfg.Location) sel := utils.IncludeOneDriveRestoreDataSelectors(opts) utils.FilterOneDriveRestoreInfoSelectors(sel, opts) - ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) + ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg) if err != nil { return Only(ctx, clues.Wrap(err, "Failed to initialize OneDrive restore")) } diff --git a/src/cli/restore/sharepoint.go b/src/cli/restore/sharepoint.go index 332805d8d..c9b47b6bc 100644 --- a/src/cli/restore/sharepoint.go +++ b/src/cli/restore/sharepoint.go @@ -102,13 +102,13 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error { defer utils.CloseRepo(ctx, r) - dest := control.DefaultRestoreDestination(dttm.HumanReadableDriveItem) - Infof(ctx, "Restoring to folder %s", dest.ContainerName) + restoreCfg := control.DefaultRestoreConfig(dttm.HumanReadableDriveItem) + Infof(ctx, "Restoring to folder %s", restoreCfg.Location) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts) utils.FilterSharePointRestoreInfoSelectors(sel, opts) - ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, dest) + ro, err := r.NewRestore(ctx, utils.BackupIDFV, sel.Selector, restoreCfg) if err != nil { return Only(ctx, clues.Wrap(err, "Failed to initialize SharePoint restore")) } diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index 15c00f672..d2ccb76f5 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -83,9 +83,9 @@ func generateAndRestoreItems( items: items, }} - dest := control.DefaultRestoreDestination(dttm.SafeForTesting) - dest.ContainerName = destFldr - print.Infof(ctx, "Restoring to folder %s", dest.ContainerName) + dest := control.DefaultRestoreConfig(dttm.SafeForTesting) + dest.Location = destFldr + print.Infof(ctx, "Restoring to folder %s", dest.Location) dataColls, err := buildCollections( service, @@ -163,7 +163,7 @@ type collection struct { func buildCollections( service path.ServiceType, tenant, user string, - dest control.RestoreDestination, + dest control.RestoreConfig, colls []collection, ) ([]data.RestoreCollection, error) { collections := make([]data.RestoreCollection, 0, len(colls)) @@ -224,9 +224,9 @@ func generateAndRestoreDriveItems( ctx, flush := tester.NewContext(nil) defer flush() - dest := control.DefaultRestoreDestination(dttm.SafeForTesting) - dest.ContainerName = destFldr - print.Infof(ctx, "Restoring to folder %s", dest.ContainerName) + dest := control.DefaultRestoreConfig(dttm.SafeForTesting) + dest.Location = destFldr + print.Infof(ctx, "Restoring to folder %s", dest.Location) var driveID string @@ -394,7 +394,7 @@ func generateAndRestoreDriveItems( Service: service, Tenant: tenantID, ResourceOwners: []string{resourceOwner}, - Dest: tester.DefaultTestRestoreDestination(""), + RestoreCfg: tester.DefaultTestRestoreConfig(""), } _, _, collections, _, err := connector.GetCollectionsAndExpected( diff --git a/src/internal/connector/data_collections.go b/src/internal/connector/data_collections.go index 6e3c65d95..0d836820e 100644 --- a/src/internal/connector/data_collections.go +++ b/src/internal/connector/data_collections.go @@ -233,7 +233,7 @@ func (gc *GraphConnector) ConsumeRestoreCollections( ctx context.Context, backupVersion int, sels selectors.Selector, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, opts control.Options, dcs []data.RestoreCollection, errs *fault.Bus, @@ -251,13 +251,13 @@ func (gc *GraphConnector) ConsumeRestoreCollections( switch sels.Service { case selectors.ServiceExchange: - status, err = exchange.RestoreCollections(ctx, gc.AC, dest, dcs, deets, errs) + status, err = exchange.RestoreCollections(ctx, gc.AC, restoreCfg, dcs, deets, errs) case selectors.ServiceOneDrive: status, err = onedrive.RestoreCollections( ctx, onedrive.NewRestoreHandler(gc.AC), backupVersion, - dest, + restoreCfg, opts, dcs, deets, @@ -267,7 +267,7 @@ func (gc *GraphConnector) ConsumeRestoreCollections( ctx, backupVersion, gc.AC, - dest, + restoreCfg, opts, dcs, deets, diff --git a/src/internal/connector/exchange/contacts_restore_test.go b/src/internal/connector/exchange/contacts_restore_test.go index d33e9fb61..2eff0d6a6 100644 --- a/src/internal/connector/exchange/contacts_restore_test.go +++ b/src/internal/connector/exchange/contacts_restore_test.go @@ -51,7 +51,7 @@ func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() { path.EmailCategory, suite.creds.AzureTenantID, suite.userID, - tester.DefaultTestRestoreDestination("").ContainerName, + tester.DefaultTestRestoreConfig("").Location, []string{"Hufflepuff"}, []string{"Ravenclaw"}) } diff --git a/src/internal/connector/exchange/events_restore_test.go b/src/internal/connector/exchange/events_restore_test.go index 2060bf21a..dcce8c5f3 100644 --- a/src/internal/connector/exchange/events_restore_test.go +++ b/src/internal/connector/exchange/events_restore_test.go @@ -51,7 +51,7 @@ func (suite *EventsRestoreIntgSuite) TestCreateContainerDestination() { path.EmailCategory, suite.creds.AzureTenantID, suite.userID, - tester.DefaultTestRestoreDestination("").ContainerName, + tester.DefaultTestRestoreConfig("").Location, []string{"Durmstrang"}, []string{"Beauxbatons"}) } diff --git a/src/internal/connector/exchange/mail_restore_test.go b/src/internal/connector/exchange/mail_restore_test.go index 8edcedd4c..74ac9e87a 100644 --- a/src/internal/connector/exchange/mail_restore_test.go +++ b/src/internal/connector/exchange/mail_restore_test.go @@ -51,7 +51,7 @@ func (suite *MailRestoreIntgSuite) TestCreateContainerDestination() { path.EmailCategory, suite.creds.AzureTenantID, suite.userID, - tester.DefaultTestRestoreDestination("").ContainerName, + tester.DefaultTestRestoreConfig("").Location, []string{"Griffindor", "Croix"}, []string{"Griffindor", "Felicius"}) } diff --git a/src/internal/connector/exchange/restore_test.go b/src/internal/connector/exchange/restore_test.go index 5b77733ce..eb308f2dc 100644 --- a/src/internal/connector/exchange/restore_test.go +++ b/src/internal/connector/exchange/restore_test.go @@ -54,7 +54,7 @@ func (suite *RestoreIntgSuite) TestRestoreContact() { var ( userID = tester.M365UserID(t) - folderName = tester.DefaultTestRestoreDestination("contact").ContainerName + folderName = tester.DefaultTestRestoreConfig("contact").Location handler = newContactRestoreHandler(suite.ac) ) @@ -88,7 +88,7 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() { var ( userID = tester.M365UserID(t) - subject = tester.DefaultTestRestoreDestination("event").ContainerName + subject = tester.DefaultTestRestoreConfig("event").Location handler = newEventRestoreHandler(suite.ac) ) @@ -154,7 +154,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageBytes("Restore Exchange Object"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailobj").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailobj").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -167,7 +167,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithDirectAttachment("Restore 1 Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailwattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailwattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -180,7 +180,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithItemAttachmentEvent("Event Item Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("eventwattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("eventwattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -193,7 +193,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithItemAttachmentMail("Mail Item Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailitemattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailitemattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -209,7 +209,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { ), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailbasicattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailbasicattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -225,7 +225,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { ), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailnestattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailnestattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -241,7 +241,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { ), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailcontactattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailcontactattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -254,7 +254,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithNestedItemAttachmentEvent("Nested Item Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("nestedattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("nestedattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -267,7 +267,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithLargeAttachment("Restore Large Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("maillargeattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("maillargeattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -280,7 +280,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithTwoAttachments("Restore 2 Attachments"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailtwoattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailtwoattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -293,7 +293,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithOneDriveAttachment("Restore Reference(OneDrive) Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("mailrefattch").ContainerName + folderName := tester.DefaultTestRestoreConfig("mailrefattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -306,7 +306,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.ContactBytes("Test_Omega"), category: path.ContactsCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("contact").ContainerName + folderName := tester.DefaultTestRestoreConfig("contact").Location folder, err := handlers[path.ContactsCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -319,7 +319,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.EventBytes("Restored Event Object"), category: path.EventsCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("event").ContainerName + folderName := tester.DefaultTestRestoreConfig("event").Location calendar, err := handlers[path.EventsCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -332,7 +332,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.EventWithAttachment("Restored Event Attachment"), category: path.EventsCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreDestination("eventobj").ContainerName + folderName := tester.DefaultTestRestoreConfig("eventobj").Location calendar, err := handlers[path.EventsCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) diff --git a/src/internal/connector/exchange/service_iterators_test.go b/src/internal/connector/exchange/service_iterators_test.go index ce56eb0b8..102031ffe 100644 --- a/src/internal/connector/exchange/service_iterators_test.go +++ b/src/internal/connector/exchange/service_iterators_test.go @@ -190,7 +190,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() { getter mockGetter resolver graph.ContainerResolver scope selectors.ExchangeScope - failFast control.FailureBehavior + failFast control.FailurePolicy expectErr assert.ErrorAssertionFunc expectNewColls int expectMetadataColls int diff --git a/src/internal/connector/exchange/service_restore.go b/src/internal/connector/exchange/service_restore.go index a44edf26b..b55c2c318 100644 --- a/src/internal/connector/exchange/service_restore.go +++ b/src/internal/connector/exchange/service_restore.go @@ -27,7 +27,7 @@ import ( func RestoreCollections( ctx context.Context, ac api.Client, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, dcs []data.RestoreCollection, deets *details.Builder, errs *fault.Bus, @@ -76,7 +76,7 @@ func RestoreCollections( containerID, gcr, err := createDestination( ictx, handler, - handler.formatRestoreDestination(dest.ContainerName, dc.FullPath()), + handler.formatRestoreDestination(restoreCfg.Location, dc.FullPath()), userID, directoryCache[category], isNewCache, @@ -116,7 +116,7 @@ func RestoreCollections( support.Restore, len(dcs), metrics, - dest.ContainerName) + restoreCfg.Location) return status, el.Failure() } diff --git a/src/internal/connector/graph_connector_helper_test.go b/src/internal/connector/graph_connector_helper_test.go index b5907497b..5a80c1cda 100644 --- a/src/internal/connector/graph_connector_helper_test.go +++ b/src/internal/connector/graph_connector_helper_test.go @@ -936,7 +936,7 @@ func checkCollections( category = returned.FullPath().Category() expectedColData = expected[returned.FullPath().String()] folders = returned.FullPath().Elements() - rootDir = folders[len(folders)-1] == config.Dest.ContainerName + rootDir = folders[len(folders)-1] == config.RestoreCfg.Location ) // Need to iterate through all items even if we don't expect to find a match diff --git a/src/internal/connector/graph_connector_onedrive_test_helper.go b/src/internal/connector/graph_connector_onedrive_test_helper.go index 38b760b1a..b70543019 100644 --- a/src/internal/connector/graph_connector_onedrive_test_helper.go +++ b/src/internal/connector/graph_connector_onedrive_test_helper.go @@ -339,7 +339,7 @@ func GetCollectionsAndExpected( config.Service, config.Tenant, owner, - config.Dest, + config.RestoreCfg, testCollections, backupVersion, ) diff --git a/src/internal/connector/graph_connector_test.go b/src/internal/connector/graph_connector_test.go index f5f966287..ccc9e25e7 100644 --- a/src/internal/connector/graph_connector_test.go +++ b/src/internal/connector/graph_connector_test.go @@ -293,8 +293,8 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { defer flush() var ( - dest = tester.DefaultTestRestoreDestination("") - sel = selectors.Selector{ + restoreCfg = tester.DefaultTestRestoreConfig("") + sel = selectors.Selector{ Service: selectors.ServiceUnknown, } ) @@ -303,7 +303,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { ctx, version.Backup, sel, - dest, + restoreCfg, control.Options{ RestorePermissions: true, ToggleFeatures: control.Toggles{}, @@ -320,7 +320,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { } func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { - dest := tester.DefaultTestRestoreDestination("") + restoreCfg := tester.DefaultTestRestoreConfig("") table := []struct { name string col []data.RestoreCollection @@ -381,7 +381,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { ctx, version.Backup, test.sel, - dest, + restoreCfg, control.Options{ RestorePermissions: true, ToggleFeatures: control.Toggles{}, @@ -413,7 +413,7 @@ func runRestore( ) { t.Logf( "Restoring collections to %s for resourceOwners(s) %v\n", - config.Dest.ContainerName, + config.RestoreCfg.Location, config.ResourceOwners) start := time.Now() @@ -424,7 +424,7 @@ func runRestore( ctx, backupVersion, restoreSel, - config.Dest, + config.RestoreCfg, config.Opts, collections, fault.New(true)) @@ -472,7 +472,7 @@ func runBackupAndCompare( for _, ro := range config.ResourceOwners { expectedDests = append(expectedDests, destAndCats{ resourceOwner: ro, - dest: config.Dest.ContainerName, + dest: config.RestoreCfg.Location, cats: cats, }) @@ -536,7 +536,7 @@ func runRestoreBackupTest( Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, - Dest: tester.DefaultTestRestoreDestination(""), + RestoreCfg: tester.DefaultTestRestoreConfig(""), } totalItems, totalKopiaItems, collections, expectedData, err := GetCollectionsAndExpected( @@ -581,7 +581,7 @@ func runRestoreTestWithVersion( Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, - Dest: tester.DefaultTestRestoreDestination(""), + RestoreCfg: tester.DefaultTestRestoreConfig(""), } totalItems, _, collections, _, err := GetCollectionsAndExpected( @@ -618,7 +618,7 @@ func runRestoreBackupTestVersions( Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, - Dest: tester.DefaultTestRestoreDestination(""), + RestoreCfg: tester.DefaultTestRestoreConfig(""), } totalItems, _, collections, _, err := GetCollectionsAndExpected( @@ -993,11 +993,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames allExpectedData := map[string]map[string][]byte{} for i, collection := range test.collections { - // Get a dest per collection so they're independent. - dest := tester.DefaultTestRestoreDestination("") + // Get a restoreCfg per collection so they're independent. + restoreCfg := tester.DefaultTestRestoreConfig("") expectedDests = append(expectedDests, destAndCats{ resourceOwner: suite.user, - dest: dest.ContainerName, + dest: restoreCfg.Location, cats: map[path.CategoryType]struct{}{ collection.Category: {}, }, @@ -1007,7 +1007,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames test.service, suite.connector.tenant, suite.user, - dest, + restoreCfg, []ColInfo{collection}, version.Backup, ) @@ -1023,7 +1023,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames "Restoring %v/%v collections to %s\n", i+1, len(test.collections), - dest.ContainerName, + restoreCfg.Location, ) restoreGC := loadConnector(ctx, t, test.resource) @@ -1031,7 +1031,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames ctx, version.Backup, restoreSel, - dest, + restoreCfg, control.Options{ RestorePermissions: true, ToggleFeatures: control.Toggles{}, @@ -1081,7 +1081,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames ci := ConfigInfo{ Opts: control.Options{RestorePermissions: true}, // Alright to be empty, needed for OneDrive. - Dest: control.RestoreDestination{}, + RestoreCfg: control.RestoreConfig{}, } // Pull the data prior to waiting for the status as otherwise it will diff --git a/src/internal/connector/graph_connector_test_helper.go b/src/internal/connector/graph_connector_test_helper.go index ef6c11201..8ea552190 100644 --- a/src/internal/connector/graph_connector_test_helper.go +++ b/src/internal/connector/graph_connector_test_helper.go @@ -43,7 +43,7 @@ type ConfigInfo struct { Service path.ServiceType Tenant string ResourceOwners []string - Dest control.RestoreDestination + RestoreCfg control.RestoreConfig } func mustToDataLayerPath( @@ -66,15 +66,15 @@ func mustToDataLayerPath( // combination of the location the data was recently restored to and where the // data was originally in the hierarchy. func backupOutputPathFromRestore( - restoreDest control.RestoreDestination, + restoreCfg control.RestoreConfig, inputPath path.Path, ) (path.Path, error) { - base := []string{restoreDest.ContainerName} + base := []string{restoreCfg.Location} // OneDrive has leading information like the drive ID. if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService { folders := inputPath.Folders() - base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName) + base = append(append([]string{}, folders[:3]...), restoreCfg.Location) if len(folders) > 3 { base = append(base, folders[3:]...) @@ -117,7 +117,7 @@ func (rc mockRestoreCollection) FetchItemByName( func collectionsForInfo( service path.ServiceType, tenant, user string, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, allInfo []ColInfo, backupVersion int, ) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) { @@ -142,7 +142,7 @@ func collectionsForInfo( mc := exchMock.NewCollection(pth, pth, len(info.Items)) - baseDestPath, err := backupOutputPathFromRestore(dest, pth) + baseDestPath, err := backupOutputPathFromRestore(restoreCfg, pth) if err != nil { return totalItems, kopiaEntries, collections, expectedData, err } diff --git a/src/internal/connector/mock/connector.go b/src/internal/connector/mock/connector.go index b34296283..2e2faec77 100644 --- a/src/internal/connector/mock/connector.go +++ b/src/internal/connector/mock/connector.go @@ -60,7 +60,7 @@ func (gc GraphConnector) ConsumeRestoreCollections( _ context.Context, _ int, _ selectors.Selector, - _ control.RestoreDestination, + _ control.RestoreConfig, _ control.Options, _ []data.RestoreCollection, _ *fault.Bus, diff --git a/src/internal/connector/onedrive/item_test.go b/src/internal/connector/onedrive/item_test.go index 8c1af9ca7..fd3497fc4 100644 --- a/src/internal/connector/onedrive/item_test.go +++ b/src/internal/connector/onedrive/item_test.go @@ -155,7 +155,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() { root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID) require.NoError(t, err, clues.ToCore(err)) - newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName + newFolderName := tester.DefaultTestRestoreConfig("folder").Location t.Logf("creating folder %s", newFolderName) newFolder, err := rh.PostItemInContainer( diff --git a/src/internal/connector/onedrive/restore.go b/src/internal/connector/onedrive/restore.go index c606389b6..67c5f53bf 100644 --- a/src/internal/connector/onedrive/restore.go +++ b/src/internal/connector/onedrive/restore.go @@ -64,7 +64,7 @@ func RestoreCollections( ctx context.Context, rh RestoreHandler, backupVersion int, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, opts control.Options, dcs []data.RestoreCollection, deets *details.Builder, @@ -79,7 +79,7 @@ func RestoreCollections( ctx = clues.Add( ctx, "backup_version", backupVersion, - "destination", dest.ContainerName) + "restore_location", restoreCfg.Location) // Reorder collections so that the parents directories are created // before the child directories; a requirement for permissions. @@ -97,7 +97,7 @@ func RestoreCollections( ictx = clues.Add( ctx, "category", dc.FullPath().Category(), - "destination", clues.Hide(dest.ContainerName), + "destination", clues.Hide(restoreCfg.Location), "resource_owner", clues.Hide(dc.FullPath().ResourceOwner()), "full_path", dc.FullPath()) ) @@ -108,7 +108,7 @@ func RestoreCollections( backupVersion, dc, caches, - dest.ContainerName, + restoreCfg.Location, deets, opts.RestorePermissions, errs) @@ -128,7 +128,7 @@ func RestoreCollections( support.Restore, len(dcs), restoreMetrics, - dest.ContainerName) + restoreCfg.Location) return status, el.Failure() } diff --git a/src/internal/connector/onedrive/url_cache_test.go b/src/internal/connector/onedrive/url_cache_test.go index 4a4da5c4a..97a7b87b1 100644 --- a/src/internal/connector/onedrive/url_cache_test.go +++ b/src/internal/connector/onedrive/url_cache_test.go @@ -63,7 +63,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() { t = suite.T() ac = suite.ac.Drives() driveID = suite.driveID - newFolderName = tester.DefaultTestRestoreDestination("folder").ContainerName + newFolderName = tester.DefaultTestRestoreConfig("folder").Location driveItemPager = suite.ac.Drives().NewItemPager(driveID, "", api.DriveItemSelectDefault()) ) diff --git a/src/internal/connector/sharepoint/api/pages_test.go b/src/internal/connector/sharepoint/api/pages_test.go index cb10f6022..ae9635ff8 100644 --- a/src/internal/connector/sharepoint/api/pages_test.go +++ b/src/internal/connector/sharepoint/api/pages_test.go @@ -92,7 +92,7 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() { ctx, flush := tester.NewContext(t) defer flush() - destName := tester.DefaultTestRestoreDestination("").ContainerName + destName := tester.DefaultTestRestoreConfig("").Location testName := "MockPage" // Create Test Page diff --git a/src/internal/connector/sharepoint/collection_test.go b/src/internal/connector/sharepoint/collection_test.go index 74220ae10..4d66a33c0 100644 --- a/src/internal/connector/sharepoint/collection_test.go +++ b/src/internal/connector/sharepoint/collection_test.go @@ -208,7 +208,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() { info: sharePointListInfo(listing, int64(len(byteArray))), } - destName := tester.DefaultTestRestoreDestination("").ContainerName + destName := tester.DefaultTestRestoreConfig("").Location deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName) assert.NoError(t, err, clues.ToCore(err)) diff --git a/src/internal/connector/sharepoint/restore.go b/src/internal/connector/sharepoint/restore.go index ef040356d..be307fb86 100644 --- a/src/internal/connector/sharepoint/restore.go +++ b/src/internal/connector/sharepoint/restore.go @@ -43,7 +43,7 @@ func RestoreCollections( ctx context.Context, backupVersion int, ac api.Client, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, opts control.Options, dcs []data.RestoreCollection, deets *details.Builder, @@ -71,7 +71,7 @@ func RestoreCollections( metrics support.CollectionMetrics ictx = clues.Add(ctx, "category", category, - "destination", clues.Hide(dest.ContainerName), + "restore_location", restoreCfg.Location, "resource_owner", clues.Hide(dc.FullPath().ResourceOwner()), "full_path", dc.FullPath()) ) @@ -84,7 +84,7 @@ func RestoreCollections( backupVersion, dc, caches, - dest.ContainerName, + restoreCfg.Location, deets, opts.RestorePermissions, errs) @@ -94,7 +94,7 @@ func RestoreCollections( ictx, ac.Stable, dc, - dest.ContainerName, + restoreCfg.Location, deets, errs) @@ -103,7 +103,7 @@ func RestoreCollections( ictx, ac.Stable, dc, - dest.ContainerName, + restoreCfg.Location, deets, errs) @@ -127,7 +127,7 @@ func RestoreCollections( support.Restore, len(dcs), restoreMetrics, - dest.ContainerName) + restoreCfg.Location) return status, el.Failure() } diff --git a/src/internal/operations/backup_integration_test.go b/src/internal/operations/backup_integration_test.go index 2f24eb23f..8fbd9d2ce 100644 --- a/src/internal/operations/backup_integration_test.go +++ b/src/internal/operations/backup_integration_test.go @@ -381,14 +381,14 @@ func generateContainerOfItems( items: items, }} - dest := control.DefaultRestoreDestination(dttm.SafeForTesting) - dest.ContainerName = destFldr + restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting) + restoreCfg.Location = destFldr dataColls := buildCollections( t, service, tenantID, resourceOwner, - dest, + restoreCfg, collections) opts := control.Defaults() @@ -398,7 +398,7 @@ func generateContainerOfItems( ctx, backupVersion, sel, - dest, + restoreCfg, opts, dataColls, fault.New(true)) @@ -443,7 +443,7 @@ func buildCollections( t *testing.T, service path.ServiceType, tenant, user string, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, colls []incrementalCollection, ) []data.RestoreCollection { t.Helper() diff --git a/src/internal/operations/inject/inject.go b/src/internal/operations/inject/inject.go index 4514ce646..7bcbaf67b 100644 --- a/src/internal/operations/inject/inject.go +++ b/src/internal/operations/inject/inject.go @@ -37,7 +37,7 @@ type ( ctx context.Context, backupVersion int, selector selectors.Selector, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, opts control.Options, dcs []data.RestoreCollection, errs *fault.Bus, diff --git a/src/internal/operations/restore.go b/src/internal/operations/restore.go index ac67666c3..02f406e0a 100644 --- a/src/internal/operations/restore.go +++ b/src/internal/operations/restore.go @@ -35,11 +35,11 @@ import ( type RestoreOperation struct { operation - BackupID model.StableID `json:"backupID"` - Destination control.RestoreDestination `json:"destination"` - Results RestoreResults `json:"results"` - Selectors selectors.Selector `json:"selectors"` - Version string `json:"version"` + BackupID model.StableID + Results RestoreResults + Selectors selectors.Selector + RestoreCfg control.RestoreConfig + Version string acct account.Account rc inject.RestoreConsumer @@ -61,17 +61,17 @@ func NewRestoreOperation( acct account.Account, backupID model.StableID, sel selectors.Selector, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, bus events.Eventer, ) (RestoreOperation, error) { op := RestoreOperation{ - operation: newOperation(opts, bus, kw, sw), - acct: acct, - BackupID: backupID, - Destination: dest, - Selectors: sel, - Version: "v0", - rc: rc, + operation: newOperation(opts, bus, kw, sw), + acct: acct, + BackupID: backupID, + RestoreCfg: restoreCfg, + Selectors: sel, + Version: "v0", + rc: rc, } if err := op.validate(); err != nil { return RestoreOperation{}, err @@ -138,7 +138,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De "tenant_id", clues.Hide(op.acct.ID()), "backup_id", op.BackupID, "service", op.Selectors.Service, - "destination_container", clues.Hide(op.Destination.ContainerName)) + "destination_container", clues.Hide(op.RestoreCfg.Location)) defer func() { op.bus.Event( @@ -257,7 +257,7 @@ func (op *RestoreOperation) do( op.rc, bup.Version, op.Selectors, - op.Destination, + op.RestoreCfg, op.Options, dcs, op.Errors) @@ -314,7 +314,7 @@ func consumeRestoreCollections( rc inject.RestoreConsumer, backupVersion int, sel selectors.Selector, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, opts control.Options, dcs []data.RestoreCollection, errs *fault.Bus, @@ -329,7 +329,7 @@ func consumeRestoreCollections( ctx, backupVersion, sel, - dest, + restoreCfg, opts, dcs, errs) diff --git a/src/internal/operations/restore_test.go b/src/internal/operations/restore_test.go index d43a6c7b2..63c92c208 100644 --- a/src/internal/operations/restore_test.go +++ b/src/internal/operations/restore_test.go @@ -46,11 +46,11 @@ func TestRestoreOpSuite(t *testing.T) { func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { var ( - kw = &kopia.Wrapper{} - sw = &store.Wrapper{} - gc = &mock.GraphConnector{} - now = time.Now() - dest = tester.DefaultTestRestoreDestination("") + kw = &kopia.Wrapper{} + sw = &store.Wrapper{} + gc = &mock.GraphConnector{} + now = time.Now() + restoreCfg = tester.DefaultTestRestoreConfig("") ) table := []struct { @@ -113,7 +113,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { account.Account{}, "foo", selectors.Selector{DiscreteOwner: "test"}, - dest, + restoreCfg, evmock.NewBus()) require.NoError(t, err, clues.ToCore(err)) @@ -215,11 +215,11 @@ func (suite *RestoreOpIntegrationSuite) TearDownSuite() { func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() { var ( - kw = &kopia.Wrapper{} - sw = &store.Wrapper{} - gc = &mock.GraphConnector{} - dest = tester.DefaultTestRestoreDestination("") - opts = control.Defaults() + kw = &kopia.Wrapper{} + sw = &store.Wrapper{} + gc = &mock.GraphConnector{} + restoreCfg = tester.DefaultTestRestoreConfig("") + opts = control.Defaults() ) table := []struct { @@ -251,7 +251,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() { tester.NewM365Account(t), "backup-id", selectors.Selector{DiscreteOwner: "test"}, - dest, + restoreCfg, evmock.NewBus()) test.errCheck(t, err, clues.ToCore(err)) }) @@ -370,14 +370,14 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() { tables := []struct { name string owner string - dest control.RestoreDestination + restoreCfg control.RestoreConfig getSelector func(t *testing.T, owners []string) selectors.Selector setup func(t *testing.T, kw *kopia.Wrapper, sw *store.Wrapper, acct account.Account, owner string) bupResults }{ { - name: "Exchange_Restore", - owner: tester.M365UserID(suite.T()), - dest: tester.DefaultTestRestoreDestination(""), + name: "Exchange_Restore", + owner: tester.M365UserID(suite.T()), + restoreCfg: tester.DefaultTestRestoreConfig(""), getSelector: func(t *testing.T, owners []string) selectors.Selector { rsel := selectors.NewExchangeRestore(owners) rsel.Include(rsel.AllData()) @@ -387,9 +387,9 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() { setup: setupExchangeBackup, }, { - name: "SharePoint_Restore", - owner: tester.M365SiteID(suite.T()), - dest: control.DefaultRestoreDestination(dttm.SafeForTesting), + name: "SharePoint_Restore", + owner: tester.M365SiteID(suite.T()), + restoreCfg: control.DefaultRestoreConfig(dttm.SafeForTesting), getSelector: func(t *testing.T, owners []string) selectors.Selector { rsel := selectors.NewSharePointRestore(owners) rsel.Include(rsel.AllData()) @@ -423,7 +423,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() { tester.NewM365Account(t), bup.backupID, test.getSelector(t, bup.selectorResourceOwners), - test.dest, + test.restoreCfg, mb) require.NoError(t, err, clues.ToCore(err)) @@ -453,8 +453,8 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() { defer flush() var ( - dest = tester.DefaultTestRestoreDestination("") - mb = evmock.NewBus() + restoreCfg = tester.DefaultTestRestoreConfig("") + mb = evmock.NewBus() ) rsel := selectors.NewExchangeRestore(selectors.None()) @@ -475,7 +475,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() { tester.NewM365Account(t), "backupID", rsel.Selector, - dest, + restoreCfg, mb) require.NoError(t, err, clues.ToCore(err)) diff --git a/src/internal/tester/restore_destination.go b/src/internal/tester/restore_destination.go index af247258d..5b1c667c3 100644 --- a/src/internal/tester/restore_destination.go +++ b/src/internal/tester/restore_destination.go @@ -9,10 +9,10 @@ import ( const RestoreFolderPrefix = "Corso_Test" -func DefaultTestRestoreDestination(namespace string) control.RestoreDestination { +func DefaultTestRestoreConfig(namespace string) control.RestoreConfig { var ( - dest = control.DefaultRestoreDestination(dttm.SafeForTesting) - sft = dttm.FormatNow(dttm.SafeForTesting) + restoreCfg = control.DefaultRestoreConfig(dttm.SafeForTesting) + sft = dttm.FormatNow(dttm.SafeForTesting) ) parts := []string{RestoreFolderPrefix, namespace, sft} @@ -20,7 +20,7 @@ func DefaultTestRestoreDestination(namespace string) control.RestoreDestination parts = []string{RestoreFolderPrefix, sft} } - dest.ContainerName = strings.Join(parts, "_") + restoreCfg.Location = strings.Join(parts, "_") - return dest + return restoreCfg } diff --git a/src/pkg/control/collisionpolicy_string.go b/src/pkg/control/collisionpolicy_string.go deleted file mode 100644 index da2c0b50b..000000000 --- a/src/pkg/control/collisionpolicy_string.go +++ /dev/null @@ -1,26 +0,0 @@ -// Code generated by "stringer -type=CollisionPolicy"; DO NOT EDIT. - -package control - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[Unknown-0] - _ = x[Copy-1] - _ = x[Skip-2] - _ = x[Replace-3] -} - -const _CollisionPolicy_name = "UnknownCopySkipReplace" - -var _CollisionPolicy_index = [...]uint8{0, 7, 11, 15, 22} - -func (i CollisionPolicy) String() string { - if i < 0 || i >= CollisionPolicy(len(_CollisionPolicy_index)-1) { - return "CollisionPolicy(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _CollisionPolicy_name[_CollisionPolicy_index[i]:_CollisionPolicy_index[i+1]] -} diff --git a/src/pkg/control/options.go b/src/pkg/control/options.go index 3bda48854..4f592787e 100644 --- a/src/pkg/control/options.go +++ b/src/pkg/control/options.go @@ -7,9 +7,8 @@ import ( // Options holds the optional configurations for a process type Options struct { - Collision CollisionPolicy `json:"-"` DisableMetrics bool `json:"disableMetrics"` - FailureHandling FailureBehavior `json:"failureHandling"` + FailureHandling FailurePolicy `json:"failureHandling"` RestorePermissions bool `json:"restorePermissions"` SkipReduce bool `json:"skipReduce"` ToggleFeatures Toggles `json:"toggleFeatures"` @@ -17,8 +16,6 @@ type Options struct { Repo repository.Options `json:"repo"` } -type FailureBehavior string - type Parallelism struct { // sets the collection buffer size before blocking. CollectionBuffer int @@ -26,13 +23,15 @@ type Parallelism struct { ItemFetch int } +type FailurePolicy string + const ( // fails and exits the run immediately - FailFast FailureBehavior = "fail-fast" + FailFast FailurePolicy = "fail-fast" // recovers whenever possible, reports non-zero recoveries as a failure - FailAfterRecovery FailureBehavior = "fail-after-recovery" + FailAfterRecovery FailurePolicy = "fail-after-recovery" // recovers whenever possible, does not report recovery as failure - BestEffort FailureBehavior = "best-effort" + BestEffort FailurePolicy = "best-effort" ) // Defaults provides an Options with the default values set. @@ -48,44 +47,50 @@ func Defaults() Options { } // --------------------------------------------------------------------------- -// Restore Item Collision Policy -// --------------------------------------------------------------------------- - -// CollisionPolicy describes how the datalayer behaves in case of a collision. -type CollisionPolicy int - -//go:generate stringer -type=CollisionPolicy -const ( - Unknown CollisionPolicy = iota - Copy - Skip - Replace -) - -// --------------------------------------------------------------------------- -// Restore Destination +// Restore Configuration // --------------------------------------------------------------------------- const ( defaultRestoreLocation = "Corso_Restore_" ) -// RestoreDestination is a POD that contains an override of the resource owner -// to restore data under and the name of the root of the restored container -// hierarchy. -type RestoreDestination struct { - // ResourceOwnerOverride overrides the default resource owner to restore to. - // If it is not populated items should be restored under the previous resource - // owner of the item. - ResourceOwnerOverride string - // ContainerName is the name of the root of the restored container hierarchy. - // This field must be populated for a restore. - ContainerName string +// CollisionPolicy describes how the datalayer behaves in case of a collision. +type CollisionPolicy string + +const ( + Unknown CollisionPolicy = "" + Skip CollisionPolicy = "skip" + Copy CollisionPolicy = "copy" + Replace CollisionPolicy = "replace" +) + +// RestoreConfig contains +type RestoreConfig struct { + // Defines the per-item collision handling policy. + // Defaults to Skip. + OnCollision CollisionPolicy + + // ProtectedResource specifies which resource the data will be restored to. + // If empty, restores to the same resource that was backed up. + // Defaults to empty. + ProtectedResource string + + // Location specifies the container into which the data will be restored. + // Only accepts container names, does not accept IDs. + // If empty or "/", data will get restored in place, beginning at the root. + // Defaults to "Corso_Restore_" + Location string + + // Drive specifies the drive into which the data will be restored. + // If empty, data is restored to the same drive that was backed up. + // Defaults to empty. + Drive string } -func DefaultRestoreDestination(timeFormat dttm.TimeFormat) RestoreDestination { - return RestoreDestination{ - ContainerName: defaultRestoreLocation + dttm.FormatNow(timeFormat), +func DefaultRestoreConfig(timeFormat dttm.TimeFormat) RestoreConfig { + return RestoreConfig{ + OnCollision: Skip, + Location: defaultRestoreLocation + dttm.FormatNow(timeFormat), } } diff --git a/src/pkg/repository/loadtest/repository_load_test.go b/src/pkg/repository/loadtest/repository_load_test.go index 22138f6c9..9f586f648 100644 --- a/src/pkg/repository/loadtest/repository_load_test.go +++ b/src/pkg/repository/loadtest/repository_load_test.go @@ -151,9 +151,9 @@ func runRestoreLoadTest( t.Skip("restore load test is toggled off") } - dest := tester.DefaultTestRestoreDestination("") + restoreCfg := tester.DefaultTestRestoreConfig("") - rst, err := r.NewRestore(ctx, backupID, restSel, dest) + rst, err := r.NewRestore(ctx, backupID, restSel, restoreCfg) require.NoError(t, err, clues.ToCore(err)) doRestoreLoadTest(t, ctx, rst, service, bup.Results.ItemsWritten, usersUnderTest) diff --git a/src/pkg/repository/repository.go b/src/pkg/repository/repository.go index fbe374223..13385cbb8 100644 --- a/src/pkg/repository/repository.go +++ b/src/pkg/repository/repository.go @@ -69,7 +69,7 @@ type Repository interface { ctx context.Context, backupID string, sel selectors.Selector, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, ) (operations.RestoreOperation, error) NewMaintenance( ctx context.Context, @@ -336,7 +336,7 @@ func (r repository) NewRestore( ctx context.Context, backupID string, sel selectors.Selector, - dest control.RestoreDestination, + restoreCfg control.RestoreConfig, ) (operations.RestoreOperation, error) { gc, err := connectToM365(ctx, sel, r.Account) if err != nil { @@ -352,7 +352,7 @@ func (r repository) NewRestore( r.Account, model.StableID(backupID), sel, - dest, + restoreCfg, r.Bus) } diff --git a/src/pkg/repository/repository_test.go b/src/pkg/repository/repository_test.go index 09c4b8f14..f3a06cc9e 100644 --- a/src/pkg/repository/repository_test.go +++ b/src/pkg/repository/repository_test.go @@ -242,7 +242,7 @@ func (suite *RepositoryIntegrationSuite) TestNewRestore() { defer flush() acct := tester.NewM365Account(t) - dest := tester.DefaultTestRestoreDestination("") + restoreCfg := tester.DefaultTestRestoreConfig("") // need to initialize the repository before we can test connecting to it. st := tester.NewPrefixedS3Storage(t) @@ -250,7 +250,7 @@ func (suite *RepositoryIntegrationSuite) TestNewRestore() { r, err := repository.Initialize(ctx, acct, st, control.Defaults()) require.NoError(t, err, clues.ToCore(err)) - ro, err := r.NewRestore(ctx, "backup-id", selectors.Selector{DiscreteOwner: "test"}, dest) + ro, err := r.NewRestore(ctx, "backup-id", selectors.Selector{DiscreteOwner: "test"}, restoreCfg) require.NoError(t, err, clues.ToCore(err)) require.NotNil(t, ro) } From 7120164db6b15df9ea503aab36e35aaff6f90371 Mon Sep 17 00:00:00 2001 From: ashmrtn <3891298+ashmrtn@users.noreply.github.com> Date: Mon, 12 Jun 2023 17:16:07 -0700 Subject: [PATCH 12/41] Add struct functions for backup bases (#3595) Move most of the stuff that was acting on backup bases to be functions that are defined for backup bases. Other code can be removed at a later point New functions aren't called yet in other code --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #3525 #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/kopia/backup_bases.go | 387 +++++++++++++ src/internal/kopia/backup_bases_test.go | 705 ++++++++++++++++++++++++ src/internal/kopia/base_finder.go | 6 - src/internal/kopia/base_finder_test.go | 18 +- src/internal/kopia/mock_backup_base.go | 63 +++ 5 files changed, 1164 insertions(+), 15 deletions(-) create mode 100644 src/internal/kopia/backup_bases.go create mode 100644 src/internal/kopia/backup_bases_test.go create mode 100644 src/internal/kopia/mock_backup_base.go diff --git a/src/internal/kopia/backup_bases.go b/src/internal/kopia/backup_bases.go new file mode 100644 index 000000000..0505fc829 --- /dev/null +++ b/src/internal/kopia/backup_bases.go @@ -0,0 +1,387 @@ +package kopia + +import ( + "context" + + "github.com/alcionai/clues" + "github.com/kopia/kopia/repo/manifest" + "golang.org/x/exp/slices" + + "github.com/alcionai/corso/src/internal/version" + "github.com/alcionai/corso/src/pkg/logger" +) + +// TODO(ashmrtn): Move this into some inject package. Here to avoid import +// cycles. +type BackupBases interface { + RemoveMergeBaseByManifestID(manifestID manifest.ID) + Backups() []BackupEntry + MinBackupVersion() int + MergeBases() []ManifestEntry + ClearMergeBases() + AssistBases() []ManifestEntry + ClearAssistBases() + MergeBackupBases( + ctx context.Context, + other BackupBases, + reasonToKey func(Reason) string, + ) BackupBases +} + +type backupBases struct { + // backups and mergeBases should be modified together as they relate similar + // data. + backups []BackupEntry + mergeBases []ManifestEntry + assistBases []ManifestEntry +} + +func (bb *backupBases) RemoveMergeBaseByManifestID(manifestID manifest.ID) { + idx := slices.IndexFunc( + bb.mergeBases, + func(man ManifestEntry) bool { + return man.ID == manifestID + }) + if idx >= 0 { + bb.mergeBases = slices.Delete(bb.mergeBases, idx, idx+1) + } + + // TODO(ashmrtn): This may not be strictly necessary but is at least easier to + // reason about. + idx = slices.IndexFunc( + bb.assistBases, + func(man ManifestEntry) bool { + return man.ID == manifestID + }) + if idx >= 0 { + bb.assistBases = slices.Delete(bb.assistBases, idx, idx+1) + } + + idx = slices.IndexFunc( + bb.backups, + func(bup BackupEntry) bool { + return bup.SnapshotID == string(manifestID) + }) + if idx >= 0 { + bb.backups = slices.Delete(bb.backups, idx, idx+1) + } +} + +func (bb backupBases) Backups() []BackupEntry { + return slices.Clone(bb.backups) +} + +func (bb *backupBases) MinBackupVersion() int { + min := version.NoBackup + + if bb == nil { + return min + } + + for _, bup := range bb.backups { + if min == version.NoBackup || bup.Version < min { + min = bup.Version + } + } + + return min +} + +func (bb backupBases) MergeBases() []ManifestEntry { + return slices.Clone(bb.mergeBases) +} + +func (bb *backupBases) ClearMergeBases() { + bb.mergeBases = nil + bb.backups = nil +} + +func (bb backupBases) AssistBases() []ManifestEntry { + return slices.Clone(bb.assistBases) +} + +func (bb *backupBases) ClearAssistBases() { + bb.assistBases = nil +} + +// MergeBackupBases reduces the two BackupBases into a single BackupBase. +// Assumes the passed in BackupBases represents a prior backup version (across +// some migration that disrupts lookup), and that the BackupBases used to call +// this function contains the current version. +// +// reasonToKey should be a function that, given a Reason, will produce some +// string that represents Reason in the context of the merge operation. For +// example, to merge BackupBases across a ResourceOwner migration, the Reason's +// service and category can be used as the key. +// +// Selection priority, for each reason key generated by reasonsToKey, follows +// these rules: +// 1. If the called BackupBases has an entry for a given resaon, ignore the +// other BackupBases matching that reason. +// 2. If the the receiver BackupBases has only AssistBases, look for a matching +// MergeBase manifest in the passed in BackupBases. +// 3. If the called BackupBases has no entry for a reason, look for both +// AssistBases and MergeBases in the passed in BackupBases. +func (bb *backupBases) MergeBackupBases( + ctx context.Context, + other BackupBases, + reasonToKey func(reason Reason) string, +) BackupBases { + if other == nil || (len(other.MergeBases()) == 0 && len(other.AssistBases()) == 0) { + return bb + } + + if bb == nil || (len(bb.MergeBases()) == 0 && len(bb.AssistBases()) == 0) { + return other + } + + toMerge := map[string]struct{}{} + assist := map[string]struct{}{} + + // Track the bases in bb. + for _, m := range bb.mergeBases { + for _, r := range m.Reasons { + k := reasonToKey(r) + + toMerge[k] = struct{}{} + assist[k] = struct{}{} + } + } + + for _, m := range bb.assistBases { + for _, r := range m.Reasons { + k := reasonToKey(r) + assist[k] = struct{}{} + } + } + + var toAdd []ManifestEntry + + // Calculate the set of mergeBases to pull from other into this one. + for _, m := range other.MergeBases() { + useReasons := []Reason{} + + for _, r := range m.Reasons { + k := reasonToKey(r) + if _, ok := toMerge[k]; ok { + // Assume other contains prior manifest versions. + // We don't want to stack a prior version incomplete onto + // a current version's complete snapshot. + continue + } + + useReasons = append(useReasons, r) + } + + if len(useReasons) > 0 { + m.Reasons = useReasons + toAdd = append(toAdd, m) + } + } + + res := &backupBases{ + backups: bb.Backups(), + mergeBases: bb.MergeBases(), + assistBases: bb.AssistBases(), + } + + // Add new mergeBases and backups. + for _, man := range toAdd { + // Will get empty string if not found which is fine, it'll fail one of the + // other checks. + bID, _ := man.GetTag(TagBackupID) + + bup, ok := getBackupByID(other.Backups(), bID) + if !ok { + logger.Ctx(ctx).Infow( + "not unioning snapshot missing backup", + "other_manifest_id", man.ID, + "other_backup_id", bID) + + continue + } + + bup.Reasons = man.Reasons + + res.backups = append(res.backups, bup) + res.mergeBases = append(res.mergeBases, man) + res.assistBases = append(res.assistBases, man) + } + + // Add assistBases from other to this one as needed. + for _, m := range other.AssistBases() { + useReasons := []Reason{} + + // Assume that all complete manifests in assist overlap with MergeBases. + if len(m.IncompleteReason) == 0 { + continue + } + + for _, r := range m.Reasons { + k := reasonToKey(r) + if _, ok := assist[k]; ok { + // This reason is already covered by either: + // * complete manifest in bb + // * incomplete manifest in bb + // + // If it was already in the assist set then it must be the case that + // it's newer than any complete manifests in other for the same reason. + continue + } + + useReasons = append(useReasons, r) + } + + if len(useReasons) > 0 { + m.Reasons = useReasons + res.assistBases = append(res.assistBases, m) + } + } + + return res +} + +func findNonUniqueManifests( + ctx context.Context, + manifests []ManifestEntry, +) map[manifest.ID]struct{} { + // ReasonKey -> manifests with that reason. + reasons := map[string][]ManifestEntry{} + toDrop := map[manifest.ID]struct{}{} + + for _, man := range manifests { + // Incomplete snapshots are used only for kopia-assisted incrementals. The + // fact that we need this check here makes it seem like this should live in + // the kopia code. However, keeping it here allows for better debugging as + // the kopia code only has access to a path builder which means it cannot + // remove the resource owner from the error/log output. That is also below + // the point where we decide if we should do a full backup or an incremental. + if len(man.IncompleteReason) > 0 { + logger.Ctx(ctx).Infow( + "dropping incomplete manifest", + "manifest_id", man.ID) + + toDrop[man.ID] = struct{}{} + + continue + } + + for _, reason := range man.Reasons { + reasonKey := reason.ResourceOwner + reason.Service.String() + reason.Category.String() + reasons[reasonKey] = append(reasons[reasonKey], man) + } + } + + for reason, mans := range reasons { + ictx := clues.Add(ctx, "reason", reason) + + if len(mans) == 0 { + // Not sure how this would happen but just in case... + continue + } else if len(mans) > 1 { + mIDs := make([]manifest.ID, 0, len(mans)) + for _, m := range mans { + toDrop[m.ID] = struct{}{} + mIDs = append(mIDs, m.ID) + } + + // TODO(ashmrtn): We should actually just remove this reason from the + // manifests and then if they have no reasons remaining drop them from the + // set. + logger.Ctx(ictx).Infow( + "dropping manifests with duplicate reason", + "manifest_ids", mIDs) + + continue + } + } + + return toDrop +} + +func getBackupByID(backups []BackupEntry, bID string) (BackupEntry, bool) { + if len(bID) == 0 { + return BackupEntry{}, false + } + + idx := slices.IndexFunc(backups, func(b BackupEntry) bool { + return string(b.ID) == bID + }) + + if idx < 0 || idx >= len(backups) { + return BackupEntry{}, false + } + + return backups[idx], true +} + +// fixupAndVerify goes through the set of backups and snapshots used for merging +// and ensures: +// - the reasons for selecting merge snapshots are distinct +// - all bases used for merging have a backup model with item and details +// snapshot ID +// +// Backups that have overlapping reasons or that are not complete are removed +// from the set. Dropping these is safe because it only affects how much data we +// pull. On the other hand, *not* dropping them is unsafe as it will muck up +// merging when we add stuff to kopia (possibly multiple entries for the same +// item etc). +func (bb *backupBases) fixupAndVerify(ctx context.Context) { + toDrop := findNonUniqueManifests(ctx, bb.mergeBases) + + var ( + backupsToKeep []BackupEntry + mergeToKeep []ManifestEntry + ) + + for _, man := range bb.mergeBases { + if _, ok := toDrop[man.ID]; ok { + continue + } + + bID, _ := man.GetTag(TagBackupID) + + bup, ok := getBackupByID(bb.backups, bID) + if !ok { + toDrop[man.ID] = struct{}{} + + logger.Ctx(ctx).Info( + "dropping manifest due to missing backup", + "manifest_id", man.ID) + + continue + } + + deetsID := bup.StreamStoreID + if len(deetsID) == 0 { + deetsID = bup.DetailsID + } + + if len(bup.SnapshotID) == 0 || len(deetsID) == 0 { + toDrop[man.ID] = struct{}{} + + logger.Ctx(ctx).Info( + "dropping manifest due to invalid backup", + "manifest_id", man.ID) + + continue + } + + backupsToKeep = append(backupsToKeep, bup) + mergeToKeep = append(mergeToKeep, man) + } + + var assistToKeep []ManifestEntry + + for _, man := range bb.assistBases { + if _, ok := toDrop[man.ID]; ok { + continue + } + + assistToKeep = append(assistToKeep, man) + } + + bb.backups = backupsToKeep + bb.mergeBases = mergeToKeep + bb.assistBases = assistToKeep +} diff --git a/src/internal/kopia/backup_bases_test.go b/src/internal/kopia/backup_bases_test.go new file mode 100644 index 000000000..f902d4e37 --- /dev/null +++ b/src/internal/kopia/backup_bases_test.go @@ -0,0 +1,705 @@ +package kopia + +import ( + "fmt" + "testing" + + "github.com/kopia/kopia/repo/manifest" + "github.com/kopia/kopia/snapshot" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/alcionai/corso/src/internal/model" + "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/internal/version" + "github.com/alcionai/corso/src/pkg/backup" + "github.com/alcionai/corso/src/pkg/path" +) + +func makeManifest(id, incmpl, bID string, reasons ...Reason) ManifestEntry { + bIDKey, _ := makeTagKV(TagBackupID) + + return ManifestEntry{ + Manifest: &snapshot.Manifest{ + ID: manifest.ID(id), + IncompleteReason: incmpl, + Tags: map[string]string{bIDKey: bID}, + }, + Reasons: reasons, + } +} + +type BackupBasesUnitSuite struct { + tester.Suite +} + +func TestBackupBasesUnitSuite(t *testing.T) { + suite.Run(t, &BackupBasesUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *BackupBasesUnitSuite) TestMinBackupVersion() { + table := []struct { + name string + bb *backupBases + expectedVersion int + }{ + { + name: "Nil BackupBase", + expectedVersion: version.NoBackup, + }, + { + name: "No Backups", + bb: &backupBases{}, + expectedVersion: version.NoBackup, + }, + { + name: "Unsorted Backups", + bb: &backupBases{ + backups: []BackupEntry{ + { + Backup: &backup.Backup{ + Version: 4, + }, + }, + { + Backup: &backup.Backup{ + Version: 0, + }, + }, + { + Backup: &backup.Backup{ + Version: 2, + }, + }, + }, + }, + expectedVersion: 0, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + assert.Equal(suite.T(), test.expectedVersion, test.bb.MinBackupVersion()) + }) + } +} + +func (suite *BackupBasesUnitSuite) TestRemoveMergeBaseByManifestID() { + backups := []BackupEntry{ + {Backup: &backup.Backup{SnapshotID: "1"}}, + {Backup: &backup.Backup{SnapshotID: "2"}}, + {Backup: &backup.Backup{SnapshotID: "3"}}, + } + + merges := []ManifestEntry{ + makeManifest("1", "", ""), + makeManifest("2", "", ""), + makeManifest("3", "", ""), + } + + expected := &backupBases{ + backups: []BackupEntry{backups[0], backups[1]}, + mergeBases: []ManifestEntry{merges[0], merges[1]}, + assistBases: []ManifestEntry{merges[0], merges[1]}, + } + + delID := manifest.ID("3") + + table := []struct { + name string + // Below indices specify which items to add from the defined sets above. + backup []int + merge []int + assist []int + }{ + { + name: "Not In Bases", + backup: []int{0, 1}, + merge: []int{0, 1}, + assist: []int{0, 1}, + }, + { + name: "Different Indexes", + backup: []int{2, 0, 1}, + merge: []int{0, 2, 1}, + assist: []int{0, 1, 2}, + }, + { + name: "First Item", + backup: []int{2, 0, 1}, + merge: []int{2, 0, 1}, + assist: []int{2, 0, 1}, + }, + { + name: "Middle Item", + backup: []int{0, 2, 1}, + merge: []int{0, 2, 1}, + assist: []int{0, 2, 1}, + }, + { + name: "Final Item", + backup: []int{0, 1, 2}, + merge: []int{0, 1, 2}, + assist: []int{0, 1, 2}, + }, + { + name: "Only In Backups", + backup: []int{0, 1, 2}, + merge: []int{0, 1}, + assist: []int{0, 1}, + }, + { + name: "Only In Merges", + backup: []int{0, 1}, + merge: []int{0, 1, 2}, + assist: []int{0, 1}, + }, + { + name: "Only In Assists", + backup: []int{0, 1}, + merge: []int{0, 1}, + assist: []int{0, 1, 2}, + }, + } + + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + bb := &backupBases{} + + for _, i := range test.backup { + bb.backups = append(bb.backups, backups[i]) + } + + for _, i := range test.merge { + bb.mergeBases = append(bb.mergeBases, merges[i]) + } + + for _, i := range test.assist { + bb.assistBases = append(bb.assistBases, merges[i]) + } + + bb.RemoveMergeBaseByManifestID(delID) + AssertBackupBasesEqual(t, expected, bb) + }) + } +} + +func (suite *BackupBasesUnitSuite) TestClearMergeBases() { + bb := &backupBases{ + backups: make([]BackupEntry, 2), + mergeBases: make([]ManifestEntry, 2), + } + + bb.ClearMergeBases() + assert.Empty(suite.T(), bb.Backups()) + assert.Empty(suite.T(), bb.MergeBases()) +} + +func (suite *BackupBasesUnitSuite) TestClearAssistBases() { + bb := &backupBases{assistBases: make([]ManifestEntry, 2)} + + bb.ClearAssistBases() + assert.Empty(suite.T(), bb.AssistBases()) +} + +func (suite *BackupBasesUnitSuite) TestMergeBackupBases() { + ro := "resource_owner" + + type testInput struct { + id int + incomplete bool + cat []path.CategoryType + } + + // Make a function so tests can modify things without messing with each other. + makeBackupBases := func(ti []testInput) *backupBases { + res := &backupBases{} + + for _, i := range ti { + baseID := fmt.Sprintf("id%d", i.id) + ir := "" + + if i.incomplete { + ir = "checkpoint" + } + + reasons := make([]Reason, 0, len(i.cat)) + + for _, c := range i.cat { + reasons = append(reasons, Reason{ + ResourceOwner: ro, + Service: path.ExchangeService, + Category: c, + }) + } + + m := makeManifest(baseID, ir, "b"+baseID, reasons...) + res.assistBases = append(res.assistBases, m) + + if i.incomplete { + continue + } + + b := BackupEntry{ + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ID: model.StableID("b" + baseID)}, + SnapshotID: baseID, + StreamStoreID: "ss" + baseID, + }, + Reasons: reasons, + } + + res.backups = append(res.backups, b) + res.mergeBases = append(res.mergeBases, m) + } + + return res + } + + table := []struct { + name string + bb []testInput + other []testInput + expect []testInput + }{ + { + name: "Other Empty", + bb: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + }, + expect: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + }, + }, + { + name: "BB Empty", + other: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + }, + expect: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + }, + }, + { + name: "Other overlaps Complete And Incomplete", + bb: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + other: []testInput{ + { + id: 2, + cat: []path.CategoryType{path.EmailCategory}, + }, + { + id: 3, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + expect: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + }, + { + name: "Other Overlaps Complete", + bb: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + }, + other: []testInput{ + { + id: 2, + cat: []path.CategoryType{path.EmailCategory}, + }, + }, + expect: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + }, + }, + { + name: "Other Overlaps Incomplete", + bb: []testInput{ + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + other: []testInput{ + { + id: 2, + cat: []path.CategoryType{path.EmailCategory}, + }, + { + id: 3, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + expect: []testInput{ + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + { + id: 2, + cat: []path.CategoryType{path.EmailCategory}, + }, + }, + }, + { + name: "Other Disjoint", + bb: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + other: []testInput{ + { + id: 2, + cat: []path.CategoryType{path.ContactsCategory}, + }, + { + id: 3, + cat: []path.CategoryType{path.ContactsCategory}, + incomplete: true, + }, + }, + expect: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + { + id: 2, + cat: []path.CategoryType{path.ContactsCategory}, + }, + { + id: 3, + cat: []path.CategoryType{path.ContactsCategory}, + incomplete: true, + }, + }, + }, + { + name: "Other Reduced Reasons", + bb: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + }, + other: []testInput{ + { + id: 2, + cat: []path.CategoryType{ + path.EmailCategory, + path.ContactsCategory, + }, + }, + { + id: 3, + cat: []path.CategoryType{ + path.EmailCategory, + path.ContactsCategory, + }, + incomplete: true, + }, + }, + expect: []testInput{ + {cat: []path.CategoryType{path.EmailCategory}}, + { + id: 1, + cat: []path.CategoryType{path.EmailCategory}, + incomplete: true, + }, + { + id: 2, + cat: []path.CategoryType{path.ContactsCategory}, + }, + { + id: 3, + cat: []path.CategoryType{path.ContactsCategory}, + incomplete: true, + }, + }, + }, + } + + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + bb := makeBackupBases(test.bb) + other := makeBackupBases(test.other) + expect := makeBackupBases(test.expect) + + ctx, flush := tester.NewContext(t) + defer flush() + + got := bb.MergeBackupBases( + ctx, + other, + func(reason Reason) string { + return reason.Service.String() + reason.Category.String() + }) + AssertBackupBasesEqual(t, expect, got) + }) + } +} + +func (suite *BackupBasesUnitSuite) TestFixupAndVerify() { + ro := "resource_owner" + + makeMan := func(pct path.CategoryType, id, incmpl, bID string) ManifestEntry { + reason := Reason{ + ResourceOwner: ro, + Service: path.ExchangeService, + Category: pct, + } + + return makeManifest(id, incmpl, bID, reason) + } + + // Make a function so tests can modify things without messing with each other. + validMail1 := func() *backupBases { + return &backupBases{ + backups: []BackupEntry{ + { + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: "bid1", + }, + SnapshotID: "id1", + StreamStoreID: "ssid1", + }, + }, + }, + mergeBases: []ManifestEntry{ + makeMan(path.EmailCategory, "id1", "", "bid1"), + }, + assistBases: []ManifestEntry{ + makeMan(path.EmailCategory, "id1", "", "bid1"), + }, + } + } + + table := []struct { + name string + bb *backupBases + expect BackupBases + }{ + { + name: "empty BaseBackups", + bb: &backupBases{}, + }, + { + name: "Merge Base Without Backup", + bb: func() *backupBases { + res := validMail1() + res.backups = nil + + return res + }(), + }, + { + name: "Backup Missing Snapshot ID", + bb: func() *backupBases { + res := validMail1() + res.backups[0].SnapshotID = "" + + return res + }(), + }, + { + name: "Backup Missing Deets ID", + bb: func() *backupBases { + res := validMail1() + res.backups[0].StreamStoreID = "" + + return res + }(), + }, + { + name: "Incomplete Snapshot", + bb: func() *backupBases { + res := validMail1() + res.mergeBases[0].IncompleteReason = "ir" + res.assistBases[0].IncompleteReason = "ir" + + return res + }(), + }, + { + name: "Duplicate Reason", + bb: func() *backupBases { + res := validMail1() + res.mergeBases[0].Reasons = append( + res.mergeBases[0].Reasons, + res.mergeBases[0].Reasons[0]) + res.assistBases = res.mergeBases + + return res + }(), + }, + { + name: "Single Valid Entry", + bb: validMail1(), + expect: validMail1(), + }, + { + name: "Single Valid Entry With Incomplete Assist With Same Reason", + bb: func() *backupBases { + res := validMail1() + res.assistBases = append( + res.assistBases, + makeMan(path.EmailCategory, "id2", "checkpoint", "bid2")) + + return res + }(), + expect: func() *backupBases { + res := validMail1() + res.assistBases = append( + res.assistBases, + makeMan(path.EmailCategory, "id2", "checkpoint", "bid2")) + + return res + }(), + }, + { + name: "Single Valid Entry With Backup With Old Deets ID", + bb: func() *backupBases { + res := validMail1() + res.backups[0].DetailsID = res.backups[0].StreamStoreID + res.backups[0].StreamStoreID = "" + + return res + }(), + expect: func() *backupBases { + res := validMail1() + res.backups[0].DetailsID = res.backups[0].StreamStoreID + res.backups[0].StreamStoreID = "" + + return res + }(), + }, + { + name: "Single Valid Entry With Multiple Reasons", + bb: func() *backupBases { + res := validMail1() + res.mergeBases[0].Reasons = append( + res.mergeBases[0].Reasons, + Reason{ + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }) + res.assistBases = res.mergeBases + + return res + }(), + expect: func() *backupBases { + res := validMail1() + res.mergeBases[0].Reasons = append( + res.mergeBases[0].Reasons, + Reason{ + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }) + res.assistBases = res.mergeBases + + return res + }(), + }, + { + name: "Two Entries Overlapping Reasons", + bb: func() *backupBases { + res := validMail1() + res.mergeBases = append( + res.mergeBases, + makeMan(path.EmailCategory, "id2", "", "bid2")) + res.assistBases = res.mergeBases + + return res + }(), + }, + { + name: "Three Entries One Invalid", + bb: func() *backupBases { + res := validMail1() + res.backups = append( + res.backups, + BackupEntry{ + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: "bid2", + }, + }, + }, + BackupEntry{ + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: "bid3", + }, + SnapshotID: "id3", + StreamStoreID: "ssid3", + }, + }) + res.mergeBases = append( + res.mergeBases, + makeMan(path.ContactsCategory, "id2", "checkpoint", "bid2"), + makeMan(path.EventsCategory, "id3", "", "bid3")) + res.assistBases = res.mergeBases + + return res + }(), + expect: func() *backupBases { + res := validMail1() + res.backups = append( + res.backups, + BackupEntry{ + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: "bid3", + }, + SnapshotID: "id3", + StreamStoreID: "ssid3", + }, + }) + res.mergeBases = append( + res.mergeBases, + makeMan(path.EventsCategory, "id3", "", "bid3")) + res.assistBases = res.mergeBases + + return res + }(), + }, + } + for _, test := range table { + suite.Run(test.name, func() { + ctx, flush := tester.NewContext(suite.T()) + defer flush() + + test.bb.fixupAndVerify(ctx) + AssertBackupBasesEqual(suite.T(), test.expect, test.bb) + }) + } +} diff --git a/src/internal/kopia/base_finder.go b/src/internal/kopia/base_finder.go index b01c4401a..ebe8f3287 100644 --- a/src/internal/kopia/base_finder.go +++ b/src/internal/kopia/base_finder.go @@ -47,12 +47,6 @@ func (r Reason) Key() string { return r.ResourceOwner + r.Service.String() + r.Category.String() } -type backupBases struct { - backups []BackupEntry - mergeBases []ManifestEntry - assistBases []ManifestEntry -} - type BackupEntry struct { *backup.Backup Reasons []Reason diff --git a/src/internal/kopia/base_finder_test.go b/src/internal/kopia/base_finder_test.go index 2382063cd..ca84f5d94 100644 --- a/src/internal/kopia/base_finder_test.go +++ b/src/internal/kopia/base_finder_test.go @@ -342,8 +342,8 @@ func (suite *BaseFinderUnitSuite) TestNoResult_NoBackupsOrSnapshots() { bb, err := bf.findBases(ctx, reasons, nil) assert.NoError(t, err, "getting bases: %v", clues.ToCore(err)) - assert.Empty(t, bb.mergeBases) - assert.Empty(t, bb.assistBases) + assert.Empty(t, bb.MergeBases()) + assert.Empty(t, bb.AssistBases()) } func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() { @@ -366,8 +366,8 @@ func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() { bb, err := bf.findBases(ctx, reasons, nil) assert.NoError(t, err, "getting bases: %v", clues.ToCore(err)) - assert.Empty(t, bb.mergeBases) - assert.Empty(t, bb.assistBases) + assert.Empty(t, bb.MergeBases()) + assert.Empty(t, bb.AssistBases()) } func (suite *BaseFinderUnitSuite) TestGetBases() { @@ -831,24 +831,24 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { checkBackupEntriesMatch( t, - bb.backups, + bb.Backups(), test.backupData, test.expectedBaseReasons) checkManifestEntriesMatch( t, - bb.mergeBases, + bb.MergeBases(), test.manifestData, test.expectedBaseReasons) checkManifestEntriesMatch( t, - bb.assistBases, + bb.AssistBases(), test.manifestData, test.expectedAssistManifestReasons) }) } } -func (suite *BaseFinderUnitSuite) TestFetchPrevSnapshots_CustomTags() { +func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() { manifestData := []manifestInfo{ newManifestInfo2( testID1, @@ -926,7 +926,7 @@ func (suite *BaseFinderUnitSuite) TestFetchPrevSnapshots_CustomTags() { checkManifestEntriesMatch( t, - bb.mergeBases, + bb.MergeBases(), manifestData, test.expectedIdxs) }) diff --git a/src/internal/kopia/mock_backup_base.go b/src/internal/kopia/mock_backup_base.go new file mode 100644 index 000000000..84743486e --- /dev/null +++ b/src/internal/kopia/mock_backup_base.go @@ -0,0 +1,63 @@ +package kopia + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) { + if expect == nil && got == nil { + return + } + + if expect == nil { + assert.Empty(t, got.Backups(), "backups") + assert.Empty(t, got.MergeBases(), "merge bases") + assert.Empty(t, got.AssistBases(), "assist bases") + + return + } + + if got == nil { + if len(expect.Backups()) > 0 && len(expect.MergeBases()) > 0 && len(expect.AssistBases()) > 0 { + assert.Fail(t, "got was nil but expected non-nil result %v", expect) + } + + return + } + + assert.ElementsMatch(t, expect.Backups(), got.Backups(), "backups") + assert.ElementsMatch(t, expect.MergeBases(), got.MergeBases(), "merge bases") + assert.ElementsMatch(t, expect.AssistBases(), got.AssistBases(), "assist bases") +} + +func NewMockBackupBases() *MockBackupBases { + return &MockBackupBases{backupBases: &backupBases{}} +} + +type MockBackupBases struct { + *backupBases +} + +func (bb *MockBackupBases) WithBackups(b ...BackupEntry) *MockBackupBases { + bb.backupBases.backups = append(bb.Backups(), b...) + return bb +} + +func (bb *MockBackupBases) WithMergeBases(m ...ManifestEntry) *MockBackupBases { + bb.backupBases.mergeBases = append(bb.MergeBases(), m...) + bb.backupBases.assistBases = append(bb.AssistBases(), m...) + + return bb +} + +func (bb *MockBackupBases) WithAssistBases(m ...ManifestEntry) *MockBackupBases { + bb.backupBases.assistBases = append(bb.AssistBases(), m...) + return bb +} + +func (bb *MockBackupBases) ClearMockAssistBases() *MockBackupBases { + bb.backupBases.ClearAssistBases() + return bb +} From e7d0eea32b57c2e4c103626238d07857eb4e9598 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 11:48:17 +0000 Subject: [PATCH 13/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20github.com/aw?= =?UTF-8?q?s/aws-sdk-go=20from=201.44.276=20to=201.44.280=20in=20/src=20(#?= =?UTF-8?q?3588)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.276 to 1.44.280.
Release notes

Sourced from github.com/aws/aws-sdk-go's releases.

Release v1.44.280 (2023-06-09)

Service Client Updates

  • service/acm-pca: Updates service documentation
  • service/connect: Updates service API, documentation, and paginators

Release v1.44.279 (2023-06-08)

Service Client Updates

  • service/athena: Updates service API and documentation
    • You can now define custom spark properties at start of the session for use cases like cluster encryption, table formats, and general Spark tuning.
  • service/comprehendmedical: Updates service API
  • service/payment-cryptography: Adds new service
  • service/payment-cryptography-data: Adds new service
  • service/servicecatalog: Updates service API and documentation
    • New parameter added in ServiceCatalog DescribeProvisioningArtifact api - IncludeProvisioningArtifactParameters. This parameter can be used to return information about the parameters used to provision the product
  • service/timestream-write: Updates service API and documentation

Release v1.44.278 (2023-06-07)

Service Client Updates

  • service/cloudformation: Updates service API and documentation
    • AWS CloudFormation StackSets is updating the deployment experience for all stackset operations to skip suspended AWS accounts during deployments. StackSets will skip target AWS accounts that are suspended and set the Detailed Status of the corresponding stack instances as SKIPPED_SUSPENDED_ACCOUNT
  • service/customer-profiles: Updates service API, documentation, and paginators
  • service/directconnect: Updates service documentation
    • This update corrects the jumbo frames mtu values from 9100 to 8500 for transit virtual interfaces.
  • service/emr-containers: Updates service API and documentation
  • service/iotdeviceadvisor: Updates service API and documentation
  • service/logs: Updates service API and documentation
    • This change adds support for account level data protection policies using 3 new APIs, PutAccountPolicy, DeleteAccountPolicy and DescribeAccountPolicy. DescribeLogGroup API has been modified to indicate if account level policy is applied to the LogGroup via "inheritedProperties" list in the response.

Release v1.44.277 (2023-06-06)

Service Client Updates

  • service/connect: Updates service documentation
  • service/elasticmapreduce: Updates service API and documentation
    • This release provides customers the ability to specify an allocation strategies amongst PRICE_CAPACITY_OPTIMIZED, CAPACITY_OPTIMIZED, LOWEST_PRICE, DIVERSIFIED for Spot instances in Instance Feet cluster. This enables customers to choose an allocation strategy best suited for their workload.
  • service/iam: Updates service API and documentation
    • This release updates the AccountAlias regex pattern with the same length restrictions enforced by the length constraint.
  • service/inspector2: Updates service API and documentation
  • service/iot: Updates service API, documentation, and paginators
    • Adding IoT Device Management Software Package Catalog APIs to register, store, and report system software packages, along with their versions and metadata in a centralized location.
  • service/iot-data: Updates service API
  • service/models.lex.v2: Updates service API, documentation, and paginators
  • service/quicksight: Updates service API and documentation
    • QuickSight support for pivot table field collapse state, radar chart range scale and multiple scope options in conditional formatting.

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/aws/aws-sdk-go&package-manager=go_modules&previous-version=1.44.276&new-version=1.44.280)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 2 +- src/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index 11e2deee1..a31bc701d 100644 --- a/src/go.mod +++ b/src/go.mod @@ -8,7 +8,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c github.com/armon/go-metrics v0.4.1 - github.com/aws/aws-sdk-go v1.44.276 + github.com/aws/aws-sdk-go v1.44.280 github.com/aws/aws-xray-sdk-go v1.8.1 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.0 diff --git a/src/go.sum b/src/go.sum index f064f821b..06257750a 100644 --- a/src/go.sum +++ b/src/go.sum @@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= -github.com/aws/aws-sdk-go v1.44.276 h1:ywPlx9C5Yc482dUgAZ9bHpQ6onVvJvYE9FJWsNDCEy0= -github.com/aws/aws-sdk-go v1.44.276/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.280 h1:UYl/yxhDxP8naok6ftWyQ9/9ZzNwjC9dvEs/j8BkGhw= +github.com/aws/aws-sdk-go v1.44.280/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= From b9766d8bafe94d90f1c04ab1e9565c21bfe0c50c Mon Sep 17 00:00:00 2001 From: Abin Simon Date: Tue, 13 Jun 2023 17:56:30 +0530 Subject: [PATCH 14/41] Add note about sharing links in known issues (#3574) Surface the note about link shares mentioned in [changelog](https://github.com/alcionai/corso/blob/4545feeb802f1fb9fb0e9faeb891a4a9d592d070/CHANGELOG.md?plain=1#L163) to known issues. --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [ ] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [ ] :muscle: Manual - [ ] :zap: Unit test - [ ] :green_heart: E2E --- website/docs/support/known-issues.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/website/docs/support/known-issues.md b/website/docs/support/known-issues.md index 04190b723..6f98071fb 100644 --- a/website/docs/support/known-issues.md +++ b/website/docs/support/known-issues.md @@ -24,3 +24,5 @@ Below is a list of known Corso issues and limitations: included in backup and restore. * SharePoint document library data can't be restored after the library has been deleted. + +* Sharing information of items in OneDrive/SharePoint using sharing links aren't backed up and restored. From c74539338e93d9998d21eba838a112c75e0c82a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 18:00:17 +0000 Subject: [PATCH 15/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20github.com/aw?= =?UTF-8?q?s/aws-sdk-go=20from=201.44.280=20to=201.44.281=20in=20/src=20(#?= =?UTF-8?q?3608)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.280 to 1.44.281.
Release notes

Sourced from github.com/aws/aws-sdk-go's releases.

Release v1.44.281 (2023-06-12)

Service Client Updates

  • service/amplifyuibuilder: Updates service API, documentation, and paginators
  • service/dynamodb: Updates service API, documentation, waiters, paginators, and examples
    • Documentation updates for DynamoDB
  • service/fsx: Updates service API and documentation
  • service/opensearch: Updates service API and documentation
  • service/rekognition: Updates service API, documentation, paginators, and examples
    • This release adds support for improved accuracy with user vector in Amazon Rekognition Face Search. Adds new APIs: AssociateFaces, CreateUser, DeleteUser, DisassociateFaces, ListUsers, SearchUsers, SearchUsersByImage. Also adds new face metadata that can be stored: user vector.
  • service/sagemaker: Updates service API and documentation
    • Sagemaker Neo now supports compilation for inferentia2 (ML_INF2) and Trainium1 (ML_TRN1) as available targets. With these devices, you can run your workloads at highest performance with lowest cost. inferentia2 (ML_INF2) is available in CMH and Trainium1 (ML_TRN1) is available in IAD currently
  • service/streams.dynamodb: Updates service documentation
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/aws/aws-sdk-go&package-manager=go_modules&previous-version=1.44.280&new-version=1.44.281)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 2 +- src/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index a31bc701d..71d8dc685 100644 --- a/src/go.mod +++ b/src/go.mod @@ -8,7 +8,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c github.com/armon/go-metrics v0.4.1 - github.com/aws/aws-sdk-go v1.44.280 + github.com/aws/aws-sdk-go v1.44.281 github.com/aws/aws-xray-sdk-go v1.8.1 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.0 diff --git a/src/go.sum b/src/go.sum index 06257750a..b392709cc 100644 --- a/src/go.sum +++ b/src/go.sum @@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= -github.com/aws/aws-sdk-go v1.44.280 h1:UYl/yxhDxP8naok6ftWyQ9/9ZzNwjC9dvEs/j8BkGhw= -github.com/aws/aws-sdk-go v1.44.280/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.281 h1:z/ptheJvINaIAsKXthxONM+toTKw2pxyk700Hfm6yUw= +github.com/aws/aws-sdk-go v1.44.281/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= From 2f6d7319931245cfc327f41787a703bdaabf71cf Mon Sep 17 00:00:00 2001 From: Keepers Date: Tue, 13 Jun 2023 12:35:39 -0600 Subject: [PATCH 16/41] rename connector -> m365 (#3600) renames /internal/connector to /internal/m365. No logic changes in this PR. Only the dir rename, import renames, and one linter shadowing rename. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #1996 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- src/.golangci.yml | 2 +- src/cli/backup/backup.go | 2 +- src/cli/backup/exchange_e2e_test.go | 2 +- src/cli/restore/exchange_e2e_test.go | 2 +- src/cmd/factory/impl/common.go | 69 +- src/cmd/factory/impl/exchange.go | 16 +- src/cmd/factory/impl/onedrive.go | 6 +- src/cmd/factory/impl/sharepoint.go | 6 +- src/cmd/getM365/onedrive/get_item.go | 2 +- src/cmd/sanity_test/sanity_tests.go | 2 +- .../connector/exchange/data_collections.go | 317 ------- .../exchange/data_collections_test.go | 761 ----------------- .../exchange/folder_resolver_test.go | 129 --- .../connector/exchange/iterators_test.go | 47 -- src/internal/kopia/data_collection_test.go | 2 +- src/internal/kopia/merge_collection_test.go | 2 +- src/internal/kopia/upload.go | 4 +- src/internal/kopia/upload_test.go | 2 +- src/internal/kopia/wrapper_test.go | 4 +- .../data_collections.go => m365/backup.go} | 108 +-- .../backup_test.go} | 60 +- .../graph_connector.go => m365/controller.go} | 126 ++- .../controller_test.go} | 169 ++-- .../discovery/discovery.go | 2 +- .../discovery/discovery_test.go | 2 +- .../exchange/attachment.go | 0 .../{connector => m365}/exchange/attendees.go | 0 .../exchange/backup.go} | 308 ++++++- .../exchange/backup_test.go} | 774 +++++++++++++++++- .../exchange/cache_container.go | 2 +- .../exchange/collection.go} | 6 +- .../exchange/collection_test.go} | 21 +- .../{connector => m365}/exchange/consts.go | 0 .../exchange/contact_container_cache.go} | 12 +- .../exchange/contacts_backup.go | 4 +- .../exchange/contacts_restore.go | 4 +- .../exchange/contacts_restore_test.go | 0 .../exchange/container_resolver.go | 2 +- .../exchange/container_resolver_test.go | 134 ++- .../exchange/event_container_cache.go} | 14 +- .../exchange/events_backup.go | 4 +- .../exchange/events_restore.go | 4 +- .../exchange/events_restore_test.go | 0 .../{connector => m365}/exchange/handlers.go | 2 +- .../exchange/mail_backup.go | 4 +- .../exchange/mail_container_cache.go} | 14 +- .../exchange/mail_container_cache_test.go} | 2 +- .../exchange/mail_restore.go | 4 +- .../exchange/mail_restore_test.go | 0 .../exchange/mock/collections.go | 0 .../exchange/mock/contact.go | 0 .../exchange/mock/event.go | 0 .../{connector => m365}/exchange/mock/mail.go | 0 .../exchange/mock/mock_test.go | 0 .../exchange/restore.go} | 18 +- .../exchange/restore_test.go | 2 +- .../exchange/testdata/handlers.go | 4 +- .../{connector => m365}/exchange/transform.go | 0 .../exchange/transform_test.go | 2 +- .../graph/betasdk/beta_client.go | 2 +- .../graph/betasdk/beta_client_test.go | 2 +- .../graph/betasdk/kiota-lock.json | 2 +- .../graph/betasdk/models/base_item.go | 0 .../graph/betasdk/models/canvas_layout.go | 0 .../graph/betasdk/models/canvas_layoutable.go | 0 .../betasdk/models/horizontal_section.go | 0 .../horizontal_section_collection_response.go | 0 ...izontal_section_collection_responseable.go | 0 .../models/horizontal_section_column.go | 0 ...ntal_section_column_collection_response.go | 0 ..._section_column_collection_responseable.go | 0 .../models/horizontal_section_columnable.go | 0 .../models/horizontal_section_layout_type.go | 0 .../betasdk/models/horizontal_sectionable.go | 0 .../models/meta_data_key_string_pair.go | 0 ...ata_key_string_pair_collection_response.go | 0 ...key_string_pair_collection_responseable.go | 0 .../models/meta_data_key_string_pairable.go | 0 .../models/meta_data_key_value_pair.go | 0 ...data_key_value_pair_collection_response.go | 0 ..._key_value_pair_collection_responseable.go | 0 .../models/meta_data_key_value_pairable.go | 0 .../graph/betasdk/models/page_layout_type.go | 0 .../betasdk/models/page_promotion_type.go | 0 .../graph/betasdk/models/publication_facet.go | 0 .../betasdk/models/publication_facetable.go | 0 .../graph/betasdk/models/reactions_facet.go | 0 .../betasdk/models/reactions_facetable.go | 0 .../betasdk/models/section_emphasis_type.go | 0 .../models/server_processed_content.go | 0 .../models/server_processed_contentable.go | 0 .../graph/betasdk/models/site_access_type.go | 0 .../graph/betasdk/models/site_page.go | 0 .../models/site_page_collection_response.go | 0 .../site_page_collection_responseable.go | 0 .../graph/betasdk/models/site_pageable.go | 0 .../betasdk/models/site_security_level.go | 0 .../graph/betasdk/models/site_settings.go | 0 .../graph/betasdk/models/site_settingsable.go | 0 .../graph/betasdk/models/standard_web_part.go | 0 .../standard_web_part_collection_response.go | 0 ...andard_web_part_collection_responseable.go | 0 .../betasdk/models/standard_web_partable.go | 0 .../graph/betasdk/models/text_web_part.go | 0 .../text_web_part_collection_response.go | 0 .../text_web_part_collection_responseable.go | 0 .../graph/betasdk/models/text_web_partable.go | 0 .../graph/betasdk/models/title_area.go | 0 .../betasdk/models/title_area_layout_type.go | 0 .../models/title_area_text_alignment_type.go | 0 .../graph/betasdk/models/title_areaable.go | 0 .../graph/betasdk/models/vertical_section.go | 0 .../betasdk/models/vertical_sectionable.go | 0 .../graph/betasdk/models/web_part.go | 0 .../models/web_part_collection_response.go | 0 .../web_part_collection_responseable.go | 0 .../graph/betasdk/models/web_part_data.go | 0 .../graph/betasdk/models/web_part_dataable.go | 0 .../graph/betasdk/models/web_part_position.go | 0 .../betasdk/models/web_part_positionable.go | 0 .../graph/betasdk/models/web_partable.go | 0 .../betasdk/sites/count_request_builder.go | 0 .../sites/item_pages_count_request_builder.go | 0 ...rizontal_sections_count_request_builder.go | 0 ...horizontal_section_item_request_builder.go | 2 +- ...ions_item_columns_count_request_builder.go | 0 ...tal_section_column_item_request_builder.go | 2 +- ...mns_item_webparts_count_request_builder.go | 0 ...et_position_of_web_part_request_builder.go | 2 +- ...m_columns_item_webparts_request_builder.go | 2 +- ..._webparts_web_part_item_request_builder.go | 2 +- ...l_sections_item_columns_request_builder.go | 2 +- ...out_horizontal_sections_request_builder.go | 2 +- ...ages_item_canvas_layout_request_builder.go | 2 +- ...layout_vertical_section_request_builder.go | 2 +- ..._section_webparts_count_request_builder.go | 0 ...et_position_of_web_part_request_builder.go | 2 +- ...rtical_section_webparts_request_builder.go | 2 +- ..._webparts_web_part_item_request_builder.go | 2 +- ...web_parts_by_position_post_request_body.go | 0 ...parts_by_position_post_request_bodyable.go | 0 ...t_web_parts_by_position_request_builder.go | 0 ...item_get_web_parts_by_position_response.go | 2 +- ..._get_web_parts_by_position_responseable.go | 2 +- ...item_pages_item_publish_request_builder.go | 0 ...es_item_web_parts_count_request_builder.go | 0 ...et_position_of_web_part_request_builder.go | 2 +- ...em_pages_item_web_parts_request_builder.go | 2 +- ...web_parts_web_part_item_request_builder.go | 2 +- .../sites/item_pages_request_builder.go | 2 +- ...em_pages_site_page_item_request_builder.go | 2 +- .../sites/item_sites_count_request_builder.go | 0 .../item_sites_site_item_request_builder.go | 0 .../sites/site_item_request_builder.go | 0 .../graph/cache_container.go | 0 .../{connector => m365}/graph/collections.go | 2 +- .../graph/collections_test.go | 0 .../graph/concurrency_middleware.go | 0 .../graph/concurrency_middleware_test.go | 0 .../{connector => m365}/graph/consts.go | 0 .../{connector => m365}/graph/consts_test.go | 0 .../{connector => m365}/graph/errors.go | 0 .../{connector => m365}/graph/errors_test.go | 0 .../{connector => m365}/graph/http_wrapper.go | 0 .../graph/http_wrapper_test.go | 0 .../graph/metadata/metadata.go | 2 +- .../graph/metadata/metadata_test.go | 4 +- .../graph/metadata_collection.go | 6 +- .../graph/metadata_collection_test.go | 6 +- .../{connector => m365}/graph/middleware.go | 0 .../graph/middleware_test.go | 0 .../{connector => m365}/graph/mock/service.go | 2 +- .../{connector => m365}/graph/service.go | 0 .../{connector => m365}/graph/service_test.go | 0 .../graph/uploadsession.go | 0 .../graph/uploadsession_test.go | 0 .../graph_connector_disconnected_test.go | 32 +- .../graph_connector_helper_test.go | 19 +- .../graph_connector_onedrive_test.go | 105 +-- .../graph_connector_onedrive_test_helper.go | 6 +- .../graph_connector_test_helper.go | 9 +- .../{connector => m365}/mock/connector.go | 20 +- .../mock/id_name_getter.go | 0 .../onedrive/backup.go} | 8 +- .../onedrive/backup_test.go} | 8 +- .../onedrive/collection.go | 6 +- .../onedrive/collection_test.go | 24 +- .../onedrive/collections.go | 8 +- .../onedrive/collections_test.go | 16 +- .../onedrive/consts/consts.go | 0 .../{connector => m365}/onedrive/drive.go | 2 +- .../onedrive/drive_test.go | 2 +- .../onedrive/folder_cache.go | 0 .../{connector => m365}/onedrive/handlers.go | 0 .../{connector => m365}/onedrive/item.go | 4 +- .../onedrive/item_handler.go | 2 +- .../onedrive/item_handler_test.go | 0 .../{connector => m365}/onedrive/item_test.go | 0 .../onedrive/metadata/consts.go | 0 .../onedrive/metadata/metadata.go | 0 .../onedrive/metadata/permissions.go | 0 .../onedrive/metadata/permissions_test.go | 0 .../onedrive/metadata/testdata/permissions.go | 2 +- .../onedrive/mock/handlers.go | 2 +- .../{connector => m365}/onedrive/mock/item.go | 0 .../onedrive/permission.go | 2 +- .../onedrive/permission_test.go | 4 +- .../{connector => m365}/onedrive/restore.go | 14 +- .../onedrive/restore_test.go | 0 .../onedrive/service_test.go | 6 +- .../onedrive/testdata/item.go | 0 .../{connector => m365}/onedrive/url_cache.go | 0 .../onedrive/url_cache_test.go | 2 +- src/internal/m365/resource/resource.go | 9 + src/internal/m365/restore.go | 75 ++ .../sharepoint/api/beta_service.go | 2 +- .../sharepoint/api/beta_service_test.go | 4 +- .../sharepoint/api/pages.go | 6 +- .../sharepoint/api/pages_test.go | 8 +- .../sharepoint/api/serialization.go | 2 +- .../sharepoint/api/serialization_test.go | 4 +- .../sharepoint/backup.go} | 14 +- .../sharepoint/backup_test.go} | 12 +- .../sharepoint/collection.go | 8 +- .../sharepoint/collection_test.go | 4 +- .../sharepoint/datacategory_string.go | 0 .../sharepoint/helper_test.go | 10 +- .../sharepoint/library_handler.go | 4 +- .../sharepoint/library_handler_test.go | 0 .../{connector => m365}/sharepoint/list.go | 2 +- .../sharepoint/list_info.go} | 0 .../sharepoint/list_info_test.go} | 0 .../sharepoint/list_test.go | 0 .../sharepoint/mock/list.go | 0 .../sharepoint/mock/mock_test.go | 2 +- .../sharepoint/mock/page.go | 0 .../sharepoint/pageInfo.go | 2 +- .../sharepoint/pageInfo_test.go | 2 +- .../{connector => m365}/sharepoint/restore.go | 29 +- .../support/operation_string.go | 0 .../{connector => m365}/support/status.go | 16 +- .../support/status_test.go | 10 +- src/internal/operations/backup.go | 14 +- .../operations/backup_integration_test.go | 115 +-- src/internal/operations/backup_test.go | 20 +- src/internal/operations/help_test.go | 17 +- src/internal/operations/manifests.go | 2 +- src/internal/operations/restore.go | 14 +- src/internal/operations/restore_test.go | 53 +- src/internal/tester/account.go | 2 +- src/pkg/backup/details/details.go | 2 +- src/pkg/backup/details/details_test.go | 4 +- src/pkg/errs/errs.go | 2 +- src/pkg/errs/errs_test.go | 2 +- src/pkg/logger/example_logger_test.go | 6 +- src/pkg/path/drive_test.go | 2 +- src/pkg/repository/repository.go | 27 +- src/pkg/selectors/onedrive_test.go | 2 +- src/pkg/selectors/sharepoint_test.go | 2 +- src/pkg/services/m365/api/client.go | 2 +- src/pkg/services/m365/api/client_test.go | 2 +- src/pkg/services/m365/api/contacts.go | 2 +- src/pkg/services/m365/api/contacts_test.go | 2 +- src/pkg/services/m365/api/drive.go | 2 +- src/pkg/services/m365/api/drive_pager.go | 4 +- src/pkg/services/m365/api/events.go | 2 +- src/pkg/services/m365/api/events_test.go | 2 +- src/pkg/services/m365/api/item_pager.go | 2 +- src/pkg/services/m365/api/item_pager_test.go | 2 +- src/pkg/services/m365/api/mail.go | 2 +- src/pkg/services/m365/api/mail_test.go | 2 +- src/pkg/services/m365/api/mock/mail.go | 4 +- src/pkg/services/m365/api/sites.go | 4 +- src/pkg/services/m365/api/users.go | 2 +- src/pkg/services/m365/m365.go | 6 +- 275 files changed, 1999 insertions(+), 2108 deletions(-) delete mode 100644 src/internal/connector/exchange/data_collections.go delete mode 100644 src/internal/connector/exchange/data_collections_test.go delete mode 100644 src/internal/connector/exchange/folder_resolver_test.go delete mode 100644 src/internal/connector/exchange/iterators_test.go rename src/internal/{connector/data_collections.go => m365/backup.go} (67%) rename src/internal/{connector/data_collections_test.go => m365/backup_test.go} (88%) rename src/internal/{connector/graph_connector.go => m365/controller.go} (62%) rename src/internal/{connector/graph_connector_test.go => m365/controller_test.go} (87%) rename src/internal/{connector => m365}/discovery/discovery.go (98%) rename src/internal/{connector => m365}/discovery/discovery_test.go (99%) rename src/internal/{connector => m365}/exchange/attachment.go (100%) rename src/internal/{connector => m365}/exchange/attendees.go (100%) rename src/internal/{connector/exchange/service_iterators.go => m365/exchange/backup.go} (54%) rename src/internal/{connector/exchange/service_iterators_test.go => m365/exchange/backup_test.go} (62%) rename src/internal/{connector => m365}/exchange/cache_container.go (93%) rename src/internal/{connector/exchange/exchange_data_collection.go => m365/exchange/collection.go} (98%) rename src/internal/{connector/exchange/exchange_data_collection_test.go => m365/exchange/collection_test.go} (85%) rename src/internal/{connector => m365}/exchange/consts.go (100%) rename src/internal/{connector/exchange/contact_folder_cache.go => m365/exchange/contact_container_cache.go} (89%) rename src/internal/{connector => m365}/exchange/contacts_backup.go (85%) rename src/internal/{connector => m365}/exchange/contacts_restore.go (95%) rename src/internal/{connector => m365}/exchange/contacts_restore_test.go (100%) rename src/internal/{connector => m365}/exchange/container_resolver.go (99%) rename src/internal/{connector => m365}/exchange/container_resolver_test.go (84%) rename src/internal/{connector/exchange/event_calendar_cache.go => m365/exchange/event_container_cache.go} (87%) rename src/internal/{connector => m365}/exchange/events_backup.go (86%) rename src/internal/{connector => m365}/exchange/events_restore.go (96%) rename src/internal/{connector => m365}/exchange/events_restore_test.go (100%) rename src/internal/{connector => m365}/exchange/handlers.go (98%) rename src/internal/{connector => m365}/exchange/mail_backup.go (86%) rename src/internal/{connector/exchange/mail_folder_cache.go => m365/exchange/mail_container_cache.go} (87%) rename src/internal/{connector/exchange/mail_folder_cache_test.go => m365/exchange/mail_container_cache_test.go} (98%) rename src/internal/{connector => m365}/exchange/mail_restore.go (97%) rename src/internal/{connector => m365}/exchange/mail_restore_test.go (100%) rename src/internal/{connector => m365}/exchange/mock/collections.go (100%) rename src/internal/{connector => m365}/exchange/mock/contact.go (100%) rename src/internal/{connector => m365}/exchange/mock/event.go (100%) rename src/internal/{connector => m365}/exchange/mock/mail.go (100%) rename src/internal/{connector => m365}/exchange/mock/mock_test.go (100%) rename src/internal/{connector/exchange/service_restore.go => m365/exchange/restore.go} (93%) rename src/internal/{connector => m365}/exchange/restore_test.go (99%) rename src/internal/{connector => m365}/exchange/testdata/handlers.go (86%) rename src/internal/{connector => m365}/exchange/transform.go (100%) rename src/internal/{connector => m365}/exchange/transform_test.go (98%) rename src/internal/{connector => m365}/graph/betasdk/beta_client.go (98%) rename src/internal/{connector => m365}/graph/betasdk/beta_client_test.go (97%) rename src/internal/{connector => m365}/graph/betasdk/kiota-lock.json (99%) rename src/internal/{connector => m365}/graph/betasdk/models/base_item.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/canvas_layout.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/canvas_layoutable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_column.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_column_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_column_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_columnable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_section_layout_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/horizontal_sectionable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_string_pair.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_string_pair_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_string_pair_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_string_pairable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_value_pair.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_value_pair_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_value_pair_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/meta_data_key_value_pairable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/page_layout_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/page_promotion_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/publication_facet.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/publication_facetable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/reactions_facet.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/reactions_facetable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/section_emphasis_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/server_processed_content.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/server_processed_contentable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_access_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_page.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_page_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_page_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_pageable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_security_level.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_settings.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/site_settingsable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/standard_web_part.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/standard_web_part_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/standard_web_part_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/standard_web_partable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/text_web_part.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/text_web_part_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/text_web_part_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/text_web_partable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/title_area.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/title_area_layout_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/title_area_text_alignment_type.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/title_areaable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/vertical_section.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/vertical_sectionable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part_collection_response.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part_collection_responseable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part_data.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part_dataable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part_position.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_part_positionable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/models/web_partable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go (98%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go (98%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_body.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_bodyable.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go (97%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go (90%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_publish_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_web_parts_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go (98%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_pages_site_page_item_request_builder.go (99%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_sites_count_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/item_sites_site_item_request_builder.go (100%) rename src/internal/{connector => m365}/graph/betasdk/sites/site_item_request_builder.go (100%) rename src/internal/{connector => m365}/graph/cache_container.go (100%) rename src/internal/{connector => m365}/graph/collections.go (98%) rename src/internal/{connector => m365}/graph/collections_test.go (100%) rename src/internal/{connector => m365}/graph/concurrency_middleware.go (100%) rename src/internal/{connector => m365}/graph/concurrency_middleware_test.go (100%) rename src/internal/{connector => m365}/graph/consts.go (100%) rename src/internal/{connector => m365}/graph/consts_test.go (100%) rename src/internal/{connector => m365}/graph/errors.go (100%) rename src/internal/{connector => m365}/graph/errors_test.go (100%) rename src/internal/{connector => m365}/graph/http_wrapper.go (100%) rename src/internal/{connector => m365}/graph/http_wrapper_test.go (100%) rename src/internal/{connector => m365}/graph/metadata/metadata.go (83%) rename src/internal/{connector => m365}/graph/metadata/metadata_test.go (95%) rename src/internal/{connector => m365}/graph/metadata_collection.go (96%) rename src/internal/{connector => m365}/graph/metadata_collection_test.go (96%) rename src/internal/{connector => m365}/graph/middleware.go (100%) rename src/internal/{connector => m365}/graph/middleware_test.go (100%) rename src/internal/{connector => m365}/graph/mock/service.go (95%) rename src/internal/{connector => m365}/graph/service.go (100%) rename src/internal/{connector => m365}/graph/service_test.go (100%) rename src/internal/{connector => m365}/graph/uploadsession.go (100%) rename src/internal/{connector => m365}/graph/uploadsession_test.go (100%) rename src/internal/{connector => m365}/graph_connector_disconnected_test.go (88%) rename src/internal/{connector => m365}/graph_connector_helper_test.go (98%) rename src/internal/{connector => m365}/graph_connector_onedrive_test.go (87%) rename src/internal/{connector => m365}/graph_connector_onedrive_test_helper.go (98%) rename src/internal/{connector => m365}/graph_connector_test_helper.go (95%) rename src/internal/{connector => m365}/mock/connector.go (73%) rename src/internal/{connector => m365}/mock/id_name_getter.go (100%) rename src/internal/{connector/onedrive/data_collections.go => m365/onedrive/backup.go} (94%) rename src/internal/{connector/onedrive/data_collections_test.go => m365/onedrive/backup_test.go} (91%) rename src/internal/{connector => m365}/onedrive/collection.go (98%) rename src/internal/{connector => m365}/onedrive/collection_test.go (96%) rename src/internal/{connector => m365}/onedrive/collections.go (98%) rename src/internal/{connector => m365}/onedrive/collections_test.go (99%) rename src/internal/{connector => m365}/onedrive/consts/consts.go (100%) rename src/internal/{connector => m365}/onedrive/drive.go (99%) rename src/internal/{connector => m365}/onedrive/drive_test.go (99%) rename src/internal/{connector => m365}/onedrive/folder_cache.go (100%) rename src/internal/{connector => m365}/onedrive/handlers.go (100%) rename src/internal/{connector => m365}/onedrive/item.go (96%) rename src/internal/{connector => m365}/onedrive/item_handler.go (98%) rename src/internal/{connector => m365}/onedrive/item_handler_test.go (100%) rename src/internal/{connector => m365}/onedrive/item_test.go (100%) rename src/internal/{connector => m365}/onedrive/metadata/consts.go (100%) rename src/internal/{connector => m365}/onedrive/metadata/metadata.go (100%) rename src/internal/{connector => m365}/onedrive/metadata/permissions.go (100%) rename src/internal/{connector => m365}/onedrive/metadata/permissions_test.go (100%) rename src/internal/{connector => m365}/onedrive/metadata/testdata/permissions.go (94%) rename src/internal/{connector => m365}/onedrive/mock/handlers.go (98%) rename src/internal/{connector => m365}/onedrive/mock/item.go (100%) rename src/internal/{connector => m365}/onedrive/permission.go (98%) rename src/internal/{connector => m365}/onedrive/permission_test.go (96%) rename src/internal/{connector => m365}/onedrive/restore.go (98%) rename src/internal/{connector => m365}/onedrive/restore_test.go (100%) rename src/internal/{connector => m365}/onedrive/service_test.go (84%) rename src/internal/{connector => m365}/onedrive/testdata/item.go (100%) rename src/internal/{connector => m365}/onedrive/url_cache.go (100%) rename src/internal/{connector => m365}/onedrive/url_cache_test.go (98%) create mode 100644 src/internal/m365/resource/resource.go create mode 100644 src/internal/m365/restore.go rename src/internal/{connector => m365}/sharepoint/api/beta_service.go (94%) rename src/internal/{connector => m365}/sharepoint/api/beta_service_test.go (88%) rename src/internal/{connector => m365}/sharepoint/api/pages.go (97%) rename src/internal/{connector => m365}/sharepoint/api/pages_test.go (91%) rename src/internal/{connector => m365}/sharepoint/api/serialization.go (98%) rename src/internal/{connector => m365}/sharepoint/api/serialization_test.go (94%) rename src/internal/{connector/sharepoint/data_collections.go => m365/sharepoint/backup.go} (94%) rename src/internal/{connector/sharepoint/data_collections_test.go => m365/sharepoint/backup_test.go} (93%) rename src/internal/{connector => m365}/sharepoint/collection.go (96%) rename src/internal/{connector => m365}/sharepoint/collection_test.go (97%) rename src/internal/{connector => m365}/sharepoint/datacategory_string.go (100%) rename src/internal/{connector => m365}/sharepoint/helper_test.go (81%) rename src/internal/{connector => m365}/sharepoint/library_handler.go (98%) rename src/internal/{connector => m365}/sharepoint/library_handler_test.go (100%) rename src/internal/{connector => m365}/sharepoint/list.go (99%) rename src/internal/{connector/sharepoint/listInfo.go => m365/sharepoint/list_info.go} (100%) rename src/internal/{connector/sharepoint/listInfo_test.go => m365/sharepoint/list_info_test.go} (100%) rename src/internal/{connector => m365}/sharepoint/list_test.go (100%) rename src/internal/{connector => m365}/sharepoint/mock/list.go (100%) rename src/internal/{connector => m365}/sharepoint/mock/mock_test.go (96%) rename src/internal/{connector => m365}/sharepoint/mock/page.go (100%) rename src/internal/{connector => m365}/sharepoint/pageInfo.go (93%) rename src/internal/{connector => m365}/sharepoint/pageInfo_test.go (94%) rename src/internal/{connector => m365}/sharepoint/restore.go (88%) rename src/internal/{connector => m365}/support/operation_string.go (100%) rename src/internal/{connector => m365}/support/status.go (84%) rename src/internal/{connector => m365}/support/status_test.go (93%) diff --git a/src/.golangci.yml b/src/.golangci.yml index 377c140d8..0949053cc 100644 --- a/src/.golangci.yml +++ b/src/.golangci.yml @@ -118,7 +118,7 @@ issues: linters: - forbidigo text: "context.(Background|TODO)" - - path: internal/connector/graph/betasdk + - path: internal/m365/graph/betasdk linters: - wsl - revive diff --git a/src/cli/backup/backup.go b/src/cli/backup/backup.go index 59809ef97..25a6d22e7 100644 --- a/src/cli/backup/backup.go +++ b/src/cli/backup/backup.go @@ -12,8 +12,8 @@ import ( . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/cli/backup/exchange_e2e_test.go b/src/cli/backup/exchange_e2e_test.go index 39437de20..9400f0d90 100644 --- a/src/cli/backup/exchange_e2e_test.go +++ b/src/cli/backup/exchange_e2e_test.go @@ -19,7 +19,7 @@ import ( "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/connector/exchange" + "github.com/alcionai/corso/src/internal/m365/exchange" "github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" diff --git a/src/cli/restore/exchange_e2e_test.go b/src/cli/restore/exchange_e2e_test.go index c35001f29..1f4f93601 100644 --- a/src/cli/restore/exchange_e2e_test.go +++ b/src/cli/restore/exchange_e2e_test.go @@ -14,7 +14,7 @@ import ( "github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/connector/exchange" + "github.com/alcionai/corso/src/internal/m365/exchange" "github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index d2ccb76f5..f1b863275 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -15,9 +15,10 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/str" - "github.com/alcionai/corso/src/internal/connector" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/account" @@ -50,7 +51,7 @@ type dataBuilderFunc func(id, now, subject, body string) []byte func generateAndRestoreItems( ctx context.Context, - gc *connector.GraphConnector, + ctrl *m365.Controller, service path.ServiceType, cat path.CategoryType, sel selectors.Selector, @@ -98,19 +99,19 @@ func generateAndRestoreItems( print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination) - return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs) + return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs) } // ------------------------------------------------------------------------------------------ // Common Helpers // ------------------------------------------------------------------------------------------ -func getGCAndVerifyResourceOwner( +func getControllerAndVerifyResourceOwner( ctx context.Context, - resource connector.Resource, + rc resource.Category, resourceOwner string, ) ( - *connector.GraphConnector, + *m365.Controller, account.Account, idname.Provider, error, @@ -132,17 +133,17 @@ func getGCAndVerifyResourceOwner( return nil, account.Account{}, nil, clues.Wrap(err, "finding m365 account details") } - gc, err := connector.NewGraphConnector(ctx, acct, resource) + ctrl, err := m365.NewController(ctx, acct, rc) if err != nil { return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api") } - id, _, err := gc.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil) + id, _, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil) if err != nil { return nil, account.Account{}, nil, clues.Wrap(err, "verifying user") } - return gc, acct, gc.IDNameLookup.ProviderForID(id), nil + return ctrl, acct, ctrl.IDNameLookup.ProviderForID(id), nil } type item struct { @@ -208,7 +209,7 @@ var ( ) func generateAndRestoreDriveItems( - gc *connector.GraphConnector, + ctrl *m365.Controller, resourceOwner, secondaryUserID, secondaryUserName string, acct account.Account, service path.ServiceType, @@ -232,14 +233,14 @@ func generateAndRestoreDriveItems( switch service { case path.SharePointService: - d, err := gc.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil) + d, err := ctrl.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil) if err != nil { return nil, clues.Wrap(err, "getting site's default drive") } driveID = ptr.Val(d.GetId()) default: - d, err := gc.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil) + d, err := ctrl.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil) if err != nil { return nil, clues.Wrap(err, "getting user's default drive") } @@ -248,7 +249,7 @@ func generateAndRestoreDriveItems( } var ( - cols []connector.OnedriveColInfo + cols []m365.OnedriveColInfo rootPath = []string{"drives", driveID, "root:"} folderAPath = []string{"drives", driveID, "root:", folderAName} @@ -262,15 +263,15 @@ func generateAndRestoreDriveItems( ) for i := 0; i < count; i++ { - col := []connector.OnedriveColInfo{ + col := []m365.OnedriveColInfo{ // basic folder and file creation { PathElements: rootPath, - Files: []connector.ItemData{ + Files: []m365.ItemData{ { Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime), Data: fileAData, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, @@ -281,13 +282,13 @@ func generateAndRestoreDriveItems( Data: fileBData, }, }, - Folders: []connector.ItemData{ + Folders: []m365.ItemData{ { Name: folderBName, }, { Name: folderAName, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -295,7 +296,7 @@ func generateAndRestoreDriveItems( }, { Name: folderCName, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -307,18 +308,18 @@ func generateAndRestoreDriveItems( // a folder that has permissions with an item in the folder with // the different permissions. PathElements: folderAPath, - Files: []connector.ItemData{ + Files: []m365.ItemData{ { Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Data: fileEData, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, }, }, }, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -328,13 +329,13 @@ func generateAndRestoreDriveItems( // a folder that has permissions with an item in the folder with // no permissions. PathElements: folderCPath, - Files: []connector.ItemData{ + Files: []m365.ItemData{ { Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Data: fileAData, }, }, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -342,23 +343,23 @@ func generateAndRestoreDriveItems( }, { PathElements: folderBPath, - Files: []connector.ItemData{ + Files: []m365.ItemData{ { // restoring a file in a non-root folder that doesn't inherit // permissions. Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Data: fileBData, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, }, }, }, - Folders: []connector.ItemData{ + Folders: []m365.ItemData{ { Name: folderAName, - Perms: connector.PermData{ + Perms: m365.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -371,7 +372,7 @@ func generateAndRestoreDriveItems( cols = append(cols, col...) } - input, err := connector.DataForInfo(service, cols, version.Backup) + input, err := m365.DataForInfo(service, cols, version.Backup) if err != nil { return nil, err } @@ -388,16 +389,16 @@ func generateAndRestoreDriveItems( ToggleFeatures: control.Toggles{}, } - config := connector.ConfigInfo{ + config := m365.ConfigInfo{ Opts: opts, - Resource: connector.Users, + Resource: resource.Users, Service: service, Tenant: tenantID, ResourceOwners: []string{resourceOwner}, RestoreCfg: tester.DefaultTestRestoreConfig(""), } - _, _, collections, _, err := connector.GetCollectionsAndExpected( + _, _, collections, _, err := m365.GetCollectionsAndExpected( config, input, version.Backup) @@ -405,5 +406,5 @@ func generateAndRestoreDriveItems( return nil, err } - return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs) + return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs) } diff --git a/src/cmd/factory/impl/exchange.go b/src/cmd/factory/impl/exchange.go index bc6b666be..dd304e2e9 100644 --- a/src/cmd/factory/impl/exchange.go +++ b/src/cmd/factory/impl/exchange.go @@ -5,8 +5,8 @@ import ( . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" - "github.com/alcionai/corso/src/internal/connector" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" @@ -52,14 +52,14 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error { return nil } - gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) + ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User) if err != nil { return Only(ctx, err) } deets, err := generateAndRestoreItems( ctx, - gc, + ctrl, service, category, selectors.NewExchangeRestore([]string{User}).Selector, @@ -98,14 +98,14 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error return nil } - gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) + ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User) if err != nil { return Only(ctx, err) } deets, err := generateAndRestoreItems( ctx, - gc, + ctrl, service, category, selectors.NewExchangeRestore([]string{User}).Selector, @@ -143,14 +143,14 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error { return nil } - gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) + ctrl, _, _, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User) if err != nil { return Only(ctx, err) } deets, err := generateAndRestoreItems( ctx, - gc, + ctrl, service, category, selectors.NewExchangeRestore([]string{User}).Selector, diff --git a/src/cmd/factory/impl/onedrive.go b/src/cmd/factory/impl/onedrive.go index 62ebcc71a..750fa64a3 100644 --- a/src/cmd/factory/impl/onedrive.go +++ b/src/cmd/factory/impl/onedrive.go @@ -7,7 +7,7 @@ import ( . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" - "github.com/alcionai/corso/src/internal/connector" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" @@ -36,7 +36,7 @@ func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error { return nil } - gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User) + ctrl, acct, inp, err := getControllerAndVerifyResourceOwner(ctx, resource.Users, User) if err != nil { return Only(ctx, err) } @@ -45,7 +45,7 @@ func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error { sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name()) deets, err := generateAndRestoreDriveItems( - gc, + ctrl, inp.ID(), SecondaryUser, strings.ToLower(SecondaryUser), diff --git a/src/cmd/factory/impl/sharepoint.go b/src/cmd/factory/impl/sharepoint.go index 7f50ee97b..bdcc952f5 100644 --- a/src/cmd/factory/impl/sharepoint.go +++ b/src/cmd/factory/impl/sharepoint.go @@ -7,7 +7,7 @@ import ( . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" - "github.com/alcionai/corso/src/internal/connector" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" @@ -36,7 +36,7 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error return nil } - gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Sites, Site) + ctrl, acct, inp, err := getControllerAndVerifyResourceOwner(ctx, resource.Sites, Site) if err != nil { return Only(ctx, err) } @@ -45,7 +45,7 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name()) deets, err := generateAndRestoreDriveItems( - gc, + ctrl, inp.ID(), SecondaryUser, strings.ToLower(SecondaryUser), diff --git a/src/cmd/getM365/onedrive/get_item.go b/src/cmd/getM365/onedrive/get_item.go index 4729885f5..4e29098fd 100644 --- a/src/cmd/getM365/onedrive/get_item.go +++ b/src/cmd/getM365/onedrive/get_item.go @@ -21,7 +21,7 @@ import ( "github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/str" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/cmd/sanity_test/sanity_tests.go b/src/cmd/sanity_test/sanity_tests.go index da603d651..9304460fd 100644 --- a/src/cmd/sanity_test/sanity_tests.go +++ b/src/cmd/sanity_test/sanity_tests.go @@ -17,7 +17,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/logger" diff --git a/src/internal/connector/exchange/data_collections.go b/src/internal/connector/exchange/data_collections.go deleted file mode 100644 index f607488fd..000000000 --- a/src/internal/connector/exchange/data_collections.go +++ /dev/null @@ -1,317 +0,0 @@ -package exchange - -import ( - "context" - "encoding/json" - - "github.com/alcionai/clues" - - "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" - "github.com/alcionai/corso/src/internal/data" - "github.com/alcionai/corso/src/internal/observe" - "github.com/alcionai/corso/src/pkg/control" - "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/logger" - "github.com/alcionai/corso/src/pkg/path" - "github.com/alcionai/corso/src/pkg/selectors" - "github.com/alcionai/corso/src/pkg/services/m365/api" -) - -// MetadataFileNames produces the category-specific set of filenames used to -// store graph metadata such as delta tokens and folderID->path references. -func MetadataFileNames(cat path.CategoryType) []string { - switch cat { - case path.EmailCategory, path.ContactsCategory: - return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName} - default: - return []string{graph.PreviousPathFileName} - } -} - -type CatDeltaPaths map[path.CategoryType]DeltaPaths - -type DeltaPaths map[string]DeltaPath - -func (dps DeltaPaths) AddDelta(k, d string) { - dp, ok := dps[k] - if !ok { - dp = DeltaPath{} - } - - dp.Delta = d - dps[k] = dp -} - -func (dps DeltaPaths) AddPath(k, p string) { - dp, ok := dps[k] - if !ok { - dp = DeltaPath{} - } - - dp.Path = p - dps[k] = dp -} - -type DeltaPath struct { - Delta string - Path string -} - -// ParseMetadataCollections produces a map of structs holding delta -// and path lookup maps. -func parseMetadataCollections( - ctx context.Context, - colls []data.RestoreCollection, -) (CatDeltaPaths, bool, error) { - // cdp stores metadata - cdp := CatDeltaPaths{ - path.ContactsCategory: {}, - path.EmailCategory: {}, - path.EventsCategory: {}, - } - - // found tracks the metadata we've loaded, to make sure we don't - // fetch overlapping copies. - found := map[path.CategoryType]map[string]struct{}{ - path.ContactsCategory: {}, - path.EmailCategory: {}, - path.EventsCategory: {}, - } - - // errors from metadata items should not stop the backup, - // but it should prevent us from using previous backups - errs := fault.New(true) - - for _, coll := range colls { - var ( - breakLoop bool - items = coll.Items(ctx, errs) - category = coll.FullPath().Category() - ) - - for { - select { - case <-ctx.Done(): - return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx) - - case item, ok := <-items: - if !ok || errs.Failure() != nil { - breakLoop = true - break - } - - var ( - m = map[string]string{} - cdps = cdp[category] - ) - - err := json.NewDecoder(item.ToReader()).Decode(&m) - if err != nil { - return nil, false, clues.New("decoding metadata json").WithClues(ctx) - } - - switch item.UUID() { - case graph.PreviousPathFileName: - if _, ok := found[category]["path"]; ok { - return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx) - } - - for k, p := range m { - cdps.AddPath(k, p) - } - - found[category]["path"] = struct{}{} - - case graph.DeltaURLsFileName: - if _, ok := found[category]["delta"]; ok { - return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx) - } - - for k, d := range m { - cdps.AddDelta(k, d) - } - - found[category]["delta"] = struct{}{} - } - - cdp[category] = cdps - } - - if breakLoop { - break - } - } - } - - if errs.Failure() != nil { - logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items") - - return CatDeltaPaths{ - path.ContactsCategory: {}, - path.EmailCategory: {}, - path.EventsCategory: {}, - }, false, nil - } - - // Remove any entries that contain a path or a delta, but not both. - // That metadata is considered incomplete, and needs to incur a - // complete backup on the next run. - for _, dps := range cdp { - for k, dp := range dps { - if len(dp.Path) == 0 { - delete(dps, k) - } - } - } - - return cdp, true, nil -} - -// DataCollections returns a DataCollection which the caller can -// use to read mailbox data out for the specified user -func DataCollections( - ctx context.Context, - ac api.Client, - selector selectors.Selector, - tenantID string, - user idname.Provider, - metadata []data.RestoreCollection, - su support.StatusUpdater, - ctrlOpts control.Options, - errs *fault.Bus, -) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) { - eb, err := selector.ToExchangeBackup() - if err != nil { - return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx) - } - - var ( - collections = []data.BackupCollection{} - el = errs.Local() - categories = map[path.CategoryType]struct{}{} - handlers = BackupHandlers(ac) - ) - - // Turn on concurrency limiter middleware for exchange backups - // unless explicitly disabled through DisableConcurrencyLimiterFN cli flag - if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter { - graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch) - } - - cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata) - if err != nil { - return nil, nil, false, err - } - - for _, scope := range eb.Scopes() { - if el.Failure() != nil { - break - } - - dcs, err := createCollections( - ctx, - handlers, - tenantID, - user, - scope, - cdps[scope.Category().PathType()], - ctrlOpts, - su, - errs) - if err != nil { - el.AddRecoverable(err) - continue - } - - categories[scope.Category().PathType()] = struct{}{} - - collections = append(collections, dcs...) - } - - if len(collections) > 0 { - baseCols, err := graph.BaseCollections( - ctx, - collections, - tenantID, - user.ID(), - path.ExchangeService, - categories, - su, - errs) - if err != nil { - return nil, nil, false, err - } - - collections = append(collections, baseCols...) - } - - return collections, nil, canUsePreviousBackup, el.Failure() -} - -// createCollections - utility function that retrieves M365 -// IDs through Microsoft Graph API. The selectors.ExchangeScope -// determines the type of collections that are retrieved. -func createCollections( - ctx context.Context, - handlers map[path.CategoryType]backupHandler, - tenantID string, - user idname.Provider, - scope selectors.ExchangeScope, - dps DeltaPaths, - ctrlOpts control.Options, - su support.StatusUpdater, - errs *fault.Bus, -) ([]data.BackupCollection, error) { - ctx = clues.Add(ctx, "category", scope.Category().PathType()) - - var ( - allCollections = make([]data.BackupCollection, 0) - category = scope.Category().PathType() - qp = graph.QueryParams{ - Category: category, - ResourceOwner: user, - TenantID: tenantID, - } - ) - - handler, ok := handlers[category] - if !ok { - return nil, clues.New("unsupported backup category type").WithClues(ctx) - } - - foldersComplete := observe.MessageWithCompletion( - ctx, - observe.Bulletf("%s", qp.Category)) - defer close(foldersComplete) - - rootFolder, cc := handler.NewContainerCache(user.ID()) - - if err := cc.Populate(ctx, errs, rootFolder); err != nil { - return nil, clues.Wrap(err, "populating container cache") - } - - collections, err := filterContainersAndFillCollections( - ctx, - qp, - handler, - su, - cc, - scope, - dps, - ctrlOpts, - errs) - if err != nil { - return nil, clues.Wrap(err, "filling collections") - } - - foldersComplete <- struct{}{} - - for _, coll := range collections { - allCollections = append(allCollections, coll) - } - - return allCollections, nil -} diff --git a/src/internal/connector/exchange/data_collections_test.go b/src/internal/connector/exchange/data_collections_test.go deleted file mode 100644 index 212024fb9..000000000 --- a/src/internal/connector/exchange/data_collections_test.go +++ /dev/null @@ -1,761 +0,0 @@ -package exchange - -import ( - "bytes" - "context" - "sync" - "testing" - - "github.com/alcionai/clues" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - - inMock "github.com/alcionai/corso/src/internal/common/idname/mock" - "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" - "github.com/alcionai/corso/src/internal/data" - "github.com/alcionai/corso/src/internal/tester" - "github.com/alcionai/corso/src/pkg/control" - "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/path" - "github.com/alcionai/corso/src/pkg/selectors" - "github.com/alcionai/corso/src/pkg/services/m365/api" -) - -// --------------------------------------------------------------------------- -// Unit tests -// --------------------------------------------------------------------------- - -type DataCollectionsUnitSuite struct { - tester.Suite -} - -func TestDataCollectionsUnitSuite(t *testing.T) { - suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)}) -} - -func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() { - type fileValues struct { - fileName string - value string - } - - table := []struct { - name string - data []fileValues - expect map[string]DeltaPath - canUsePreviousBackup bool - expectError assert.ErrorAssertionFunc - }{ - { - name: "delta urls only", - data: []fileValues{ - {graph.DeltaURLsFileName, "delta-link"}, - }, - expect: map[string]DeltaPath{}, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "multiple delta urls", - data: []fileValues{ - {graph.DeltaURLsFileName, "delta-link"}, - {graph.DeltaURLsFileName, "delta-link-2"}, - }, - canUsePreviousBackup: false, - expectError: assert.Error, - }, - { - name: "previous path only", - data: []fileValues{ - {graph.PreviousPathFileName, "prev-path"}, - }, - expect: map[string]DeltaPath{ - "key": { - Delta: "delta-link", - Path: "prev-path", - }, - }, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "multiple previous paths", - data: []fileValues{ - {graph.PreviousPathFileName, "prev-path"}, - {graph.PreviousPathFileName, "prev-path-2"}, - }, - canUsePreviousBackup: false, - expectError: assert.Error, - }, - { - name: "delta urls and previous paths", - data: []fileValues{ - {graph.DeltaURLsFileName, "delta-link"}, - {graph.PreviousPathFileName, "prev-path"}, - }, - expect: map[string]DeltaPath{ - "key": { - Delta: "delta-link", - Path: "prev-path", - }, - }, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "delta urls and empty previous paths", - data: []fileValues{ - {graph.DeltaURLsFileName, "delta-link"}, - {graph.PreviousPathFileName, ""}, - }, - expect: map[string]DeltaPath{}, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "empty delta urls and previous paths", - data: []fileValues{ - {graph.DeltaURLsFileName, ""}, - {graph.PreviousPathFileName, "prev-path"}, - }, - expect: map[string]DeltaPath{ - "key": { - Delta: "delta-link", - Path: "prev-path", - }, - }, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "delta urls with special chars", - data: []fileValues{ - {graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"}, - {graph.PreviousPathFileName, "prev-path"}, - }, - expect: map[string]DeltaPath{ - "key": { - Delta: "`!@#$%^&*()_[]{}/\"\\", - Path: "prev-path", - }, - }, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "delta urls with escaped chars", - data: []fileValues{ - {graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`}, - {graph.PreviousPathFileName, "prev-path"}, - }, - expect: map[string]DeltaPath{ - "key": { - Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\", - Path: "prev-path", - }, - }, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - { - name: "delta urls with newline char runes", - data: []fileValues{ - // rune(92) = \, rune(110) = n. Ensuring it's not possible to - // error in serializing/deserializing and produce a single newline - // character from those two runes. - {graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})}, - {graph.PreviousPathFileName, "prev-path"}, - }, - expect: map[string]DeltaPath{ - "key": { - Delta: "\\n", - Path: "prev-path", - }, - }, - canUsePreviousBackup: true, - expectError: assert.NoError, - }, - } - for _, test := range table { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - entries := []graph.MetadataCollectionEntry{} - - for _, d := range test.data { - entries = append( - entries, - graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value})) - } - - coll, err := graph.MakeMetadataCollection( - "t", "u", - path.ExchangeService, - path.EmailCategory, - entries, - func(cos *support.ConnectorOperationStatus) {}, - ) - require.NoError(t, err, clues.ToCore(err)) - - cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{ - data.NoFetchRestoreCollection{Collection: coll}, - }) - test.expectError(t, err, clues.ToCore(err)) - - assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup") - - emails := cdps[path.EmailCategory] - - assert.Len(t, emails, len(test.expect)) - - for k, v := range emails { - assert.Equal(t, v.Delta, emails[k].Delta, "delta") - assert.Equal(t, v.Path, emails[k].Path, "path") - } - }) - } -} - -type failingColl struct { - t *testing.T -} - -func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream { - ic := make(chan data.Stream) - defer close(ic) - - errs.AddRecoverable(assert.AnError) - - return ic -} - -func (f failingColl) FullPath() path.Path { - tmp, err := path.Build( - "tenant", - "user", - path.ExchangeService, - path.EmailCategory, - false, - "inbox") - require.NoError(f.t, err, clues.ToCore(err)) - - return tmp -} - -func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) { - // no fetch calls will be made - return nil, nil -} - -// This check is to ensure that we don't error out, but still return -// canUsePreviousBackup as false on read errors -func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - fc := failingColl{t} - - _, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc}) - require.NoError(t, err) - require.False(t, canUsePreviousBackup) -} - -// --------------------------------------------------------------------------- -// Integration tests -// --------------------------------------------------------------------------- - -func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ConnectorOperationStatus) { - updater := func(status *support.ConnectorOperationStatus) { - defer wg.Done() - } - - return updater -} - -type DataCollectionsIntegrationSuite struct { - tester.Suite - user string - site string - tenantID string - ac api.Client -} - -func TestDataCollectionsIntegrationSuite(t *testing.T) { - suite.Run(t, &DataCollectionsIntegrationSuite{ - Suite: tester.NewIntegrationSuite( - t, - [][]string{tester.M365AcctCredEnvs}, - ), - }) -} - -func (suite *DataCollectionsIntegrationSuite) SetupSuite() { - suite.user = tester.M365UserID(suite.T()) - suite.site = tester.M365SiteID(suite.T()) - - acct := tester.NewM365Account(suite.T()) - creds, err := acct.M365Config() - require.NoError(suite.T(), err, clues.ToCore(err)) - - suite.ac, err = api.NewClient(creds) - require.NoError(suite.T(), err, clues.ToCore(err)) - - suite.tenantID = creds.AzureTenantID - - tester.LogTimeOfTest(suite.T()) -} - -func (suite *DataCollectionsIntegrationSuite) TestMailFetch() { - var ( - userID = tester.M365UserID(suite.T()) - users = []string{userID} - handlers = BackupHandlers(suite.ac) - ) - - tests := []struct { - name string - scope selectors.ExchangeScope - folderNames map[string]struct{} - canMakeDeltaQueries bool - }{ - { - name: "Folder Iterative Check Mail", - scope: selectors.NewExchangeBackup(users).MailFolders( - []string{DefaultMailFolder}, - selectors.PrefixMatch(), - )[0], - folderNames: map[string]struct{}{ - DefaultMailFolder: {}, - }, - canMakeDeltaQueries: true, - }, - { - name: "Folder Iterative Check Mail Non-Delta", - scope: selectors.NewExchangeBackup(users).MailFolders( - []string{DefaultMailFolder}, - selectors.PrefixMatch(), - )[0], - folderNames: map[string]struct{}{ - DefaultMailFolder: {}, - }, - canMakeDeltaQueries: false, - }, - } - - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - ctrlOpts := control.Defaults() - ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries - - collections, err := createCollections( - ctx, - handlers, - suite.tenantID, - inMock.NewProvider(userID, userID), - test.scope, - DeltaPaths{}, - ctrlOpts, - func(status *support.ConnectorOperationStatus) {}, - fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - - for _, c := range collections { - if c.FullPath().Service() == path.ExchangeMetadataService { - continue - } - - require.NotEmpty(t, c.FullPath().Folder(false)) - - // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection - // interface. - if !assert.Implements(t, (*data.LocationPather)(nil), c) { - continue - } - - loc := c.(data.LocationPather).LocationPath().String() - - require.NotEmpty(t, loc) - - delete(test.folderNames, loc) - } - - assert.Empty(t, test.folderNames) - }) - } -} - -func (suite *DataCollectionsIntegrationSuite) TestDelta() { - var ( - userID = tester.M365UserID(suite.T()) - users = []string{userID} - handlers = BackupHandlers(suite.ac) - ) - - tests := []struct { - name string - scope selectors.ExchangeScope - }{ - { - name: "Mail", - scope: selectors.NewExchangeBackup(users).MailFolders( - []string{DefaultMailFolder}, - selectors.PrefixMatch(), - )[0], - }, - { - name: "Contacts", - scope: selectors.NewExchangeBackup(users).ContactFolders( - []string{DefaultContactFolder}, - selectors.PrefixMatch(), - )[0], - }, - { - name: "Events", - scope: selectors.NewExchangeBackup(users).EventCalendars( - []string{DefaultCalendar}, - selectors.PrefixMatch(), - )[0], - }, - } - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - // get collections without providing any delta history (ie: full backup) - collections, err := createCollections( - ctx, - handlers, - suite.tenantID, - inMock.NewProvider(userID, userID), - test.scope, - DeltaPaths{}, - control.Defaults(), - func(status *support.ConnectorOperationStatus) {}, - fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - assert.Less(t, 1, len(collections), "retrieved metadata and data collections") - - var metadata data.BackupCollection - - for _, coll := range collections { - if coll.FullPath().Service() == path.ExchangeMetadataService { - metadata = coll - } - } - - require.NotNil(t, metadata, "collections contains a metadata collection") - - cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{ - data.NoFetchRestoreCollection{Collection: metadata}, - }) - require.NoError(t, err, clues.ToCore(err)) - assert.True(t, canUsePreviousBackup, "can use previous backup") - - dps := cdps[test.scope.Category().PathType()] - - // now do another backup with the previous delta tokens, - // which should only contain the difference. - collections, err = createCollections( - ctx, - handlers, - suite.tenantID, - inMock.NewProvider(userID, userID), - test.scope, - dps, - control.Defaults(), - func(status *support.ConnectorOperationStatus) {}, - fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - - // TODO(keepers): this isn't a very useful test at the moment. It needs to - // investigate the items in the original and delta collections to at least - // assert some minimum assumptions, such as "deltas should retrieve fewer items". - // Delta usage is commented out at the moment, anyway. So this is currently - // a sanity check that the minimum behavior won't break. - for _, coll := range collections { - if coll.FullPath().Service() != path.ExchangeMetadataService { - ec, ok := coll.(*Collection) - require.True(t, ok, "collection is *Collection") - assert.NotNil(t, ec) - } - } - }) - } -} - -// TestMailSerializationRegression verifies that all mail data stored in the -// test account can be successfully downloaded into bytes and restored into -// M365 mail objects -func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - var ( - wg sync.WaitGroup - users = []string{suite.user} - handlers = BackupHandlers(suite.ac) - ) - - sel := selectors.NewExchangeBackup(users) - sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch())) - - collections, err := createCollections( - ctx, - handlers, - suite.tenantID, - inMock.NewProvider(suite.user, suite.user), - sel.Scopes()[0], - DeltaPaths{}, - control.Defaults(), - newStatusUpdater(t, &wg), - fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - - wg.Add(len(collections)) - - for _, edc := range collections { - suite.Run(edc.FullPath().String(), func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService - streamChannel := edc.Items(ctx, fault.New(true)) - - // Verify that each message can be restored - for stream := range streamChannel { - buf := &bytes.Buffer{} - - read, err := buf.ReadFrom(stream.ToReader()) - assert.NoError(t, err, clues.ToCore(err)) - assert.NotZero(t, read) - - if isMetadata { - continue - } - - message, err := api.BytesToMessageable(buf.Bytes()) - assert.NotNil(t, message) - assert.NoError(t, err, clues.ToCore(err)) - } - }) - } - - wg.Wait() -} - -// TestContactSerializationRegression verifies ability to query contact items -// and to store contact within Collection. Downloaded contacts are run through -// a regression test to ensure that downloaded items can be uploaded. -func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() { - var ( - users = []string{suite.user} - handlers = BackupHandlers(suite.ac) - ) - - tests := []struct { - name string - scope selectors.ExchangeScope - }{ - { - name: "Default Contact Folder", - scope: selectors.NewExchangeBackup(users).ContactFolders( - []string{DefaultContactFolder}, - selectors.PrefixMatch())[0], - }, - } - - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - var wg sync.WaitGroup - - edcs, err := createCollections( - ctx, - handlers, - suite.tenantID, - inMock.NewProvider(suite.user, suite.user), - test.scope, - DeltaPaths{}, - control.Defaults(), - newStatusUpdater(t, &wg), - fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - - wg.Add(len(edcs)) - - require.GreaterOrEqual(t, len(edcs), 1, "expected 1 <= num collections <= 2") - require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2") - - for _, edc := range edcs { - isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService - count := 0 - - for stream := range edc.Items(ctx, fault.New(true)) { - buf := &bytes.Buffer{} - read, err := buf.ReadFrom(stream.ToReader()) - assert.NoError(t, err, clues.ToCore(err)) - assert.NotZero(t, read) - - if isMetadata { - continue - } - - contact, err := api.BytesToContactable(buf.Bytes()) - assert.NotNil(t, contact) - assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err)) - count++ - } - - if isMetadata { - continue - } - - // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection - // interface. - if !assert.Implements(t, (*data.LocationPather)(nil), edc) { - continue - } - - assert.Equal( - t, - edc.(data.LocationPather).LocationPath().String(), - DefaultContactFolder) - assert.NotZero(t, count) - } - - wg.Wait() - }) - } -} - -// TestEventsSerializationRegression ensures functionality of createCollections -// to be able to successfully query, download and restore event objects -func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - var ( - users = []string{suite.user} - handlers = BackupHandlers(suite.ac) - calID string - bdayID string - ) - - fn := func(gcf graph.CachedContainer) error { - if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar { - calID = ptr.Val(gcf.GetId()) - } - - if ptr.Val(gcf.GetDisplayName()) == "Birthdays" { - bdayID = ptr.Val(gcf.GetId()) - } - - return nil - } - - err := suite.ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - - tests := []struct { - name, expected string - scope selectors.ExchangeScope - }{ - { - name: "Default Event Calendar", - expected: calID, - scope: selectors.NewExchangeBackup(users).EventCalendars( - []string{DefaultCalendar}, - selectors.PrefixMatch(), - )[0], - }, - { - name: "Birthday Calendar", - expected: bdayID, - scope: selectors.NewExchangeBackup(users).EventCalendars( - []string{"Birthdays"}, - selectors.PrefixMatch(), - )[0], - }, - } - - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - var wg sync.WaitGroup - - collections, err := createCollections( - ctx, - handlers, - suite.tenantID, - inMock.NewProvider(suite.user, suite.user), - test.scope, - DeltaPaths{}, - control.Defaults(), - newStatusUpdater(t, &wg), - fault.New(true)) - require.NoError(t, err, clues.ToCore(err)) - require.Len(t, collections, 2) - - wg.Add(len(collections)) - - for _, edc := range collections { - var isMetadata bool - - if edc.FullPath().Service() != path.ExchangeMetadataService { - isMetadata = true - assert.Equal(t, test.expected, edc.FullPath().Folder(false)) - } else { - assert.Equal(t, "", edc.FullPath().Folder(false)) - } - - for item := range edc.Items(ctx, fault.New(true)) { - buf := &bytes.Buffer{} - - read, err := buf.ReadFrom(item.ToReader()) - assert.NoError(t, err, clues.ToCore(err)) - assert.NotZero(t, read) - - if isMetadata { - continue - } - - event, err := api.BytesToEventable(buf.Bytes()) - assert.NotNil(t, event) - assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err)) - } - } - - wg.Wait() - }) - } -} diff --git a/src/internal/connector/exchange/folder_resolver_test.go b/src/internal/connector/exchange/folder_resolver_test.go deleted file mode 100644 index 6ba78f464..000000000 --- a/src/internal/connector/exchange/folder_resolver_test.go +++ /dev/null @@ -1,129 +0,0 @@ -package exchange - -import ( - "testing" - - "github.com/alcionai/clues" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/tester" - "github.com/alcionai/corso/src/pkg/account" - "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/services/m365/api" -) - -type CacheResolverSuite struct { - tester.Suite - credentials account.M365Config -} - -func TestCacheResolverIntegrationSuite(t *testing.T) { - suite.Run(t, &CacheResolverSuite{ - Suite: tester.NewIntegrationSuite( - t, - [][]string{tester.M365AcctCredEnvs}, - ), - }) -} - -func (suite *CacheResolverSuite) SetupSuite() { - t := suite.T() - - a := tester.NewM365Account(t) - m365, err := a.M365Config() - require.NoError(t, err, clues.ToCore(err)) - - suite.credentials = m365 -} - -func (suite *CacheResolverSuite) TestPopulate() { - ac, err := api.NewClient(suite.credentials) - require.NoError(suite.T(), err, clues.ToCore(err)) - - eventFunc := func(t *testing.T) graph.ContainerResolver { - return &eventCalendarCache{ - userID: tester.M365UserID(t), - enumer: ac.Events(), - getter: ac.Events(), - } - } - - contactFunc := func(t *testing.T) graph.ContainerResolver { - return &contactFolderCache{ - userID: tester.M365UserID(t), - enumer: ac.Contacts(), - getter: ac.Contacts(), - } - } - - tests := []struct { - name, folderInCache, root, basePath string - resolverFunc func(t *testing.T) graph.ContainerResolver - canFind assert.BoolAssertionFunc - }{ - { - name: "Default Event Cache", - // Fine as long as this isn't running against a migrated Exchange server. - folderInCache: DefaultCalendar, - root: DefaultCalendar, - basePath: DefaultCalendar, - resolverFunc: eventFunc, - canFind: assert.True, - }, - { - name: "Default Event Folder Hidden", - folderInCache: DefaultContactFolder, - root: DefaultCalendar, - canFind: assert.False, - resolverFunc: eventFunc, - }, - { - name: "Name Not in Cache", - folderInCache: "testFooBarWhoBar", - root: DefaultCalendar, - canFind: assert.False, - resolverFunc: eventFunc, - }, - { - name: "Default Contact Cache", - folderInCache: DefaultContactFolder, - root: DefaultContactFolder, - basePath: DefaultContactFolder, - canFind: assert.True, - resolverFunc: contactFunc, - }, - { - name: "Default Contact Hidden", - folderInCache: DefaultContactFolder, - root: DefaultContactFolder, - canFind: assert.False, - resolverFunc: contactFunc, - }, - { - name: "Name Not in Cache", - folderInCache: "testFooBarWhoBar", - root: DefaultContactFolder, - canFind: assert.False, - resolverFunc: contactFunc, - }, - } - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - resolver := test.resolverFunc(t) - - err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath) - require.NoError(t, err, clues.ToCore(err)) - - _, isFound := resolver.LocationInCache(test.folderInCache) - test.canFind(t, isFound, "folder path", test.folderInCache) - }) - } -} diff --git a/src/internal/connector/exchange/iterators_test.go b/src/internal/connector/exchange/iterators_test.go deleted file mode 100644 index 7938dfff8..000000000 --- a/src/internal/connector/exchange/iterators_test.go +++ /dev/null @@ -1,47 +0,0 @@ -package exchange - -import ( - "testing" - - "github.com/alcionai/clues" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/tester" - "github.com/alcionai/corso/src/pkg/services/m365/api" -) - -type ExchangeIteratorSuite struct { - tester.Suite -} - -func TestExchangeIteratorSuite(t *testing.T) { - suite.Run(t, &ExchangeIteratorSuite{Suite: tester.NewUnitSuite(t)}) -} - -func (suite *ExchangeIteratorSuite) TestDisplayable() { - t := suite.T() - bytes := exchMock.ContactBytes("Displayable") - contact, err := api.BytesToContactable(bytes) - require.NoError(t, err, clues.ToCore(err)) - - aDisplayable, ok := contact.(graph.Displayable) - assert.True(t, ok) - assert.NotNil(t, aDisplayable.GetId()) - assert.NotNil(t, aDisplayable.GetDisplayName()) -} - -func (suite *ExchangeIteratorSuite) TestDescendable() { - t := suite.T() - bytes := exchMock.MessageBytes("Descendable") - message, err := api.BytesToMessageable(bytes) - require.NoError(t, err, clues.ToCore(err)) - - aDescendable, ok := message.(graph.Descendable) - assert.True(t, ok) - assert.NotNil(t, aDescendable.GetId()) - assert.NotNil(t, aDescendable.GetParentFolderId()) -} diff --git a/src/internal/kopia/data_collection_test.go b/src/internal/kopia/data_collection_test.go index a6c2a8b97..318af2682 100644 --- a/src/internal/kopia/data_collection_test.go +++ b/src/internal/kopia/data_collection_test.go @@ -13,8 +13,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/data" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/internal/kopia/merge_collection_test.go b/src/internal/kopia/merge_collection_test.go index bd5579e08..4ffd8d394 100644 --- a/src/internal/kopia/merge_collection_test.go +++ b/src/internal/kopia/merge_collection_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/internal/kopia/upload.go b/src/internal/kopia/upload.go index a1cc0bed2..4a610c892 100644 --- a/src/internal/kopia/upload.go +++ b/src/internal/kopia/upload.go @@ -22,10 +22,10 @@ import ( "github.com/kopia/kopia/snapshot/snapshotfs" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/graph/metadata" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/graph/metadata" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" diff --git a/src/internal/kopia/upload_test.go b/src/internal/kopia/upload_test.go index 32ed10364..fba178fdf 100644 --- a/src/internal/kopia/upload_test.go +++ b/src/internal/kopia/upload_test.go @@ -20,8 +20,8 @@ import ( "github.com/stretchr/testify/suite" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/data" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/internal/kopia/wrapper_test.go b/src/internal/kopia/wrapper_test.go index 750a08432..19d6e1a4d 100644 --- a/src/internal/kopia/wrapper_test.go +++ b/src/internal/kopia/wrapper_test.go @@ -19,10 +19,10 @@ import ( "golang.org/x/exp/maps" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control/repository" diff --git a/src/internal/connector/data_collections.go b/src/internal/m365/backup.go similarity index 67% rename from src/internal/connector/data_collections.go rename to src/internal/m365/backup.go index 0d836820e..f3af44e62 100644 --- a/src/internal/connector/data_collections.go +++ b/src/internal/m365/backup.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "context" @@ -8,15 +8,13 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/discovery" - "github.com/alcionai/corso/src/internal/connector/exchange" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive" - "github.com/alcionai/corso/src/internal/connector/sharepoint" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" - "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/internal/m365/discovery" + "github.com/alcionai/corso/src/internal/m365/exchange" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive" + "github.com/alcionai/corso/src/internal/m365/sharepoint" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/filters" @@ -34,7 +32,7 @@ import ( // The metadata field can include things like delta tokens or the previous backup's // folder hierarchy. The absence of metadata causes the collection creation to ignore // prior history (ie, incrementals) and run a full backup. -func (gc *GraphConnector) ProduceBackupCollections( +func (ctrl *Controller) ProduceBackupCollections( ctx context.Context, owner idname.Provider, sels selectors.Selector, @@ -45,7 +43,7 @@ func (gc *GraphConnector) ProduceBackupCollections( ) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) { ctx, end := diagnostics.Span( ctx, - "gc:produceBackupCollections", + "m365:produceBackupCollections", diagnostics.Index("service", sels.Service.String())) defer end() @@ -55,14 +53,14 @@ func (gc *GraphConnector) ProduceBackupCollections( ctrlOpts.Parallelism.ItemFetch = graph.Parallelism(sels.PathService()). ItemOverride(ctx, ctrlOpts.Parallelism.ItemFetch) - err := verifyBackupInputs(sels, gc.IDNameLookup.IDs()) + err := verifyBackupInputs(sels, ctrl.IDNameLookup.IDs()) if err != nil { return nil, nil, false, clues.Stack(err).WithClues(ctx) } serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled( ctx, - gc.AC.Users(), + ctrl.AC.Users(), path.ServiceType(sels.Service), sels.DiscreteOwner) if err != nil { @@ -87,14 +85,14 @@ func (gc *GraphConnector) ProduceBackupCollections( switch sels.Service { case selectors.ServiceExchange: - colls, ssmb, canUsePreviousBackup, err = exchange.DataCollections( + colls, ssmb, canUsePreviousBackup, err = exchange.ProduceBackupCollections( ctx, - gc.AC, + ctrl.AC, sels, - gc.credentials.AzureTenantID, + ctrl.credentials.AzureTenantID, owner, metadata, - gc.UpdateStatus, + ctrl.UpdateStatus, ctrlOpts, errs) if err != nil { @@ -102,15 +100,15 @@ func (gc *GraphConnector) ProduceBackupCollections( } case selectors.ServiceOneDrive: - colls, ssmb, canUsePreviousBackup, err = onedrive.DataCollections( + colls, ssmb, canUsePreviousBackup, err = onedrive.ProduceBackupCollections( ctx, - gc.AC, + ctrl.AC, sels, owner, metadata, lastBackupVersion, - gc.credentials.AzureTenantID, - gc.UpdateStatus, + ctrl.credentials.AzureTenantID, + ctrl.UpdateStatus, ctrlOpts, errs) if err != nil { @@ -118,14 +116,14 @@ func (gc *GraphConnector) ProduceBackupCollections( } case selectors.ServiceSharePoint: - colls, ssmb, canUsePreviousBackup, err = sharepoint.DataCollections( + colls, ssmb, canUsePreviousBackup, err = sharepoint.ProduceBackupCollections( ctx, - gc.AC, + ctrl.AC, sels, owner, metadata, - gc.credentials, - gc, + ctrl.credentials, + ctrl, ctrlOpts, errs) if err != nil { @@ -144,7 +142,7 @@ func (gc *GraphConnector) ProduceBackupCollections( // break the process state, putting us into deadlock or // panics. if c.State() != data.DeletedState { - gc.incrementAwaitingMessages() + ctrl.incrementAwaitingMessages() } } @@ -154,7 +152,7 @@ func (gc *GraphConnector) ProduceBackupCollections( // IsBackupRunnable verifies that the users provided has the services enabled and // data can be backed up. The canMakeDeltaQueries provides info if the mailbox is // full and delta queries can be made on it. -func (gc *GraphConnector) IsBackupRunnable( +func (ctrl *Controller) IsBackupRunnable( ctx context.Context, service path.ServiceType, resourceOwner string, @@ -164,7 +162,7 @@ func (gc *GraphConnector) IsBackupRunnable( return true, nil } - info, err := gc.AC.Users().GetInfo(ctx, resourceOwner) + info, err := ctrl.AC.Users().GetInfo(ctx, resourceOwner) if err != nil { return false, err } @@ -225,59 +223,3 @@ func checkServiceEnabled( return true, canMakeDeltaQueries, nil } - -// ConsumeRestoreCollections restores data from the specified collections -// into M365 using the GraphAPI. -// SideEffect: gc.status is updated at the completion of operation -func (gc *GraphConnector) ConsumeRestoreCollections( - ctx context.Context, - backupVersion int, - sels selectors.Selector, - restoreCfg control.RestoreConfig, - opts control.Options, - dcs []data.RestoreCollection, - errs *fault.Bus, -) (*details.Details, error) { - ctx, end := diagnostics.Span(ctx, "connector:restore") - defer end() - - ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()}) - - var ( - status *support.ConnectorOperationStatus - deets = &details.Builder{} - err error - ) - - switch sels.Service { - case selectors.ServiceExchange: - status, err = exchange.RestoreCollections(ctx, gc.AC, restoreCfg, dcs, deets, errs) - case selectors.ServiceOneDrive: - status, err = onedrive.RestoreCollections( - ctx, - onedrive.NewRestoreHandler(gc.AC), - backupVersion, - restoreCfg, - opts, - dcs, - deets, - errs) - case selectors.ServiceSharePoint: - status, err = sharepoint.RestoreCollections( - ctx, - backupVersion, - gc.AC, - restoreCfg, - opts, - dcs, - deets, - errs) - default: - err = clues.Wrap(clues.New(sels.Service.String()), "service not supported") - } - - gc.incrementAwaitingMessages() - gc.UpdateStatus(status) - - return deets.Details(), err -} diff --git a/src/internal/connector/data_collections_test.go b/src/internal/m365/backup_test.go similarity index 88% rename from src/internal/connector/data_collections_test.go rename to src/internal/m365/backup_test.go index a20b55952..9429be012 100644 --- a/src/internal/connector/data_collections_test.go +++ b/src/internal/m365/backup_test.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "bytes" @@ -11,8 +11,9 @@ import ( "github.com/stretchr/testify/suite" inMock "github.com/alcionai/corso/src/internal/common/idname/mock" - "github.com/alcionai/corso/src/internal/connector/exchange" - "github.com/alcionai/corso/src/internal/connector/sharepoint" + "github.com/alcionai/corso/src/internal/m365/exchange" + "github.com/alcionai/corso/src/internal/m365/resource" + "github.com/alcionai/corso/src/internal/m365/sharepoint" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/control" @@ -59,19 +60,13 @@ func (suite *DataCollectionIntgSuite) SetupSuite() { require.NoError(t, err, clues.ToCore(err)) } -// TestExchangeDataCollection verifies interface between operation and -// GraphConnector remains stable to receive a non-zero amount of Collections -// for the Exchange Package. Enabled exchange applications: -// - mail -// - contacts -// - events func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() { ctx, flush := tester.NewContext(suite.T()) defer flush() selUsers := []string{suite.user} - connector := loadConnector(ctx, suite.T(), Users) + ctrl := loadController(ctx, suite.T(), resource.Users) tests := []struct { name string getSelector func(t *testing.T) selectors.Selector @@ -127,14 +122,14 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() { ctrlOpts := control.Defaults() ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries - collections, excludes, canUsePreviousBackup, err := exchange.DataCollections( + collections, excludes, canUsePreviousBackup, err := exchange.ProduceBackupCollections( ctx, suite.ac, sel, suite.tenantID, uidn, nil, - connector.UpdateStatus, + ctrl.UpdateStatus, ctrlOpts, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) @@ -142,7 +137,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() { assert.True(t, excludes.Empty()) for range collections { - connector.incrementAwaitingMessages() + ctrl.incrementAwaitingMessages() } // Categories with delta endpoints will produce a collection for metadata @@ -158,7 +153,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() { } } - status := connector.Wait() + status := ctrl.Wait() assert.NotZero(t, status.Successes) t.Log(status.String()) }) @@ -172,8 +167,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner() defer flush() owners := []string{"snuffleupagus"} - - connector := loadConnector(ctx, suite.T(), Users) + ctrl := loadController(ctx, suite.T(), resource.Users) tests := []struct { name string getSelector func(t *testing.T) selectors.Selector @@ -238,7 +232,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner() ctx, flush := tester.NewContext(t) defer flush() - collections, excludes, canUsePreviousBackup, err := connector.ProduceBackupCollections( + collections, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections( ctx, test.getSelector(t), test.getSelector(t), @@ -254,16 +248,12 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner() } } -// TestSharePointDataCollection verifies interface between operation and -// GraphConnector remains stable to receive a non-zero amount of Collections -// for the SharePoint Package. func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { ctx, flush := tester.NewContext(suite.T()) defer flush() selSites := []string{suite.site} - - connector := loadConnector(ctx, suite.T(), Sites) + ctrl := loadController(ctx, suite.T(), resource.Sites) tests := []struct { name string expected int @@ -297,14 +287,14 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { sel := test.getSelector() - collections, excludes, canUsePreviousBackup, err := sharepoint.DataCollections( + collections, excludes, canUsePreviousBackup, err := sharepoint.ProduceBackupCollections( ctx, suite.ac, sel, sel, nil, - connector.credentials, - connector, + ctrl.credentials, + ctrl, control.Defaults(), fault.New(true)) require.NoError(t, err, clues.ToCore(err)) @@ -313,7 +303,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { assert.True(t, excludes.Empty()) for range collections { - connector.incrementAwaitingMessages() + ctrl.incrementAwaitingMessages() } // we don't know an exact count of drives this will produce, @@ -328,7 +318,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { } } - status := connector.Wait() + status := ctrl.Wait() assert.NotZero(t, status.Successes) t.Log(status.String()) }) @@ -341,7 +331,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { type SPCollectionIntgSuite struct { tester.Suite - connector *GraphConnector + connector *Controller user string } @@ -358,7 +348,7 @@ func (suite *SPCollectionIntgSuite) SetupSuite() { ctx, flush := tester.NewContext(suite.T()) defer flush() - suite.connector = loadConnector(ctx, suite.T(), Sites) + suite.connector = loadController(ctx, suite.T(), resource.Sites) suite.user = tester.M365UserID(suite.T()) tester.LogTimeOfTest(suite.T()) @@ -372,11 +362,11 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() { var ( siteID = tester.M365SiteID(t) - gc = loadConnector(ctx, t, Sites) + ctrl = loadController(ctx, t, resource.Sites) siteIDs = []string{siteID} ) - id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil) + id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil) require.NoError(t, err, clues.ToCore(err)) sel := selectors.NewSharePointBackup(siteIDs) @@ -384,7 +374,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() { sel.SetDiscreteOwnerIDName(id, name) - cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections( + cols, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections( ctx, inMock.NewProvider(id, name), sel.Selector, @@ -419,11 +409,11 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() { var ( siteID = tester.M365SiteID(t) - gc = loadConnector(ctx, t, Sites) + ctrl = loadController(ctx, t, resource.Sites) siteIDs = []string{siteID} ) - id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil) + id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, siteID, nil) require.NoError(t, err, clues.ToCore(err)) sel := selectors.NewSharePointBackup(siteIDs) @@ -431,7 +421,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() { sel.SetDiscreteOwnerIDName(id, name) - cols, excludes, canUsePreviousBackup, err := gc.ProduceBackupCollections( + cols, excludes, canUsePreviousBackup, err := ctrl.ProduceBackupCollections( ctx, inMock.NewProvider(id, name), sel.Selector, diff --git a/src/internal/connector/graph_connector.go b/src/internal/m365/controller.go similarity index 62% rename from src/internal/connector/graph_connector.go rename to src/internal/m365/controller.go index b38518e37..910bf52a2 100644 --- a/src/internal/connector/graph_connector.go +++ b/src/internal/m365/controller.go @@ -1,6 +1,4 @@ -// Package connector uploads and retrieves data from M365 through -// the msgraph-go-sdk. -package connector +package m365 import ( "context" @@ -10,28 +8,25 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/resource" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/services/m365/api" ) -// --------------------------------------------------------------------------- -// Graph Connector -// --------------------------------------------------------------------------- - // must comply with BackupProducer and RestoreConsumer var ( - _ inject.BackupProducer = &GraphConnector{} - _ inject.RestoreConsumer = &GraphConnector{} + _ inject.BackupProducer = &Controller{} + _ inject.RestoreConsumer = &Controller{} ) -// GraphConnector is a struct used to wrap the GraphServiceClient and +// Controller is a struct used to wrap the GraphServiceClient and // GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for // bookkeeping and interfacing with other component. -type GraphConnector struct { +type Controller struct { AC api.Client tenant string @@ -43,20 +38,20 @@ type GraphConnector struct { // reference for processes that choose to populate the values. IDNameLookup idname.Cacher - // wg is used to track completion of GC tasks + // wg is used to track completion of tasks wg *sync.WaitGroup region *trace.Region // mutex used to synchronize updates to `status` mu sync.Mutex - status support.ConnectorOperationStatus // contains the status of the last run status + status support.ControllerOperationStatus // contains the status of the last run status } -func NewGraphConnector( +func NewController( ctx context.Context, acct account.Account, - r Resource, -) (*GraphConnector, error) { + rc resource.Category, +) (*Controller, error) { creds, err := acct.M365Config() if err != nil { return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx) @@ -67,106 +62,97 @@ func NewGraphConnector( return nil, clues.Wrap(err, "creating api client").WithClues(ctx) } - rc, err := r.resourceClient(ac) + rCli, err := getResourceClient(rc, ac) if err != nil { return nil, clues.Wrap(err, "creating resource client").WithClues(ctx) } - gc := GraphConnector{ + ctrl := Controller{ AC: ac, IDNameLookup: idname.NewCache(nil), credentials: creds, - ownerLookup: rc, + ownerLookup: rCli, tenant: acct.ID(), wg: &sync.WaitGroup{}, } - return &gc, nil + return &ctrl, nil } // --------------------------------------------------------------------------- // Processing Status // --------------------------------------------------------------------------- -// AwaitStatus waits for all gc tasks to complete and then returns status -func (gc *GraphConnector) Wait() *data.CollectionStats { +// AwaitStatus waits for all tasks to complete and then returns status +func (ctrl *Controller) Wait() *data.CollectionStats { defer func() { - if gc.region != nil { - gc.region.End() - gc.region = nil + if ctrl.region != nil { + ctrl.region.End() + ctrl.region = nil } }() - gc.wg.Wait() + ctrl.wg.Wait() // clean up and reset statefulness dcs := data.CollectionStats{ - Folders: gc.status.Folders, - Objects: gc.status.Metrics.Objects, - Successes: gc.status.Metrics.Successes, - Bytes: gc.status.Metrics.Bytes, - Details: gc.status.String(), + Folders: ctrl.status.Folders, + Objects: ctrl.status.Metrics.Objects, + Successes: ctrl.status.Metrics.Successes, + Bytes: ctrl.status.Metrics.Bytes, + Details: ctrl.status.String(), } - gc.wg = &sync.WaitGroup{} - gc.status = support.ConnectorOperationStatus{} + ctrl.wg = &sync.WaitGroup{} + ctrl.status = support.ControllerOperationStatus{} return &dcs } -// UpdateStatus is used by gc initiated tasks to indicate completion -func (gc *GraphConnector) UpdateStatus(status *support.ConnectorOperationStatus) { - defer gc.wg.Done() +// UpdateStatus is used by initiated tasks to indicate completion +func (ctrl *Controller) UpdateStatus(status *support.ControllerOperationStatus) { + defer ctrl.wg.Done() if status == nil { return } - gc.mu.Lock() - defer gc.mu.Unlock() - gc.status = support.MergeStatus(gc.status, *status) + ctrl.mu.Lock() + defer ctrl.mu.Unlock() + ctrl.status = support.MergeStatus(ctrl.status, *status) } -// Status returns the current status of the graphConnector operation. -func (gc *GraphConnector) Status() support.ConnectorOperationStatus { - return gc.status +// Status returns the current status of the controller process. +func (ctrl *Controller) Status() support.ControllerOperationStatus { + return ctrl.status } -// PrintableStatus returns a string formatted version of the GC status. -func (gc *GraphConnector) PrintableStatus() string { - return gc.status.String() +// PrintableStatus returns a string formatted version of the status. +func (ctrl *Controller) PrintableStatus() string { + return ctrl.status.String() } -func (gc *GraphConnector) incrementAwaitingMessages() { - gc.wg.Add(1) +func (ctrl *Controller) incrementAwaitingMessages() { + ctrl.wg.Add(1) } // --------------------------------------------------------------------------- // Resource Lookup Handling // --------------------------------------------------------------------------- -type Resource int - -const ( - UnknownResource Resource = iota - AllResources // unused - Users - Sites -) - -func (r Resource) resourceClient(ac api.Client) (*resourceClient, error) { - switch r { - case Users: - return &resourceClient{enum: r, getter: ac.Users()}, nil - case Sites: - return &resourceClient{enum: r, getter: ac.Sites()}, nil +func getResourceClient(rc resource.Category, ac api.Client) (*resourceClient, error) { + switch rc { + case resource.Users: + return &resourceClient{enum: rc, getter: ac.Users()}, nil + case resource.Sites: + return &resourceClient{enum: rc, getter: ac.Sites()}, nil default: - return nil, clues.New("unrecognized owner resource enum").With("resource_enum", r) + return nil, clues.New("unrecognized owner resource enum").With("resource_enum", rc) } } type resourceClient struct { - enum Resource + enum resource.Category getter getIDAndNamer } @@ -243,18 +229,18 @@ func (r resourceClient) getOwnerIDAndNameFrom( // The id-name swapper is optional. Some processes will look up all owners in // the tenant before reaching this step. In that case, the data gets handed // down for this func to consume instead of performing further queries. The -// data gets stored inside the gc instance for later re-use. -func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom( +// data gets stored inside the controller instance for later re-use. +func (ctrl *Controller) PopulateOwnerIDAndNamesFrom( ctx context.Context, owner string, // input value, can be either id or name ins idname.Cacher, ) (string, string, error) { - id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.AC, owner, ins) + id, name, err := ctrl.ownerLookup.getOwnerIDAndNameFrom(ctx, ctrl.AC, owner, ins) if err != nil { return "", "", clues.Wrap(err, "identifying resource owner") } - gc.IDNameLookup = idname.NewCache(map[string]string{id: name}) + ctrl.IDNameLookup = idname.NewCache(map[string]string{id: name}) return id, name, nil } diff --git a/src/internal/connector/graph_connector_test.go b/src/internal/m365/controller_test.go similarity index 87% rename from src/internal/connector/graph_connector_test.go rename to src/internal/m365/controller_test.go index ccc9e25e7..e7be0ae5f 100644 --- a/src/internal/connector/graph_connector_test.go +++ b/src/internal/m365/controller_test.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "context" @@ -13,10 +13,11 @@ import ( "github.com/stretchr/testify/suite" inMock "github.com/alcionai/corso/src/internal/common/idname/mock" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - "github.com/alcionai/corso/src/internal/connector/mock" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/mock" + "github.com/alcionai/corso/src/internal/m365/resource" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/control" @@ -29,15 +30,15 @@ import ( // Unit tests // --------------------------------------------------------------------------- -type GraphConnectorUnitSuite struct { +type ControllerUnitSuite struct { tester.Suite } -func TestGraphConnectorUnitSuite(t *testing.T) { - suite.Run(t, &GraphConnectorUnitSuite{Suite: tester.NewUnitSuite(t)}) +func TestControllerUnitSuite(t *testing.T) { + suite.Run(t, &ControllerUnitSuite{Suite: tester.NewUnitSuite(t)}) } -func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() { +func (suite *ControllerUnitSuite) TestPopulateOwnerIDAndNamesFrom() { const ( id = "owner-id" name = "owner-name" @@ -47,10 +48,10 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() { itn = map[string]string{id: name} nti = map[string]string{name: id} lookup = &resourceClient{ - enum: Users, + enum: resource.Users, getter: &mock.IDNameGetter{ID: id, Name: name}, } - noLookup = &resourceClient{enum: Users, getter: &mock.IDNameGetter{}} + noLookup = &resourceClient{enum: resource.Users, getter: &mock.IDNameGetter{}} ) table := []struct { @@ -211,9 +212,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() { ctx, flush := tester.NewContext(t) defer flush() - gc := &GraphConnector{ownerLookup: test.rc} + ctrl := &Controller{ownerLookup: test.rc} - rID, rName, err := gc.PopulateOwnerIDAndNamesFrom(ctx, test.owner, test.ins) + rID, rName, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, test.owner, test.ins) test.expectErr(t, err, clues.ToCore(err)) assert.Equal(t, test.expectID, rID, "id") assert.Equal(t, test.expectName, rName, "name") @@ -221,14 +222,14 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() { } } -func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() { +func (suite *ControllerUnitSuite) TestController_Wait() { t := suite.T() ctx, flush := tester.NewContext(t) defer flush() var ( - gc = &GraphConnector{ + ctrl = &Controller{ wg: &sync.WaitGroup{}, region: &trace.Region{}, } @@ -240,13 +241,13 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() { status = support.CreateStatus(ctx, support.Backup, 1, metrics, "details") ) - gc.wg.Add(1) - gc.UpdateStatus(status) + ctrl.wg.Add(1) + ctrl.UpdateStatus(status) - result := gc.Wait() + result := ctrl.Wait() require.NotNil(t, result) - assert.Nil(t, gc.region, "region") - assert.Empty(t, gc.status, "status") + assert.Nil(t, ctrl.region, "region") + assert.Empty(t, ctrl.status, "status") assert.Equal(t, 1, result.Folders) assert.Equal(t, 2, result.Objects) assert.Equal(t, 3, result.Successes) @@ -257,15 +258,15 @@ func (suite *GraphConnectorUnitSuite) TestGraphConnector_Wait() { // Integration tests // --------------------------------------------------------------------------- -type GraphConnectorIntegrationSuite struct { +type ControllerIntegrationSuite struct { tester.Suite - connector *GraphConnector + ctrl *Controller user string secondaryUser string } -func TestGraphConnectorIntegrationSuite(t *testing.T) { - suite.Run(t, &GraphConnectorIntegrationSuite{ +func TestControllerIntegrationSuite(t *testing.T) { + suite.Run(t, &ControllerIntegrationSuite{ Suite: tester.NewIntegrationSuite( t, [][]string{tester.M365AcctCredEnvs}, @@ -273,20 +274,20 @@ func TestGraphConnectorIntegrationSuite(t *testing.T) { }) } -func (suite *GraphConnectorIntegrationSuite) SetupSuite() { +func (suite *ControllerIntegrationSuite) SetupSuite() { t := suite.T() ctx, flush := tester.NewContext(t) defer flush() - suite.connector = loadConnector(ctx, t, Users) + suite.ctrl = loadController(ctx, t, resource.Users) suite.user = tester.M365UserID(t) suite.secondaryUser = tester.SecondaryM365UserID(t) tester.LogTimeOfTest(t) } -func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { +func (suite *ControllerIntegrationSuite) TestRestoreFailsBadService() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -299,7 +300,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { } ) - deets, err := suite.connector.ConsumeRestoreCollections( + deets, err := suite.ctrl.ConsumeRestoreCollections( ctx, version.Backup, sel, @@ -313,13 +314,13 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() { assert.Error(t, err, clues.ToCore(err)) assert.NotNil(t, deets) - status := suite.connector.Wait() + status := suite.ctrl.Wait() assert.Equal(t, 0, status.Objects) assert.Equal(t, 0, status.Folders) assert.Equal(t, 0, status.Successes) } -func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { +func (suite *ControllerIntegrationSuite) TestEmptyCollections() { restoreCfg := tester.DefaultTestRestoreConfig("") table := []struct { name string @@ -377,7 +378,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { ctx, flush := tester.NewContext(t) defer flush() - deets, err := suite.connector.ConsumeRestoreCollections( + deets, err := suite.ctrl.ConsumeRestoreCollections( ctx, version.Backup, test.sel, @@ -391,7 +392,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { require.NoError(t, err, clues.ToCore(err)) assert.NotNil(t, deets) - stats := suite.connector.Wait() + stats := suite.ctrl.Wait() assert.Zero(t, stats.Objects) assert.Zero(t, stats.Folders) assert.Zero(t, stats.Successes) @@ -418,9 +419,9 @@ func runRestore( start := time.Now() - restoreGC := loadConnector(ctx, t, config.Resource) + restoreCtrl := loadController(ctx, t, config.Resource) restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true) - deets, err := restoreGC.ConsumeRestoreCollections( + deets, err := restoreCtrl.ConsumeRestoreCollections( ctx, backupVersion, restoreSel, @@ -431,7 +432,7 @@ func runRestore( require.NoError(t, err, clues.ToCore(err)) assert.NotNil(t, deets) - status := restoreGC.Wait() + status := restoreCtrl.Wait() runTime := time.Since(start) assert.Equal(t, numRestoreItems, status.Objects, "restored status.Objects") @@ -480,14 +481,14 @@ func runBackupAndCompare( nameToID[ro] = ro } - backupGC := loadConnector(ctx, t, config.Resource) - backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID) + backupCtrl := loadController(ctx, t, config.Resource) + backupCtrl.IDNameLookup = inMock.NewCache(idToName, nameToID) backupSel := backupSelectorForExpected(t, config.Service, expectedDests) t.Logf("Selective backup of %s\n", backupSel) start := time.Now() - dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections( + dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections( ctx, backupSel, backupSel, @@ -512,7 +513,7 @@ func runBackupAndCompare( dcs, config) - status := backupGC.Wait() + status := backupCtrl.Wait() assert.Equalf(t, totalItems+skipped, status.Objects, "backup status.Objects; wanted %d items + %d skipped", totalItems, skipped) @@ -532,7 +533,7 @@ func runRestoreBackupTest( config := ConfigInfo{ Opts: opts, - Resource: test.resource, + Resource: test.resourceCat, Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, @@ -652,15 +653,15 @@ func runRestoreBackupTestVersions( test.collectionsLatest) } -func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { +func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() { bodyText := "This email has some text. However, all the text is on the same line." subjectText := "Test message for restore" table := []restoreBackupInfo{ { - name: "EmailsWithAttachments", - service: path.ExchangeService, - resource: Users, + name: "EmailsWithAttachments", + service: path.ExchangeService, + resourceCat: resource.Users, collections: []ColInfo{ { PathElements: []string{"Inbox"}, @@ -685,9 +686,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - name: "MultipleEmailsMultipleFolders", - service: path.ExchangeService, - resource: Users, + name: "MultipleEmailsMultipleFolders", + service: path.ExchangeService, + resourceCat: resource.Users, collections: []ColInfo{ { PathElements: []string{"Inbox"}, @@ -761,9 +762,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - name: "MultipleContactsSingleFolder", - service: path.ExchangeService, - resource: Users, + name: "MultipleContactsSingleFolder", + service: path.ExchangeService, + resourceCat: resource.Users, collections: []ColInfo{ { PathElements: []string{"Contacts"}, @@ -789,9 +790,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { }, }, { - name: "MultipleContactsMultipleFolders", - service: path.ExchangeService, - resource: Users, + name: "MultipleContactsMultipleFolders", + service: path.ExchangeService, + resourceCat: resource.Users, collections: []ColInfo{ { PathElements: []string{"Work"}, @@ -909,7 +910,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { runRestoreBackupTest( suite.T(), test, - suite.connector.tenant, + suite.ctrl.tenant, []string{suite.user}, control.Options{ RestorePermissions: true, @@ -919,12 +920,12 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() { } } -func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames() { +func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { table := []restoreBackupInfo{ { - name: "Contacts", - service: path.ExchangeService, - resource: Users, + name: "Contacts", + service: path.ExchangeService, + resourceCat: resource.Users, collections: []ColInfo{ { PathElements: []string{"Work"}, @@ -1005,7 +1006,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames totalItems, _, collections, expectedData, err := collectionsForInfo( test.service, - suite.connector.tenant, + suite.ctrl.tenant, suite.user, restoreCfg, []ColInfo{collection}, @@ -1026,8 +1027,8 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames restoreCfg.Location, ) - restoreGC := loadConnector(ctx, t, test.resource) - deets, err := restoreGC.ConsumeRestoreCollections( + restoreCtrl := loadController(ctx, t, test.resourceCat) + deets, err := restoreCtrl.ConsumeRestoreCollections( ctx, version.Backup, restoreSel, @@ -1041,7 +1042,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames require.NoError(t, err, clues.ToCore(err)) require.NotNil(t, deets) - status := restoreGC.Wait() + status := restoreCtrl.Wait() // Always just 1 because it's just 1 collection. assert.Equal(t, totalItems, status.Objects, "status.Objects") assert.Equal(t, totalItems, status.Successes, "status.Successes") @@ -1056,11 +1057,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames // Run a backup and compare its output with what we put in. - backupGC := loadConnector(ctx, t, test.resource) + backupCtrl := loadController(ctx, t, test.resourceCat) backupSel := backupSelectorForExpected(t, test.service, expectedDests) t.Log("Selective backup of", backupSel) - dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections( + dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections( ctx, backupSel, backupSel, @@ -1088,7 +1089,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames // deadlock. skipped := checkCollections(t, ctx, allItems, allExpectedData, dcs, ci) - status := backupGC.Wait() + status := backupCtrl.Wait() assert.Equal(t, allItems+skipped, status.Objects, "status.Objects") assert.Equal(t, allItems+skipped, status.Successes, "status.Successes") }) @@ -1097,13 +1098,13 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames // TODO: this should only be run during smoke tests, not part of the standard CI. // That's why it's set aside instead of being included in the other test set. -func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttachment() { +func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_largeMailAttachment() { subjectText := "Test message for restore with large attachment" test := restoreBackupInfo{ - name: "EmailsWithLargeAttachments", - service: path.ExchangeService, - resource: Users, + name: "EmailsWithLargeAttachments", + service: path.ExchangeService, + resourceCat: resource.Users, collections: []ColInfo{ { PathElements: []string{"Inbox"}, @@ -1122,7 +1123,7 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac runRestoreBackupTest( suite.T(), test, - suite.connector.tenant, + suite.ctrl.tenant, []string{suite.user}, control.Options{ RestorePermissions: true, @@ -1131,17 +1132,17 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac ) } -func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections() { +func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() { table := []struct { name string - resource Resource + resourceCat resource.Category selectorFunc func(t *testing.T) selectors.Selector service path.ServiceType categories []string }{ { - name: "Exchange", - resource: Users, + name: "Exchange", + resourceCat: resource.Users, selectorFunc: func(t *testing.T) selectors.Selector { sel := selectors.NewExchangeBackup([]string{suite.user}) sel.Include( @@ -1160,8 +1161,8 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections }, }, { - name: "OneDrive", - resource: Users, + name: "OneDrive", + resourceCat: resource.Users, selectorFunc: func(t *testing.T) selectors.Selector { sel := selectors.NewOneDriveBackup([]string{suite.user}) sel.Include(sel.Folders([]string{selectors.NoneTgt})) @@ -1174,8 +1175,8 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections }, }, { - name: "SharePoint", - resource: Sites, + name: "SharePoint", + resourceCat: resource.Sites, selectorFunc: func(t *testing.T) selectors.Selector { sel := selectors.NewSharePointBackup([]string{tester.M365SiteID(t)}) sel.Include( @@ -1205,18 +1206,18 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections defer flush() var ( - backupGC = loadConnector(ctx, t, test.resource) - backupSel = test.selectorFunc(t) - errs = fault.New(true) - start = time.Now() + backupCtrl = loadController(ctx, t, test.resourceCat) + backupSel = test.selectorFunc(t) + errs = fault.New(true) + start = time.Now() ) - id, name, err := backupGC.PopulateOwnerIDAndNamesFrom(ctx, backupSel.DiscreteOwner, nil) + id, name, err := backupCtrl.PopulateOwnerIDAndNamesFrom(ctx, backupSel.DiscreteOwner, nil) require.NoError(t, err, clues.ToCore(err)) backupSel.SetDiscreteOwnerIDName(id, name) - dcs, excludes, canUsePreviousBackup, err := backupGC.ProduceBackupCollections( + dcs, excludes, canUsePreviousBackup, err := backupCtrl.ProduceBackupCollections( ctx, inMock.NewProvider(id, name), backupSel, @@ -1263,7 +1264,7 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections assert.ElementsMatch(t, test.categories, foundCategories) - backupGC.Wait() + backupCtrl.Wait() assert.NoError(t, errs.Failure()) }) diff --git a/src/internal/connector/discovery/discovery.go b/src/internal/m365/discovery/discovery.go similarity index 98% rename from src/internal/connector/discovery/discovery.go rename to src/internal/m365/discovery/discovery.go index df31402b9..cba4a25a7 100644 --- a/src/internal/connector/discovery/discovery.go +++ b/src/internal/m365/discovery/discovery.go @@ -6,7 +6,7 @@ import ( "github.com/alcionai/clues" "github.com/microsoftgraph/msgraph-sdk-go/models" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/connector/discovery/discovery_test.go b/src/internal/m365/discovery/discovery_test.go similarity index 99% rename from src/internal/connector/discovery/discovery_test.go rename to src/internal/m365/discovery/discovery_test.go index e972d7a80..f8648cd84 100644 --- a/src/internal/connector/discovery/discovery_test.go +++ b/src/internal/m365/discovery/discovery_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/discovery" + "github.com/alcionai/corso/src/internal/m365/discovery" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/credentials" diff --git a/src/internal/connector/exchange/attachment.go b/src/internal/m365/exchange/attachment.go similarity index 100% rename from src/internal/connector/exchange/attachment.go rename to src/internal/m365/exchange/attachment.go diff --git a/src/internal/connector/exchange/attendees.go b/src/internal/m365/exchange/attendees.go similarity index 100% rename from src/internal/connector/exchange/attendees.go rename to src/internal/m365/exchange/attendees.go diff --git a/src/internal/connector/exchange/service_iterators.go b/src/internal/m365/exchange/backup.go similarity index 54% rename from src/internal/connector/exchange/service_iterators.go rename to src/internal/m365/exchange/backup.go index 021c15cbc..a24cc1c44 100644 --- a/src/internal/connector/exchange/service_iterators.go +++ b/src/internal/m365/exchange/backup.go @@ -2,14 +2,18 @@ package exchange import ( "context" + "encoding/json" "github.com/alcionai/clues" + "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/pii" + "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/support" + "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" @@ -18,7 +22,303 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api" ) -// filterContainersAndFillCollections is a utility function +// MetadataFileNames produces the category-specific set of filenames used to +// store graph metadata such as delta tokens and folderID->path references. +func MetadataFileNames(cat path.CategoryType) []string { + switch cat { + case path.EmailCategory, path.ContactsCategory: + return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName} + default: + return []string{graph.PreviousPathFileName} + } +} + +type CatDeltaPaths map[path.CategoryType]DeltaPaths + +type DeltaPaths map[string]DeltaPath + +func (dps DeltaPaths) AddDelta(k, d string) { + dp, ok := dps[k] + if !ok { + dp = DeltaPath{} + } + + dp.Delta = d + dps[k] = dp +} + +func (dps DeltaPaths) AddPath(k, p string) { + dp, ok := dps[k] + if !ok { + dp = DeltaPath{} + } + + dp.Path = p + dps[k] = dp +} + +type DeltaPath struct { + Delta string + Path string +} + +// ParseMetadataCollections produces a map of structs holding delta +// and path lookup maps. +func parseMetadataCollections( + ctx context.Context, + colls []data.RestoreCollection, +) (CatDeltaPaths, bool, error) { + // cdp stores metadata + cdp := CatDeltaPaths{ + path.ContactsCategory: {}, + path.EmailCategory: {}, + path.EventsCategory: {}, + } + + // found tracks the metadata we've loaded, to make sure we don't + // fetch overlapping copies. + found := map[path.CategoryType]map[string]struct{}{ + path.ContactsCategory: {}, + path.EmailCategory: {}, + path.EventsCategory: {}, + } + + // errors from metadata items should not stop the backup, + // but it should prevent us from using previous backups + errs := fault.New(true) + + for _, coll := range colls { + var ( + breakLoop bool + items = coll.Items(ctx, errs) + category = coll.FullPath().Category() + ) + + for { + select { + case <-ctx.Done(): + return nil, false, clues.Wrap(ctx.Err(), "parsing collection metadata").WithClues(ctx) + + case item, ok := <-items: + if !ok || errs.Failure() != nil { + breakLoop = true + break + } + + var ( + m = map[string]string{} + cdps = cdp[category] + ) + + err := json.NewDecoder(item.ToReader()).Decode(&m) + if err != nil { + return nil, false, clues.New("decoding metadata json").WithClues(ctx) + } + + switch item.UUID() { + case graph.PreviousPathFileName: + if _, ok := found[category]["path"]; ok { + return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of path metadata").WithClues(ctx) + } + + for k, p := range m { + cdps.AddPath(k, p) + } + + found[category]["path"] = struct{}{} + + case graph.DeltaURLsFileName: + if _, ok := found[category]["delta"]; ok { + return nil, false, clues.Wrap(clues.New(category.String()), "multiple versions of delta metadata").WithClues(ctx) + } + + for k, d := range m { + cdps.AddDelta(k, d) + } + + found[category]["delta"] = struct{}{} + } + + cdp[category] = cdps + } + + if breakLoop { + break + } + } + } + + if errs.Failure() != nil { + logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items") + + return CatDeltaPaths{ + path.ContactsCategory: {}, + path.EmailCategory: {}, + path.EventsCategory: {}, + }, false, nil + } + + // Remove any entries that contain a path or a delta, but not both. + // That metadata is considered incomplete, and needs to incur a + // complete backup on the next run. + for _, dps := range cdp { + for k, dp := range dps { + if len(dp.Path) == 0 { + delete(dps, k) + } + } + } + + return cdp, true, nil +} + +// ProduceBackupCollections returns a DataCollection which the caller can +// use to read mailbox data out for the specified user +func ProduceBackupCollections( + ctx context.Context, + ac api.Client, + selector selectors.Selector, + tenantID string, + user idname.Provider, + metadata []data.RestoreCollection, + su support.StatusUpdater, + ctrlOpts control.Options, + errs *fault.Bus, +) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) { + eb, err := selector.ToExchangeBackup() + if err != nil { + return nil, nil, false, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx) + } + + var ( + collections = []data.BackupCollection{} + el = errs.Local() + categories = map[path.CategoryType]struct{}{} + handlers = BackupHandlers(ac) + ) + + // Turn on concurrency limiter middleware for exchange backups + // unless explicitly disabled through DisableConcurrencyLimiterFN cli flag + if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter { + graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch) + } + + cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, metadata) + if err != nil { + return nil, nil, false, err + } + + for _, scope := range eb.Scopes() { + if el.Failure() != nil { + break + } + + dcs, err := createCollections( + ctx, + handlers, + tenantID, + user, + scope, + cdps[scope.Category().PathType()], + ctrlOpts, + su, + errs) + if err != nil { + el.AddRecoverable(err) + continue + } + + categories[scope.Category().PathType()] = struct{}{} + + collections = append(collections, dcs...) + } + + if len(collections) > 0 { + baseCols, err := graph.BaseCollections( + ctx, + collections, + tenantID, + user.ID(), + path.ExchangeService, + categories, + su, + errs) + if err != nil { + return nil, nil, false, err + } + + collections = append(collections, baseCols...) + } + + return collections, nil, canUsePreviousBackup, el.Failure() +} + +// createCollections - utility function that retrieves M365 +// IDs through Microsoft Graph API. The selectors.ExchangeScope +// determines the type of collections that are retrieved. +func createCollections( + ctx context.Context, + handlers map[path.CategoryType]backupHandler, + tenantID string, + user idname.Provider, + scope selectors.ExchangeScope, + dps DeltaPaths, + ctrlOpts control.Options, + su support.StatusUpdater, + errs *fault.Bus, +) ([]data.BackupCollection, error) { + ctx = clues.Add(ctx, "category", scope.Category().PathType()) + + var ( + allCollections = make([]data.BackupCollection, 0) + category = scope.Category().PathType() + qp = graph.QueryParams{ + Category: category, + ResourceOwner: user, + TenantID: tenantID, + } + ) + + handler, ok := handlers[category] + if !ok { + return nil, clues.New("unsupported backup category type").WithClues(ctx) + } + + foldersComplete := observe.MessageWithCompletion( + ctx, + observe.Bulletf("%s", qp.Category)) + defer close(foldersComplete) + + rootFolder, cc := handler.NewContainerCache(user.ID()) + + if err := cc.Populate(ctx, errs, rootFolder); err != nil { + return nil, clues.Wrap(err, "populating container cache") + } + + collections, err := populateCollections( + ctx, + qp, + handler, + su, + cc, + scope, + dps, + ctrlOpts, + errs) + if err != nil { + return nil, clues.Wrap(err, "filling collections") + } + + foldersComplete <- struct{}{} + + for _, coll := range collections { + allCollections = append(allCollections, coll) + } + + return allCollections, nil +} + +// populateCollections is a utility function // that places the M365 object ids belonging to specific directories // into a BackupCollection. Messages outside of those directories are omitted. // @param collection is filled with during this function. @@ -27,7 +327,7 @@ import ( // TODO(ashmrtn): This should really return []data.BackupCollection but // unfortunately some of our tests rely on being able to lookup returned // collections by ID and it would be non-trivial to change them. -func filterContainersAndFillCollections( +func populateCollections( ctx context.Context, qp graph.QueryParams, bh backupHandler, diff --git a/src/internal/connector/exchange/service_iterators_test.go b/src/internal/m365/exchange/backup_test.go similarity index 62% rename from src/internal/connector/exchange/service_iterators_test.go rename to src/internal/m365/exchange/backup_test.go index 102031ffe..06d5a75c6 100644 --- a/src/internal/connector/exchange/service_iterators_test.go +++ b/src/internal/m365/exchange/backup_test.go @@ -1,7 +1,9 @@ package exchange import ( + "bytes" "context" + "sync" "testing" "github.com/alcionai/clues" @@ -11,9 +13,9 @@ import ( inMock "github.com/alcionai/corso/src/internal/common/idname/mock" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" @@ -107,12 +109,12 @@ func (m mockResolver) Items() []graph.CachedContainer { return m.items } -func (m mockResolver) AddToCache(ctx context.Context, gc graph.Container) error { +func (m mockResolver) AddToCache(ctx context.Context, ctrl graph.Container) error { if len(m.added) == 0 { m.added = map[string]string{} } - m.added[ptr.Val(gc.GetDisplayName())] = ptr.Val(gc.GetId()) + m.added[ptr.Val(ctrl.GetDisplayName())] = ptr.Val(ctrl.GetId()) return nil } @@ -125,33 +127,765 @@ func (m mockResolver) LocationInCache(string) (string, bool) func (m mockResolver) Populate(context.Context, *fault.Bus, string, ...string) error { return nil } // --------------------------------------------------------------------------- -// tests +// Unit tests // --------------------------------------------------------------------------- -type ServiceIteratorsSuite struct { +type DataCollectionsUnitSuite struct { + tester.Suite +} + +func TestDataCollectionsUnitSuite(t *testing.T) { + suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() { + type fileValues struct { + fileName string + value string + } + + table := []struct { + name string + data []fileValues + expect map[string]DeltaPath + canUsePreviousBackup bool + expectError assert.ErrorAssertionFunc + }{ + { + name: "delta urls only", + data: []fileValues{ + {graph.DeltaURLsFileName, "delta-link"}, + }, + expect: map[string]DeltaPath{}, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "multiple delta urls", + data: []fileValues{ + {graph.DeltaURLsFileName, "delta-link"}, + {graph.DeltaURLsFileName, "delta-link-2"}, + }, + canUsePreviousBackup: false, + expectError: assert.Error, + }, + { + name: "previous path only", + data: []fileValues{ + {graph.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]DeltaPath{ + "key": { + Delta: "delta-link", + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "multiple previous paths", + data: []fileValues{ + {graph.PreviousPathFileName, "prev-path"}, + {graph.PreviousPathFileName, "prev-path-2"}, + }, + canUsePreviousBackup: false, + expectError: assert.Error, + }, + { + name: "delta urls and previous paths", + data: []fileValues{ + {graph.DeltaURLsFileName, "delta-link"}, + {graph.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]DeltaPath{ + "key": { + Delta: "delta-link", + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "delta urls and empty previous paths", + data: []fileValues{ + {graph.DeltaURLsFileName, "delta-link"}, + {graph.PreviousPathFileName, ""}, + }, + expect: map[string]DeltaPath{}, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "empty delta urls and previous paths", + data: []fileValues{ + {graph.DeltaURLsFileName, ""}, + {graph.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]DeltaPath{ + "key": { + Delta: "delta-link", + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "delta urls with special chars", + data: []fileValues{ + {graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"}, + {graph.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]DeltaPath{ + "key": { + Delta: "`!@#$%^&*()_[]{}/\"\\", + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "delta urls with escaped chars", + data: []fileValues{ + {graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`}, + {graph.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]DeltaPath{ + "key": { + Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\", + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "delta urls with newline char runes", + data: []fileValues{ + // rune(92) = \, rune(110) = n. Ensuring it's not possible to + // error in serializing/deserializing and produce a single newline + // character from those two runes. + {graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})}, + {graph.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]DeltaPath{ + "key": { + Delta: "\\n", + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + entries := []graph.MetadataCollectionEntry{} + + for _, d := range test.data { + entries = append( + entries, + graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value})) + } + + coll, err := graph.MakeMetadataCollection( + "t", "u", + path.ExchangeService, + path.EmailCategory, + entries, + func(cos *support.ControllerOperationStatus) {}, + ) + require.NoError(t, err, clues.ToCore(err)) + + cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{ + data.NoFetchRestoreCollection{Collection: coll}, + }) + test.expectError(t, err, clues.ToCore(err)) + + assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup") + + emails := cdps[path.EmailCategory] + + assert.Len(t, emails, len(test.expect)) + + for k, v := range emails { + assert.Equal(t, v.Delta, emails[k].Delta, "delta") + assert.Equal(t, v.Path, emails[k].Path, "path") + } + }) + } +} + +type failingColl struct { + t *testing.T +} + +func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Stream { + ic := make(chan data.Stream) + defer close(ic) + + errs.AddRecoverable(assert.AnError) + + return ic +} + +func (f failingColl) FullPath() path.Path { + tmp, err := path.Build( + "tenant", + "user", + path.ExchangeService, + path.EmailCategory, + false, + "inbox") + require.NoError(f.t, err, clues.ToCore(err)) + + return tmp +} + +func (f failingColl) FetchItemByName(context.Context, string) (data.Stream, error) { + // no fetch calls will be made + return nil, nil +} + +// This check is to ensure that we don't error out, but still return +// canUsePreviousBackup as false on read errors +func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections_ReadFailure() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + fc := failingColl{t} + + _, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{fc}) + require.NoError(t, err) + require.False(t, canUsePreviousBackup) +} + +// --------------------------------------------------------------------------- +// Integration tests +// --------------------------------------------------------------------------- + +func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.ControllerOperationStatus) { + updater := func(status *support.ControllerOperationStatus) { + defer wg.Done() + } + + return updater +} + +type DataCollectionsIntegrationSuite struct { + tester.Suite + user string + site string + tenantID string + ac api.Client +} + +func TestDataCollectionsIntegrationSuite(t *testing.T) { + suite.Run(t, &DataCollectionsIntegrationSuite{ + Suite: tester.NewIntegrationSuite( + t, + [][]string{tester.M365AcctCredEnvs}, + ), + }) +} + +func (suite *DataCollectionsIntegrationSuite) SetupSuite() { + suite.user = tester.M365UserID(suite.T()) + suite.site = tester.M365SiteID(suite.T()) + + acct := tester.NewM365Account(suite.T()) + creds, err := acct.M365Config() + require.NoError(suite.T(), err, clues.ToCore(err)) + + suite.ac, err = api.NewClient(creds) + require.NoError(suite.T(), err, clues.ToCore(err)) + + suite.tenantID = creds.AzureTenantID + + tester.LogTimeOfTest(suite.T()) +} + +func (suite *DataCollectionsIntegrationSuite) TestMailFetch() { + var ( + userID = tester.M365UserID(suite.T()) + users = []string{userID} + handlers = BackupHandlers(suite.ac) + ) + + tests := []struct { + name string + scope selectors.ExchangeScope + folderNames map[string]struct{} + canMakeDeltaQueries bool + }{ + { + name: "Folder Iterative Check Mail", + scope: selectors.NewExchangeBackup(users).MailFolders( + []string{DefaultMailFolder}, + selectors.PrefixMatch(), + )[0], + folderNames: map[string]struct{}{ + DefaultMailFolder: {}, + }, + canMakeDeltaQueries: true, + }, + { + name: "Folder Iterative Check Mail Non-Delta", + scope: selectors.NewExchangeBackup(users).MailFolders( + []string{DefaultMailFolder}, + selectors.PrefixMatch(), + )[0], + folderNames: map[string]struct{}{ + DefaultMailFolder: {}, + }, + canMakeDeltaQueries: false, + }, + } + + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + ctrlOpts := control.Defaults() + ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries + + collections, err := createCollections( + ctx, + handlers, + suite.tenantID, + inMock.NewProvider(userID, userID), + test.scope, + DeltaPaths{}, + ctrlOpts, + func(status *support.ControllerOperationStatus) {}, + fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + + for _, c := range collections { + if c.FullPath().Service() == path.ExchangeMetadataService { + continue + } + + require.NotEmpty(t, c.FullPath().Folder(false)) + + // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection + // interface. + if !assert.Implements(t, (*data.LocationPather)(nil), c) { + continue + } + + loc := c.(data.LocationPather).LocationPath().String() + + require.NotEmpty(t, loc) + + delete(test.folderNames, loc) + } + + assert.Empty(t, test.folderNames) + }) + } +} + +func (suite *DataCollectionsIntegrationSuite) TestDelta() { + var ( + userID = tester.M365UserID(suite.T()) + users = []string{userID} + handlers = BackupHandlers(suite.ac) + ) + + tests := []struct { + name string + scope selectors.ExchangeScope + }{ + { + name: "Mail", + scope: selectors.NewExchangeBackup(users).MailFolders( + []string{DefaultMailFolder}, + selectors.PrefixMatch(), + )[0], + }, + { + name: "Contacts", + scope: selectors.NewExchangeBackup(users).ContactFolders( + []string{DefaultContactFolder}, + selectors.PrefixMatch(), + )[0], + }, + { + name: "Events", + scope: selectors.NewExchangeBackup(users).EventCalendars( + []string{DefaultCalendar}, + selectors.PrefixMatch(), + )[0], + }, + } + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + // get collections without providing any delta history (ie: full backup) + collections, err := createCollections( + ctx, + handlers, + suite.tenantID, + inMock.NewProvider(userID, userID), + test.scope, + DeltaPaths{}, + control.Defaults(), + func(status *support.ControllerOperationStatus) {}, + fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + assert.Less(t, 1, len(collections), "retrieved metadata and data collections") + + var metadata data.BackupCollection + + for _, coll := range collections { + if coll.FullPath().Service() == path.ExchangeMetadataService { + metadata = coll + } + } + + require.NotNil(t, metadata, "collections contains a metadata collection") + + cdps, canUsePreviousBackup, err := parseMetadataCollections(ctx, []data.RestoreCollection{ + data.NoFetchRestoreCollection{Collection: metadata}, + }) + require.NoError(t, err, clues.ToCore(err)) + assert.True(t, canUsePreviousBackup, "can use previous backup") + + dps := cdps[test.scope.Category().PathType()] + + // now do another backup with the previous delta tokens, + // which should only contain the difference. + collections, err = createCollections( + ctx, + handlers, + suite.tenantID, + inMock.NewProvider(userID, userID), + test.scope, + dps, + control.Defaults(), + func(status *support.ControllerOperationStatus) {}, + fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + + // TODO(keepers): this isn't a very useful test at the moment. It needs to + // investigate the items in the original and delta collections to at least + // assert some minimum assumptions, such as "deltas should retrieve fewer items". + // Delta usage is commented out at the moment, anyway. So this is currently + // a sanity check that the minimum behavior won't break. + for _, coll := range collections { + if coll.FullPath().Service() != path.ExchangeMetadataService { + ec, ok := coll.(*Collection) + require.True(t, ok, "collection is *Collection") + assert.NotNil(t, ec) + } + } + }) + } +} + +// TestMailSerializationRegression verifies that all mail data stored in the +// test account can be successfully downloaded into bytes and restored into +// M365 mail objects +func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + var ( + wg sync.WaitGroup + users = []string{suite.user} + handlers = BackupHandlers(suite.ac) + ) + + sel := selectors.NewExchangeBackup(users) + sel.Include(sel.MailFolders([]string{DefaultMailFolder}, selectors.PrefixMatch())) + + collections, err := createCollections( + ctx, + handlers, + suite.tenantID, + inMock.NewProvider(suite.user, suite.user), + sel.Scopes()[0], + DeltaPaths{}, + control.Defaults(), + newStatusUpdater(t, &wg), + fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + + wg.Add(len(collections)) + + for _, edc := range collections { + suite.Run(edc.FullPath().String(), func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService + streamChannel := edc.Items(ctx, fault.New(true)) + + // Verify that each message can be restored + for stream := range streamChannel { + buf := &bytes.Buffer{} + + read, err := buf.ReadFrom(stream.ToReader()) + assert.NoError(t, err, clues.ToCore(err)) + assert.NotZero(t, read) + + if isMetadata { + continue + } + + message, err := api.BytesToMessageable(buf.Bytes()) + assert.NotNil(t, message) + assert.NoError(t, err, clues.ToCore(err)) + } + }) + } + + wg.Wait() +} + +// TestContactSerializationRegression verifies ability to query contact items +// and to store contact within Collection. Downloaded contacts are run through +// a regression test to ensure that downloaded items can be uploaded. +func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression() { + var ( + users = []string{suite.user} + handlers = BackupHandlers(suite.ac) + ) + + tests := []struct { + name string + scope selectors.ExchangeScope + }{ + { + name: "Default Contact Folder", + scope: selectors.NewExchangeBackup(users).ContactFolders( + []string{DefaultContactFolder}, + selectors.PrefixMatch())[0], + }, + } + + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + var wg sync.WaitGroup + + edcs, err := createCollections( + ctx, + handlers, + suite.tenantID, + inMock.NewProvider(suite.user, suite.user), + test.scope, + DeltaPaths{}, + control.Defaults(), + newStatusUpdater(t, &wg), + fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + + wg.Add(len(edcs)) + + require.GreaterOrEqual(t, len(edcs), 1, "expected 1 <= num collections <= 2") + require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2") + + for _, edc := range edcs { + isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService + count := 0 + + for stream := range edc.Items(ctx, fault.New(true)) { + buf := &bytes.Buffer{} + read, err := buf.ReadFrom(stream.ToReader()) + assert.NoError(t, err, clues.ToCore(err)) + assert.NotZero(t, read) + + if isMetadata { + continue + } + + contact, err := api.BytesToContactable(buf.Bytes()) + assert.NotNil(t, contact) + assert.NoError(t, err, "converting contact bytes: "+buf.String(), clues.ToCore(err)) + count++ + } + + if isMetadata { + continue + } + + // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection + // interface. + if !assert.Implements(t, (*data.LocationPather)(nil), edc) { + continue + } + + assert.Equal( + t, + edc.(data.LocationPather).LocationPath().String(), + DefaultContactFolder) + assert.NotZero(t, count) + } + + wg.Wait() + }) + } +} + +// TestEventsSerializationRegression ensures functionality of createCollections +// to be able to successfully query, download and restore event objects +func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + var ( + users = []string{suite.user} + handlers = BackupHandlers(suite.ac) + calID string + bdayID string + ) + + fn := func(gcc graph.CachedContainer) error { + if ptr.Val(gcc.GetDisplayName()) == DefaultCalendar { + calID = ptr.Val(gcc.GetId()) + } + + if ptr.Val(gcc.GetDisplayName()) == "Birthdays" { + bdayID = ptr.Val(gcc.GetId()) + } + + return nil + } + + err := suite.ac.Events().EnumerateContainers(ctx, suite.user, DefaultCalendar, fn, fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + + tests := []struct { + name, expected string + scope selectors.ExchangeScope + }{ + { + name: "Default Event Calendar", + expected: calID, + scope: selectors.NewExchangeBackup(users).EventCalendars( + []string{DefaultCalendar}, + selectors.PrefixMatch(), + )[0], + }, + { + name: "Birthday Calendar", + expected: bdayID, + scope: selectors.NewExchangeBackup(users).EventCalendars( + []string{"Birthdays"}, + selectors.PrefixMatch(), + )[0], + }, + } + + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + var wg sync.WaitGroup + + collections, err := createCollections( + ctx, + handlers, + suite.tenantID, + inMock.NewProvider(suite.user, suite.user), + test.scope, + DeltaPaths{}, + control.Defaults(), + newStatusUpdater(t, &wg), + fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + require.Len(t, collections, 2) + + wg.Add(len(collections)) + + for _, edc := range collections { + var isMetadata bool + + if edc.FullPath().Service() != path.ExchangeMetadataService { + isMetadata = true + assert.Equal(t, test.expected, edc.FullPath().Folder(false)) + } else { + assert.Equal(t, "", edc.FullPath().Folder(false)) + } + + for item := range edc.Items(ctx, fault.New(true)) { + buf := &bytes.Buffer{} + + read, err := buf.ReadFrom(item.ToReader()) + assert.NoError(t, err, clues.ToCore(err)) + assert.NotZero(t, read) + + if isMetadata { + continue + } + + event, err := api.BytesToEventable(buf.Bytes()) + assert.NotNil(t, event) + assert.NoError(t, err, "creating event from bytes: "+buf.String(), clues.ToCore(err)) + } + } + + wg.Wait() + }) + } +} + +type CollectionPopulationSuite struct { tester.Suite creds account.M365Config } func TestServiceIteratorsUnitSuite(t *testing.T) { - suite.Run(t, &ServiceIteratorsSuite{Suite: tester.NewUnitSuite(t)}) + suite.Run(t, &CollectionPopulationSuite{Suite: tester.NewUnitSuite(t)}) } -func (suite *ServiceIteratorsSuite) SetupSuite() { +func (suite *CollectionPopulationSuite) SetupSuite() { a := tester.NewMockM365Account(suite.T()) m365, err := a.M365Config() require.NoError(suite.T(), err, clues.ToCore(err)) suite.creds = m365 } -func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() { +func (suite *CollectionPopulationSuite) TestPopulateCollections() { var ( qp = graph.QueryParams{ Category: path.EmailCategory, // doesn't matter which one we use. ResourceOwner: inMock.NewProvider("user_id", "user_name"), TenantID: suite.creds.AzureTenantID, } - statusUpdater = func(*support.ConnectorOperationStatus) {} + statusUpdater = func(*support.ControllerOperationStatus) {} allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0] dps = DeltaPaths{} // incrementals are tested separately commonResult = mockGetterResults{ @@ -349,7 +1083,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() { category: qp.Category, } - collections, err := filterContainersAndFillCollections( + collections, err := populateCollections( ctx, qp, mbh, @@ -435,7 +1169,7 @@ func checkMetadata( assert.Equal(t, expect, catPaths[cat]) } -func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_DuplicateFolders() { +func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_DuplicateFolders() { type scopeCat struct { scope selectors.ExchangeScope cat path.CategoryType @@ -447,7 +1181,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli TenantID: suite.creds.AzureTenantID, } - statusUpdater = func(*support.ConnectorOperationStatus) {} + statusUpdater = func(*support.ControllerOperationStatus) {} dataTypes = []scopeCat{ { @@ -687,7 +1421,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli category: qp.Category, } - collections, err := filterContainersAndFillCollections( + collections, err := populateCollections( ctx, qp, mbh, @@ -754,7 +1488,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_Dupli } } -func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repeatedItems() { +func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_repeatedItems() { newDelta := api.DeltaUpdate{URL: "delta_url"} table := []struct { @@ -832,7 +1566,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea ResourceOwner: inMock.NewProvider("user_id", "user_name"), TenantID: suite.creds.AzureTenantID, } - statusUpdater = func(*support.ConnectorOperationStatus) {} + statusUpdater = func(*support.ControllerOperationStatus) {} allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0] dps = DeltaPaths{} // incrementals are tested separately container1 = mockContainer{ @@ -851,7 +1585,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea require.Equal(t, "user_id", qp.ResourceOwner.ID(), qp.ResourceOwner) require.Equal(t, "user_name", qp.ResourceOwner.Name(), qp.ResourceOwner) - collections, err := filterContainersAndFillCollections( + collections, err := populateCollections( ctx, qp, mbh, @@ -907,7 +1641,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea } } -func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incrementals_nondelta() { +func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_incrementals_nondelta() { var ( userID = "user_id" tenantID = suite.creds.AzureTenantID @@ -917,7 +1651,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre ResourceOwner: inMock.NewProvider("user_id", "user_name"), TenantID: suite.creds.AzureTenantID, } - statusUpdater = func(*support.ConnectorOperationStatus) {} + statusUpdater = func(*support.ControllerOperationStatus) {} allScope = selectors.NewExchangeBackup(nil).MailFolders(selectors.Any())[0] commonResults = mockGetterResults{ added: []string{"added"}, @@ -1270,7 +2004,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre } } - collections, err := filterContainersAndFillCollections( + collections, err := populateCollections( ctx, qp, mbh, diff --git a/src/internal/connector/exchange/cache_container.go b/src/internal/m365/exchange/cache_container.go similarity index 93% rename from src/internal/connector/exchange/cache_container.go rename to src/internal/m365/exchange/cache_container.go index acad82a31..b4be9e801 100644 --- a/src/internal/connector/exchange/cache_container.go +++ b/src/internal/m365/exchange/cache_container.go @@ -4,7 +4,7 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" ) // checkIDAndName is a helper function to ensure that diff --git a/src/internal/connector/exchange/exchange_data_collection.go b/src/internal/m365/exchange/collection.go similarity index 98% rename from src/internal/connector/exchange/exchange_data_collection.go rename to src/internal/m365/exchange/collection.go index 921952b88..bc5ff3027 100644 --- a/src/internal/connector/exchange/exchange_data_collection.go +++ b/src/internal/m365/exchange/collection.go @@ -13,9 +13,9 @@ import ( "github.com/alcionai/clues" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" @@ -125,7 +125,7 @@ func (col *Collection) LocationPath() *path.Builder { return col.locationPath } -// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old +// TODO(ashmrtn): Fill in with previous path once the Controller compares old // and new folder hierarchies. func (col Collection) PreviousPath() path.Path { return col.prevPath diff --git a/src/internal/connector/exchange/exchange_data_collection_test.go b/src/internal/m365/exchange/collection_test.go similarity index 85% rename from src/internal/connector/exchange/exchange_data_collection_test.go rename to src/internal/m365/exchange/collection_test.go index 212f0c96e..2c023d703 100644 --- a/src/internal/connector/exchange/exchange_data_collection_test.go +++ b/src/internal/m365/exchange/collection_test.go @@ -11,8 +11,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" @@ -46,20 +46,19 @@ func (mi *mockItemer) Serialize( return nil, mi.serializeErr } -type ExchangeDataCollectionSuite struct { +type CollectionSuite struct { tester.Suite } -func TestExchangeDataCollectionSuite(t *testing.T) { - suite.Run(t, &ExchangeDataCollectionSuite{Suite: tester.NewUnitSuite(t)}) +func TestCollectionSuite(t *testing.T) { + suite.Run(t, &CollectionSuite{Suite: tester.NewUnitSuite(t)}) } -func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() { +func (suite *CollectionSuite) TestReader_Valid() { m := []byte("test message") description := "aFile" ed := &Stream{id: description, message: m} - // Read the message using the `ExchangeData` reader and validate it matches what we set buf := &bytes.Buffer{} _, err := buf.ReadFrom(ed.ToReader()) assert.NoError(suite.T(), err, clues.ToCore(err)) @@ -67,7 +66,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Valid() { assert.Equal(suite.T(), description, ed.UUID()) } -func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() { +func (suite *CollectionSuite) TestReader_Empty() { var ( empty []byte expected int64 @@ -82,7 +81,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataReader_Empty() { assert.NoError(t, err, clues.ToCore(err)) } -func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() { +func (suite *CollectionSuite) TestColleciton_FullPath() { t := suite.T() tenant := "a-tenant" user := "a-user" @@ -105,7 +104,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeData_FullPath() { assert.Equal(t, fullPath, edc.FullPath()) } -func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchangeDataCollection() { +func (suite *CollectionSuite) TestCollection_NewCollection() { t := suite.T() tenant := "a-tenant" user := "a-user" @@ -129,7 +128,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange assert.Equal(t, fullPath, edc.FullPath()) } -func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() { +func (suite *CollectionSuite) TestNewCollection_state() { fooP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "foo") require.NoError(suite.T(), err, clues.ToCore(err)) barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar") @@ -189,7 +188,7 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() { } } -func (suite *ExchangeDataCollectionSuite) TestGetItemWithRetries() { +func (suite *CollectionSuite) TestGetItemWithRetries() { table := []struct { name string items *mockItemer diff --git a/src/internal/connector/exchange/consts.go b/src/internal/m365/exchange/consts.go similarity index 100% rename from src/internal/connector/exchange/consts.go rename to src/internal/m365/exchange/consts.go diff --git a/src/internal/connector/exchange/contact_folder_cache.go b/src/internal/m365/exchange/contact_container_cache.go similarity index 89% rename from src/internal/connector/exchange/contact_folder_cache.go rename to src/internal/m365/exchange/contact_container_cache.go index 75cc2f66d..aa9a45518 100644 --- a/src/internal/connector/exchange/contact_folder_cache.go +++ b/src/internal/m365/exchange/contact_container_cache.go @@ -6,13 +6,13 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) var ( - _ graph.ContainerResolver = &contactFolderCache{} + _ graph.ContainerResolver = &contactContainerCache{} _ containerRefresher = &contactRefresher{} ) @@ -35,14 +35,14 @@ func (r *contactRefresher) refreshContainer( return &f, nil } -type contactFolderCache struct { +type contactContainerCache struct { *containerResolver enumer containersEnumerator getter containerGetter userID string } -func (cfc *contactFolderCache) populateContactRoot( +func (cfc *contactContainerCache) populateContactRoot( ctx context.Context, directoryID string, baseContainerPath []string, @@ -67,7 +67,7 @@ func (cfc *contactFolderCache) populateContactRoot( // objects into the Contact Folder Cache // Function does NOT use Delta Queries as it is not supported // as of (Oct-07-2022) -func (cfc *contactFolderCache) Populate( +func (cfc *contactContainerCache) Populate( ctx context.Context, errs *fault.Bus, baseID string, @@ -89,7 +89,7 @@ func (cfc *contactFolderCache) Populate( return nil } -func (cfc *contactFolderCache) init( +func (cfc *contactContainerCache) init( ctx context.Context, baseNode string, baseContainerPath []string, diff --git a/src/internal/connector/exchange/contacts_backup.go b/src/internal/m365/exchange/contacts_backup.go similarity index 85% rename from src/internal/connector/exchange/contacts_backup.go rename to src/internal/m365/exchange/contacts_backup.go index 4054a17a8..fb8fde2ab 100644 --- a/src/internal/connector/exchange/contacts_backup.go +++ b/src/internal/m365/exchange/contacts_backup.go @@ -1,7 +1,7 @@ package exchange import ( - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -32,7 +32,7 @@ func (h contactBackupHandler) itemHandler() itemGetterSerializer { func (h contactBackupHandler) NewContainerCache( userID string, ) (string, graph.ContainerResolver) { - return DefaultContactFolder, &contactFolderCache{ + return DefaultContactFolder, &contactContainerCache{ userID: userID, enumer: h.ac, getter: h.ac, diff --git a/src/internal/connector/exchange/contacts_restore.go b/src/internal/m365/exchange/contacts_restore.go similarity index 95% rename from src/internal/connector/exchange/contacts_restore.go rename to src/internal/m365/exchange/contacts_restore.go index 63d0e87c8..82ff1364a 100644 --- a/src/internal/connector/exchange/contacts_restore.go +++ b/src/internal/m365/exchange/contacts_restore.go @@ -7,7 +7,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -31,7 +31,7 @@ func newContactRestoreHandler( } func (h contactRestoreHandler) newContainerCache(userID string) graph.ContainerResolver { - return &contactFolderCache{ + return &contactContainerCache{ userID: userID, enumer: h.ac, getter: h.ac, diff --git a/src/internal/connector/exchange/contacts_restore_test.go b/src/internal/m365/exchange/contacts_restore_test.go similarity index 100% rename from src/internal/connector/exchange/contacts_restore_test.go rename to src/internal/m365/exchange/contacts_restore_test.go diff --git a/src/internal/connector/exchange/container_resolver.go b/src/internal/m365/exchange/container_resolver.go similarity index 99% rename from src/internal/connector/exchange/container_resolver.go rename to src/internal/m365/exchange/container_resolver.go index 0e2730449..368b2ae73 100644 --- a/src/internal/connector/exchange/container_resolver.go +++ b/src/internal/m365/exchange/container_resolver.go @@ -6,7 +6,7 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/internal/connector/exchange/container_resolver_test.go b/src/internal/m365/exchange/container_resolver_test.go similarity index 84% rename from src/internal/connector/exchange/container_resolver_test.go rename to src/internal/m365/exchange/container_resolver_test.go index 1cfe4690e..f3efdce70 100644 --- a/src/internal/connector/exchange/container_resolver_test.go +++ b/src/internal/m365/exchange/container_resolver_test.go @@ -13,10 +13,12 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/services/m365/api" ) // --------------------------------------------------------------------------- @@ -671,6 +673,118 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() { assert.Equal(t, m.expectedLocation, l.String(), "location path") } +type ContainerResolverSuite struct { + tester.Suite + credentials account.M365Config +} + +func TestContainerResolverIntegrationSuite(t *testing.T) { + suite.Run(t, &ContainerResolverSuite{ + Suite: tester.NewIntegrationSuite( + t, + [][]string{tester.M365AcctCredEnvs}), + }) +} + +func (suite *ContainerResolverSuite) SetupSuite() { + t := suite.T() + + a := tester.NewM365Account(t) + m365, err := a.M365Config() + require.NoError(t, err, clues.ToCore(err)) + + suite.credentials = m365 +} + +func (suite *ContainerResolverSuite) TestPopulate() { + ac, err := api.NewClient(suite.credentials) + require.NoError(suite.T(), err, clues.ToCore(err)) + + eventFunc := func(t *testing.T) graph.ContainerResolver { + return &eventContainerCache{ + userID: tester.M365UserID(t), + enumer: ac.Events(), + getter: ac.Events(), + } + } + + contactFunc := func(t *testing.T) graph.ContainerResolver { + return &contactContainerCache{ + userID: tester.M365UserID(t), + enumer: ac.Contacts(), + getter: ac.Contacts(), + } + } + + tests := []struct { + name, folderInCache, root, basePath string + resolverFunc func(t *testing.T) graph.ContainerResolver + canFind assert.BoolAssertionFunc + }{ + { + name: "Default Event Cache", + // Fine as long as this isn't running against a migrated Exchange server. + folderInCache: DefaultCalendar, + root: DefaultCalendar, + basePath: DefaultCalendar, + resolverFunc: eventFunc, + canFind: assert.True, + }, + { + name: "Default Event Folder Hidden", + folderInCache: DefaultContactFolder, + root: DefaultCalendar, + canFind: assert.False, + resolverFunc: eventFunc, + }, + { + name: "Name Not in Cache", + folderInCache: "testFooBarWhoBar", + root: DefaultCalendar, + canFind: assert.False, + resolverFunc: eventFunc, + }, + { + name: "Default Contact Cache", + folderInCache: DefaultContactFolder, + root: DefaultContactFolder, + basePath: DefaultContactFolder, + canFind: assert.True, + resolverFunc: contactFunc, + }, + { + name: "Default Contact Hidden", + folderInCache: DefaultContactFolder, + root: DefaultContactFolder, + canFind: assert.False, + resolverFunc: contactFunc, + }, + { + name: "Name Not in Cache", + folderInCache: "testFooBarWhoBar", + root: DefaultContactFolder, + canFind: assert.False, + resolverFunc: contactFunc, + }, + } + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + resolver := test.resolverFunc(t) + + err := resolver.Populate(ctx, fault.New(true), test.root, test.basePath) + require.NoError(t, err, clues.ToCore(err)) + + _, isFound := resolver.LocationInCache(test.folderInCache) + test.canFind(t, isFound, "folder path", test.folderInCache) + }) + } +} + // --------------------------------------------------------------------------- // integration suite // --------------------------------------------------------------------------- @@ -688,7 +802,7 @@ func runCreateDestinationTest( var ( svc = path.ExchangeService - gcr = handler.newContainerCache(userID) + gcc = handler.newContainerCache(userID) ) path1, err := path.Build( @@ -700,17 +814,17 @@ func runCreateDestinationTest( containerNames1...) require.NoError(t, err, clues.ToCore(err)) - containerID, gcr, err := createDestination( + containerID, gcc, err := createDestination( ctx, handler, handler.formatRestoreDestination(destinationName, path1), userID, - gcr, + gcc, true, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) - _, _, err = gcr.IDToPath(ctx, containerID) + _, _, err = gcc.IDToPath(ctx, containerID) assert.NoError(t, err, clues.ToCore(err)) path2, err := path.Build( @@ -722,22 +836,22 @@ func runCreateDestinationTest( containerNames2...) require.NoError(t, err, clues.ToCore(err)) - containerID, gcr, err = createDestination( + containerID, gcc, err = createDestination( ctx, handler, handler.formatRestoreDestination(destinationName, path2), userID, - gcr, + gcc, false, fault.New(true)) require.NoError(t, err, clues.ToCore(err)) - p, l, err := gcr.IDToPath(ctx, containerID) + p, l, err := gcc.IDToPath(ctx, containerID) require.NoError(t, err, clues.ToCore(err)) - _, ok := gcr.LocationInCache(l.String()) + _, ok := gcc.LocationInCache(l.String()) require.True(t, ok, "looking for location in cache: %s", l) - _, ok = gcr.PathInCache(p.String()) + _, ok = gcc.PathInCache(p.String()) require.True(t, ok, "looking for path in cache: %s", p) } diff --git a/src/internal/connector/exchange/event_calendar_cache.go b/src/internal/m365/exchange/event_container_cache.go similarity index 87% rename from src/internal/connector/exchange/event_calendar_cache.go rename to src/internal/m365/exchange/event_container_cache.go index 5e99b4b39..d89a5caa2 100644 --- a/src/internal/connector/exchange/event_calendar_cache.go +++ b/src/internal/m365/exchange/event_container_cache.go @@ -6,14 +6,14 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) -var _ graph.ContainerResolver = &eventCalendarCache{} +var _ graph.ContainerResolver = &eventContainerCache{} -type eventCalendarCache struct { +type eventContainerCache struct { *containerResolver enumer containersEnumerator getter containerGetter @@ -23,7 +23,7 @@ type eventCalendarCache struct { // init ensures that the structure's fields are initialized. // Fields Initialized when cache == nil: // [mc.cache] -func (ecc *eventCalendarCache) init( +func (ecc *eventContainerCache) init( ctx context.Context, ) error { if ecc.containerResolver == nil { @@ -37,7 +37,7 @@ func (ecc *eventCalendarCache) init( // DefaultCalendar is the traditional "Calendar". // Action ensures that cache will stop at appropriate level. // @error iff the struct is not properly instantiated -func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error { +func (ecc *eventContainerCache) populateEventRoot(ctx context.Context) error { container := DefaultCalendar f, err := ecc.getter.GetContainerByID(ctx, ecc.userID, container) @@ -59,7 +59,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error { // Populate utility function for populating eventCalendarCache. // Executes 1 additional Graph Query // @param baseID: ignored. Present to conform to interface -func (ecc *eventCalendarCache) Populate( +func (ecc *eventContainerCache) Populate( ctx context.Context, errs *fault.Bus, baseID string, @@ -88,7 +88,7 @@ func (ecc *eventCalendarCache) Populate( // AddToCache adds container to map in field 'cache' // @returns error iff the required values are not accessible. -func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container) error { +func (ecc *eventContainerCache) AddToCache(ctx context.Context, f graph.Container) error { if err := checkIDAndName(f); err != nil { return clues.Wrap(err, "validating container").WithClues(ctx) } diff --git a/src/internal/connector/exchange/events_backup.go b/src/internal/m365/exchange/events_backup.go similarity index 86% rename from src/internal/connector/exchange/events_backup.go rename to src/internal/m365/exchange/events_backup.go index f77a6a1a3..c1502c965 100644 --- a/src/internal/connector/exchange/events_backup.go +++ b/src/internal/m365/exchange/events_backup.go @@ -1,7 +1,7 @@ package exchange import ( - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -32,7 +32,7 @@ func (h eventBackupHandler) itemHandler() itemGetterSerializer { func (h eventBackupHandler) NewContainerCache( userID string, ) (string, graph.ContainerResolver) { - return DefaultCalendar, &eventCalendarCache{ + return DefaultCalendar, &eventContainerCache{ userID: userID, enumer: h.ac, getter: h.ac, diff --git a/src/internal/connector/exchange/events_restore.go b/src/internal/m365/exchange/events_restore.go similarity index 96% rename from src/internal/connector/exchange/events_restore.go rename to src/internal/m365/exchange/events_restore.go index a45de05aa..18540ecaf 100644 --- a/src/internal/connector/exchange/events_restore.go +++ b/src/internal/m365/exchange/events_restore.go @@ -7,7 +7,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -33,7 +33,7 @@ func newEventRestoreHandler( } func (h eventRestoreHandler) newContainerCache(userID string) graph.ContainerResolver { - return &eventCalendarCache{ + return &eventContainerCache{ userID: userID, enumer: h.ac, getter: h.ac, diff --git a/src/internal/connector/exchange/events_restore_test.go b/src/internal/m365/exchange/events_restore_test.go similarity index 100% rename from src/internal/connector/exchange/events_restore_test.go rename to src/internal/m365/exchange/events_restore_test.go diff --git a/src/internal/connector/exchange/handlers.go b/src/internal/m365/exchange/handlers.go similarity index 98% rename from src/internal/connector/exchange/handlers.go rename to src/internal/m365/exchange/handlers.go index 0538ebd17..9eb7d1fe1 100644 --- a/src/internal/connector/exchange/handlers.go +++ b/src/internal/m365/exchange/handlers.go @@ -5,7 +5,7 @@ import ( "github.com/microsoft/kiota-abstractions-go/serialization" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/internal/connector/exchange/mail_backup.go b/src/internal/m365/exchange/mail_backup.go similarity index 86% rename from src/internal/connector/exchange/mail_backup.go rename to src/internal/m365/exchange/mail_backup.go index 1491a683a..ecd31919c 100644 --- a/src/internal/connector/exchange/mail_backup.go +++ b/src/internal/m365/exchange/mail_backup.go @@ -1,7 +1,7 @@ package exchange import ( - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -32,7 +32,7 @@ func (h mailBackupHandler) itemHandler() itemGetterSerializer { func (h mailBackupHandler) NewContainerCache( userID string, ) (string, graph.ContainerResolver) { - return rootFolderAlias, &mailFolderCache{ + return rootFolderAlias, &mailContainerCache{ userID: userID, enumer: h.ac, getter: h.ac, diff --git a/src/internal/connector/exchange/mail_folder_cache.go b/src/internal/m365/exchange/mail_container_cache.go similarity index 87% rename from src/internal/connector/exchange/mail_folder_cache.go rename to src/internal/m365/exchange/mail_container_cache.go index 062f91a23..cf7b0bcdc 100644 --- a/src/internal/connector/exchange/mail_folder_cache.go +++ b/src/internal/m365/exchange/mail_container_cache.go @@ -5,13 +5,13 @@ import ( "github.com/alcionai/clues" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) var ( - _ graph.ContainerResolver = &mailFolderCache{} + _ graph.ContainerResolver = &mailContainerCache{} _ containerRefresher = &mailRefresher{} ) @@ -34,10 +34,10 @@ func (r *mailRefresher) refreshContainer( return &f, nil } -// mailFolderCache struct used to improve lookup of directories within exchange.Mail +// mailContainerCache struct used to improve lookup of directories within exchange.Mail // cache map of cachedContainers where the key = M365ID // nameLookup map: Key: DisplayName Value: ID -type mailFolderCache struct { +type mailContainerCache struct { *containerResolver enumer containersEnumerator getter containerGetter @@ -47,7 +47,7 @@ type mailFolderCache struct { // init ensures that the structure's fields are initialized. // Fields Initialized when cache == nil: // [mc.cache] -func (mc *mailFolderCache) init( +func (mc *mailContainerCache) init( ctx context.Context, ) error { if mc.containerResolver == nil { @@ -64,7 +64,7 @@ func (mc *mailFolderCache) init( // rootFolderAlias is the top-level directory for exchange.Mail. // Action ensures that cache will stop at appropriate level. // @error iff the struct is not properly instantiated -func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error { +func (mc *mailContainerCache) populateMailRoot(ctx context.Context) error { f, err := mc.getter.GetContainerByID(ctx, mc.userID, rootFolderAlias) if err != nil { return clues.Wrap(err, "fetching root folder") @@ -89,7 +89,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error { // @param baseID: M365ID of the base of the exchange.Mail.Folder // @param baseContainerPath: the set of folder elements that make up the path // for the base container in the cache. -func (mc *mailFolderCache) Populate( +func (mc *mailContainerCache) Populate( ctx context.Context, errs *fault.Bus, baseID string, diff --git a/src/internal/connector/exchange/mail_folder_cache_test.go b/src/internal/m365/exchange/mail_container_cache_test.go similarity index 98% rename from src/internal/connector/exchange/mail_folder_cache_test.go rename to src/internal/m365/exchange/mail_container_cache_test.go index 59be9386e..5aa602b29 100644 --- a/src/internal/connector/exchange/mail_folder_cache_test.go +++ b/src/internal/m365/exchange/mail_container_cache_test.go @@ -87,7 +87,7 @@ func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() { acm := ac.Mail() - mfc := mailFolderCache{ + mfc := mailContainerCache{ userID: userID, enumer: acm, getter: acm, diff --git a/src/internal/connector/exchange/mail_restore.go b/src/internal/m365/exchange/mail_restore.go similarity index 97% rename from src/internal/connector/exchange/mail_restore.go rename to src/internal/m365/exchange/mail_restore.go index a6e8d2e0e..ce0979859 100644 --- a/src/internal/connector/exchange/mail_restore.go +++ b/src/internal/m365/exchange/mail_restore.go @@ -8,7 +8,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -34,7 +34,7 @@ func newMailRestoreHandler( } func (h mailRestoreHandler) newContainerCache(userID string) graph.ContainerResolver { - return &mailFolderCache{ + return &mailContainerCache{ userID: userID, enumer: h.ac, getter: h.ac, diff --git a/src/internal/connector/exchange/mail_restore_test.go b/src/internal/m365/exchange/mail_restore_test.go similarity index 100% rename from src/internal/connector/exchange/mail_restore_test.go rename to src/internal/m365/exchange/mail_restore_test.go diff --git a/src/internal/connector/exchange/mock/collections.go b/src/internal/m365/exchange/mock/collections.go similarity index 100% rename from src/internal/connector/exchange/mock/collections.go rename to src/internal/m365/exchange/mock/collections.go diff --git a/src/internal/connector/exchange/mock/contact.go b/src/internal/m365/exchange/mock/contact.go similarity index 100% rename from src/internal/connector/exchange/mock/contact.go rename to src/internal/m365/exchange/mock/contact.go diff --git a/src/internal/connector/exchange/mock/event.go b/src/internal/m365/exchange/mock/event.go similarity index 100% rename from src/internal/connector/exchange/mock/event.go rename to src/internal/m365/exchange/mock/event.go diff --git a/src/internal/connector/exchange/mock/mail.go b/src/internal/m365/exchange/mock/mail.go similarity index 100% rename from src/internal/connector/exchange/mock/mail.go rename to src/internal/m365/exchange/mock/mail.go diff --git a/src/internal/connector/exchange/mock/mock_test.go b/src/internal/m365/exchange/mock/mock_test.go similarity index 100% rename from src/internal/connector/exchange/mock/mock_test.go rename to src/internal/m365/exchange/mock/mock_test.go diff --git a/src/internal/connector/exchange/service_restore.go b/src/internal/m365/exchange/restore.go similarity index 93% rename from src/internal/connector/exchange/service_restore.go rename to src/internal/m365/exchange/restore.go index b55c2c318..7703933df 100644 --- a/src/internal/connector/exchange/service_restore.go +++ b/src/internal/m365/exchange/restore.go @@ -9,10 +9,10 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" @@ -22,16 +22,16 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api" ) -// RestoreCollections restores M365 objects in data.RestoreCollection to MSFT +// ConsumeRestoreCollections restores M365 objects in data.RestoreCollection to MSFT // store through GraphAPI. -func RestoreCollections( +func ConsumeRestoreCollections( ctx context.Context, ac api.Client, restoreCfg control.RestoreConfig, dcs []data.RestoreCollection, deets *details.Builder, errs *fault.Bus, -) (*support.ConnectorOperationStatus, error) { +) (*support.ControllerOperationStatus, error) { if len(dcs) == 0 { return support.CreateStatus(ctx, support.Restore, 0, support.CollectionMetrics{}, ""), nil } @@ -73,7 +73,7 @@ func RestoreCollections( isNewCache = true } - containerID, gcr, err := createDestination( + containerID, gcc, err := createDestination( ictx, handler, handler.formatRestoreDestination(restoreCfg.Location, dc.FullPath()), @@ -86,7 +86,7 @@ func RestoreCollections( continue } - directoryCache[category] = gcr + directoryCache[category] = gcc ictx = clues.Add(ictx, "restore_destination_id", containerID) @@ -131,7 +131,7 @@ func restoreCollection( deets *details.Builder, errs *fault.Bus, ) (support.CollectionMetrics, error) { - ctx, end := diagnostics.Span(ctx, "gc:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath())) + ctx, end := diagnostics.Span(ctx, "m365:exchange:restoreCollection", diagnostics.Label("path", dc.FullPath())) defer end() var ( @@ -159,7 +159,7 @@ func restoreCollection( } ictx := clues.Add(ctx, "item_id", itemData.UUID()) - trace.Log(ictx, "gc:exchange:restoreCollection:item", itemData.UUID()) + trace.Log(ictx, "m365:exchange:restoreCollection:item", itemData.UUID()) metrics.Objects++ buf := &bytes.Buffer{} diff --git a/src/internal/connector/exchange/restore_test.go b/src/internal/m365/exchange/restore_test.go similarity index 99% rename from src/internal/connector/exchange/restore_test.go rename to src/internal/m365/exchange/restore_test.go index eb308f2dc..5b4bbb825 100644 --- a/src/internal/connector/exchange/restore_test.go +++ b/src/internal/m365/exchange/restore_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/internal/connector/exchange/testdata/handlers.go b/src/internal/m365/exchange/testdata/handlers.go similarity index 86% rename from src/internal/connector/exchange/testdata/handlers.go rename to src/internal/m365/exchange/testdata/handlers.go index 396de9008..559c23b2c 100644 --- a/src/internal/connector/exchange/testdata/handlers.go +++ b/src/internal/m365/exchange/testdata/handlers.go @@ -7,8 +7,8 @@ import ( "github.com/alcionai/clues" "github.com/stretchr/testify/require" - "github.com/alcionai/corso/src/internal/connector/exchange" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/exchange" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/connector/exchange/transform.go b/src/internal/m365/exchange/transform.go similarity index 100% rename from src/internal/connector/exchange/transform.go rename to src/internal/m365/exchange/transform.go diff --git a/src/internal/connector/exchange/transform_test.go b/src/internal/m365/exchange/transform_test.go similarity index 98% rename from src/internal/connector/exchange/transform_test.go rename to src/internal/m365/exchange/transform_test.go index fe1bc1601..4e3ce4278 100644 --- a/src/internal/connector/exchange/transform_test.go +++ b/src/internal/m365/exchange/transform_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/services/m365/api" ) diff --git a/src/internal/connector/graph/betasdk/beta_client.go b/src/internal/m365/graph/betasdk/beta_client.go similarity index 98% rename from src/internal/connector/graph/betasdk/beta_client.go rename to src/internal/m365/graph/betasdk/beta_client.go index 0f7816ac1..62224721b 100644 --- a/src/internal/connector/graph/betasdk/beta_client.go +++ b/src/internal/m365/graph/betasdk/beta_client.go @@ -8,7 +8,7 @@ import ( ktext "github.com/microsoft/kiota-serialization-text-go" msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" - i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites" + i1a3c1a5501c5e41b7fd169f2d4c768dce9b096ac28fb5431bf02afcc57295411 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/sites" ) // BetaClient the main entry point of the SDK, exposes the configuration and the fluent API. diff --git a/src/internal/connector/graph/betasdk/beta_client_test.go b/src/internal/m365/graph/betasdk/beta_client_test.go similarity index 97% rename from src/internal/connector/graph/betasdk/beta_client_test.go rename to src/internal/m365/graph/betasdk/beta_client_test.go index d0d19de11..9e7fcab00 100644 --- a/src/internal/connector/graph/betasdk/beta_client_test.go +++ b/src/internal/m365/graph/betasdk/beta_client_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" ) diff --git a/src/internal/connector/graph/betasdk/kiota-lock.json b/src/internal/m365/graph/betasdk/kiota-lock.json similarity index 99% rename from src/internal/connector/graph/betasdk/kiota-lock.json rename to src/internal/m365/graph/betasdk/kiota-lock.json index 21a111aef..c1819639e 100644 --- a/src/internal/connector/graph/betasdk/kiota-lock.json +++ b/src/internal/m365/graph/betasdk/kiota-lock.json @@ -2,7 +2,7 @@ "lockFileVersion": "1.0.0", "kiotaVersion": "0.10.0.0", "clientClassName": "BetaClient", - "clientNamespaceName": "github.com/alcionai/corso/src/internal/connector/graph/betasdk", + "clientNamespaceName": "github.com/alcionai/corso/src/internal/m365/graph/betasdk", "language": "Go", "betaVersion": "0.53.0", "usesBackingStore": false, diff --git a/src/internal/connector/graph/betasdk/models/base_item.go b/src/internal/m365/graph/betasdk/models/base_item.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/base_item.go rename to src/internal/m365/graph/betasdk/models/base_item.go diff --git a/src/internal/connector/graph/betasdk/models/canvas_layout.go b/src/internal/m365/graph/betasdk/models/canvas_layout.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/canvas_layout.go rename to src/internal/m365/graph/betasdk/models/canvas_layout.go diff --git a/src/internal/connector/graph/betasdk/models/canvas_layoutable.go b/src/internal/m365/graph/betasdk/models/canvas_layoutable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/canvas_layoutable.go rename to src/internal/m365/graph/betasdk/models/canvas_layoutable.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section.go b/src/internal/m365/graph/betasdk/models/horizontal_section.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section.go rename to src/internal/m365/graph/betasdk/models/horizontal_section.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_collection_response.go b/src/internal/m365/graph/betasdk/models/horizontal_section_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_collection_response.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_collection_responseable.go b/src/internal/m365/graph/betasdk/models/horizontal_section_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_column.go b/src/internal/m365/graph/betasdk/models/horizontal_section_column.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_column.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_column.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_column_collection_response.go b/src/internal/m365/graph/betasdk/models/horizontal_section_column_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_column_collection_response.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_column_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_column_collection_responseable.go b/src/internal/m365/graph/betasdk/models/horizontal_section_column_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_column_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_column_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_columnable.go b/src/internal/m365/graph/betasdk/models/horizontal_section_columnable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_columnable.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_columnable.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_section_layout_type.go b/src/internal/m365/graph/betasdk/models/horizontal_section_layout_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_section_layout_type.go rename to src/internal/m365/graph/betasdk/models/horizontal_section_layout_type.go diff --git a/src/internal/connector/graph/betasdk/models/horizontal_sectionable.go b/src/internal/m365/graph/betasdk/models/horizontal_sectionable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/horizontal_sectionable.go rename to src/internal/m365/graph/betasdk/models/horizontal_sectionable.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_string_pair.go b/src/internal/m365/graph/betasdk/models/meta_data_key_string_pair.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_string_pair.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_string_pair.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_string_pair_collection_response.go b/src/internal/m365/graph/betasdk/models/meta_data_key_string_pair_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_string_pair_collection_response.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_string_pair_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_string_pair_collection_responseable.go b/src/internal/m365/graph/betasdk/models/meta_data_key_string_pair_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_string_pair_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_string_pair_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_string_pairable.go b/src/internal/m365/graph/betasdk/models/meta_data_key_string_pairable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_string_pairable.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_string_pairable.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_value_pair.go b/src/internal/m365/graph/betasdk/models/meta_data_key_value_pair.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_value_pair.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_value_pair.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_value_pair_collection_response.go b/src/internal/m365/graph/betasdk/models/meta_data_key_value_pair_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_value_pair_collection_response.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_value_pair_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_value_pair_collection_responseable.go b/src/internal/m365/graph/betasdk/models/meta_data_key_value_pair_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_value_pair_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_value_pair_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/meta_data_key_value_pairable.go b/src/internal/m365/graph/betasdk/models/meta_data_key_value_pairable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/meta_data_key_value_pairable.go rename to src/internal/m365/graph/betasdk/models/meta_data_key_value_pairable.go diff --git a/src/internal/connector/graph/betasdk/models/page_layout_type.go b/src/internal/m365/graph/betasdk/models/page_layout_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/page_layout_type.go rename to src/internal/m365/graph/betasdk/models/page_layout_type.go diff --git a/src/internal/connector/graph/betasdk/models/page_promotion_type.go b/src/internal/m365/graph/betasdk/models/page_promotion_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/page_promotion_type.go rename to src/internal/m365/graph/betasdk/models/page_promotion_type.go diff --git a/src/internal/connector/graph/betasdk/models/publication_facet.go b/src/internal/m365/graph/betasdk/models/publication_facet.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/publication_facet.go rename to src/internal/m365/graph/betasdk/models/publication_facet.go diff --git a/src/internal/connector/graph/betasdk/models/publication_facetable.go b/src/internal/m365/graph/betasdk/models/publication_facetable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/publication_facetable.go rename to src/internal/m365/graph/betasdk/models/publication_facetable.go diff --git a/src/internal/connector/graph/betasdk/models/reactions_facet.go b/src/internal/m365/graph/betasdk/models/reactions_facet.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/reactions_facet.go rename to src/internal/m365/graph/betasdk/models/reactions_facet.go diff --git a/src/internal/connector/graph/betasdk/models/reactions_facetable.go b/src/internal/m365/graph/betasdk/models/reactions_facetable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/reactions_facetable.go rename to src/internal/m365/graph/betasdk/models/reactions_facetable.go diff --git a/src/internal/connector/graph/betasdk/models/section_emphasis_type.go b/src/internal/m365/graph/betasdk/models/section_emphasis_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/section_emphasis_type.go rename to src/internal/m365/graph/betasdk/models/section_emphasis_type.go diff --git a/src/internal/connector/graph/betasdk/models/server_processed_content.go b/src/internal/m365/graph/betasdk/models/server_processed_content.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/server_processed_content.go rename to src/internal/m365/graph/betasdk/models/server_processed_content.go diff --git a/src/internal/connector/graph/betasdk/models/server_processed_contentable.go b/src/internal/m365/graph/betasdk/models/server_processed_contentable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/server_processed_contentable.go rename to src/internal/m365/graph/betasdk/models/server_processed_contentable.go diff --git a/src/internal/connector/graph/betasdk/models/site_access_type.go b/src/internal/m365/graph/betasdk/models/site_access_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_access_type.go rename to src/internal/m365/graph/betasdk/models/site_access_type.go diff --git a/src/internal/connector/graph/betasdk/models/site_page.go b/src/internal/m365/graph/betasdk/models/site_page.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_page.go rename to src/internal/m365/graph/betasdk/models/site_page.go diff --git a/src/internal/connector/graph/betasdk/models/site_page_collection_response.go b/src/internal/m365/graph/betasdk/models/site_page_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_page_collection_response.go rename to src/internal/m365/graph/betasdk/models/site_page_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/site_page_collection_responseable.go b/src/internal/m365/graph/betasdk/models/site_page_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_page_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/site_page_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/site_pageable.go b/src/internal/m365/graph/betasdk/models/site_pageable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_pageable.go rename to src/internal/m365/graph/betasdk/models/site_pageable.go diff --git a/src/internal/connector/graph/betasdk/models/site_security_level.go b/src/internal/m365/graph/betasdk/models/site_security_level.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_security_level.go rename to src/internal/m365/graph/betasdk/models/site_security_level.go diff --git a/src/internal/connector/graph/betasdk/models/site_settings.go b/src/internal/m365/graph/betasdk/models/site_settings.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_settings.go rename to src/internal/m365/graph/betasdk/models/site_settings.go diff --git a/src/internal/connector/graph/betasdk/models/site_settingsable.go b/src/internal/m365/graph/betasdk/models/site_settingsable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/site_settingsable.go rename to src/internal/m365/graph/betasdk/models/site_settingsable.go diff --git a/src/internal/connector/graph/betasdk/models/standard_web_part.go b/src/internal/m365/graph/betasdk/models/standard_web_part.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/standard_web_part.go rename to src/internal/m365/graph/betasdk/models/standard_web_part.go diff --git a/src/internal/connector/graph/betasdk/models/standard_web_part_collection_response.go b/src/internal/m365/graph/betasdk/models/standard_web_part_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/standard_web_part_collection_response.go rename to src/internal/m365/graph/betasdk/models/standard_web_part_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/standard_web_part_collection_responseable.go b/src/internal/m365/graph/betasdk/models/standard_web_part_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/standard_web_part_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/standard_web_part_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/standard_web_partable.go b/src/internal/m365/graph/betasdk/models/standard_web_partable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/standard_web_partable.go rename to src/internal/m365/graph/betasdk/models/standard_web_partable.go diff --git a/src/internal/connector/graph/betasdk/models/text_web_part.go b/src/internal/m365/graph/betasdk/models/text_web_part.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/text_web_part.go rename to src/internal/m365/graph/betasdk/models/text_web_part.go diff --git a/src/internal/connector/graph/betasdk/models/text_web_part_collection_response.go b/src/internal/m365/graph/betasdk/models/text_web_part_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/text_web_part_collection_response.go rename to src/internal/m365/graph/betasdk/models/text_web_part_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/text_web_part_collection_responseable.go b/src/internal/m365/graph/betasdk/models/text_web_part_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/text_web_part_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/text_web_part_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/text_web_partable.go b/src/internal/m365/graph/betasdk/models/text_web_partable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/text_web_partable.go rename to src/internal/m365/graph/betasdk/models/text_web_partable.go diff --git a/src/internal/connector/graph/betasdk/models/title_area.go b/src/internal/m365/graph/betasdk/models/title_area.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/title_area.go rename to src/internal/m365/graph/betasdk/models/title_area.go diff --git a/src/internal/connector/graph/betasdk/models/title_area_layout_type.go b/src/internal/m365/graph/betasdk/models/title_area_layout_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/title_area_layout_type.go rename to src/internal/m365/graph/betasdk/models/title_area_layout_type.go diff --git a/src/internal/connector/graph/betasdk/models/title_area_text_alignment_type.go b/src/internal/m365/graph/betasdk/models/title_area_text_alignment_type.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/title_area_text_alignment_type.go rename to src/internal/m365/graph/betasdk/models/title_area_text_alignment_type.go diff --git a/src/internal/connector/graph/betasdk/models/title_areaable.go b/src/internal/m365/graph/betasdk/models/title_areaable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/title_areaable.go rename to src/internal/m365/graph/betasdk/models/title_areaable.go diff --git a/src/internal/connector/graph/betasdk/models/vertical_section.go b/src/internal/m365/graph/betasdk/models/vertical_section.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/vertical_section.go rename to src/internal/m365/graph/betasdk/models/vertical_section.go diff --git a/src/internal/connector/graph/betasdk/models/vertical_sectionable.go b/src/internal/m365/graph/betasdk/models/vertical_sectionable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/vertical_sectionable.go rename to src/internal/m365/graph/betasdk/models/vertical_sectionable.go diff --git a/src/internal/connector/graph/betasdk/models/web_part.go b/src/internal/m365/graph/betasdk/models/web_part.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part.go rename to src/internal/m365/graph/betasdk/models/web_part.go diff --git a/src/internal/connector/graph/betasdk/models/web_part_collection_response.go b/src/internal/m365/graph/betasdk/models/web_part_collection_response.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part_collection_response.go rename to src/internal/m365/graph/betasdk/models/web_part_collection_response.go diff --git a/src/internal/connector/graph/betasdk/models/web_part_collection_responseable.go b/src/internal/m365/graph/betasdk/models/web_part_collection_responseable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part_collection_responseable.go rename to src/internal/m365/graph/betasdk/models/web_part_collection_responseable.go diff --git a/src/internal/connector/graph/betasdk/models/web_part_data.go b/src/internal/m365/graph/betasdk/models/web_part_data.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part_data.go rename to src/internal/m365/graph/betasdk/models/web_part_data.go diff --git a/src/internal/connector/graph/betasdk/models/web_part_dataable.go b/src/internal/m365/graph/betasdk/models/web_part_dataable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part_dataable.go rename to src/internal/m365/graph/betasdk/models/web_part_dataable.go diff --git a/src/internal/connector/graph/betasdk/models/web_part_position.go b/src/internal/m365/graph/betasdk/models/web_part_position.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part_position.go rename to src/internal/m365/graph/betasdk/models/web_part_position.go diff --git a/src/internal/connector/graph/betasdk/models/web_part_positionable.go b/src/internal/m365/graph/betasdk/models/web_part_positionable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_part_positionable.go rename to src/internal/m365/graph/betasdk/models/web_part_positionable.go diff --git a/src/internal/connector/graph/betasdk/models/web_partable.go b/src/internal/m365/graph/betasdk/models/web_partable.go similarity index 100% rename from src/internal/connector/graph/betasdk/models/web_partable.go rename to src/internal/m365/graph/betasdk/models/web_partable.go diff --git a/src/internal/connector/graph/betasdk/sites/count_request_builder.go b/src/internal/m365/graph/betasdk/sites/count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go index 0845ecee4..c5ad01383 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_horizontal_section_item_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsHorizontalSectionItemRequestBuilder diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go index c1a8315ce..761a7930e 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_horizontal_section_column_item_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsItemColumnsHorizontalSectionColumnItemRequestBuilder provides operations to manage the columns property of the microsoft.graph.horizontalSection entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go similarity index 98% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go index 14429d80f..8e7e069ac 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_item_get_position_of_web_part_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsItemColumnsItemWebpartsItemGetPositionOfWebPartRequestBuilder provides operations to call the getPositionOfWebPart method. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go index 4d2a94186..e4627c8d2 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsItemColumnsItemWebpartsRequestBuilder provides operations to manage the webparts property of the microsoft.graph.horizontalSectionColumn entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go index 0ce7becda..728610b97 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_item_webparts_web_part_item_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsItemColumnsItemWebpartsWebPartItemRequestBuilder provides operations to manage the webparts property of the microsoft.graph.horizontalSectionColumn entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go index 368738104..df91fde72 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_item_columns_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsItemColumnsRequestBuilder provides operations to manage the columns property of the microsoft.graph.horizontalSection entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go index 829b830f0..ddce74a8c 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_horizontal_sections_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutHorizontalSectionsRequestBuilder provides operations to manage the horizontalSections property of the microsoft.graph.canvasLayout entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go index bbca05a9b..d717ca335 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutRequestBuilder provides operations to manage the canvasLayout property of the microsoft.graph.sitePage entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go index 3fbb916ca..47eecd453 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutVerticalSectionRequestBuilder provides operations to manage the verticalSection property of the microsoft.graph.canvasLayout entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go similarity index 98% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go index d63ff729a..095417526 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_item_get_position_of_web_part_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutVerticalSectionWebpartsItemGetPositionOfWebPartRequestBuilder provides operations to call the getPositionOfWebPart method. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go index bd1885be1..c6e339cc2 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutVerticalSectionWebpartsRequestBuilder provides operations to manage the webparts property of the microsoft.graph.verticalSection entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go index fdb5025c5..02fa2b0ac 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_canvas_layout_vertical_section_webparts_web_part_item_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemCanvasLayoutVerticalSectionWebpartsWebPartItemRequestBuilder provides operations to manage the webparts property of the microsoft.graph.verticalSection entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_body.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_body.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_body.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_body.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_bodyable.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_bodyable.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_bodyable.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_post_request_bodyable.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go similarity index 97% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go index fa89d8855..3e2b17cd8 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_response.go @@ -4,7 +4,7 @@ import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" msmodel "github.com/microsoftgraph/msgraph-sdk-go/models" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemGetWebPartsByPositionResponse provides operations to call the getWebPartsByPosition method. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go similarity index 90% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go index f862929ab..ef91ec75c 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_get_web_parts_by_position_responseable.go @@ -4,7 +4,7 @@ import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" msmodel "github.com/microsoftgraph/msgraph-sdk-go/models" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemGetWebPartsByPositionResponseable diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_publish_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_publish_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_publish_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_publish_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go similarity index 98% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go index 9db79ace5..af2795798 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_item_get_position_of_web_part_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemWebPartsItemGetPositionOfWebPartRequestBuilder provides operations to call the getPositionOfWebPart method. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go index e2e32c640..6d93b9b76 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemWebPartsRequestBuilder provides operations to manage the webParts property of the microsoft.graph.sitePage entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go index 1c16fc8df..e8be53a64 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_item_web_parts_web_part_item_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesItemWebPartsWebPartItemRequestBuilder provides operations to manage the webParts property of the microsoft.graph.sitePage entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_request_builder.go index 6c82f58df..aa5e63865 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesRequestBuilder provides operations to manage the pages property of the microsoft.graph.site entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_pages_site_page_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_pages_site_page_item_request_builder.go similarity index 99% rename from src/internal/connector/graph/betasdk/sites/item_pages_site_page_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_pages_site_page_item_request_builder.go index 29fda72bd..6b118713a 100644 --- a/src/internal/connector/graph/betasdk/sites/item_pages_site_page_item_request_builder.go +++ b/src/internal/m365/graph/betasdk/sites/item_pages_site_page_item_request_builder.go @@ -6,7 +6,7 @@ import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i7ad325c11fbf3db4d761c429267362d8b24daa1eda0081f914ebc3cdc85181a0 "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" - ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + ifda19816f54f079134d70c11e75d6b26799300cf72079e282f1d3bb9a6750354 "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // ItemPagesSitePageItemRequestBuilder provides operations to manage the pages property of the microsoft.graph.site entity. diff --git a/src/internal/connector/graph/betasdk/sites/item_sites_count_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_sites_count_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_sites_count_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_sites_count_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/item_sites_site_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/item_sites_site_item_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/item_sites_site_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/item_sites_site_item_request_builder.go diff --git a/src/internal/connector/graph/betasdk/sites/site_item_request_builder.go b/src/internal/m365/graph/betasdk/sites/site_item_request_builder.go similarity index 100% rename from src/internal/connector/graph/betasdk/sites/site_item_request_builder.go rename to src/internal/m365/graph/betasdk/sites/site_item_request_builder.go diff --git a/src/internal/connector/graph/cache_container.go b/src/internal/m365/graph/cache_container.go similarity index 100% rename from src/internal/connector/graph/cache_container.go rename to src/internal/m365/graph/cache_container.go diff --git a/src/internal/connector/graph/collections.go b/src/internal/m365/graph/collections.go similarity index 98% rename from src/internal/connector/graph/collections.go rename to src/internal/m365/graph/collections.go index ee941f81c..24a8138f3 100644 --- a/src/internal/connector/graph/collections.go +++ b/src/internal/m365/graph/collections.go @@ -5,8 +5,8 @@ import ( "github.com/alcionai/clues" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/internal/connector/graph/collections_test.go b/src/internal/m365/graph/collections_test.go similarity index 100% rename from src/internal/connector/graph/collections_test.go rename to src/internal/m365/graph/collections_test.go diff --git a/src/internal/connector/graph/concurrency_middleware.go b/src/internal/m365/graph/concurrency_middleware.go similarity index 100% rename from src/internal/connector/graph/concurrency_middleware.go rename to src/internal/m365/graph/concurrency_middleware.go diff --git a/src/internal/connector/graph/concurrency_middleware_test.go b/src/internal/m365/graph/concurrency_middleware_test.go similarity index 100% rename from src/internal/connector/graph/concurrency_middleware_test.go rename to src/internal/m365/graph/concurrency_middleware_test.go diff --git a/src/internal/connector/graph/consts.go b/src/internal/m365/graph/consts.go similarity index 100% rename from src/internal/connector/graph/consts.go rename to src/internal/m365/graph/consts.go diff --git a/src/internal/connector/graph/consts_test.go b/src/internal/m365/graph/consts_test.go similarity index 100% rename from src/internal/connector/graph/consts_test.go rename to src/internal/m365/graph/consts_test.go diff --git a/src/internal/connector/graph/errors.go b/src/internal/m365/graph/errors.go similarity index 100% rename from src/internal/connector/graph/errors.go rename to src/internal/m365/graph/errors.go diff --git a/src/internal/connector/graph/errors_test.go b/src/internal/m365/graph/errors_test.go similarity index 100% rename from src/internal/connector/graph/errors_test.go rename to src/internal/m365/graph/errors_test.go diff --git a/src/internal/connector/graph/http_wrapper.go b/src/internal/m365/graph/http_wrapper.go similarity index 100% rename from src/internal/connector/graph/http_wrapper.go rename to src/internal/m365/graph/http_wrapper.go diff --git a/src/internal/connector/graph/http_wrapper_test.go b/src/internal/m365/graph/http_wrapper_test.go similarity index 100% rename from src/internal/connector/graph/http_wrapper_test.go rename to src/internal/m365/graph/http_wrapper_test.go diff --git a/src/internal/connector/graph/metadata/metadata.go b/src/internal/m365/graph/metadata/metadata.go similarity index 83% rename from src/internal/connector/graph/metadata/metadata.go rename to src/internal/m365/graph/metadata/metadata.go index 6aa0d5fa6..9b61a3fc0 100644 --- a/src/internal/connector/graph/metadata/metadata.go +++ b/src/internal/m365/graph/metadata/metadata.go @@ -1,7 +1,7 @@ package metadata import ( - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/internal/connector/graph/metadata/metadata_test.go b/src/internal/m365/graph/metadata/metadata_test.go similarity index 95% rename from src/internal/connector/graph/metadata/metadata_test.go rename to src/internal/m365/graph/metadata/metadata_test.go index 2abef52d3..6501c667c 100644 --- a/src/internal/connector/graph/metadata/metadata_test.go +++ b/src/internal/m365/graph/metadata/metadata_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/graph/metadata" - odmetadata "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/graph/metadata" + odmetadata "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/internal/connector/graph/metadata_collection.go b/src/internal/m365/graph/metadata_collection.go similarity index 96% rename from src/internal/connector/graph/metadata_collection.go rename to src/internal/m365/graph/metadata_collection.go index 4b29aa792..1c8112d47 100644 --- a/src/internal/connector/graph/metadata_collection.go +++ b/src/internal/m365/graph/metadata_collection.go @@ -8,8 +8,8 @@ import ( "github.com/alcionai/clues" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" ) @@ -115,13 +115,13 @@ func (md MetadataCollection) FullPath() path.Path { return md.fullPath } -// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old +// TODO(ashmrtn): Fill in with previous path once the Controller compares old // and new folder hierarchies. func (md MetadataCollection) PreviousPath() path.Path { return nil } -// TODO(ashmrtn): Fill in once GraphConnector compares old and new folder +// TODO(ashmrtn): Fill in once the Controller compares old and new folder // hierarchies. func (md MetadataCollection) State() data.CollectionState { return data.NewState diff --git a/src/internal/connector/graph/metadata_collection_test.go b/src/internal/m365/graph/metadata_collection_test.go similarity index 96% rename from src/internal/connector/graph/metadata_collection_test.go rename to src/internal/m365/graph/metadata_collection_test.go index ef3058c90..3748d03b2 100644 --- a/src/internal/connector/graph/metadata_collection_test.go +++ b/src/internal/m365/graph/metadata_collection_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/support" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -82,7 +82,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() { c := NewMetadataCollection( p, items, - func(c *support.ConnectorOperationStatus) { + func(c *support.ControllerOperationStatus) { assert.Equal(t, len(itemNames), c.Metrics.Objects) assert.Equal(t, len(itemNames), c.Metrics.Successes) }, @@ -167,7 +167,7 @@ func (suite *MetadataCollectionUnitSuite) TestMakeMetadataCollection() { test.service, test.cat, []MetadataCollectionEntry{test.metadata}, - func(*support.ConnectorOperationStatus) {}) + func(*support.ControllerOperationStatus) {}) test.errCheck(t, err, clues.ToCore(err)) if err != nil { diff --git a/src/internal/connector/graph/middleware.go b/src/internal/m365/graph/middleware.go similarity index 100% rename from src/internal/connector/graph/middleware.go rename to src/internal/m365/graph/middleware.go diff --git a/src/internal/connector/graph/middleware_test.go b/src/internal/m365/graph/middleware_test.go similarity index 100% rename from src/internal/connector/graph/middleware_test.go rename to src/internal/m365/graph/middleware_test.go diff --git a/src/internal/connector/graph/mock/service.go b/src/internal/m365/graph/mock/service.go similarity index 95% rename from src/internal/connector/graph/mock/service.go rename to src/internal/m365/graph/mock/service.go index a44d9f1ca..813488626 100644 --- a/src/internal/connector/graph/mock/service.go +++ b/src/internal/m365/graph/mock/service.go @@ -5,7 +5,7 @@ import ( "github.com/h2non/gock" msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/account" ) diff --git a/src/internal/connector/graph/service.go b/src/internal/m365/graph/service.go similarity index 100% rename from src/internal/connector/graph/service.go rename to src/internal/m365/graph/service.go diff --git a/src/internal/connector/graph/service_test.go b/src/internal/m365/graph/service_test.go similarity index 100% rename from src/internal/connector/graph/service_test.go rename to src/internal/m365/graph/service_test.go diff --git a/src/internal/connector/graph/uploadsession.go b/src/internal/m365/graph/uploadsession.go similarity index 100% rename from src/internal/connector/graph/uploadsession.go rename to src/internal/m365/graph/uploadsession.go diff --git a/src/internal/connector/graph/uploadsession_test.go b/src/internal/m365/graph/uploadsession_test.go similarity index 100% rename from src/internal/connector/graph/uploadsession_test.go rename to src/internal/m365/graph/uploadsession_test.go diff --git a/src/internal/connector/graph_connector_disconnected_test.go b/src/internal/m365/graph_connector_disconnected_test.go similarity index 88% rename from src/internal/connector/graph_connector_disconnected_test.go rename to src/internal/m365/graph_connector_disconnected_test.go index 845e00693..c2dee9d58 100644 --- a/src/internal/connector/graph_connector_disconnected_test.go +++ b/src/internal/m365/graph_connector_disconnected_test.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "sync" @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/support" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/selectors" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" @@ -17,12 +17,12 @@ import ( // --------------------------------------------------------------- // Disconnected Test Section // --------------------------------------------------------------- -type DisconnectedGraphConnectorSuite struct { +type DisconnectedSuite struct { tester.Suite } -func TestDisconnectedGraphSuite(t *testing.T) { - s := &DisconnectedGraphConnectorSuite{ +func TestSuite(t *testing.T) { + s := &DisconnectedSuite{ Suite: tester.NewUnitSuite(t), } @@ -31,7 +31,7 @@ func TestDisconnectedGraphSuite(t *testing.T) { func statusTestTask( t *testing.T, - gc *GraphConnector, + ctrl *Controller, objects, success, folder int, ) { ctx, flush := tester.NewContext(t) @@ -46,24 +46,24 @@ func statusTestTask( Bytes: 0, }, "statusTestTask") - gc.UpdateStatus(status) + ctrl.UpdateStatus(status) } -func (suite *DisconnectedGraphConnectorSuite) TestGraphConnector_Status() { +func (suite *DisconnectedSuite) TestController_Status() { t := suite.T() - gc := GraphConnector{wg: &sync.WaitGroup{}} + ctrl := Controller{wg: &sync.WaitGroup{}} // Two tasks - gc.incrementAwaitingMessages() - gc.incrementAwaitingMessages() + ctrl.incrementAwaitingMessages() + ctrl.incrementAwaitingMessages() // Each helper task processes 4 objects, 1 success, 3 errors, 1 folders - go statusTestTask(t, &gc, 4, 1, 1) - go statusTestTask(t, &gc, 4, 1, 1) + go statusTestTask(t, &ctrl, 4, 1, 1) + go statusTestTask(t, &ctrl, 4, 1, 1) - stats := gc.Wait() + stats := ctrl.Wait() - assert.NotEmpty(t, gc.PrintableStatus()) + assert.NotEmpty(t, ctrl.PrintableStatus()) // Expect 8 objects assert.Equal(t, 8, stats.Objects) // Expect 2 success @@ -72,7 +72,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestGraphConnector_Status() { assert.Equal(t, 2, stats.Folders) } -func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices() { +func (suite *DisconnectedSuite) TestVerifyBackupInputs_allServices() { sites := []string{"abc.site.foo", "bar.site.baz"} tests := []struct { diff --git a/src/internal/connector/graph_connector_helper_test.go b/src/internal/m365/graph_connector_helper_test.go similarity index 98% rename from src/internal/connector/graph_connector_helper_test.go rename to src/internal/m365/graph_connector_helper_test.go index 5a80c1cda..1fc1573c7 100644 --- a/src/internal/connector/graph_connector_helper_test.go +++ b/src/internal/m365/graph_connector_helper_test.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "context" @@ -16,9 +16,10 @@ import ( "golang.org/x/exp/slices" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/onedrive" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/onedrive" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -102,14 +103,14 @@ type restoreBackupInfo struct { name string service path.ServiceType collections []ColInfo - resource Resource + resourceCat resource.Category } type restoreBackupInfoMultiVersion struct { service path.ServiceType collectionsLatest []ColInfo collectionsPrevious []ColInfo - resource Resource + resource resource.Category backupVersion int } @@ -940,7 +941,7 @@ func checkCollections( ) // Need to iterate through all items even if we don't expect to find a match - // because otherwise we'll deadlock waiting for GC status. Unexpected or + // because otherwise we'll deadlock waiting for the status. Unexpected or // missing collection paths will be reported by checkHasCollections. for item := range returned.Items(ctx, fault.New(true)) { // Skip metadata collections as they aren't directly related to items to @@ -984,7 +985,7 @@ func checkCollections( checkHasCollections(t, expected, collectionsWithItems) // Return how many metadata files were skipped so we can account for it in the - // check on GraphConnector status. + // check on Controller status. return skipped } @@ -1151,10 +1152,10 @@ func getSelectorWith( } } -func loadConnector(ctx context.Context, t *testing.T, r Resource) *GraphConnector { +func loadController(ctx context.Context, t *testing.T, r resource.Category) *Controller { a := tester.NewM365Account(t) - connector, err := NewGraphConnector(ctx, a, r) + connector, err := NewController(ctx, a, r) require.NoError(t, err, clues.ToCore(err)) return connector diff --git a/src/internal/connector/graph_connector_onedrive_test.go b/src/internal/m365/graph_connector_onedrive_test.go similarity index 87% rename from src/internal/connector/graph_connector_onedrive_test.go rename to src/internal/m365/graph_connector_onedrive_test.go index 0b20245ee..1a45992d3 100644 --- a/src/internal/connector/graph_connector_onedrive_test.go +++ b/src/internal/m365/graph_connector_onedrive_test.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "context" @@ -13,9 +13,10 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/graph" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/control" @@ -86,7 +87,7 @@ type suiteInfo interface { // also be a site. ResourceOwner() string Service() path.ServiceType - Resource() Resource + Resource() resource.Category } type oneDriveSuite interface { @@ -96,9 +97,9 @@ type oneDriveSuite interface { type suiteInfoImpl struct { ac api.Client - connector *GraphConnector + controller *Controller resourceOwner string - resourceType Resource + resourceCat resource.Category secondaryUser string secondaryUserID string service path.ServiceType @@ -114,18 +115,18 @@ func NewSuiteInfoImpl( resourceOwner string, service path.ServiceType, ) suiteInfoImpl { - resource := Users + rc := resource.Users if service == path.SharePointService { - resource = Sites + rc = resource.Sites } - gc := loadConnector(ctx, t, resource) + ctrl := loadController(ctx, t, rc) return suiteInfoImpl{ - ac: gc.AC, - connector: gc, + ac: ctrl.AC, + controller: ctrl, resourceOwner: resourceOwner, - resourceType: resource, + resourceCat: rc, secondaryUser: tester.SecondaryM365UserID(t), service: service, tertiaryUser: tester.TertiaryM365UserID(t), @@ -138,7 +139,7 @@ func (si suiteInfoImpl) APIClient() api.Client { } func (si suiteInfoImpl) Tenant() string { - return si.connector.tenant + return si.controller.tenant } func (si suiteInfoImpl) PrimaryUser() (string, string) { @@ -161,8 +162,8 @@ func (si suiteInfoImpl) Service() path.ServiceType { return si.service } -func (si suiteInfoImpl) Resource() Resource { - return si.resourceType +func (si suiteInfoImpl) Resource() resource.Category { + return si.resourceCat } // --------------------------------------------------------------------------- @@ -172,20 +173,20 @@ func (si suiteInfoImpl) Resource() Resource { // only test simple things here and leave the more extensive testing to // OneDrive. -type GraphConnectorSharePointIntegrationSuite struct { +type SharePointIntegrationSuite struct { tester.Suite suiteInfo } -func TestGraphConnectorSharePointIntegrationSuite(t *testing.T) { - suite.Run(t, &GraphConnectorSharePointIntegrationSuite{ +func TestSharePointIntegrationSuite(t *testing.T) { + suite.Run(t, &SharePointIntegrationSuite{ Suite: tester.NewIntegrationSuite( t, [][]string{tester.M365AcctCredEnvs}), }) } -func (suite *GraphConnectorSharePointIntegrationSuite) SetupSuite() { +func (suite *SharePointIntegrationSuite) SetupSuite() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -194,38 +195,38 @@ func (suite *GraphConnectorSharePointIntegrationSuite) SetupSuite() { si := NewSuiteInfoImpl(suite.T(), ctx, tester.M365SiteID(suite.T()), path.SharePointService) // users needed for permissions - user, err := si.connector.AC.Users().GetByID(ctx, si.user) + user, err := si.controller.AC.Users().GetByID(ctx, si.user) require.NoError(t, err, "fetching user", si.user, clues.ToCore(err)) si.userID = ptr.Val(user.GetId()) - secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser) + secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser) require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err)) si.secondaryUserID = ptr.Val(secondaryUser.GetId()) - tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser) + tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser) require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err)) si.tertiaryUserID = ptr.Val(tertiaryUser.GetId()) suite.suiteInfo = si } -func (suite *GraphConnectorSharePointIntegrationSuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() { +func (suite *SharePointIntegrationSuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() { testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, version.Backup) } -func (suite *GraphConnectorSharePointIntegrationSuite) TestPermissionsRestoreAndBackup() { +func (suite *SharePointIntegrationSuite) TestPermissionsRestoreAndBackup() { testPermissionsRestoreAndBackup(suite, version.Backup) } -func (suite *GraphConnectorSharePointIntegrationSuite) TestPermissionsBackupAndNoRestore() { +func (suite *SharePointIntegrationSuite) TestPermissionsBackupAndNoRestore() { testPermissionsBackupAndNoRestore(suite, version.Backup) } -func (suite *GraphConnectorSharePointIntegrationSuite) TestPermissionsInheritanceRestoreAndBackup() { +func (suite *SharePointIntegrationSuite) TestPermissionsInheritanceRestoreAndBackup() { testPermissionsInheritanceRestoreAndBackup(suite, version.Backup) } -func (suite *GraphConnectorSharePointIntegrationSuite) TestRestoreFolderNamedFolderRegression() { +func (suite *SharePointIntegrationSuite) TestRestoreFolderNamedFolderRegression() { // No reason why it couldn't work with previous versions, but this is when it got introduced. testRestoreFolderNamedFolderRegression(suite, version.All8MigrateUserPNToID) } @@ -233,20 +234,20 @@ func (suite *GraphConnectorSharePointIntegrationSuite) TestRestoreFolderNamedFol // --------------------------------------------------------------------------- // OneDrive most recent backup version // --------------------------------------------------------------------------- -type GraphConnectorOneDriveIntegrationSuite struct { +type OneDriveIntegrationSuite struct { tester.Suite suiteInfo } -func TestGraphConnectorOneDriveIntegrationSuite(t *testing.T) { - suite.Run(t, &GraphConnectorOneDriveIntegrationSuite{ +func TestOneDriveIntegrationSuite(t *testing.T) { + suite.Run(t, &OneDriveIntegrationSuite{ Suite: tester.NewIntegrationSuite( t, [][]string{tester.M365AcctCredEnvs}), }) } -func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() { +func (suite *OneDriveIntegrationSuite) SetupSuite() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -254,38 +255,38 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() { si := NewSuiteInfoImpl(t, ctx, tester.M365UserID(t), path.OneDriveService) - user, err := si.connector.AC.Users().GetByID(ctx, si.user) + user, err := si.controller.AC.Users().GetByID(ctx, si.user) require.NoError(t, err, "fetching user", si.user, clues.ToCore(err)) si.userID = ptr.Val(user.GetId()) - secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser) + secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser) require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err)) si.secondaryUserID = ptr.Val(secondaryUser.GetId()) - tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser) + tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser) require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err)) si.tertiaryUserID = ptr.Val(tertiaryUser.GetId()) suite.suiteInfo = si } -func (suite *GraphConnectorOneDriveIntegrationSuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() { +func (suite *OneDriveIntegrationSuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() { testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, version.Backup) } -func (suite *GraphConnectorOneDriveIntegrationSuite) TestPermissionsRestoreAndBackup() { +func (suite *OneDriveIntegrationSuite) TestPermissionsRestoreAndBackup() { testPermissionsRestoreAndBackup(suite, version.Backup) } -func (suite *GraphConnectorOneDriveIntegrationSuite) TestPermissionsBackupAndNoRestore() { +func (suite *OneDriveIntegrationSuite) TestPermissionsBackupAndNoRestore() { testPermissionsBackupAndNoRestore(suite, version.Backup) } -func (suite *GraphConnectorOneDriveIntegrationSuite) TestPermissionsInheritanceRestoreAndBackup() { +func (suite *OneDriveIntegrationSuite) TestPermissionsInheritanceRestoreAndBackup() { testPermissionsInheritanceRestoreAndBackup(suite, version.Backup) } -func (suite *GraphConnectorOneDriveIntegrationSuite) TestRestoreFolderNamedFolderRegression() { +func (suite *OneDriveIntegrationSuite) TestRestoreFolderNamedFolderRegression() { // No reason why it couldn't work with previous versions, but this is when it got introduced. testRestoreFolderNamedFolderRegression(suite, version.All8MigrateUserPNToID) } @@ -293,20 +294,20 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) TestRestoreFolderNamedFolde // --------------------------------------------------------------------------- // OneDrive regression // --------------------------------------------------------------------------- -type GraphConnectorOneDriveNightlySuite struct { +type OneDriveNightlySuite struct { tester.Suite suiteInfo } -func TestGraphConnectorOneDriveNightlySuite(t *testing.T) { - suite.Run(t, &GraphConnectorOneDriveNightlySuite{ +func TestOneDriveNightlySuite(t *testing.T) { + suite.Run(t, &OneDriveNightlySuite{ Suite: tester.NewNightlySuite( t, [][]string{tester.M365AcctCredEnvs}), }) } -func (suite *GraphConnectorOneDriveNightlySuite) SetupSuite() { +func (suite *OneDriveNightlySuite) SetupSuite() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -314,39 +315,39 @@ func (suite *GraphConnectorOneDriveNightlySuite) SetupSuite() { si := NewSuiteInfoImpl(t, ctx, tester.M365UserID(t), path.OneDriveService) - user, err := si.connector.AC.Users().GetByID(ctx, si.user) + user, err := si.controller.AC.Users().GetByID(ctx, si.user) require.NoError(t, err, "fetching user", si.user, clues.ToCore(err)) si.userID = ptr.Val(user.GetId()) - secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser) + secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser) require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err)) si.secondaryUserID = ptr.Val(secondaryUser.GetId()) - tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser) + tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser) require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err)) si.tertiaryUserID = ptr.Val(tertiaryUser.GetId()) suite.suiteInfo = si } -func (suite *GraphConnectorOneDriveNightlySuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() { +func (suite *OneDriveNightlySuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() { testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, 0) } -func (suite *GraphConnectorOneDriveNightlySuite) TestPermissionsRestoreAndBackup() { +func (suite *OneDriveNightlySuite) TestPermissionsRestoreAndBackup() { testPermissionsRestoreAndBackup(suite, version.OneDrive1DataAndMetaFiles) } -func (suite *GraphConnectorOneDriveNightlySuite) TestPermissionsBackupAndNoRestore() { +func (suite *OneDriveNightlySuite) TestPermissionsBackupAndNoRestore() { testPermissionsBackupAndNoRestore(suite, version.OneDrive1DataAndMetaFiles) } -func (suite *GraphConnectorOneDriveNightlySuite) TestPermissionsInheritanceRestoreAndBackup() { +func (suite *OneDriveNightlySuite) TestPermissionsInheritanceRestoreAndBackup() { // No reason why it couldn't work with previous versions, but this is when it got introduced. testPermissionsInheritanceRestoreAndBackup(suite, version.OneDrive4DirIncludesPermissions) } -func (suite *GraphConnectorOneDriveNightlySuite) TestRestoreFolderNamedFolderRegression() { +func (suite *OneDriveNightlySuite) TestRestoreFolderNamedFolderRegression() { // No reason why it couldn't work with previous versions, but this is when it got introduced. testRestoreFolderNamedFolderRegression(suite, version.All8MigrateUserPNToID) } diff --git a/src/internal/connector/graph_connector_onedrive_test_helper.go b/src/internal/m365/graph_connector_onedrive_test_helper.go similarity index 98% rename from src/internal/connector/graph_connector_onedrive_test_helper.go rename to src/internal/m365/graph_connector_onedrive_test_helper.go index b70543019..77acc1b7d 100644 --- a/src/internal/connector/graph_connector_onedrive_test_helper.go +++ b/src/internal/m365/graph_connector_onedrive_test_helper.go @@ -1,4 +1,4 @@ -package connector +package m365 import ( "encoding/json" @@ -8,9 +8,9 @@ import ( "github.com/google/uuid" "golang.org/x/exp/maps" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/internal/connector/graph_connector_test_helper.go b/src/internal/m365/graph_connector_test_helper.go similarity index 95% rename from src/internal/connector/graph_connector_test_helper.go rename to src/internal/m365/graph_connector_test_helper.go index 8ea552190..f95d1781f 100644 --- a/src/internal/connector/graph_connector_test_helper.go +++ b/src/internal/m365/graph_connector_test_helper.go @@ -1,13 +1,14 @@ -package connector +package m365 import ( "bytes" "context" "io" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" ) @@ -39,7 +40,7 @@ type ItemInfo struct { type ConfigInfo struct { Opts control.Options - Resource Resource + Resource resource.Category Service path.ServiceType Tenant string ResourceOwners []string diff --git a/src/internal/connector/mock/connector.go b/src/internal/m365/mock/connector.go similarity index 73% rename from src/internal/connector/mock/connector.go rename to src/internal/m365/mock/connector.go index 2e2faec77..870e0b549 100644 --- a/src/internal/connector/mock/connector.go +++ b/src/internal/m365/mock/connector.go @@ -14,9 +14,9 @@ import ( "github.com/alcionai/corso/src/pkg/selectors" ) -var _ inject.BackupProducer = &GraphConnector{} +var _ inject.BackupProducer = &Controller{} -type GraphConnector struct { +type Controller struct { Collections []data.BackupCollection Exclude *prefixmatcher.StringSetMatcher @@ -27,7 +27,7 @@ type GraphConnector struct { Stats data.CollectionStats } -func (gc GraphConnector) ProduceBackupCollections( +func (ctrl Controller) ProduceBackupCollections( _ context.Context, _ idname.Provider, _ selectors.Selector, @@ -41,22 +41,22 @@ func (gc GraphConnector) ProduceBackupCollections( bool, error, ) { - return gc.Collections, gc.Exclude, gc.Err == nil, gc.Err + return ctrl.Collections, ctrl.Exclude, ctrl.Err == nil, ctrl.Err } -func (gc GraphConnector) IsBackupRunnable( +func (ctrl Controller) IsBackupRunnable( _ context.Context, _ path.ServiceType, _ string, ) (bool, error) { - return true, gc.Err + return true, ctrl.Err } -func (gc GraphConnector) Wait() *data.CollectionStats { - return &gc.Stats +func (ctrl Controller) Wait() *data.CollectionStats { + return &ctrl.Stats } -func (gc GraphConnector) ConsumeRestoreCollections( +func (ctrl Controller) ConsumeRestoreCollections( _ context.Context, _ int, _ selectors.Selector, @@ -65,5 +65,5 @@ func (gc GraphConnector) ConsumeRestoreCollections( _ []data.RestoreCollection, _ *fault.Bus, ) (*details.Details, error) { - return gc.Deets, gc.Err + return ctrl.Deets, ctrl.Err } diff --git a/src/internal/connector/mock/id_name_getter.go b/src/internal/m365/mock/id_name_getter.go similarity index 100% rename from src/internal/connector/mock/id_name_getter.go rename to src/internal/m365/mock/id_name_getter.go diff --git a/src/internal/connector/onedrive/data_collections.go b/src/internal/m365/onedrive/backup.go similarity index 94% rename from src/internal/connector/onedrive/data_collections.go rename to src/internal/m365/onedrive/backup.go index b4823428d..06aab38ae 100644 --- a/src/internal/connector/onedrive/data_collections.go +++ b/src/internal/m365/onedrive/backup.go @@ -7,9 +7,9 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" @@ -31,9 +31,9 @@ func (fm odFolderMatcher) Matches(dir string) bool { return fm.scope.Matches(selectors.OneDriveFolder, dir) } -// OneDriveDataCollections returns a set of DataCollection which represents the OneDrive data +// ProduceBackupCollections returns a set of DataCollection which represents the OneDrive data // for the specified user -func DataCollections( +func ProduceBackupCollections( ctx context.Context, ac api.Client, selector selectors.Selector, diff --git a/src/internal/connector/onedrive/data_collections_test.go b/src/internal/m365/onedrive/backup_test.go similarity index 91% rename from src/internal/connector/onedrive/data_collections_test.go rename to src/internal/m365/onedrive/backup_test.go index 62af1fd6b..f5a4261ad 100644 --- a/src/internal/connector/onedrive/data_collections_test.go +++ b/src/internal/m365/onedrive/backup_test.go @@ -16,15 +16,15 @@ import ( "github.com/alcionai/corso/src/pkg/selectors" ) -type DataCollectionsUnitSuite struct { +type BackupUnitSuite struct { tester.Suite } -func TestDataCollectionsUnitSuite(t *testing.T) { - suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)}) +func TestBackupUnitSuite(t *testing.T) { + suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)}) } -func (suite *DataCollectionsUnitSuite) TestMigrationCollections() { +func (suite *BackupUnitSuite) TestMigrationCollections() { u := selectors.Selector{} u = u.SetDiscreteOwnerIDName("i", "n") diff --git a/src/internal/connector/onedrive/collection.go b/src/internal/m365/onedrive/collection.go similarity index 98% rename from src/internal/connector/onedrive/collection.go rename to src/internal/m365/onedrive/collection.go index b2abbdcc9..3e9fe5157 100644 --- a/src/internal/connector/onedrive/collection.go +++ b/src/internal/m365/onedrive/collection.go @@ -14,10 +14,10 @@ import ( "github.com/spatialcurrent/go-lazy/pkg/lazy" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" diff --git a/src/internal/connector/onedrive/collection_test.go b/src/internal/m365/onedrive/collection_test.go similarity index 96% rename from src/internal/connector/onedrive/collection_test.go rename to src/internal/m365/onedrive/collection_test.go index 31d46b7bb..2cfb65cae 100644 --- a/src/internal/connector/onedrive/collection_test.go +++ b/src/internal/m365/onedrive/collection_test.go @@ -17,13 +17,13 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - metaTD "github.com/alcionai/corso/src/internal/connector/onedrive/metadata/testdata" - "github.com/alcionai/corso/src/internal/connector/onedrive/mock" - odTD "github.com/alcionai/corso/src/internal/connector/onedrive/testdata" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + metaTD "github.com/alcionai/corso/src/internal/m365/onedrive/metadata/testdata" + "github.com/alcionai/corso/src/internal/m365/onedrive/mock" + odTD "github.com/alcionai/corso/src/internal/m365/onedrive/testdata" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" @@ -46,9 +46,9 @@ func TestCollectionUnitTestSuite(t *testing.T) { // Returns a status update function that signals the specified WaitGroup when it is done func (suite *CollectionUnitTestSuite) testStatusUpdater( wg *sync.WaitGroup, - statusToUpdate *support.ConnectorOperationStatus, + statusToUpdate *support.ControllerOperationStatus, ) support.StatusUpdater { - return func(s *support.ConnectorOperationStatus) { + return func(s *support.ControllerOperationStatus) { suite.T().Logf("Update status %v, count %d, success %d", s, s.Metrics.Objects, s.Metrics.Successes) *statusToUpdate = *s @@ -170,7 +170,7 @@ func (suite *CollectionUnitTestSuite) TestCollection() { var ( wg = sync.WaitGroup{} - collStatus = support.ConnectorOperationStatus{} + collStatus = support.ControllerOperationStatus{} readItems = []data.Stream{} ) @@ -276,7 +276,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() { var ( t = suite.T() stubItemID = "fakeItemID" - collStatus = support.ConnectorOperationStatus{} + collStatus = support.ControllerOperationStatus{} wg = sync.WaitGroup{} name = "name" size int64 = 42 @@ -343,7 +343,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry() var ( t = suite.T() stubItemID = "fakeItemID" - collStatus = support.ConnectorOperationStatus{} + collStatus = support.ControllerOperationStatus{} wg = sync.WaitGroup{} name = "name" size int64 = 42 @@ -412,7 +412,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim stubItemID = "fakeItemID" stubItemName = "Fake Item" stubItemSize = int64(10) - collStatus = support.ConnectorOperationStatus{} + collStatus = support.ControllerOperationStatus{} wg = sync.WaitGroup{} ) diff --git a/src/internal/connector/onedrive/collections.go b/src/internal/m365/onedrive/collections.go similarity index 98% rename from src/internal/connector/onedrive/collections.go rename to src/internal/m365/onedrive/collections.go index c367477d8..d05b06820 100644 --- a/src/internal/connector/onedrive/collections.go +++ b/src/internal/m365/onedrive/collections.go @@ -13,11 +13,11 @@ import ( "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/internal/connector/onedrive/collections_test.go b/src/internal/m365/onedrive/collections_test.go similarity index 99% rename from src/internal/connector/onedrive/collections_test.go rename to src/internal/m365/onedrive/collections_test.go index f5bed49a4..2888bc149 100644 --- a/src/internal/connector/onedrive/collections_test.go +++ b/src/internal/m365/onedrive/collections_test.go @@ -15,12 +15,12 @@ import ( "github.com/alcionai/corso/src/internal/common/prefixmatcher" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" - "github.com/alcionai/corso/src/internal/connector/graph" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - "github.com/alcionai/corso/src/internal/connector/onedrive/mock" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/onedrive/mock" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" @@ -1135,7 +1135,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() { path.OneDriveService, path.FilesCategory, c(), - func(*support.ConnectorOperationStatus) {}) + func(*support.ControllerOperationStatus) {}) require.NoError(t, err, clues.ToCore(err)) cols = append(cols, data.NoFetchRestoreCollection{Collection: mc}) @@ -2346,7 +2346,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { tenant, user, testFolderMatcher{anyFolder}, - func(*support.ConnectorOperationStatus) {}, + func(*support.ControllerOperationStatus) {}, control.Options{ToggleFeatures: control.Toggles{}}) prevDelta := "prev-delta" @@ -2366,7 +2366,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { graph.PreviousPathFileName, test.prevFolderPaths), }, - func(*support.ConnectorOperationStatus) {}, + func(*support.ControllerOperationStatus) {}, ) assert.NoError(t, err, "creating metadata collection", clues.ToCore(err)) diff --git a/src/internal/connector/onedrive/consts/consts.go b/src/internal/m365/onedrive/consts/consts.go similarity index 100% rename from src/internal/connector/onedrive/consts/consts.go rename to src/internal/m365/onedrive/consts/consts.go diff --git a/src/internal/connector/onedrive/drive.go b/src/internal/m365/onedrive/drive.go similarity index 99% rename from src/internal/connector/onedrive/drive.go rename to src/internal/m365/onedrive/drive.go index 5f7841e28..5a4fadf68 100644 --- a/src/internal/connector/onedrive/drive.go +++ b/src/internal/m365/onedrive/drive.go @@ -9,7 +9,7 @@ import ( "golang.org/x/exp/maps" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/connector/onedrive/drive_test.go b/src/internal/m365/onedrive/drive_test.go similarity index 99% rename from src/internal/connector/onedrive/drive_test.go rename to src/internal/m365/onedrive/drive_test.go index 393da8405..348722b78 100644 --- a/src/internal/connector/onedrive/drive_test.go +++ b/src/internal/m365/onedrive/drive_test.go @@ -16,7 +16,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" diff --git a/src/internal/connector/onedrive/folder_cache.go b/src/internal/m365/onedrive/folder_cache.go similarity index 100% rename from src/internal/connector/onedrive/folder_cache.go rename to src/internal/m365/onedrive/folder_cache.go diff --git a/src/internal/connector/onedrive/handlers.go b/src/internal/m365/onedrive/handlers.go similarity index 100% rename from src/internal/connector/onedrive/handlers.go rename to src/internal/m365/onedrive/handlers.go diff --git a/src/internal/connector/onedrive/item.go b/src/internal/m365/onedrive/item.go similarity index 96% rename from src/internal/connector/onedrive/item.go rename to src/internal/m365/onedrive/item.go index 6c954fd40..c6215e9ae 100644 --- a/src/internal/connector/onedrive/item.go +++ b/src/internal/m365/onedrive/item.go @@ -11,8 +11,8 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/str" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/pkg/services/m365/api" ) diff --git a/src/internal/connector/onedrive/item_handler.go b/src/internal/m365/onedrive/item_handler.go similarity index 98% rename from src/internal/connector/onedrive/item_handler.go rename to src/internal/m365/onedrive/item_handler.go index 001c3a019..a95791237 100644 --- a/src/internal/connector/onedrive/item_handler.go +++ b/src/internal/m365/onedrive/item_handler.go @@ -9,7 +9,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/connector/onedrive/item_handler_test.go b/src/internal/m365/onedrive/item_handler_test.go similarity index 100% rename from src/internal/connector/onedrive/item_handler_test.go rename to src/internal/m365/onedrive/item_handler_test.go diff --git a/src/internal/connector/onedrive/item_test.go b/src/internal/m365/onedrive/item_test.go similarity index 100% rename from src/internal/connector/onedrive/item_test.go rename to src/internal/m365/onedrive/item_test.go diff --git a/src/internal/connector/onedrive/metadata/consts.go b/src/internal/m365/onedrive/metadata/consts.go similarity index 100% rename from src/internal/connector/onedrive/metadata/consts.go rename to src/internal/m365/onedrive/metadata/consts.go diff --git a/src/internal/connector/onedrive/metadata/metadata.go b/src/internal/m365/onedrive/metadata/metadata.go similarity index 100% rename from src/internal/connector/onedrive/metadata/metadata.go rename to src/internal/m365/onedrive/metadata/metadata.go diff --git a/src/internal/connector/onedrive/metadata/permissions.go b/src/internal/m365/onedrive/metadata/permissions.go similarity index 100% rename from src/internal/connector/onedrive/metadata/permissions.go rename to src/internal/m365/onedrive/metadata/permissions.go diff --git a/src/internal/connector/onedrive/metadata/permissions_test.go b/src/internal/m365/onedrive/metadata/permissions_test.go similarity index 100% rename from src/internal/connector/onedrive/metadata/permissions_test.go rename to src/internal/m365/onedrive/metadata/permissions_test.go diff --git a/src/internal/connector/onedrive/metadata/testdata/permissions.go b/src/internal/m365/onedrive/metadata/testdata/permissions.go similarity index 94% rename from src/internal/connector/onedrive/metadata/testdata/permissions.go rename to src/internal/m365/onedrive/metadata/testdata/permissions.go index 130368a37..a3ccc5cb3 100644 --- a/src/internal/connector/onedrive/metadata/testdata/permissions.go +++ b/src/internal/m365/onedrive/metadata/testdata/permissions.go @@ -6,7 +6,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" ) func AssertMetadataEqual(t *testing.T, expect, got metadata.Metadata) { diff --git a/src/internal/connector/onedrive/mock/handlers.go b/src/internal/m365/onedrive/mock/handlers.go similarity index 98% rename from src/internal/connector/onedrive/mock/handlers.go rename to src/internal/m365/onedrive/mock/handlers.go index 0c33d8158..0b48ffa6c 100644 --- a/src/internal/connector/onedrive/mock/handlers.go +++ b/src/internal/m365/onedrive/mock/handlers.go @@ -7,7 +7,7 @@ import ( "github.com/alcionai/clues" "github.com/microsoftgraph/msgraph-sdk-go/models" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/connector/onedrive/mock/item.go b/src/internal/m365/onedrive/mock/item.go similarity index 100% rename from src/internal/connector/onedrive/mock/item.go rename to src/internal/m365/onedrive/mock/item.go diff --git a/src/internal/connector/onedrive/permission.go b/src/internal/m365/onedrive/permission.go similarity index 98% rename from src/internal/connector/onedrive/permission.go rename to src/internal/m365/onedrive/permission.go index 683ca90e7..642f8d751 100644 --- a/src/internal/connector/onedrive/permission.go +++ b/src/internal/m365/onedrive/permission.go @@ -8,8 +8,8 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/internal/connector/onedrive/permission_test.go b/src/internal/m365/onedrive/permission_test.go similarity index 96% rename from src/internal/connector/onedrive/permission_test.go rename to src/internal/m365/onedrive/permission_test.go index 672db97f8..c345d693e 100644 --- a/src/internal/connector/onedrive/permission_test.go +++ b/src/internal/m365/onedrive/permission_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/internal/connector/onedrive/restore.go b/src/internal/m365/onedrive/restore.go similarity index 98% rename from src/internal/connector/onedrive/restore.go rename to src/internal/m365/onedrive/restore.go index 67c5f53bf..cdd6a9844 100644 --- a/src/internal/connector/onedrive/restore.go +++ b/src/internal/m365/onedrive/restore.go @@ -15,11 +15,11 @@ import ( "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/backup/details" @@ -69,7 +69,7 @@ func RestoreCollections( dcs []data.RestoreCollection, deets *details.Builder, errs *fault.Bus, -) (*support.ConnectorOperationStatus, error) { +) (*support.ControllerOperationStatus, error) { var ( restoreMetrics support.CollectionMetrics caches = NewRestoreCaches() @@ -102,7 +102,7 @@ func RestoreCollections( "full_path", dc.FullPath()) ) - metrics, err = RestoreCollection( + metrics, err = ProduceRestoreCollection( ictx, rh, backupVersion, @@ -133,12 +133,12 @@ func RestoreCollections( return status, el.Failure() } -// RestoreCollection handles restoration of an individual collection. +// ProduceRestoreCollection handles restoration of an individual collection. // returns: // - the collection's item and byte count metrics // - the updated metadata map that include metadata for folders in this collection // - error, if any besides recoverable -func RestoreCollection( +func ProduceRestoreCollection( ctx context.Context, rh RestoreHandler, backupVersion int, diff --git a/src/internal/connector/onedrive/restore_test.go b/src/internal/m365/onedrive/restore_test.go similarity index 100% rename from src/internal/connector/onedrive/restore_test.go rename to src/internal/m365/onedrive/restore_test.go diff --git a/src/internal/connector/onedrive/service_test.go b/src/internal/m365/onedrive/service_test.go similarity index 84% rename from src/internal/connector/onedrive/service_test.go rename to src/internal/m365/onedrive/service_test.go index 455520e75..046b72085 100644 --- a/src/internal/connector/onedrive/service_test.go +++ b/src/internal/m365/onedrive/service_test.go @@ -6,7 +6,7 @@ import ( "github.com/alcionai/clues" "github.com/stretchr/testify/require" - "github.com/alcionai/corso/src/internal/connector/support" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -15,7 +15,7 @@ import ( // TODO(ashmrtn): Merge with similar structs in graph and exchange packages. type oneDriveService struct { credentials account.M365Config - status support.ConnectorOperationStatus + status support.ControllerOperationStatus ac api.Client } @@ -33,7 +33,7 @@ func NewOneDriveService(credentials account.M365Config) (*oneDriveService, error return &service, nil } -func (ods *oneDriveService) updateStatus(status *support.ConnectorOperationStatus) { +func (ods *oneDriveService) updateStatus(status *support.ControllerOperationStatus) { if status == nil { return } diff --git a/src/internal/connector/onedrive/testdata/item.go b/src/internal/m365/onedrive/testdata/item.go similarity index 100% rename from src/internal/connector/onedrive/testdata/item.go rename to src/internal/m365/onedrive/testdata/item.go diff --git a/src/internal/connector/onedrive/url_cache.go b/src/internal/m365/onedrive/url_cache.go similarity index 100% rename from src/internal/connector/onedrive/url_cache.go rename to src/internal/m365/onedrive/url_cache.go diff --git a/src/internal/connector/onedrive/url_cache_test.go b/src/internal/m365/onedrive/url_cache_test.go similarity index 98% rename from src/internal/connector/onedrive/url_cache_test.go rename to src/internal/m365/onedrive/url_cache_test.go index 97a7b87b1..c3674c8c7 100644 --- a/src/internal/connector/onedrive/url_cache_test.go +++ b/src/internal/m365/onedrive/url_cache_test.go @@ -13,7 +13,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/m365/resource/resource.go b/src/internal/m365/resource/resource.go new file mode 100644 index 000000000..f91a853a6 --- /dev/null +++ b/src/internal/m365/resource/resource.go @@ -0,0 +1,9 @@ +package resource + +type Category string + +const ( + UnknownResource Category = "" + Users Category = "users" + Sites Category = "sites" +) diff --git a/src/internal/m365/restore.go b/src/internal/m365/restore.go new file mode 100644 index 000000000..f04d3296c --- /dev/null +++ b/src/internal/m365/restore.go @@ -0,0 +1,75 @@ +package m365 + +import ( + "context" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/diagnostics" + "github.com/alcionai/corso/src/internal/m365/exchange" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive" + "github.com/alcionai/corso/src/internal/m365/sharepoint" + "github.com/alcionai/corso/src/internal/m365/support" + "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/selectors" +) + +// ConsumeRestoreCollections restores data from the specified collections +// into M365 using the GraphAPI. +// SideEffect: status is updated at the completion of operation +func (ctrl *Controller) ConsumeRestoreCollections( + ctx context.Context, + backupVersion int, + sels selectors.Selector, + restoreCfg control.RestoreConfig, + opts control.Options, + dcs []data.RestoreCollection, + errs *fault.Bus, +) (*details.Details, error) { + ctx, end := diagnostics.Span(ctx, "m365:restore") + defer end() + + ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()}) + + var ( + status *support.ControllerOperationStatus + deets = &details.Builder{} + err error + ) + + switch sels.Service { + case selectors.ServiceExchange: + status, err = exchange.ConsumeRestoreCollections(ctx, ctrl.AC, restoreCfg, dcs, deets, errs) + case selectors.ServiceOneDrive: + status, err = onedrive.RestoreCollections( + ctx, + onedrive.NewRestoreHandler(ctrl.AC), + backupVersion, + restoreCfg, + opts, + dcs, + deets, + errs) + case selectors.ServiceSharePoint: + status, err = sharepoint.ConsumeRestoreCollections( + ctx, + backupVersion, + ctrl.AC, + restoreCfg, + opts, + dcs, + deets, + errs) + default: + err = clues.Wrap(clues.New(sels.Service.String()), "service not supported") + } + + ctrl.incrementAwaitingMessages() + ctrl.UpdateStatus(status) + + return deets.Details(), err +} diff --git a/src/internal/connector/sharepoint/api/beta_service.go b/src/internal/m365/sharepoint/api/beta_service.go similarity index 94% rename from src/internal/connector/sharepoint/api/beta_service.go rename to src/internal/m365/sharepoint/api/beta_service.go index 13bafbaa4..b184002a1 100644 --- a/src/internal/connector/sharepoint/api/beta_service.go +++ b/src/internal/m365/sharepoint/api/beta_service.go @@ -5,7 +5,7 @@ import ( "github.com/microsoft/kiota-abstractions-go/serialization" msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go" - "github.com/alcionai/corso/src/internal/connector/graph/betasdk" + "github.com/alcionai/corso/src/internal/m365/graph/betasdk" ) // Service wraps BetaClient's functionality. diff --git a/src/internal/connector/sharepoint/api/beta_service_test.go b/src/internal/m365/sharepoint/api/beta_service_test.go similarity index 88% rename from src/internal/connector/sharepoint/api/beta_service_test.go rename to src/internal/m365/sharepoint/api/beta_service_test.go index 802719f8e..4f4853274 100644 --- a/src/internal/connector/sharepoint/api/beta_service_test.go +++ b/src/internal/m365/sharepoint/api/beta_service_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" "github.com/alcionai/corso/src/internal/tester" ) diff --git a/src/internal/connector/sharepoint/api/pages.go b/src/internal/m365/sharepoint/api/pages.go similarity index 97% rename from src/internal/connector/sharepoint/api/pages.go rename to src/internal/m365/sharepoint/api/pages.go index f3ee314e5..0434ff0a5 100644 --- a/src/internal/connector/sharepoint/api/pages.go +++ b/src/internal/m365/sharepoint/api/pages.go @@ -9,11 +9,11 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - betamodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" - betasites "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" + "github.com/alcionai/corso/src/internal/m365/graph" + betamodels "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" + betasites "github.com/alcionai/corso/src/internal/m365/graph/betasdk/sites" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" ) diff --git a/src/internal/connector/sharepoint/api/pages_test.go b/src/internal/m365/sharepoint/api/pages_test.go similarity index 91% rename from src/internal/connector/sharepoint/api/pages_test.go rename to src/internal/m365/sharepoint/api/pages_test.go index ae9635ff8..0f2fa0471 100644 --- a/src/internal/connector/sharepoint/api/pages_test.go +++ b/src/internal/m365/sharepoint/api/pages_test.go @@ -10,10 +10,10 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/sharepoint" - "github.com/alcionai/corso/src/internal/connector/sharepoint/api" - spMock "github.com/alcionai/corso/src/internal/connector/sharepoint/mock" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/sharepoint" + "github.com/alcionai/corso/src/internal/m365/sharepoint/api" + spMock "github.com/alcionai/corso/src/internal/m365/sharepoint/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/internal/connector/sharepoint/api/serialization.go b/src/internal/m365/sharepoint/api/serialization.go similarity index 98% rename from src/internal/connector/sharepoint/api/serialization.go rename to src/internal/m365/sharepoint/api/serialization.go index e1382ed94..c377bd998 100644 --- a/src/internal/connector/sharepoint/api/serialization.go +++ b/src/internal/m365/sharepoint/api/serialization.go @@ -9,7 +9,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - betamodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + betamodels "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" ) // createFromBytes generates an m365 object form bytes. diff --git a/src/internal/connector/sharepoint/api/serialization_test.go b/src/internal/m365/sharepoint/api/serialization_test.go similarity index 94% rename from src/internal/connector/sharepoint/api/serialization_test.go rename to src/internal/m365/sharepoint/api/serialization_test.go index df6389f8d..099691d16 100644 --- a/src/internal/connector/sharepoint/api/serialization_test.go +++ b/src/internal/m365/sharepoint/api/serialization_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - bmodels "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" - spMock "github.com/alcionai/corso/src/internal/connector/sharepoint/mock" + bmodels "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" + spMock "github.com/alcionai/corso/src/internal/m365/sharepoint/mock" "github.com/alcionai/corso/src/internal/tester" ) diff --git a/src/internal/connector/sharepoint/data_collections.go b/src/internal/m365/sharepoint/backup.go similarity index 94% rename from src/internal/connector/sharepoint/data_collections.go rename to src/internal/m365/sharepoint/backup.go index 2575f12d3..ba5c45c23 100644 --- a/src/internal/connector/sharepoint/data_collections.go +++ b/src/internal/m365/sharepoint/backup.go @@ -7,11 +7,11 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive" - betaAPI "github.com/alcionai/corso/src/internal/connector/sharepoint/api" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive" + betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" @@ -23,12 +23,12 @@ import ( ) type statusUpdater interface { - UpdateStatus(status *support.ConnectorOperationStatus) + UpdateStatus(status *support.ControllerOperationStatus) } -// DataCollections returns a set of DataCollection which represents the SharePoint data +// ProduceBackupCollections returns a set of DataCollection which represents the SharePoint data // for the specified user -func DataCollections( +func ProduceBackupCollections( ctx context.Context, ac api.Client, selector selectors.Selector, diff --git a/src/internal/connector/sharepoint/data_collections_test.go b/src/internal/m365/sharepoint/backup_test.go similarity index 93% rename from src/internal/connector/sharepoint/data_collections_test.go rename to src/internal/m365/sharepoint/backup_test.go index 056d5c41d..d36e40377 100644 --- a/src/internal/connector/sharepoint/data_collections_test.go +++ b/src/internal/m365/sharepoint/backup_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/idname/mock" - "github.com/alcionai/corso/src/internal/connector/onedrive" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" @@ -45,15 +45,15 @@ func (fm testFolderMatcher) Matches(p string) bool { // tests // --------------------------------------------------------------------------- -type SharePointLibrariesUnitSuite struct { +type LibrariesBackupUnitSuite struct { tester.Suite } -func TestSharePointLibrariesUnitSuite(t *testing.T) { - suite.Run(t, &SharePointLibrariesUnitSuite{Suite: tester.NewUnitSuite(t)}) +func TestLibrariesBackupUnitSuite(t *testing.T) { + suite.Run(t, &LibrariesBackupUnitSuite{Suite: tester.NewUnitSuite(t)}) } -func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() { +func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0] const ( diff --git a/src/internal/connector/sharepoint/collection.go b/src/internal/m365/sharepoint/collection.go similarity index 96% rename from src/internal/connector/sharepoint/collection.go rename to src/internal/m365/sharepoint/collection.go index 16cb016db..12db281a4 100644 --- a/src/internal/connector/sharepoint/collection.go +++ b/src/internal/m365/sharepoint/collection.go @@ -11,10 +11,10 @@ import ( kjson "github.com/microsoft/kiota-serialization-json-go" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - betaAPI "github.com/alcionai/corso/src/internal/connector/sharepoint/api" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/internal/m365/graph" + betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" @@ -90,7 +90,7 @@ func (sc *Collection) FullPath() path.Path { return sc.fullPath } -// TODO(ashmrtn): Fill in with previous path once GraphConnector compares old +// TODO(ashmrtn): Fill in with previous path once the Controller compares old // and new folder hierarchies. func (sc Collection) PreviousPath() path.Path { return nil diff --git a/src/internal/connector/sharepoint/collection_test.go b/src/internal/m365/sharepoint/collection_test.go similarity index 97% rename from src/internal/connector/sharepoint/collection_test.go rename to src/internal/m365/sharepoint/collection_test.go index 4d66a33c0..5381710c2 100644 --- a/src/internal/connector/sharepoint/collection_test.go +++ b/src/internal/m365/sharepoint/collection_test.go @@ -13,9 +13,9 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - betaAPI "github.com/alcionai/corso/src/internal/connector/sharepoint/api" - spMock "github.com/alcionai/corso/src/internal/connector/sharepoint/mock" "github.com/alcionai/corso/src/internal/data" + betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api" + spMock "github.com/alcionai/corso/src/internal/m365/sharepoint/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" diff --git a/src/internal/connector/sharepoint/datacategory_string.go b/src/internal/m365/sharepoint/datacategory_string.go similarity index 100% rename from src/internal/connector/sharepoint/datacategory_string.go rename to src/internal/m365/sharepoint/datacategory_string.go diff --git a/src/internal/connector/sharepoint/helper_test.go b/src/internal/m365/sharepoint/helper_test.go similarity index 81% rename from src/internal/connector/sharepoint/helper_test.go rename to src/internal/m365/sharepoint/helper_test.go index 7e6d592b2..006a5648c 100644 --- a/src/internal/connector/sharepoint/helper_test.go +++ b/src/internal/m365/sharepoint/helper_test.go @@ -7,8 +7,8 @@ import ( msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" "github.com/stretchr/testify/require" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/support" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/pkg/account" ) @@ -18,10 +18,10 @@ import ( type MockGraphService struct{} type MockUpdater struct { - UpdateState func(*support.ConnectorOperationStatus) + UpdateState func(*support.ControllerOperationStatus) } -func (mu *MockUpdater) UpdateStatus(input *support.ConnectorOperationStatus) { +func (mu *MockUpdater) UpdateStatus(input *support.ControllerOperationStatus) { if mu.UpdateState != nil { mu.UpdateState(input) } @@ -39,7 +39,7 @@ func (ms *MockGraphService) Adapter() *msgraphsdk.GraphRequestAdapter { return nil } -func (ms *MockGraphService) UpdateStatus(*support.ConnectorOperationStatus) { +func (ms *MockGraphService) UpdateStatus(*support.ControllerOperationStatus) { } // --------------------------------------------------------------------------- diff --git a/src/internal/connector/sharepoint/library_handler.go b/src/internal/m365/sharepoint/library_handler.go similarity index 98% rename from src/internal/connector/sharepoint/library_handler.go rename to src/internal/m365/sharepoint/library_handler.go index 4ba928f8f..4ea9e1e92 100644 --- a/src/internal/connector/sharepoint/library_handler.go +++ b/src/internal/m365/sharepoint/library_handler.go @@ -9,8 +9,8 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/onedrive" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" diff --git a/src/internal/connector/sharepoint/library_handler_test.go b/src/internal/m365/sharepoint/library_handler_test.go similarity index 100% rename from src/internal/connector/sharepoint/library_handler_test.go rename to src/internal/m365/sharepoint/library_handler_test.go diff --git a/src/internal/connector/sharepoint/list.go b/src/internal/m365/sharepoint/list.go similarity index 99% rename from src/internal/connector/sharepoint/list.go rename to src/internal/m365/sharepoint/list.go index 0da99fcea..3532e029b 100644 --- a/src/internal/connector/sharepoint/list.go +++ b/src/internal/m365/sharepoint/list.go @@ -9,7 +9,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/sites" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" ) diff --git a/src/internal/connector/sharepoint/listInfo.go b/src/internal/m365/sharepoint/list_info.go similarity index 100% rename from src/internal/connector/sharepoint/listInfo.go rename to src/internal/m365/sharepoint/list_info.go diff --git a/src/internal/connector/sharepoint/listInfo_test.go b/src/internal/m365/sharepoint/list_info_test.go similarity index 100% rename from src/internal/connector/sharepoint/listInfo_test.go rename to src/internal/m365/sharepoint/list_info_test.go diff --git a/src/internal/connector/sharepoint/list_test.go b/src/internal/m365/sharepoint/list_test.go similarity index 100% rename from src/internal/connector/sharepoint/list_test.go rename to src/internal/m365/sharepoint/list_test.go diff --git a/src/internal/connector/sharepoint/mock/list.go b/src/internal/m365/sharepoint/mock/list.go similarity index 100% rename from src/internal/connector/sharepoint/mock/list.go rename to src/internal/m365/sharepoint/mock/list.go diff --git a/src/internal/connector/sharepoint/mock/mock_test.go b/src/internal/m365/sharepoint/mock/mock_test.go similarity index 96% rename from src/internal/connector/sharepoint/mock/mock_test.go rename to src/internal/m365/sharepoint/mock/mock_test.go index 01776ea36..52070c2cb 100644 --- a/src/internal/connector/sharepoint/mock/mock_test.go +++ b/src/internal/m365/sharepoint/mock/mock_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/sharepoint/api" + "github.com/alcionai/corso/src/internal/m365/sharepoint/api" "github.com/alcionai/corso/src/internal/tester" ) diff --git a/src/internal/connector/sharepoint/mock/page.go b/src/internal/m365/sharepoint/mock/page.go similarity index 100% rename from src/internal/connector/sharepoint/mock/page.go rename to src/internal/m365/sharepoint/mock/page.go diff --git a/src/internal/connector/sharepoint/pageInfo.go b/src/internal/m365/sharepoint/pageInfo.go similarity index 93% rename from src/internal/connector/sharepoint/pageInfo.go rename to src/internal/m365/sharepoint/pageInfo.go index 97dc618ae..8b5060bdd 100644 --- a/src/internal/connector/sharepoint/pageInfo.go +++ b/src/internal/m365/sharepoint/pageInfo.go @@ -4,7 +4,7 @@ import ( "time" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" "github.com/alcionai/corso/src/pkg/backup/details" ) diff --git a/src/internal/connector/sharepoint/pageInfo_test.go b/src/internal/m365/sharepoint/pageInfo_test.go similarity index 94% rename from src/internal/connector/sharepoint/pageInfo_test.go rename to src/internal/m365/sharepoint/pageInfo_test.go index 924a9eed2..7490b117f 100644 --- a/src/internal/connector/sharepoint/pageInfo_test.go +++ b/src/internal/m365/sharepoint/pageInfo_test.go @@ -3,7 +3,7 @@ package sharepoint import ( "github.com/stretchr/testify/assert" - "github.com/alcionai/corso/src/internal/connector/graph/betasdk/models" + "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" "github.com/alcionai/corso/src/pkg/backup/details" ) diff --git a/src/internal/connector/sharepoint/restore.go b/src/internal/m365/sharepoint/restore.go similarity index 88% rename from src/internal/connector/sharepoint/restore.go rename to src/internal/m365/sharepoint/restore.go index be307fb86..5bf53a4d6 100644 --- a/src/internal/connector/sharepoint/restore.go +++ b/src/internal/m365/sharepoint/restore.go @@ -11,12 +11,12 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/onedrive" - betaAPI "github.com/alcionai/corso/src/internal/connector/sharepoint/api" - "github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/onedrive" + betaAPI "github.com/alcionai/corso/src/internal/m365/sharepoint/api" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/fault" @@ -25,21 +25,8 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api" ) -//---------------------------------------------------------------------------- -// SharePoint Restore WorkFlow: -// - RestoreCollections called by GC component -// -- Collections are iterated within, Control Flow Switch -// -- Switch: -// ---- Libraries restored via the same workflow as oneDrive -// ---- Lists call RestoreCollection() -// ----> for each data.Stream within RestoreCollection.Items() -// ----> restoreListItems() is called -// Restored List can be found in the Site's `Site content` page -// Restored Libraries can be found within the Site's `Pages` page -//------------------------------------------ - -// RestoreCollections will restore the specified data collections into OneDrive -func RestoreCollections( +// ConsumeRestoreCollections will restore the specified data collections into OneDrive +func ConsumeRestoreCollections( ctx context.Context, backupVersion int, ac api.Client, @@ -48,7 +35,7 @@ func RestoreCollections( dcs []data.RestoreCollection, deets *details.Builder, errs *fault.Bus, -) (*support.ConnectorOperationStatus, error) { +) (*support.ControllerOperationStatus, error) { var ( restoreMetrics support.CollectionMetrics caches = onedrive.NewRestoreCaches() @@ -78,7 +65,7 @@ func RestoreCollections( switch dc.FullPath().Category() { case path.LibrariesCategory: - metrics, err = onedrive.RestoreCollection( + metrics, err = onedrive.ProduceRestoreCollection( ictx, libraryRestoreHandler{ac.Drives()}, backupVersion, diff --git a/src/internal/connector/support/operation_string.go b/src/internal/m365/support/operation_string.go similarity index 100% rename from src/internal/connector/support/operation_string.go rename to src/internal/m365/support/operation_string.go diff --git a/src/internal/connector/support/status.go b/src/internal/m365/support/status.go similarity index 84% rename from src/internal/connector/support/status.go rename to src/internal/m365/support/status.go index 6b3f154b5..f241909fe 100644 --- a/src/internal/connector/support/status.go +++ b/src/internal/m365/support/status.go @@ -7,13 +7,13 @@ import ( "github.com/dustin/go-humanize" ) -// ConnectorOperationStatus is a data type used to describe the state of +// ControllerOperationStatus is a data type used to describe the state of // the sequence of operations. // @param ObjectCount integer representation of how many objects have downloaded or uploaded. // @param Successful: Number of objects that are sent through the connector without incident. // @param incomplete: Bool representation of whether all intended items were download or uploaded. // @param bytes: represents the total number of bytes that have been downloaded or uploaded. -type ConnectorOperationStatus struct { +type ControllerOperationStatus struct { Folders int Metrics CollectionMetrics details string @@ -49,8 +49,8 @@ func CreateStatus( folders int, cm CollectionMetrics, details string, -) *ConnectorOperationStatus { - status := ConnectorOperationStatus{ +) *ControllerOperationStatus { + status := ControllerOperationStatus{ Folders: folders, Metrics: cm, details: details, @@ -63,10 +63,10 @@ func CreateStatus( // Function signature for a status updater // Used to define a function that an async connector task can call // to on completion with its ConnectorOperationStatus -type StatusUpdater func(*ConnectorOperationStatus) +type StatusUpdater func(*ControllerOperationStatus) // MergeStatus combines ConnectorOperationsStatus value into a single status -func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus { +func MergeStatus(one, two ControllerOperationStatus) ControllerOperationStatus { if one.op == OpUnknown { return two } @@ -75,7 +75,7 @@ func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus { return one } - status := ConnectorOperationStatus{ + status := ControllerOperationStatus{ Folders: one.Folders + two.Folders, Metrics: CombineMetrics(one.Metrics, two.Metrics), details: one.details + ", " + two.details, @@ -85,7 +85,7 @@ func MergeStatus(one, two ConnectorOperationStatus) ConnectorOperationStatus { return status } -func (cos *ConnectorOperationStatus) String() string { +func (cos *ControllerOperationStatus) String() string { var operationStatement string switch cos.op { diff --git a/src/internal/connector/support/status_test.go b/src/internal/m365/support/status_test.go similarity index 93% rename from src/internal/connector/support/status_test.go rename to src/internal/m365/support/status_test.go index cad22d22f..79277242b 100644 --- a/src/internal/connector/support/status_test.go +++ b/src/internal/m365/support/status_test.go @@ -13,7 +13,7 @@ type StatusUnitSuite struct { tester.Suite } -func TestGraphConnectorStatus(t *testing.T) { +func TestStatusUnitSuite(t *testing.T) { suite.Run(t, &StatusUnitSuite{tester.NewUnitSuite(t)}) } @@ -71,8 +71,8 @@ func (suite *StatusUnitSuite) TestMergeStatus() { table := []struct { name string - one ConnectorOperationStatus - two ConnectorOperationStatus + one ControllerOperationStatus + two ControllerOperationStatus expectOp Operation expectMetrics CollectionMetrics expectFolders int @@ -80,14 +80,14 @@ func (suite *StatusUnitSuite) TestMergeStatus() { { name: "Test: Status + unknown", one: *CreateStatus(ctx, Backup, 1, CollectionMetrics{1, 1, 0}, ""), - two: ConnectorOperationStatus{}, + two: ControllerOperationStatus{}, expectOp: Backup, expectMetrics: CollectionMetrics{1, 1, 0}, expectFolders: 1, }, { name: "Test: unknown + Status", - one: ConnectorOperationStatus{}, + one: ControllerOperationStatus{}, two: *CreateStatus(ctx, Backup, 1, CollectionMetrics{1, 1, 0}, ""), expectOp: Backup, expectMetrics: CollectionMetrics{1, 1, 0}, diff --git a/src/internal/operations/backup.go b/src/internal/operations/backup.go index f5739246f..4772f8b20 100644 --- a/src/internal/operations/backup.go +++ b/src/internal/operations/backup.go @@ -11,12 +11,12 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/kopia" kinject "github.com/alcionai/corso/src/internal/kopia/inject" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/operations/inject" @@ -117,7 +117,7 @@ func (op BackupOperation) validate() error { // get populated asynchronously. type backupStats struct { k *kopia.BackupStats - gc *data.CollectionStats + ctrl *data.CollectionStats resourceCount int } @@ -370,9 +370,9 @@ func (op *BackupOperation) do( return nil, clues.Wrap(err, "merging details") } - opStats.gc = op.bp.Wait() + opStats.ctrl = op.bp.Wait() - logger.Ctx(ctx).Debug(opStats.gc) + logger.Ctx(ctx).Debug(opStats.ctrl) return deets, nil } @@ -870,16 +870,16 @@ func (op *BackupOperation) persistResults( op.Results.NonMetaItemsWritten = opStats.k.TotalNonMetaFileCount op.Results.ResourceOwners = opStats.resourceCount - if opStats.gc == nil { + if opStats.ctrl == nil { op.Status = Failed return clues.New("backup population never completed") } - if op.Status != Failed && opStats.gc.IsZero() { + if op.Status != Failed && opStats.ctrl.IsZero() { op.Status = NoData } - op.Results.ItemsRead = opStats.gc.Successes + op.Results.ItemsRead = opStats.ctrl.Successes return op.Errors.Failure() } diff --git a/src/internal/operations/backup_integration_test.go b/src/internal/operations/backup_integration_test.go index 8fbd9d2ce..df57dea03 100644 --- a/src/internal/operations/backup_integration_test.go +++ b/src/internal/operations/backup_integration_test.go @@ -21,20 +21,21 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" inMock "github.com/alcionai/corso/src/internal/common/idname/mock" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector" - "github.com/alcionai/corso/src/internal/connector/exchange" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - exchTD "github.com/alcionai/corso/src/internal/connector/exchange/testdata" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/mock" - "github.com/alcionai/corso/src/internal/connector/onedrive" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" - "github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/events" evmock "github.com/alcionai/corso/src/internal/events/mock" "github.com/alcionai/corso/src/internal/kopia" + "github.com/alcionai/corso/src/internal/m365" + "github.com/alcionai/corso/src/internal/m365/exchange" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + exchTD "github.com/alcionai/corso/src/internal/m365/exchange/testdata" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/mock" + "github.com/alcionai/corso/src/internal/m365/onedrive" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/resource" + "github.com/alcionai/corso/src/internal/m365/sharepoint" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/streamstore" @@ -79,7 +80,7 @@ func prepNewTestBackupOp( *kopia.Wrapper, *kopia.ModelStore, streamstore.Streamer, - *connector.GraphConnector, + *m365.Controller, selectors.Selector, func(), ) { @@ -120,17 +121,17 @@ func prepNewTestBackupOp( ms.Close(ctx) } - connectorResource := connector.Users + connectorResource := resource.Users if sel.Service == selectors.ServiceSharePoint { - connectorResource = connector.Sites + connectorResource = resource.Sites } - gc, sel := GCWithSelector(t, ctx, acct, connectorResource, sel, nil, closer) - bo := newTestBackupOp(t, ctx, kw, ms, gc, acct, sel, bus, featureToggles, closer) + ctrl, sel := ControllerWithSelector(t, ctx, acct, connectorResource, sel, nil, closer) + bo := newTestBackupOp(t, ctx, kw, ms, ctrl, acct, sel, bus, featureToggles, closer) ss := streamstore.NewStreamer(kw, acct.ID(), sel.PathService()) - return bo, acct, kw, ms, ss, gc, sel, closer + return bo, acct, kw, ms, ss, ctrl, sel, closer } // newTestBackupOp accepts the clients required to compose a backup operation, plus @@ -142,7 +143,7 @@ func newTestBackupOp( ctx context.Context, //revive:disable-line:context-as-argument kw *kopia.Wrapper, ms *kopia.ModelStore, - gc *connector.GraphConnector, + ctrl *m365.Controller, acct account.Account, sel selectors.Selector, bus events.Eventer, @@ -155,9 +156,9 @@ func newTestBackupOp( ) opts.ToggleFeatures = featureToggles - gc.IDNameLookup = idname.NewCache(map[string]string{sel.ID(): sel.Name()}) + ctrl.IDNameLookup = idname.NewCache(map[string]string{sel.ID(): sel.Name()}) - bo, err := NewBackupOperation(ctx, opts, kw, sw, gc, acct, sel, sel, bus) + bo, err := NewBackupOperation(ctx, opts, kw, sw, ctrl, acct, sel, sel, bus) if !assert.NoError(t, err, clues.ToCore(err)) { closer() t.FailNow() @@ -346,7 +347,7 @@ type dataBuilderFunc func(id, timeStamp, subject, body string) []byte func generateContainerOfItems( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument - gc *connector.GraphConnector, + ctrl *m365.Controller, service path.ServiceType, cat path.CategoryType, sel selectors.Selector, @@ -394,7 +395,7 @@ func generateContainerOfItems( opts := control.Defaults() opts.RestorePermissions = true - deets, err := gc.ConsumeRestoreCollections( + deets, err := ctrl.ConsumeRestoreCollections( ctx, backupVersion, sel, @@ -405,8 +406,8 @@ func generateContainerOfItems( require.NoError(t, err, clues.ToCore(err)) // have to wait here, both to ensure the process - // finishes, and also to clean up the gc status - gc.Wait() + // finishes, and also to clean up the status + ctrl.Wait() return deets } @@ -542,7 +543,7 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() { var ( kw = &kopia.Wrapper{} sw = &store.Wrapper{} - gc = &mock.GraphConnector{} + ctrl = &mock.Controller{} acct = tester.NewM365Account(suite.T()) opts = control.Defaults() ) @@ -556,9 +557,9 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() { targets []string errCheck assert.ErrorAssertionFunc }{ - {"good", kw, sw, gc, acct, nil, assert.NoError}, - {"missing kopia", nil, sw, gc, acct, nil, assert.Error}, - {"missing modelstore", kw, nil, gc, acct, nil, assert.Error}, + {"good", kw, sw, ctrl, acct, nil, assert.NoError}, + {"missing kopia", nil, sw, ctrl, acct, nil, assert.Error}, + {"missing modelstore", kw, nil, ctrl, acct, nil, assert.Error}, {"missing backup producer", kw, sw, nil, acct, nil, assert.Error}, } for _, test := range table { @@ -645,7 +646,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() { whatSet = deeTD.CategoryFromRepoRef ) - bo, acct, kw, ms, ss, gc, sel, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup) + bo, acct, kw, ms, ss, ctrl, sel, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup) defer closer() userID := sel.ID() @@ -684,7 +685,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() { // produces fewer results than the last backup. var ( incMB = evmock.NewBus() - incBO = newTestBackupOp(t, ctx, kw, ms, gc, acct, sel, incMB, ffs, closer) + incBO = newTestBackupOp(t, ctx, kw, ms, ctrl, acct, sel, incMB, ffs, closer) ) runAndCheckBackup(t, ctx, &incBO, incMB, true) @@ -766,7 +767,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont whatSet = deeTD.CategoryFromRepoRef ) - gc, sels := GCWithSelector(t, ctx, acct, connector.Users, sel.Selector, nil, nil) + ctrl, sels := ControllerWithSelector(t, ctx, acct, resource.Users, sel.Selector, nil, nil) sel.DiscreteOwner = sels.ID() sel.DiscreteOwnerName = sels.Name() @@ -776,10 +777,10 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont sel.MailFolders(containers, selectors.PrefixMatch()), sel.ContactFolders(containers, selectors.PrefixMatch())) - m365, err := acct.M365Config() + creds, err := acct.M365Config() require.NoError(t, err, clues.ToCore(err)) - ac, err := api.NewClient(m365) + ac, err := api.NewClient(creds) require.NoError(t, err, clues.ToCore(err)) // generate 3 new folders with two items each. @@ -855,11 +856,11 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont deets := generateContainerOfItems( t, ctx, - gc, + ctrl, service, category, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector, - m365.AzureTenantID, uidn.ID(), "", destName, + creds.AzureTenantID, uidn.ID(), "", destName, 2, version.Backup, gen.dbf) @@ -883,7 +884,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont } } - bo, acct, kw, ms, ss, gc, sels, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, toggles, version.Backup) + bo, acct, kw, ms, ss, ctrl, sels, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, toggles, version.Backup) defer closer() // run the initial backup @@ -1036,11 +1037,11 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont deets := generateContainerOfItems( t, ctx, - gc, + ctrl, service, category, selectors.NewExchangeRestore([]string{uidn.ID()}).Selector, - m365.AzureTenantID, suite.user, "", container3, + creds.AzureTenantID, suite.user, "", container3, 2, version.Backup, gen.dbf) @@ -1245,8 +1246,8 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont var ( t = suite.T() incMB = evmock.NewBus() - incBO = newTestBackupOp(t, ctx, kw, ms, gc, acct, sels, incMB, toggles, closer) - atid = m365.AzureTenantID + incBO = newTestBackupOp(t, ctx, kw, ms, ctrl, acct, sels, incMB, toggles, closer) + atid = creds.AzureTenantID ) test.updateUserData(t) @@ -1347,7 +1348,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() { suite, suite.user, suite.user, - connector.Users, + resource.Users, path.OneDriveService, path.FilesCategory, ic, @@ -1390,7 +1391,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() { suite, suite.site, suite.user, - connector.Sites, + resource.Sites, path.SharePointService, path.LibrariesCategory, ic, @@ -1402,7 +1403,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() { func runDriveIncrementalTest( suite *BackupOpIntegrationSuite, owner, permissionsUser string, - resource connector.Resource, + rc resource.Category, service path.ServiceType, category path.CategoryType, includeContainers func([]string) selectors.Selector, @@ -1445,9 +1446,9 @@ func runDriveIncrementalTest( creds, err := acct.M365Config() require.NoError(t, err, clues.ToCore(err)) - gc, sel := GCWithSelector(t, ctx, acct, resource, sel, nil, nil) - ac := gc.AC.Drives() - rh := getRestoreHandler(gc.AC) + ctrl, sel := ControllerWithSelector(t, ctx, acct, rc, sel, nil, nil) + ac := ctrl.AC.Drives() + rh := getRestoreHandler(ctrl.AC) roidn := inMock.NewProvider(sel.ID(), sel.Name()) @@ -1479,7 +1480,7 @@ func runDriveIncrementalTest( deets := generateContainerOfItems( t, ctx, - gc, + ctrl, service, category, sel, @@ -1506,14 +1507,14 @@ func runDriveIncrementalTest( // onedrive package `getFolder` function. itemURL := fmt.Sprintf("https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, destName) resp, err := drives. - NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.AC.Stable.Adapter()). + NewItemItemsDriveItemItemRequestBuilder(itemURL, ctrl.AC.Stable.Adapter()). Get(ctx, nil) require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err)) containerIDs[destName] = ptr.Val(resp.GetId()) } - bo, _, kw, ms, ss, gc, _, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup) + bo, _, kw, ms, ss, ctrl, _, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup) defer closer() // run the initial backup @@ -1812,7 +1813,7 @@ func runDriveIncrementalTest( generateContainerOfItems( t, ctx, - gc, + ctrl, service, category, sel, @@ -1826,7 +1827,7 @@ func runDriveIncrementalTest( "https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, container3) - resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.AC.Stable.Adapter()). + resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, ctrl.AC.Stable.Adapter()). Get(ctx, nil) require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err)) @@ -1841,13 +1842,13 @@ func runDriveIncrementalTest( } for _, test := range table { suite.Run(test.name, func() { - cleanGC, err := connector.NewGraphConnector(ctx, acct, resource) + cleanCtrl, err := m365.NewController(ctx, acct, rc) require.NoError(t, err, clues.ToCore(err)) var ( t = suite.T() incMB = evmock.NewBus() - incBO = newTestBackupOp(t, ctx, kw, ms, cleanGC, acct, sel, incMB, ffs, closer) + incBO = newTestBackupOp(t, ctx, kw, ms, cleanCtrl, acct, sel, incMB, ffs, closer) ) tester.LogTimeOfTest(suite.T()) @@ -1914,13 +1915,13 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() { creds, err := acct.M365Config() require.NoError(t, err, clues.ToCore(err)) - gc, err := connector.NewGraphConnector( + ctrl, err := m365.NewController( ctx, acct, - connector.Users) + resource.Users) require.NoError(t, err, clues.ToCore(err)) - userable, err := gc.AC.Users().GetByID(ctx, suite.user) + userable, err := ctrl.AC.Users().GetByID(ctx, suite.user) require.NoError(t, err, clues.ToCore(err)) uid := ptr.Val(userable.GetId()) @@ -1929,7 +1930,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() { oldsel := selectors.NewOneDriveBackup([]string{uname}) oldsel.Include(selTD.OneDriveBackupFolderScope(oldsel)) - bo, _, kw, ms, _, gc, sel, closer := prepNewTestBackupOp(t, ctx, mb, oldsel.Selector, ffs, 0) + bo, _, kw, ms, _, ctrl, sel, closer := prepNewTestBackupOp(t, ctx, mb, oldsel.Selector, ffs, 0) defer closer() // ensure the initial owner uses name in both cases @@ -1955,7 +1956,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() { var ( incMB = evmock.NewBus() // the incremental backup op should have a proper user ID for the id. - incBO = newTestBackupOp(t, ctx, kw, ms, gc, acct, sel, incMB, ffs, closer) + incBO = newTestBackupOp(t, ctx, kw, ms, ctrl, acct, sel, incMB, ffs, closer) ) require.NotEqualf( diff --git a/src/internal/operations/backup_test.go b/src/internal/operations/backup_test.go index a8648d97b..248d40087 100644 --- a/src/internal/operations/backup_test.go +++ b/src/internal/operations/backup_test.go @@ -15,11 +15,11 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/prefixmatcher" - "github.com/alcionai/corso/src/internal/connector/mock" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" "github.com/alcionai/corso/src/internal/data" evmock "github.com/alcionai/corso/src/internal/events/mock" "github.com/alcionai/corso/src/internal/kopia" + "github.com/alcionai/corso/src/internal/m365/mock" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/model" ssmock "github.com/alcionai/corso/src/internal/streamstore/mock" "github.com/alcionai/corso/src/internal/tester" @@ -414,7 +414,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() { var ( kw = &kopia.Wrapper{} sw = &store.Wrapper{} - gc = &mock.GraphConnector{} + ctrl = &mock.Controller{} acct = account.Account{} now = time.Now() ) @@ -435,7 +435,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() { TotalHashedBytes: 1, TotalUploadedBytes: 1, }, - gc: &data.CollectionStats{Successes: 1}, + ctrl: &data.CollectionStats{Successes: 1}, }, }, { @@ -443,16 +443,16 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() { expectErr: assert.Error, fail: assert.AnError, stats: backupStats{ - k: &kopia.BackupStats{}, - gc: &data.CollectionStats{}, + k: &kopia.BackupStats{}, + ctrl: &data.CollectionStats{}, }, }, { expectStatus: NoData, expectErr: assert.NoError, stats: backupStats{ - k: &kopia.BackupStats{}, - gc: &data.CollectionStats{}, + k: &kopia.BackupStats{}, + ctrl: &data.CollectionStats{}, }, }, } @@ -471,7 +471,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() { control.Defaults(), kw, sw, - gc, + ctrl, acct, sel, sel, @@ -483,7 +483,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_PersistResults() { test.expectErr(t, op.persistResults(now, &test.stats)) assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status") - assert.Equal(t, test.stats.gc.Successes, op.Results.ItemsRead, "items read") + assert.Equal(t, test.stats.ctrl.Successes, op.Results.ItemsRead, "items read") assert.Equal(t, test.stats.k.TotalFileCount, op.Results.ItemsWritten, "items written") assert.Equal(t, test.stats.k.TotalHashedBytes, op.Results.BytesRead, "bytes read") assert.Equal(t, test.stats.k.TotalUploadedBytes, op.Results.BytesUploaded, "bytes written") diff --git a/src/internal/operations/help_test.go b/src/internal/operations/help_test.go index f5b01dc9b..c1830bf25 100644 --- a/src/internal/operations/help_test.go +++ b/src/internal/operations/help_test.go @@ -8,24 +8,25 @@ import ( "github.com/stretchr/testify/assert" "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/connector" + "github.com/alcionai/corso/src/internal/m365" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/selectors" ) -// A QoL builder for live GC instances that updates +// A QoL builder for live instances that updates // the selector's owner id and name in the process // to help avoid gotchas. -func GCWithSelector( +func ControllerWithSelector( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument acct account.Account, - cr connector.Resource, + cr resource.Category, sel selectors.Selector, ins idname.Cacher, onFail func(), -) (*connector.GraphConnector, selectors.Selector) { - gc, err := connector.NewGraphConnector(ctx, acct, cr) +) (*m365.Controller, selectors.Selector) { + ctrl, err := m365.NewController(ctx, acct, cr) if !assert.NoError(t, err, clues.ToCore(err)) { if onFail != nil { onFail() @@ -34,7 +35,7 @@ func GCWithSelector( t.FailNow() } - id, name, err := gc.PopulateOwnerIDAndNamesFrom(ctx, sel.DiscreteOwner, ins) + id, name, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, sel.DiscreteOwner, ins) if !assert.NoError(t, err, clues.ToCore(err)) { if onFail != nil { onFail() @@ -45,5 +46,5 @@ func GCWithSelector( sel = sel.SetDiscreteOwnerIDName(id, name) - return gc, sel + return ctrl, sel } diff --git a/src/internal/operations/manifests.go b/src/internal/operations/manifests.go index 2b028c55a..121481066 100644 --- a/src/internal/operations/manifests.go +++ b/src/internal/operations/manifests.go @@ -8,10 +8,10 @@ import ( "github.com/pkg/errors" "golang.org/x/exp/maps" - "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia/inject" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/internal/operations/restore.go b/src/internal/operations/restore.go index 02f406e0a..ef7e7ea38 100644 --- a/src/internal/operations/restore.go +++ b/src/internal/operations/restore.go @@ -10,11 +10,11 @@ import ( "github.com/alcionai/corso/src/internal/common/crash" "github.com/alcionai/corso/src/internal/common/dttm" - "github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/kopia" + "github.com/alcionai/corso/src/internal/m365/onedrive" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/operations/inject" @@ -94,7 +94,7 @@ func (op RestoreOperation) validate() error { // get populated asynchronously. type restoreStats struct { cs []data.RestoreCollection - gc *data.CollectionStats + ctrl *data.CollectionStats bytesRead *stats.ByteCounter resourceCount int @@ -265,9 +265,9 @@ func (op *RestoreOperation) do( return nil, clues.Wrap(err, "restoring collections") } - opStats.gc = op.rc.Wait() + opStats.ctrl = op.rc.Wait() - logger.Ctx(ctx).Debug(opStats.gc) + logger.Ctx(ctx).Debug(opStats.ctrl) return deets, nil } @@ -291,16 +291,16 @@ func (op *RestoreOperation) persistResults( op.Results.ItemsRead = len(opStats.cs) // TODO: file count, not collection count op.Results.ResourceOwners = opStats.resourceCount - if opStats.gc == nil { + if opStats.ctrl == nil { op.Status = Failed return clues.New("restoration never completed") } - if op.Status != Failed && opStats.gc.IsZero() { + if op.Status != Failed && opStats.ctrl.IsZero() { op.Status = NoData } - op.Results.ItemsWritten = opStats.gc.Successes + op.Results.ItemsWritten = opStats.ctrl.Successes return op.Errors.Failure() } diff --git a/src/internal/operations/restore_test.go b/src/internal/operations/restore_test.go index 63c92c208..5eb1d4fda 100644 --- a/src/internal/operations/restore_test.go +++ b/src/internal/operations/restore_test.go @@ -12,14 +12,15 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" inMock "github.com/alcionai/corso/src/internal/common/idname/mock" - "github.com/alcionai/corso/src/internal/connector" - "github.com/alcionai/corso/src/internal/connector/exchange" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" - "github.com/alcionai/corso/src/internal/connector/mock" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/events" evmock "github.com/alcionai/corso/src/internal/events/mock" "github.com/alcionai/corso/src/internal/kopia" + "github.com/alcionai/corso/src/internal/m365" + "github.com/alcionai/corso/src/internal/m365/exchange" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/mock" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/stats" @@ -48,7 +49,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { var ( kw = &kopia.Wrapper{} sw = &store.Wrapper{} - gc = &mock.GraphConnector{} + ctrl = &mock.Controller{} now = time.Now() restoreCfg = tester.DefaultTestRestoreConfig("") ) @@ -72,7 +73,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { Collection: &exchMock.DataCollection{}, }, }, - gc: &data.CollectionStats{ + ctrl: &data.CollectionStats{ Objects: 1, Successes: 1, }, @@ -84,7 +85,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { fail: assert.AnError, stats: restoreStats{ bytesRead: &stats.ByteCounter{}, - gc: &data.CollectionStats{}, + ctrl: &data.CollectionStats{}, }, }, { @@ -93,7 +94,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { stats: restoreStats{ bytesRead: &stats.ByteCounter{}, cs: []data.RestoreCollection{}, - gc: &data.CollectionStats{}, + ctrl: &data.CollectionStats{}, }, }, } @@ -109,7 +110,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { control.Defaults(), kw, sw, - gc, + ctrl, account.Account{}, "foo", selectors.Selector{DiscreteOwner: "test"}, @@ -124,7 +125,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { assert.Equal(t, test.expectStatus.String(), op.Status.String(), "status") assert.Equal(t, len(test.stats.cs), op.Results.ItemsRead, "items read") - assert.Equal(t, test.stats.gc.Successes, op.Results.ItemsWritten, "items written") + assert.Equal(t, test.stats.ctrl.Successes, op.Results.ItemsWritten, "items written") assert.Equal(t, test.stats.bytesRead.NumBytes, op.Results.BytesRead, "resource owners") assert.Equal(t, test.stats.resourceCount, op.Results.ResourceOwners, "resource owners") assert.Equal(t, now, op.Results.StartedAt, "started at") @@ -141,7 +142,7 @@ type bupResults struct { selectorResourceOwners []string backupID model.StableID items int - gc *connector.GraphConnector + ctrl *m365.Controller } type RestoreOpIntegrationSuite struct { @@ -217,7 +218,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() { var ( kw = &kopia.Wrapper{} sw = &store.Wrapper{} - gc = &mock.GraphConnector{} + ctrl = &mock.Controller{} restoreCfg = tester.DefaultTestRestoreConfig("") opts = control.Defaults() ) @@ -230,9 +231,9 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() { targets []string errCheck assert.ErrorAssertionFunc }{ - {"good", kw, sw, gc, nil, assert.NoError}, - {"missing kopia", nil, sw, gc, nil, assert.Error}, - {"missing modelstore", kw, nil, gc, nil, assert.Error}, + {"good", kw, sw, ctrl, nil, assert.NoError}, + {"missing kopia", nil, sw, ctrl, nil, assert.Error}, + {"missing modelstore", kw, nil, ctrl, nil, assert.Error}, {"missing restore consumer", kw, sw, nil, nil, assert.Error}, } for _, test := range table { @@ -279,14 +280,14 @@ func setupExchangeBackup( esel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch()), esel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch())) - gc, sel := GCWithSelector(t, ctx, acct, connector.Users, esel.Selector, nil, nil) + ctrl, sel := ControllerWithSelector(t, ctx, acct, resource.Users, esel.Selector, nil, nil) bo, err := NewBackupOperation( ctx, control.Defaults(), kw, sw, - gc, + ctrl, acct, sel, inMock.NewProvider(owner, owner), @@ -304,7 +305,7 @@ func setupExchangeBackup( // These meta files are used to aid restore, but are not themselves // restored (ie: counted as writes). items: bo.Results.ItemsWritten - 6, - gc: gc, + ctrl: ctrl, } } @@ -330,21 +331,21 @@ func setupSharePointBackup( ssel.Include(ssel.LibraryFolders([]string{"test"}, selectors.PrefixMatch())) ssel.DiscreteOwner = owner - gc, sel := GCWithSelector(t, ctx, acct, connector.Sites, ssel.Selector, nil, nil) + ctrl, sel := ControllerWithSelector(t, ctx, acct, resource.Sites, ssel.Selector, nil, nil) bo, err := NewBackupOperation( ctx, control.Defaults(), kw, sw, - gc, + ctrl, acct, sel, inMock.NewProvider(owner, owner), evmock.NewBus()) require.NoError(t, err, clues.ToCore(err)) - spPgr := gc.AC.Drives().NewSiteDrivePager(owner, []string{"id", "name"}) + spPgr := ctrl.AC.Drives().NewSiteDrivePager(owner, []string{"id", "name"}) drives, err := api.GetAllDrives(ctx, spPgr, true, 3) require.NoError(t, err, clues.ToCore(err)) @@ -362,7 +363,7 @@ func setupSharePointBackup( // These meta files are used to aid restore, but are not themselves // restored (ie: counted as writes). items: bo.Results.ItemsWritten - 2 - len(drives) - len(drives), - gc: gc, + ctrl: ctrl, } } @@ -419,7 +420,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() { control.Options{FailureHandling: control.FailFast}, suite.kw, suite.sw, - bup.gc, + bup.ctrl, tester.NewM365Account(t), bup.backupID, test.getSelector(t, bup.selectorResourceOwners), @@ -460,10 +461,10 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() { rsel := selectors.NewExchangeRestore(selectors.None()) rsel.Include(rsel.AllData()) - gc, err := connector.NewGraphConnector( + ctrl, err := m365.NewController( ctx, suite.acct, - connector.Users) + resource.Users) require.NoError(t, err, clues.ToCore(err)) ro, err := NewRestoreOperation( @@ -471,7 +472,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() { control.Defaults(), suite.kw, suite.sw, - gc, + ctrl, tester.NewM365Account(t), "backupID", rsel.Selector, diff --git a/src/internal/tester/account.go b/src/internal/tester/account.go index 94552c771..6684c0bf1 100644 --- a/src/internal/tester/account.go +++ b/src/internal/tester/account.go @@ -16,7 +16,7 @@ var M365AcctCredEnvs = []string{ } // NewM365Account returns an account.Account object initialized with environment -// variables used for integration tests that use Graph Connector. +// variables used for integration tests that use the m365 Controller. func NewM365Account(t *testing.T) account.Account { cfg, err := readTestConfig() require.NoError(t, err, "configuring m365 account from test configuration", clues.ToCore(err)) diff --git a/src/pkg/backup/details/details.go b/src/pkg/backup/details/details.go index c24f8fb42..f394d02b7 100644 --- a/src/pkg/backup/details/details.go +++ b/src/pkg/backup/details/details.go @@ -15,7 +15,7 @@ import ( "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/internal/common/dttm" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/pkg/backup/details/details_test.go b/src/pkg/backup/details/details_test.go index f6c1097ae..4646b484a 100644 --- a/src/pkg/backup/details/details_test.go +++ b/src/pkg/backup/details/details_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/dttm" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/pkg/errs/errs.go b/src/pkg/errs/errs.go index f93e0e51a..ce6e9c58a 100644 --- a/src/pkg/errs/errs.go +++ b/src/pkg/errs/errs.go @@ -3,7 +3,7 @@ package errs import ( "errors" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/repository" ) diff --git a/src/pkg/errs/errs_test.go b/src/pkg/errs/errs_test.go index 789c88658..50b583143 100644 --- a/src/pkg/errs/errs_test.go +++ b/src/pkg/errs/errs_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/repository" ) diff --git a/src/pkg/logger/example_logger_test.go b/src/pkg/logger/example_logger_test.go index b141efecf..3c97bfd0e 100644 --- a/src/pkg/logger/example_logger_test.go +++ b/src/pkg/logger/example_logger_test.go @@ -5,8 +5,8 @@ import ( "github.com/alcionai/clues" - "github.com/alcionai/corso/src/internal/connector" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" ) @@ -188,7 +188,7 @@ func Example_logger_clues_standards() { // preferred log.With( // internal type, safe to log plainly - "resource_type", connector.Users, + "resource_type", resource.Users, // string containing sensitive info, wrap with Hide() "user_name", clues.Hide("your_user_name@microsoft.example"), // string partially concealed by a managed concealer. diff --git a/src/pkg/path/drive_test.go b/src/pkg/path/drive_test.go index 5a6853caf..131c17b9c 100644 --- a/src/pkg/path/drive_test.go +++ b/src/pkg/path/drive_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/path" ) diff --git a/src/pkg/repository/repository.go b/src/pkg/repository/repository.go index 13385cbb8..b58cf3eac 100644 --- a/src/pkg/repository/repository.go +++ b/src/pkg/repository/repository.go @@ -10,11 +10,12 @@ import ( "github.com/alcionai/corso/src/internal/common/crash" "github.com/alcionai/corso/src/internal/common/idname" - "github.com/alcionai/corso/src/internal/connector" - "github.com/alcionai/corso/src/internal/connector/onedrive/metadata" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/kopia" + "github.com/alcionai/corso/src/internal/m365" + "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/operations" @@ -306,12 +307,12 @@ func (r repository) NewBackupWithLookup( sel selectors.Selector, ins idname.Cacher, ) (operations.BackupOperation, error) { - gc, err := connectToM365(ctx, sel, r.Account) + ctrl, err := connectToM365(ctx, sel, r.Account) if err != nil { return operations.BackupOperation{}, errors.Wrap(err, "connecting to m365") } - ownerID, ownerName, err := gc.PopulateOwnerIDAndNamesFrom(ctx, sel.DiscreteOwner, ins) + ownerID, ownerName, err := ctrl.PopulateOwnerIDAndNamesFrom(ctx, sel.DiscreteOwner, ins) if err != nil { return operations.BackupOperation{}, errors.Wrap(err, "resolving resource owner details") } @@ -324,7 +325,7 @@ func (r repository) NewBackupWithLookup( r.Opts, r.dataLayer, store.NewKopiaStore(r.modelStore), - gc, + ctrl, r.Account, sel, sel, // the selector acts as an IDNamer for its discrete resource owner. @@ -338,7 +339,7 @@ func (r repository) NewRestore( sel selectors.Selector, restoreCfg control.RestoreConfig, ) (operations.RestoreOperation, error) { - gc, err := connectToM365(ctx, sel, r.Account) + ctrl, err := connectToM365(ctx, sel, r.Account) if err != nil { return operations.RestoreOperation{}, errors.Wrap(err, "connecting to m365") } @@ -348,7 +349,7 @@ func (r repository) NewRestore( r.Opts, r.dataLayer, store.NewKopiaStore(r.modelStore), - gc, + ctrl, r.Account, model.StableID(backupID), sel, @@ -623,12 +624,12 @@ func newRepoID(s storage.Storage) string { // helpers // --------------------------------------------------------------------------- -// produces a graph connector. +// produces a graph m365. func connectToM365( ctx context.Context, sel selectors.Selector, acct account.Account, -) (*connector.GraphConnector, error) { +) (*m365.Controller, error) { complete := observe.MessageWithCompletion(ctx, "Connecting to M365") defer func() { complete <- struct{}{} @@ -636,17 +637,17 @@ func connectToM365( }() // retrieve data from the producer - resource := connector.Users + rc := resource.Users if sel.Service == selectors.ServiceSharePoint { - resource = connector.Sites + rc = resource.Sites } - gc, err := connector.NewGraphConnector(ctx, acct, resource) + ctrl, err := m365.NewController(ctx, acct, rc) if err != nil { return nil, err } - return gc, nil + return ctrl, nil } func errWrapper(err error) error { diff --git a/src/pkg/selectors/onedrive_test.go b/src/pkg/selectors/onedrive_test.go index 1946fac28..aeb2f19cd 100644 --- a/src/pkg/selectors/onedrive_test.go +++ b/src/pkg/selectors/onedrive_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/dttm" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/pkg/selectors/sharepoint_test.go b/src/pkg/selectors/sharepoint_test.go index 93654f0e7..a8003951e 100644 --- a/src/pkg/selectors/sharepoint_test.go +++ b/src/pkg/selectors/sharepoint_test.go @@ -12,7 +12,7 @@ import ( "golang.org/x/exp/slices" "github.com/alcionai/corso/src/internal/common/dttm" - odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" diff --git a/src/pkg/services/m365/api/client.go b/src/pkg/services/m365/api/client.go index ee546be75..338ce9c29 100644 --- a/src/pkg/services/m365/api/client.go +++ b/src/pkg/services/m365/api/client.go @@ -6,7 +6,7 @@ import ( "github.com/alcionai/clues" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/account" ) diff --git a/src/pkg/services/m365/api/client_test.go b/src/pkg/services/m365/api/client_test.go index 3fb248200..e2b0722a3 100644 --- a/src/pkg/services/m365/api/client_test.go +++ b/src/pkg/services/m365/api/client_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" ) diff --git a/src/pkg/services/m365/api/contacts.go b/src/pkg/services/m365/api/contacts.go index 2410e032a..fa52d8312 100644 --- a/src/pkg/services/m365/api/contacts.go +++ b/src/pkg/services/m365/api/contacts.go @@ -11,7 +11,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/selectors" diff --git a/src/pkg/services/m365/api/contacts_test.go b/src/pkg/services/m365/api/contacts_test.go index 13b5330cf..ddba9da87 100644 --- a/src/pkg/services/m365/api/contacts_test.go +++ b/src/pkg/services/m365/api/contacts_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" ) diff --git a/src/pkg/services/m365/api/drive.go b/src/pkg/services/m365/api/drive.go index f938e3263..478da708e 100644 --- a/src/pkg/services/m365/api/drive.go +++ b/src/pkg/services/m365/api/drive.go @@ -8,7 +8,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/models" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" ) // --------------------------------------------------------------------------- diff --git a/src/pkg/services/m365/api/drive_pager.go b/src/pkg/services/m365/api/drive_pager.go index 8bb50ecab..8199dc8e8 100644 --- a/src/pkg/services/m365/api/drive_pager.go +++ b/src/pkg/services/m365/api/drive_pager.go @@ -12,8 +12,8 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - onedrive "github.com/alcionai/corso/src/internal/connector/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/graph" + onedrive "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/logger" ) diff --git a/src/pkg/services/m365/api/events.go b/src/pkg/services/m365/api/events.go index b4af00fda..8f35481e2 100644 --- a/src/pkg/services/m365/api/events.go +++ b/src/pkg/services/m365/api/events.go @@ -15,7 +15,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/pkg/services/m365/api/events_test.go b/src/pkg/services/m365/api/events_test.go index 9f185bacc..2daa66454 100644 --- a/src/pkg/services/m365/api/events_test.go +++ b/src/pkg/services/m365/api/events_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/dttm" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" ) diff --git a/src/pkg/services/m365/api/item_pager.go b/src/pkg/services/m365/api/item_pager.go index aaa3f2248..00a93ea13 100644 --- a/src/pkg/services/m365/api/item_pager.go +++ b/src/pkg/services/m365/api/item_pager.go @@ -8,7 +8,7 @@ import ( "github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/logger" ) diff --git a/src/pkg/services/m365/api/item_pager_test.go b/src/pkg/services/m365/api/item_pager_test.go index b2fd90fdf..4c6dbfbeb 100644 --- a/src/pkg/services/m365/api/item_pager_test.go +++ b/src/pkg/services/m365/api/item_pager_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" ) diff --git a/src/pkg/services/m365/api/mail.go b/src/pkg/services/m365/api/mail.go index 34ce4b18f..8c7ca138c 100644 --- a/src/pkg/services/m365/api/mail.go +++ b/src/pkg/services/m365/api/mail.go @@ -13,7 +13,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" diff --git a/src/pkg/services/m365/api/mail_test.go b/src/pkg/services/m365/api/mail_test.go index 581dcd2b0..236bc9b4c 100644 --- a/src/pkg/services/m365/api/mail_test.go +++ b/src/pkg/services/m365/api/mail_test.go @@ -15,7 +15,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/ptr" - exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" + exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/backup/details" diff --git a/src/pkg/services/m365/api/mock/mail.go b/src/pkg/services/m365/api/mock/mail.go index b05cec1a4..8a71d1067 100644 --- a/src/pkg/services/m365/api/mock/mail.go +++ b/src/pkg/services/m365/api/mock/mail.go @@ -1,8 +1,8 @@ package mock import ( - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/graph/mock" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/graph/mock" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/services/m365/api" ) diff --git a/src/pkg/services/m365/api/sites.go b/src/pkg/services/m365/api/sites.go index fb7db5d20..d1f506cda 100644 --- a/src/pkg/services/m365/api/sites.go +++ b/src/pkg/services/m365/api/sites.go @@ -13,8 +13,8 @@ import ( "github.com/pkg/errors" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" - "github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/internal/m365/graph/betasdk/sites" "github.com/alcionai/corso/src/pkg/fault" ) diff --git a/src/pkg/services/m365/api/users.go b/src/pkg/services/m365/api/users.go index fa1d29a36..908128579 100644 --- a/src/pkg/services/m365/api/users.go +++ b/src/pkg/services/m365/api/users.go @@ -12,7 +12,7 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" diff --git a/src/pkg/services/m365/m365.go b/src/pkg/services/m365/m365.go index 697e34005..305a10bbf 100644 --- a/src/pkg/services/m365/m365.go +++ b/src/pkg/services/m365/m365.go @@ -8,8 +8,8 @@ import ( "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/discovery" - "github.com/alcionai/corso/src/internal/connector/graph" + "github.com/alcionai/corso/src/internal/m365/discovery" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -251,7 +251,7 @@ type Site struct { func Sites(ctx context.Context, acct account.Account, errs *fault.Bus) ([]*Site, error) { sites, err := discovery.Sites(ctx, acct, errs) if err != nil { - return nil, clues.Wrap(err, "initializing M365 graph connection") + return nil, clues.Wrap(err, "initializing M365 api connection") } ret := make([]*Site, 0, len(sites)) From c68810ecffaebd08112bb15f09fe691558cd8c97 Mon Sep 17 00:00:00 2001 From: Keepers Date: Tue, 13 Jun 2023 13:10:17 -0600 Subject: [PATCH 17/41] bump clues to latest (#3597) #### Does this PR need a docs update or release note? - [x] :no_entry: No --- src/go.mod | 2 +- src/go.sum | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index 71d8dc685..6a7955fcf 100644 --- a/src/go.mod +++ b/src/go.mod @@ -6,7 +6,7 @@ replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.2023050223 require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 - github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c + github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 github.com/armon/go-metrics v0.4.1 github.com/aws/aws-sdk-go v1.44.281 github.com/aws/aws-xray-sdk-go v1.8.1 diff --git a/src/go.sum b/src/go.sum index b392709cc..29f42c054 100644 --- a/src/go.sum +++ b/src/go.sum @@ -53,8 +53,10 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= -github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c h1:Njdw/Nnq2DN3f8QMaHuZZHdVHTUSxFqPMMxDIInDWB4= -github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= +github.com/alcionai/clues v0.0.0-20230612202815-692e4550015b h1:Zw/dlWkeJZ3MnXNCtIYD9Nn4TDO6pqDqjkQPecVI2E8= +github.com/alcionai/clues v0.0.0-20230612202815-692e4550015b/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= +github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 h1:mjUjCCGvJpmnLh3fuVzpfOSFC9lp9TOIOfjj51L5Rs0= +github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 h1:Wrl99Y7jftZMnNDiOIcRJrjstZO3IEj3+Q/sip27vmI= github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79/go.mod h1:Iic7CcKhsq+A7MLR9hh6VJfgpcJhLx3Kn+BgjY+azvI= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= From ce32b99c9dd5f2a2dd37bcc4ba8fe43a521c6314 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 19:56:17 +0000 Subject: [PATCH 18/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20golang.org/x/?= =?UTF-8?q?net=20from=200.10.0=20to=200.11.0=20in=20/src=20(#3610)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [golang.org/x/net](https://github.com/golang/net) from 0.10.0 to 0.11.0.
Commits
  • 6c96ca5 go.mod: update golang.org/x dependencies
  • 5541298 quic: add packet pacer
  • 88a50b6 all: update x/sys to HEAD
  • 7e6923f quic: add RTT estimator
  • 2796e09 bpf: check for little endian CPU for OS VM comparison
  • 10cf388 quic: add a data structure for tracking lists of sent packets
  • ccc217c quic: parameterize rangeset
  • f16447c quic: add go1.21 build constraint
  • f7250ea quic: add a type tracking sent values
  • 1b5a2d8 quic: packet encoding/decoding
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/net&package-manager=go_modules&previous-version=0.10.0&new-version=0.11.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 8 ++++---- src/go.sum | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/go.mod b/src/go.mod index 6a7955fcf..eb06644e2 100644 --- a/src/go.mod +++ b/src/go.mod @@ -115,12 +115,12 @@ require ( go.opentelemetry.io/otel/trace v1.15.1 // indirect go.uber.org/atomic v1.10.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.9.0 // indirect + golang.org/x/crypto v0.10.0 // indirect golang.org/x/mod v0.10.0 // indirect - golang.org/x/net v0.10.0 + golang.org/x/net v0.11.0 golang.org/x/sync v0.2.0 // indirect - golang.org/x/sys v0.8.0 // indirect - golang.org/x/text v0.9.0 // indirect + golang.org/x/sys v0.9.0 // indirect + golang.org/x/text v0.10.0 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect google.golang.org/grpc v1.55.0 // indirect google.golang.org/protobuf v1.30.0 // indirect diff --git a/src/go.sum b/src/go.sum index 29f42c054..243cb4402 100644 --- a/src/go.sum +++ b/src/go.sum @@ -454,8 +454,8 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= -golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM= +golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -531,8 +531,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= +golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -605,8 +605,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= +golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -619,8 +619,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58= +golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= From b0bbd78e859e423ba87350490721db0dfef88887 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 20:53:16 +0000 Subject: [PATCH 19/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20github.com/aw?= =?UTF-8?q?s/aws-sdk-go=20from=201.44.281=20to=201.44.282=20in=20/src=20(#?= =?UTF-8?q?3611)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.281 to 1.44.282.
Release notes

Sourced from github.com/aws/aws-sdk-go's releases.

Release v1.44.282 (2023-06-13)

Service Client Updates

  • service/cloudtrail: Updates service API and documentation
    • This feature allows users to view dashboards for CloudTrail Lake event data stores.
  • service/codeguru-security: Adds new service
  • service/drs: Updates service API, documentation, and paginators
  • service/ec2: Updates service API, documentation, and paginators
    • This release introduces a new feature, EC2 Instance Connect Endpoint, that enables you to connect to a resource over TCP, without requiring the resource to have a public IPv4 address.
  • service/imagebuilder: Updates service documentation
  • service/lightsail: Updates service API and documentation
    • This release adds pagination for the Get Certificates API operation.
  • service/s3: Updates service API and examples
    • Integrate double encryption feature to SDKs.
  • service/securityhub: Updates service API, documentation, and examples
  • service/simspaceweaver: Updates service API and documentation
  • service/verifiedpermissions: Adds new service
  • service/wafv2: Updates service API and documentation
  • service/wellarchitected: Updates service API, documentation, and paginators
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/aws/aws-sdk-go&package-manager=go_modules&previous-version=1.44.281&new-version=1.44.282)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 2 +- src/go.sum | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/go.mod b/src/go.mod index eb06644e2..a0d4eae9d 100644 --- a/src/go.mod +++ b/src/go.mod @@ -8,7 +8,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 github.com/armon/go-metrics v0.4.1 - github.com/aws/aws-sdk-go v1.44.281 + github.com/aws/aws-sdk-go v1.44.282 github.com/aws/aws-xray-sdk-go v1.8.1 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.0 diff --git a/src/go.sum b/src/go.sum index 243cb4402..b1e923509 100644 --- a/src/go.sum +++ b/src/go.sum @@ -53,8 +53,6 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= -github.com/alcionai/clues v0.0.0-20230612202815-692e4550015b h1:Zw/dlWkeJZ3MnXNCtIYD9Nn4TDO6pqDqjkQPecVI2E8= -github.com/alcionai/clues v0.0.0-20230612202815-692e4550015b/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 h1:mjUjCCGvJpmnLh3fuVzpfOSFC9lp9TOIOfjj51L5Rs0= github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 h1:Wrl99Y7jftZMnNDiOIcRJrjstZO3IEj3+Q/sip27vmI= @@ -68,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= -github.com/aws/aws-sdk-go v1.44.281 h1:z/ptheJvINaIAsKXthxONM+toTKw2pxyk700Hfm6yUw= -github.com/aws/aws-sdk-go v1.44.281/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.282 h1:ZPB9QhwxmMIEC8ja0DdFowOl5fODWaZ6s2cZ40fx6r8= +github.com/aws/aws-sdk-go v1.44.282/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= From e4ec00a5d2f088f3d4389f1918018ef281508b1b Mon Sep 17 00:00:00 2001 From: Vaibhav Kamra Date: Tue, 13 Jun 2023 14:43:47 -0700 Subject: [PATCH 20/41] Update to go 1.20 (#3609) Release notes: https://tip.golang.org/doc/go1.20 Changes required: - Updated `go.mod` - this also controls what CI uses - `rand` is now automatically seeded --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [x] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * #3568 #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [x] :green_heart: E2E --- src/go.mod | 2 +- src/internal/m365/exchange/mock/collections.go | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index a0d4eae9d..3c154584d 100644 --- a/src/go.mod +++ b/src/go.mod @@ -1,6 +1,6 @@ module github.com/alcionai/corso/src -go 1.19 +go 1.20 replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 diff --git a/src/internal/m365/exchange/mock/collections.go b/src/internal/m365/exchange/mock/collections.go index 4d78523b1..36de3cfd1 100644 --- a/src/internal/m365/exchange/mock/collections.go +++ b/src/internal/m365/exchange/mock/collections.go @@ -89,8 +89,6 @@ func NewContactCollection(pathRepresentation path.Path, numMessagesToReturn int) Names: []string{}, } - rand.Seed(time.Now().UnixNano()) - middleNames := []string{ "Argon", "Bernard", From ce72acbcc1ef3d1bbace8332978b437a1fbc8a63 Mon Sep 17 00:00:00 2001 From: Keepers Date: Tue, 13 Jun 2023 16:18:18 -0600 Subject: [PATCH 21/41] auto-log recoverable errors with stack (#3598) automatically log when we add a recoverable error or a skipped item to fault. This log will include a stack trace of the call from the location of the logged recoverable. Clues does not have a method for pulling a stack trace out of an error yet; that can be added at a future date. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :robot: Supportability/Tests #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- src/internal/kopia/data_collection.go | 2 +- src/internal/kopia/upload.go | 25 ++--- src/internal/kopia/upload_test.go | 27 +++++ src/internal/kopia/wrapper.go | 7 +- src/internal/m365/exchange/backup.go | 6 +- src/internal/m365/exchange/backup_test.go | 2 +- src/internal/m365/exchange/collection.go | 4 +- .../m365/exchange/container_resolver.go | 2 +- src/internal/m365/exchange/restore.go | 14 +-- src/internal/m365/graph/collections.go | 2 +- src/internal/m365/onedrive/backup.go | 2 +- src/internal/m365/onedrive/collection.go | 10 +- src/internal/m365/onedrive/collections.go | 6 +- .../m365/onedrive/collections_test.go | 2 +- src/internal/m365/onedrive/drive.go | 2 +- src/internal/m365/onedrive/restore.go | 6 +- src/internal/m365/sharepoint/api/pages.go | 2 +- src/internal/m365/sharepoint/backup.go | 10 +- src/internal/m365/sharepoint/collection.go | 4 +- src/internal/m365/sharepoint/list.go | 10 +- src/internal/m365/sharepoint/restore.go | 10 +- src/internal/operations/helpers_test.go | 25 ++--- .../restore_path_transformer.go | 2 +- src/internal/streamstore/collectables_test.go | 29 +++--- src/pkg/fault/example_fault_test.go | 25 ++--- src/pkg/fault/fault.go | 53 +++++++--- src/pkg/fault/fault_test.go | 98 ++++++++++++------- src/pkg/logger/logger.go | 21 ++++ src/pkg/repository/repository.go | 2 +- src/pkg/selectors/scopes.go | 4 +- src/pkg/services/m365/api/contacts.go | 4 +- src/pkg/services/m365/api/events.go | 4 +- src/pkg/services/m365/api/mail.go | 4 +- src/pkg/services/m365/api/sites.go | 2 +- src/pkg/services/m365/api/users.go | 2 +- 35 files changed, 271 insertions(+), 159 deletions(-) diff --git a/src/internal/kopia/data_collection.go b/src/internal/kopia/data_collection.go index 4cc53e871..64d5f1a17 100644 --- a/src/internal/kopia/data_collection.go +++ b/src/internal/kopia/data_collection.go @@ -42,7 +42,7 @@ func (kdc *kopiaDataCollection) Items( for _, item := range kdc.items { s, err := kdc.FetchItemByName(ctx, item) if err != nil { - el.AddRecoverable(clues.Wrap(err, "fetching item"). + el.AddRecoverable(ctx, clues.Wrap(err, "fetching item"). WithClues(ctx). Label(fault.LabelForceNoBackupCreation)) diff --git a/src/internal/kopia/upload.go b/src/internal/kopia/upload.go index 4a610c892..5c8d19950 100644 --- a/src/internal/kopia/upload.go +++ b/src/internal/kopia/upload.go @@ -133,6 +133,12 @@ type itemDetails struct { } type corsoProgress struct { + // this is an unwanted hack. We can't extend the kopia interface + // funcs to pass through a context. This is the second best way to + // get an at least partially formed context into funcs that need it + // for logging and other purposes. + ctx context.Context + snapshotfs.UploadProgress pending map[string]*itemDetails deets *details.Builder @@ -183,11 +189,10 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) { // never had to materialize their details in-memory. if d.info == nil { if d.prevPath == nil { - cp.errs.AddRecoverable(clues.New("item sourced from previous backup with no previous path"). + cp.errs.AddRecoverable(cp.ctx, clues.New("item sourced from previous backup with no previous path"). With( "service", d.repoPath.Service().String(), - "category", d.repoPath.Category().String(), - ). + "category", d.repoPath.Category().String()). Label(fault.LabelForceNoBackupCreation)) return @@ -198,11 +203,10 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) { err := cp.toMerge.addRepoRef(d.prevPath.ToBuilder(), d.repoPath, d.locationPath) if err != nil { - cp.errs.AddRecoverable(clues.Wrap(err, "adding item to merge list"). + cp.errs.AddRecoverable(cp.ctx, clues.Wrap(err, "adding item to merge list"). With( "service", d.repoPath.Service().String(), - "category", d.repoPath.Category().String(), - ). + "category", d.repoPath.Category().String()). Label(fault.LabelForceNoBackupCreation)) } @@ -215,11 +219,10 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) { !d.cached, *d.info) if err != nil { - cp.errs.AddRecoverable(clues.New("adding item to details"). + cp.errs.AddRecoverable(cp.ctx, clues.New("adding item to details"). With( "service", d.repoPath.Service().String(), - "category", d.repoPath.Category().String(), - ). + "category", d.repoPath.Category().String()). Label(fault.LabelForceNoBackupCreation)) return @@ -278,7 +281,7 @@ func (cp *corsoProgress) Error(relpath string, err error, isIgnored bool) { defer cp.UploadProgress.Error(relpath, err, isIgnored) - cp.errs.AddRecoverable(clues.Wrap(err, "kopia reported error"). + cp.errs.AddRecoverable(cp.ctx, clues.Wrap(err, "kopia reported error"). With("is_ignored", isIgnored, "relative_path", relpath). Label(fault.LabelForceNoBackupCreation)) } @@ -350,7 +353,7 @@ func collectionEntries( itemPath, err := streamedEnts.FullPath().AppendItem(e.UUID()) if err != nil { err = clues.Wrap(err, "getting full item path") - progress.errs.AddRecoverable(err) + progress.errs.AddRecoverable(ctx, err) logger.CtxErr(ctx, err).Error("getting full item path") diff --git a/src/internal/kopia/upload_test.go b/src/internal/kopia/upload_test.go index fba178fdf..0ac10ec6b 100644 --- a/src/internal/kopia/upload_test.go +++ b/src/internal/kopia/upload_test.go @@ -472,8 +472,12 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() { suite.Run(test.name, func() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + bd := &details.Builder{} cp := corsoProgress{ + ctx: ctx, UploadProgress: &snapshotfs.NullUploadProgress{}, deets: bd, pending: map[string]*itemDetails{}, @@ -526,6 +530,10 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() { func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() { t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + bd := &details.Builder{} cachedItems := map[string]testInfo{ suite.targetFileName: { @@ -535,6 +543,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() { }, } cp := corsoProgress{ + ctx: ctx, UploadProgress: &snapshotfs.NullUploadProgress{}, deets: bd, pending: map[string]*itemDetails{}, @@ -565,6 +574,9 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + prevPath := makePath( suite.T(), []string{testTenant, service, testUser, category, testInboxDir, testFileName2}, @@ -582,6 +594,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch // Setup stuff. db := &details.Builder{} cp := corsoProgress{ + ctx: ctx, UploadProgress: &snapshotfs.NullUploadProgress{}, deets: db, pending: map[string]*itemDetails{}, @@ -617,8 +630,12 @@ func (suite *CorsoProgressUnitSuite) TestFinishedHashingFile() { suite.Run(test.name, func() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + bd := &details.Builder{} cp := corsoProgress{ + ctx: ctx, UploadProgress: &snapshotfs.NullUploadProgress{}, deets: bd, pending: map[string]*itemDetails{}, @@ -682,6 +699,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() { } progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -801,6 +819,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory() defer flush() progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -908,6 +927,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() { defer flush() progress := &corsoProgress{ + ctx: ctx, toMerge: newMergeDetails(), errs: fault.New(true), } @@ -1004,6 +1024,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() { defer flush() progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -1298,6 +1319,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() { defer flush() progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -2221,6 +2243,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto defer flush() progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -2375,6 +2398,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre ) progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -2477,6 +2501,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase() ) progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -2733,6 +2758,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt ) progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), @@ -2901,6 +2927,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt ) progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, toMerge: newMergeDetails(), errs: fault.New(true), diff --git a/src/internal/kopia/wrapper.go b/src/internal/kopia/wrapper.go index e424a47b6..4ed8e3a6a 100644 --- a/src/internal/kopia/wrapper.go +++ b/src/internal/kopia/wrapper.go @@ -160,6 +160,7 @@ func (w Wrapper) ConsumeBackupCollections( } progress := &corsoProgress{ + ctx: ctx, pending: map[string]*itemDetails{}, deets: &details.Builder{}, toMerge: newMergeDetails(), @@ -415,7 +416,7 @@ func loadDirsAndItems( dir, err := getDir(ictx, dirItems.dir, snapshotRoot) if err != nil { - el.AddRecoverable(clues.Wrap(err, "loading storage directory"). + el.AddRecoverable(ctx, clues.Wrap(err, "loading storage directory"). WithClues(ictx). Label(fault.LabelForceNoBackupCreation)) @@ -431,7 +432,7 @@ func loadDirsAndItems( } if err := mergeCol.addCollection(dirItems.dir.String(), dc); err != nil { - el.AddRecoverable(clues.Wrap(err, "adding collection to merge collection"). + el.AddRecoverable(ctx, clues.Wrap(err, "adding collection to merge collection"). WithClues(ctx). Label(fault.LabelForceNoBackupCreation)) @@ -493,7 +494,7 @@ func (w Wrapper) ProduceRestoreCollections( parentStoragePath, err := itemPaths.StoragePath.Dir() if err != nil { - el.AddRecoverable(clues.Wrap(err, "getting storage directory path"). + el.AddRecoverable(ictx, clues.Wrap(err, "getting storage directory path"). WithClues(ictx). Label(fault.LabelForceNoBackupCreation)) diff --git a/src/internal/m365/exchange/backup.go b/src/internal/m365/exchange/backup.go index a24cc1c44..1f2732ea0 100644 --- a/src/internal/m365/exchange/backup.go +++ b/src/internal/m365/exchange/backup.go @@ -224,7 +224,7 @@ func ProduceBackupCollections( su, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -404,7 +404,7 @@ func populateCollections( !ctrlOpts.ToggleFeatures.DisableDelta) if err != nil { if !graph.IsErrDeletedInFlight(err) { - el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation)) + el.AddRecoverable(ctx, clues.Stack(err).Label(fault.LabelForceNoBackupCreation)) continue } @@ -467,7 +467,7 @@ func populateCollections( ) if collections[id] != nil { - el.AddRecoverable(clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ictx)) + el.AddRecoverable(ctx, clues.Wrap(err, "conflict: tombstone exists for a live collection").WithClues(ictx)) continue } diff --git a/src/internal/m365/exchange/backup_test.go b/src/internal/m365/exchange/backup_test.go index 06d5a75c6..0b900945d 100644 --- a/src/internal/m365/exchange/backup_test.go +++ b/src/internal/m365/exchange/backup_test.go @@ -332,7 +332,7 @@ func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Str ic := make(chan data.Stream) defer close(ic) - errs.AddRecoverable(assert.AnError) + errs.AddRecoverable(ctx, assert.AnError) return ic } diff --git a/src/internal/m365/exchange/collection.go b/src/internal/m365/exchange/collection.go index bc5ff3027..9bb320f18 100644 --- a/src/internal/m365/exchange/collection.go +++ b/src/internal/m365/exchange/collection.go @@ -230,7 +230,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) { atomic.AddInt64(&success, 1) log.With("err", err).Infow("item not found", clues.InErr(err).Slice()...) } else { - errs.AddRecoverable(clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation)) } return @@ -238,7 +238,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) { data, err := col.items.Serialize(ctx, item, user, id) if err != nil { - errs.AddRecoverable(clues.Wrap(err, "serializing item").Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, clues.Wrap(err, "serializing item").Label(fault.LabelForceNoBackupCreation)) return } diff --git a/src/internal/m365/exchange/container_resolver.go b/src/internal/m365/exchange/container_resolver.go index 368b2ae73..fff528bae 100644 --- a/src/internal/m365/exchange/container_resolver.go +++ b/src/internal/m365/exchange/container_resolver.go @@ -403,7 +403,7 @@ func (cr *containerResolver) populatePaths( _, err := cr.idToPath(ctx, ptr.Val(f.GetId()), 0) if err != nil { err = clues.Wrap(err, "populating path") - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) lastErr = err } } diff --git a/src/internal/m365/exchange/restore.go b/src/internal/m365/exchange/restore.go index 7703933df..f540e621d 100644 --- a/src/internal/m365/exchange/restore.go +++ b/src/internal/m365/exchange/restore.go @@ -64,7 +64,7 @@ func ConsumeRestoreCollections( handler, ok := handlers[category] if !ok { - el.AddRecoverable(clues.New("unsupported restore path category").WithClues(ictx)) + el.AddRecoverable(ctx, clues.New("unsupported restore path category").WithClues(ictx)) continue } @@ -82,7 +82,7 @@ func ConsumeRestoreCollections( isNewCache, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -107,7 +107,7 @@ func ConsumeRestoreCollections( break } - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) } } @@ -166,7 +166,7 @@ func restoreCollection( _, err := buf.ReadFrom(itemData.ToReader()) if err != nil { - el.AddRecoverable(clues.Wrap(err, "reading item bytes").WithClues(ictx)) + el.AddRecoverable(ctx, clues.Wrap(err, "reading item bytes").WithClues(ictx)) continue } @@ -174,7 +174,7 @@ func restoreCollection( info, err := ir.restore(ictx, body, userID, destinationID, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -185,7 +185,7 @@ func restoreCollection( // destination folder, then the restore path no longer matches the fullPath. itemPath, err := fullPath.AppendItem(itemData.UUID()) if err != nil { - el.AddRecoverable(clues.Wrap(err, "adding item to collection path").WithClues(ctx)) + el.AddRecoverable(ctx, clues.Wrap(err, "adding item to collection path").WithClues(ctx)) continue } @@ -343,7 +343,7 @@ func uploadAttachments( continue } - el.AddRecoverable(clues.Wrap(err, "uploading mail attachment").WithClues(ctx)) + el.AddRecoverable(ctx, clues.Wrap(err, "uploading mail attachment").WithClues(ctx)) } } diff --git a/src/internal/m365/graph/collections.go b/src/internal/m365/graph/collections.go index 24a8138f3..57c206490 100644 --- a/src/internal/m365/graph/collections.go +++ b/src/internal/m365/graph/collections.go @@ -83,7 +83,7 @@ func BaseCollections( if err != nil { // Shouldn't happen. err = clues.Wrap(err, "making path").WithClues(ictx) - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) lastErr = err continue diff --git a/src/internal/m365/onedrive/backup.go b/src/internal/m365/onedrive/backup.go index 06aab38ae..f1a47e9e1 100644 --- a/src/internal/m365/onedrive/backup.go +++ b/src/internal/m365/onedrive/backup.go @@ -77,7 +77,7 @@ func ProduceBackupCollections( odcs, canUsePreviousBackup, err = nc.Get(ctx, metadata, ssmb, errs) if err != nil { - el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation)) + el.AddRecoverable(ctx, clues.Stack(err).Label(fault.LabelForceNoBackupCreation)) } categories[scope.Category().PathType()] = struct{}{} diff --git a/src/internal/m365/onedrive/collection.go b/src/internal/m365/onedrive/collection.go index 3e9fe5157..afeb0bcb0 100644 --- a/src/internal/m365/onedrive/collection.go +++ b/src/internal/m365/onedrive/collection.go @@ -271,14 +271,14 @@ func (oc *Collection) getDriveItemContent( if err != nil { if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) { logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware") - el.AddSkip(fault.FileSkip(fault.SkipMalware, driveID, itemID, itemName, graph.ItemInfo(item))) + el.AddSkip(ctx, fault.FileSkip(fault.SkipMalware, driveID, itemID, itemName, graph.ItemInfo(item))) return nil, clues.Wrap(err, "malware item").Label(graph.LabelsSkippable) } if clues.HasLabel(err, graph.LabelStatus(http.StatusNotFound)) || graph.IsErrDeletedInFlight(err) { logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipNotFound).Info("item not found") - el.AddSkip(fault.FileSkip(fault.SkipNotFound, driveID, itemID, itemName, graph.ItemInfo(item))) + el.AddSkip(ctx, fault.FileSkip(fault.SkipNotFound, driveID, itemID, itemName, graph.ItemInfo(item))) return nil, clues.Wrap(err, "deleted item").Label(graph.LabelsSkippable) } @@ -293,13 +293,13 @@ func (oc *Collection) getDriveItemContent( // restore, or we have to handle it separately by somehow // deleting the entire collection. logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipBigOneNote).Info("max OneNote file size exceeded") - el.AddSkip(fault.FileSkip(fault.SkipBigOneNote, driveID, itemID, itemName, graph.ItemInfo(item))) + el.AddSkip(ctx, fault.FileSkip(fault.SkipBigOneNote, driveID, itemID, itemName, graph.ItemInfo(item))) return nil, clues.Wrap(err, "max oneNote item").Label(graph.LabelsSkippable) } logger.CtxErr(ctx, err).Error("downloading item") - el.AddRecoverable(clues.Stack(err).WithClues(ctx).Label(fault.LabelForceNoBackupCreation)) + el.AddRecoverable(ctx, clues.Stack(err).WithClues(ctx).Label(fault.LabelForceNoBackupCreation)) // return err, not el.Err(), because the lazy reader needs to communicate to // the data consumer that this item is unreadable, regardless of the fault state. @@ -431,7 +431,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) { // Fetch metadata for the file itemMeta, itemMetaSize, err = downloadItemMeta(ctx, oc.handler, oc.driveID, item) if err != nil { - el.AddRecoverable(clues.Wrap(err, "getting item metadata").Label(fault.LabelForceNoBackupCreation)) + el.AddRecoverable(ctx, clues.Wrap(err, "getting item metadata").Label(fault.LabelForceNoBackupCreation)) return } diff --git a/src/internal/m365/onedrive/collections.go b/src/internal/m365/onedrive/collections.go index d05b06820..24371b22c 100644 --- a/src/internal/m365/onedrive/collections.go +++ b/src/internal/m365/onedrive/collections.go @@ -663,7 +663,7 @@ func (c *Collections) UpdateCollections( skip = fault.ContainerSkip(fault.SkipMalware, driveID, itemID, itemName, addtl) } - errs.AddSkip(skip) + errs.AddSkip(ctx, skip) logger.Ctx(ctx).Infow("malware detected", "item_details", addtl) continue @@ -689,7 +689,7 @@ func (c *Collections) UpdateCollections( collectionPath, err := c.getCollectionPath(driveID, item) if err != nil { - el.AddRecoverable(clues.Stack(err). + el.AddRecoverable(ctx, clues.Stack(err). WithClues(ictx). Label(fault.LabelForceNoBackupCreation)) @@ -711,7 +711,7 @@ func (c *Collections) UpdateCollections( if ok { prevPath, err = path.FromDataLayerPath(prevPathStr, false) if err != nil { - el.AddRecoverable(clues.Wrap(err, "invalid previous path"). + el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path"). WithClues(ictx). With("path_string", prevPathStr)) } diff --git a/src/internal/m365/onedrive/collections_test.go b/src/internal/m365/onedrive/collections_test.go index 2888bc149..5a5805179 100644 --- a/src/internal/m365/onedrive/collections_test.go +++ b/src/internal/m365/onedrive/collections_test.go @@ -1157,7 +1157,7 @@ func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Str ic := make(chan data.Stream) defer close(ic) - errs.AddRecoverable(assert.AnError) + errs.AddRecoverable(ctx, assert.AnError) return ic } diff --git a/src/internal/m365/onedrive/drive.go b/src/internal/m365/onedrive/drive.go index 5a4fadf68..c1df2c539 100644 --- a/src/internal/m365/onedrive/drive.go +++ b/src/internal/m365/onedrive/drive.go @@ -238,7 +238,7 @@ func GetAllFolders( "", errs) if err != nil { - el.AddRecoverable(clues.Wrap(err, "enumerating items in drive")) + el.AddRecoverable(ctx, clues.Wrap(err, "enumerating items in drive")) } } diff --git a/src/internal/m365/onedrive/restore.go b/src/internal/m365/onedrive/restore.go index cdd6a9844..c1416a0a3 100644 --- a/src/internal/m365/onedrive/restore.go +++ b/src/internal/m365/onedrive/restore.go @@ -113,7 +113,7 @@ func RestoreCollections( opts.RestorePermissions, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) } restoreMetrics = support.CombineMetrics(restoreMetrics, metrics) @@ -273,7 +273,7 @@ func ProduceRestoreCollection( itemPath, err := dc.FullPath().AppendItem(itemData.UUID()) if err != nil { - el.AddRecoverable(clues.Wrap(err, "appending item to full path").WithClues(ictx)) + el.AddRecoverable(ctx, clues.Wrap(err, "appending item to full path").WithClues(ictx)) return } @@ -297,7 +297,7 @@ func ProduceRestoreCollection( } if err != nil { - el.AddRecoverable(clues.Wrap(err, "restoring item")) + el.AddRecoverable(ctx, clues.Wrap(err, "restoring item")) return } diff --git a/src/internal/m365/sharepoint/api/pages.go b/src/internal/m365/sharepoint/api/pages.go index 0434ff0a5..581d5de10 100644 --- a/src/internal/m365/sharepoint/api/pages.go +++ b/src/internal/m365/sharepoint/api/pages.go @@ -70,7 +70,7 @@ func GetSitePages( page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts) if err != nil { - el.AddRecoverable(graph.Wrap(ctx, err, "fetching page")) + el.AddRecoverable(ctx, graph.Wrap(ctx, err, "fetching page")) return } diff --git a/src/internal/m365/sharepoint/backup.go b/src/internal/m365/sharepoint/backup.go index ba5c45c23..79765bfdb 100644 --- a/src/internal/m365/sharepoint/backup.go +++ b/src/internal/m365/sharepoint/backup.go @@ -80,7 +80,7 @@ func ProduceBackupCollections( ctrlOpts, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -101,7 +101,7 @@ func ProduceBackupCollections( ctrlOpts, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -115,7 +115,7 @@ func ProduceBackupCollections( ctrlOpts, errs) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -184,7 +184,7 @@ func collectLists( false, tuple.name) if err != nil { - el.AddRecoverable(clues.Wrap(err, "creating list collection path").WithClues(ctx)) + el.AddRecoverable(ctx, clues.Wrap(err, "creating list collection path").WithClues(ctx)) } collection := NewCollection( @@ -284,7 +284,7 @@ func collectPages( false, tuple.Name) if err != nil { - el.AddRecoverable(clues.Wrap(err, "creating page collection path").WithClues(ctx)) + el.AddRecoverable(ctx, clues.Wrap(err, "creating page collection path").WithClues(ctx)) } collection := NewCollection( diff --git a/src/internal/m365/sharepoint/collection.go b/src/internal/m365/sharepoint/collection.go index 12db281a4..90af58cbf 100644 --- a/src/internal/m365/sharepoint/collection.go +++ b/src/internal/m365/sharepoint/collection.go @@ -239,7 +239,7 @@ func (sc *Collection) retrieveLists( byteArray, err := serializeContent(ctx, wtr, lst) if err != nil { - el.AddRecoverable(clues.Wrap(err, "serializing list").WithClues(ctx).Label(fault.LabelForceNoBackupCreation)) + el.AddRecoverable(ctx, clues.Wrap(err, "serializing list").WithClues(ctx).Label(fault.LabelForceNoBackupCreation)) continue } @@ -308,7 +308,7 @@ func (sc *Collection) retrievePages( byteArray, err := serializeContent(ctx, wtr, pg) if err != nil { - el.AddRecoverable(clues.Wrap(err, "serializing page").WithClues(ctx).Label(fault.LabelForceNoBackupCreation)) + el.AddRecoverable(ctx, clues.Wrap(err, "serializing page").WithClues(ctx).Label(fault.LabelForceNoBackupCreation)) continue } diff --git a/src/internal/m365/sharepoint/list.go b/src/internal/m365/sharepoint/list.go index 3532e029b..3dcaaa58f 100644 --- a/src/internal/m365/sharepoint/list.go +++ b/src/internal/m365/sharepoint/list.go @@ -130,13 +130,13 @@ func loadSiteLists( entry, err = gs.Client().Sites().BySiteId(siteID).Lists().ByListId(id).Get(ctx, nil) if err != nil { - el.AddRecoverable(graph.Wrap(ctx, err, "getting site list")) + el.AddRecoverable(ctx, graph.Wrap(ctx, err, "getting site list")) return } cols, cTypes, lItems, err := fetchListContents(ctx, gs, siteID, id, errs) if err != nil { - el.AddRecoverable(clues.Wrap(err, "getting list contents")) + el.AddRecoverable(ctx, clues.Wrap(err, "getting list contents")) return } @@ -220,7 +220,7 @@ func fetchListItems( fields, err := newPrefix.Fields().Get(ctx, nil) if err != nil { - el.AddRecoverable(graph.Wrap(ctx, err, "getting list fields")) + el.AddRecoverable(ctx, graph.Wrap(ctx, err, "getting list fields")) continue } @@ -336,7 +336,7 @@ func fetchContentTypes( links, err := fetchColumnLinks(ctx, gs, siteID, listID, id) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -344,7 +344,7 @@ func fetchContentTypes( cs, err := fetchColumns(ctx, gs, siteID, listID, id) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } diff --git a/src/internal/m365/sharepoint/restore.go b/src/internal/m365/sharepoint/restore.go index 5bf53a4d6..504f9a06a 100644 --- a/src/internal/m365/sharepoint/restore.go +++ b/src/internal/m365/sharepoint/restore.go @@ -101,7 +101,7 @@ func ConsumeRestoreCollections( restoreMetrics = support.CombineMetrics(restoreMetrics, metrics) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) } if errors.Is(err, context.Canceled) { @@ -238,7 +238,7 @@ func RestoreListCollection( siteID, restoreContainerName) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -246,7 +246,7 @@ func RestoreListCollection( itemPath, err := dc.FullPath().AppendItem(itemData.UUID()) if err != nil { - el.AddRecoverable(clues.Wrap(err, "appending item to full path").WithClues(ctx)) + el.AddRecoverable(ctx, clues.Wrap(err, "appending item to full path").WithClues(ctx)) continue } @@ -318,7 +318,7 @@ func RestorePageCollection( siteID, restoreContainerName) if err != nil { - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) continue } @@ -326,7 +326,7 @@ func RestorePageCollection( itemPath, err := dc.FullPath().AppendItem(itemData.UUID()) if err != nil { - el.AddRecoverable(clues.Wrap(err, "appending item to full path").WithClues(ctx)) + el.AddRecoverable(ctx, clues.Wrap(err, "appending item to full path").WithClues(ctx)) continue } diff --git a/src/internal/operations/helpers_test.go b/src/internal/operations/helpers_test.go index 6afe7329b..339b1703e 100644 --- a/src/internal/operations/helpers_test.go +++ b/src/internal/operations/helpers_test.go @@ -1,6 +1,7 @@ package operations import ( + "context" "testing" "github.com/stretchr/testify/assert" @@ -22,13 +23,13 @@ func TestHelpersUnitSuite(t *testing.T) { func (suite *HelpersUnitSuite) TestFinalizeErrorHandling() { table := []struct { name string - errs func() *fault.Bus + errs func(context.Context) *fault.Bus opts control.Options expectErr assert.ErrorAssertionFunc }{ { name: "no errors", - errs: func() *fault.Bus { + errs: func(ctx context.Context) *fault.Bus { return fault.New(false) }, opts: control.Options{ @@ -38,7 +39,7 @@ func (suite *HelpersUnitSuite) TestFinalizeErrorHandling() { }, { name: "already failed", - errs: func() *fault.Bus { + errs: func(ctx context.Context) *fault.Bus { fn := fault.New(false) fn.Fail(assert.AnError) return fn @@ -50,9 +51,9 @@ func (suite *HelpersUnitSuite) TestFinalizeErrorHandling() { }, { name: "best effort", - errs: func() *fault.Bus { + errs: func(ctx context.Context) *fault.Bus { fn := fault.New(false) - fn.AddRecoverable(assert.AnError) + fn.AddRecoverable(ctx, assert.AnError) return fn }, opts: control.Options{ @@ -62,9 +63,9 @@ func (suite *HelpersUnitSuite) TestFinalizeErrorHandling() { }, { name: "recoverable errors produce hard fail", - errs: func() *fault.Bus { + errs: func(ctx context.Context) *fault.Bus { fn := fault.New(false) - fn.AddRecoverable(assert.AnError) + fn.AddRecoverable(ctx, assert.AnError) return fn }, opts: control.Options{ @@ -74,11 +75,11 @@ func (suite *HelpersUnitSuite) TestFinalizeErrorHandling() { }, { name: "multiple recoverable errors produce hard fail", - errs: func() *fault.Bus { + errs: func(ctx context.Context) *fault.Bus { fn := fault.New(false) - fn.AddRecoverable(assert.AnError) - fn.AddRecoverable(assert.AnError) - fn.AddRecoverable(assert.AnError) + fn.AddRecoverable(ctx, assert.AnError) + fn.AddRecoverable(ctx, assert.AnError) + fn.AddRecoverable(ctx, assert.AnError) return fn }, opts: control.Options{ @@ -94,7 +95,7 @@ func (suite *HelpersUnitSuite) TestFinalizeErrorHandling() { ctx, flush := tester.NewContext(t) defer flush() - errs := test.errs() + errs := test.errs(ctx) finalizeErrorHandling(ctx, test.opts, errs, "test") test.expectErr(t, errs.Failure()) diff --git a/src/internal/operations/pathtransformer/restore_path_transformer.go b/src/internal/operations/pathtransformer/restore_path_transformer.go index 8993328f3..0225d81ea 100644 --- a/src/internal/operations/pathtransformer/restore_path_transformer.go +++ b/src/internal/operations/pathtransformer/restore_path_transformer.go @@ -168,7 +168,7 @@ func GetPaths( restorePaths, err := makeRestorePathsForEntry(ctx, backupVersion, ent) if err != nil { - el.AddRecoverable(clues.Wrap(err, "getting restore paths")) + el.AddRecoverable(ctx, clues.Wrap(err, "getting restore paths")) continue } diff --git a/src/internal/streamstore/collectables_test.go b/src/internal/streamstore/collectables_test.go index 6b8b9d395..d9c827408 100644 --- a/src/internal/streamstore/collectables_test.go +++ b/src/internal/streamstore/collectables_test.go @@ -1,6 +1,7 @@ package streamstore import ( + "context" "testing" "github.com/alcionai/clues" @@ -73,13 +74,13 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { table := []struct { name string deets func(*testing.T) *details.Details - errs func() *fault.Errors + errs func(context.Context) *fault.Errors hasSnapID assert.ValueAssertionFunc }{ { name: "none", deets: func(*testing.T) *details.Details { return nil }, - errs: func() *fault.Errors { return nil }, + errs: func(context.Context) *fault.Errors { return nil }, hasSnapID: assert.Empty, }, { @@ -98,18 +99,20 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { })) return deetsBuilder.Details() }, - errs: func() *fault.Errors { return nil }, + errs: func(context.Context) *fault.Errors { return nil }, hasSnapID: assert.NotEmpty, }, { name: "errors", deets: func(*testing.T) *details.Details { return nil }, - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { bus := fault.New(false) bus.Fail(clues.New("foo")) - bus.AddRecoverable(clues.New("bar")) - bus.AddRecoverable(fault.FileErr(clues.New("file"), "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) - bus.AddSkip(fault.FileSkip(fault.SkipMalware, "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) + bus.AddRecoverable(ctx, clues.New("bar")) + bus.AddRecoverable( + ctx, + fault.FileErr(clues.New("file"), "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) + bus.AddSkip(ctx, fault.FileSkip(fault.SkipMalware, "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) fe := bus.Errors() return fe @@ -133,12 +136,14 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { return deetsBuilder.Details() }, - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { bus := fault.New(false) bus.Fail(clues.New("foo")) - bus.AddRecoverable(clues.New("bar")) - bus.AddRecoverable(fault.FileErr(clues.New("file"), "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) - bus.AddSkip(fault.FileSkip(fault.SkipMalware, "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) + bus.AddRecoverable(ctx, clues.New("bar")) + bus.AddRecoverable( + ctx, + fault.FileErr(clues.New("file"), "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) + bus.AddSkip(ctx, fault.FileSkip(fault.SkipMalware, "ns", "file-id", "file-name", map[string]any{"foo": "bar"})) fe := bus.Errors() return fe @@ -164,7 +169,7 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { require.NoError(t, err) } - errs := test.errs() + errs := test.errs(ctx) if errs != nil { err = ss.Collect(ctx, FaultErrorsCollector(errs)) require.NoError(t, err) diff --git a/src/pkg/fault/example_fault_test.go b/src/pkg/fault/example_fault_test.go index c830a9aa9..55a30295c 100644 --- a/src/pkg/fault/example_fault_test.go +++ b/src/pkg/fault/example_fault_test.go @@ -1,6 +1,7 @@ package fault_test import ( + "context" "fmt" "github.com/alcionai/clues" @@ -15,6 +16,8 @@ import ( var ( ctrl any items = []string{} + //nolint:forbidigo + ctx = context.Background() ) type mockController struct { @@ -133,7 +136,7 @@ func ExampleBus_AddRecoverable() { // to aggregate the error using fault. // Side note: technically, you should use a local bus // here (see below) instead of errs. - errs.AddRecoverable(err) + errs.AddRecoverable(ctx, err) } } @@ -150,7 +153,7 @@ func ExampleBus_AddRecoverable() { } if err := getIthItem(i); err != nil { - errs.AddRecoverable(err) + errs.AddRecoverable(ctx, err) } } } @@ -175,13 +178,13 @@ func ExampleBus_Failure() { // If Failure() is nil, then you can assume the operation completed. // A complete operation is not necessarily an error-free operation. - // Recoverable errors may still have been added using AddRecoverable(err). + // Recoverable errors may still have been added using AddRecoverable(ctx, err). // Make sure you check both. // If failFast is set to true, then the first recoerable error Added gets // promoted to the Err() position. errs = fault.New(true) - errs.AddRecoverable(clues.New("not catastrophic, but still becomes the Failure()")) + errs.AddRecoverable(ctx, clues.New("not catastrophic, but still becomes the Failure()")) err = errs.Failure() fmt.Println(err) @@ -194,8 +197,8 @@ func ExampleBus_Failure() { // recover from and continue. func ExampleErrors_Recovered() { errs := fault.New(false) - errs.AddRecoverable(clues.New("not catastrophic")) - errs.AddRecoverable(clues.New("something unwanted")) + errs.AddRecoverable(ctx, clues.New("not catastrophic")) + errs.AddRecoverable(ctx, clues.New("something unwanted")) // Recovered() gets the slice of all recoverable errors added during // the run, but which did not cause a failure. @@ -247,12 +250,12 @@ func ExampleBus_Local() { } if err := getIthItem(i); err != nil { - // instead of calling errs.AddRecoverable(err), we call the + // instead of calling errs.AddRecoverable(ctx, err), we call the // local bus's Add method. The error will still get // added to the errs.Recovered() set. But if this err // causes the run to fail, only this local bus treats // it as the causal failure. - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) } } @@ -330,7 +333,7 @@ func Example_e2e() { if err := storer(d); err != nil { // Since we're at the top of the iteration, we need // to add each error to the fault.localBus struct. - el.AddRecoverable(err) + el.AddRecoverable(ctx, err) } } @@ -383,7 +386,7 @@ func ExampleErrors_Failure_return() { } if err := dependency.do(); err != nil { - errs.AddRecoverable(clues.Wrap(err, "recoverable")) + errs.AddRecoverable(ctx, clues.Wrap(err, "recoverable")) } } @@ -426,7 +429,7 @@ func ExampleBus_AddSkip() { // over, instead of error out. An initial case is when Graph API identifies // a file as containing malware. We can't download the file: it'll always // error. Our only option is to skip it. - errs.AddSkip(fault.FileSkip( + errs.AddSkip(ctx, fault.FileSkip( fault.SkipMalware, "deduplication-namespace", "file-id", diff --git a/src/pkg/fault/fault.go b/src/pkg/fault/fault.go index 041e3d4e2..d69569edc 100644 --- a/src/pkg/fault/fault.go +++ b/src/pkg/fault/fault.go @@ -12,6 +12,7 @@ import ( "golang.org/x/exp/slices" "github.com/alcionai/corso/src/cli/print" + "github.com/alcionai/corso/src/pkg/logger" ) type Bus struct { @@ -118,27 +119,45 @@ func (e *Bus) setFailure(err error) *Bus { // // TODO: nil return, not Bus, since we don't want people to return // from errors.AddRecoverable(). -func (e *Bus) AddRecoverable(err error) *Bus { +func (e *Bus) AddRecoverable(ctx context.Context, err error) { if err == nil { - return e + return } e.mu.Lock() defer e.mu.Unlock() - return e.addRecoverableErr(err) + e.logAndAddRecoverable(ctx, err, 1) +} + +// logs the error and adds it to the bus. If the error is a failure, +// it gets logged at an Error level. Otherwise logs an Info. +func (e *Bus) logAndAddRecoverable(ctx context.Context, err error, skip int) { + log := logger.CtxErrStack(ctx, err, skip+1) + isFail := e.addRecoverableErr(err) + + if isFail { + log.Error("recoverable error") + } else { + log.Info("recoverable error") + } } // addErr handles adding errors to errors.errs. Sync locking -// gets handled upstream of this call. -func (e *Bus) addRecoverableErr(err error) *Bus { +// gets handled upstream of this call. Returns true if the +// error is a failure, false otherwise. +func (e *Bus) addRecoverableErr(err error) bool { + var isFail bool + if e.failure == nil && e.failFast { e.setFailure(err) + + isFail = true } e.recoverable = append(e.recoverable, err) - return e + return isFail } // AddSkip appends a record of a Skipped item to the fault bus. @@ -151,15 +170,23 @@ func (e *Bus) addRecoverableErr(err error) *Bus { // 2. Skipping avoids a permanent and consistent failure. If // the underlying reason is transient or otherwise recoverable, // the item should not be skipped. -func (e *Bus) AddSkip(s *Skipped) *Bus { +func (e *Bus) AddSkip(ctx context.Context, s *Skipped) { if s == nil { - return e + return } e.mu.Lock() defer e.mu.Unlock() - return e.addSkip(s) + e.logAndAddSkip(ctx, s, 1) +} + +// logs the error and adds a skipped item. +func (e *Bus) logAndAddSkip(ctx context.Context, s *Skipped, skip int) { + logger.CtxStack(ctx, skip+1). + With("skipped", s). + Info("recoverable error") + e.addSkip(s) } func (e *Bus) addSkip(s *Skipped) *Bus { @@ -344,7 +371,7 @@ type localBus struct { current error } -func (e *localBus) AddRecoverable(err error) { +func (e *localBus) AddRecoverable(ctx context.Context, err error) { if err == nil { return } @@ -356,7 +383,7 @@ func (e *localBus) AddRecoverable(err error) { e.current = err } - e.bus.AddRecoverable(err) + e.bus.logAndAddRecoverable(ctx, err, 1) } // AddSkip appends a record of a Skipped item to the local bus. @@ -369,7 +396,7 @@ func (e *localBus) AddRecoverable(err error) { // 2. Skipping avoids a permanent and consistent failure. If // the underlying reason is transient or otherwise recoverable, // the item should not be skipped. -func (e *localBus) AddSkip(s *Skipped) { +func (e *localBus) AddSkip(ctx context.Context, s *Skipped) { if s == nil { return } @@ -377,7 +404,7 @@ func (e *localBus) AddSkip(s *Skipped) { e.mu.Lock() defer e.mu.Unlock() - e.bus.AddSkip(s) + e.bus.logAndAddSkip(ctx, s, 1) } // Failure returns the failure that happened within the local bus. diff --git a/src/pkg/fault/fault_test.go b/src/pkg/fault/fault_test.go index 4d731ede1..c4166456b 100644 --- a/src/pkg/fault/fault_test.go +++ b/src/pkg/fault/fault_test.go @@ -1,6 +1,7 @@ package fault_test import ( + "context" "encoding/json" "testing" @@ -75,6 +76,9 @@ func (suite *FaultErrorsUnitSuite) TestErr() { suite.Run(test.name, func() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + n := fault.New(test.failFast) require.NotNil(t, n) require.NoError(t, n.Failure(), clues.ToCore(n.Failure())) @@ -83,8 +87,7 @@ func (suite *FaultErrorsUnitSuite) TestErr() { e := n.Fail(test.fail) require.NotNil(t, e) - e = n.AddRecoverable(test.add) - require.NotNil(t, e) + n.AddRecoverable(ctx, test.add) test.expect(t, n.Failure()) }) @@ -152,14 +155,16 @@ func (suite *FaultErrorsUnitSuite) TestErrs() { suite.Run(test.name, func() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + n := fault.New(test.failFast) require.NotNil(t, n) e := n.Fail(test.fail) require.NotNil(t, e) - e = n.AddRecoverable(test.add) - require.NotNil(t, e) + n.AddRecoverable(ctx, test.add) test.expect(t, n.Recovered()) }) @@ -169,14 +174,17 @@ func (suite *FaultErrorsUnitSuite) TestErrs() { func (suite *FaultErrorsUnitSuite) TestAdd() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + n := fault.New(true) require.NotNil(t, n) - n.AddRecoverable(assert.AnError) + n.AddRecoverable(ctx, assert.AnError) assert.Error(t, n.Failure()) assert.Len(t, n.Recovered(), 1) - n.AddRecoverable(assert.AnError) + n.AddRecoverable(ctx, assert.AnError) assert.Error(t, n.Failure()) assert.Len(t, n.Recovered(), 2) } @@ -184,29 +192,35 @@ func (suite *FaultErrorsUnitSuite) TestAdd() { func (suite *FaultErrorsUnitSuite) TestAddSkip() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + n := fault.New(true) require.NotNil(t, n) n.Fail(assert.AnError) assert.Len(t, n.Skipped(), 0) - n.AddRecoverable(assert.AnError) + n.AddRecoverable(ctx, assert.AnError) assert.Len(t, n.Skipped(), 0) - n.AddSkip(fault.OwnerSkip(fault.SkipMalware, "ns", "id", "name", nil)) + n.AddSkip(ctx, fault.OwnerSkip(fault.SkipMalware, "ns", "id", "name", nil)) assert.Len(t, n.Skipped(), 1) } func (suite *FaultErrorsUnitSuite) TestErrors() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + // not fail-fast n := fault.New(false) require.NotNil(t, n) n.Fail(clues.New("fail")) - n.AddRecoverable(clues.New("1")) - n.AddRecoverable(clues.New("2")) + n.AddRecoverable(ctx, clues.New("1")) + n.AddRecoverable(ctx, clues.New("2")) d := n.Errors() assert.Equal(t, clues.ToCore(n.Failure()), d.Failure) @@ -218,8 +232,8 @@ func (suite *FaultErrorsUnitSuite) TestErrors() { require.NotNil(t, n) n.Fail(clues.New("fail")) - n.AddRecoverable(clues.New("1")) - n.AddRecoverable(clues.New("2")) + n.AddRecoverable(ctx, clues.New("1")) + n.AddRecoverable(ctx, clues.New("2")) d = n.Errors() assert.Equal(t, clues.ToCore(n.Failure()), d.Failure) @@ -234,13 +248,13 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { table := []struct { name string - errs func() *fault.Errors + errs func(context.Context) *fault.Errors expectItems []fault.Item expectRecoverable []*clues.ErrCore }{ { name: "no errors", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { return fault.New(false).Errors() }, expectItems: []fault.Item{}, @@ -248,10 +262,10 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "no items", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(ae) - b.AddRecoverable(ae) + b.AddRecoverable(ctx, ae) return b.Errors() }, @@ -260,10 +274,10 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "failure item", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(fault.OwnerErr(ae, "ns", "id", "name", addtl)) - b.AddRecoverable(ae) + b.AddRecoverable(ctx, ae) return b.Errors() }, @@ -272,10 +286,10 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "recoverable item", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(ae) - b.AddRecoverable(fault.OwnerErr(ae, "ns", "id", "name", addtl)) + b.AddRecoverable(ctx, fault.OwnerErr(ae, "ns", "id", "name", addtl)) return b.Errors() }, @@ -284,10 +298,10 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "two items", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(fault.OwnerErr(ae, "ns", "oid", "name", addtl)) - b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name", addtl)) + b.AddRecoverable(ctx, fault.FileErr(ae, "ns", "fid", "name", addtl)) return b.Errors() }, @@ -299,10 +313,10 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "two items - diff namespace same id", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(fault.OwnerErr(ae, "ns", "id", "name", addtl)) - b.AddRecoverable(fault.FileErr(ae, "ns2", "id", "name", addtl)) + b.AddRecoverable(ctx, fault.FileErr(ae, "ns2", "id", "name", addtl)) return b.Errors() }, @@ -314,10 +328,10 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "duplicate items - failure priority", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(fault.OwnerErr(ae, "ns", "id", "name", addtl)) - b.AddRecoverable(fault.FileErr(ae, "ns", "id", "name", addtl)) + b.AddRecoverable(ctx, fault.FileErr(ae, "ns", "id", "name", addtl)) return b.Errors() }, @@ -328,11 +342,11 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "duplicate items - last recoverable priority", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(ae) - b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name", addtl)) - b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name2", addtl)) + b.AddRecoverable(ctx, fault.FileErr(ae, "ns", "fid", "name", addtl)) + b.AddRecoverable(ctx, fault.FileErr(ae, "ns", "fid", "name2", addtl)) return b.Errors() }, @@ -343,11 +357,11 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { }, { name: "recoverable item and non-items", - errs: func() *fault.Errors { + errs: func(ctx context.Context) *fault.Errors { b := fault.New(false) b.Fail(ae) - b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name", addtl)) - b.AddRecoverable(ae) + b.AddRecoverable(ctx, fault.FileErr(ae, "ns", "fid", "name", addtl)) + b.AddRecoverable(ctx, ae) return b.Errors() }, @@ -360,7 +374,11 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { for _, test := range table { suite.Run(test.name, func() { t := suite.T() - fe := test.errs() + + ctx, flush := tester.NewContext(t) + defer flush() + + fe := test.errs(ctx) assert.ElementsMatch(t, test.expectItems, fe.Items) require.Equal(t, test.expectRecoverable, fe.Recovered) @@ -378,12 +396,15 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() { func (suite *FaultErrorsUnitSuite) TestMarshalUnmarshal() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + // not fail-fast n := fault.New(false) require.NotNil(t, n) - n.AddRecoverable(clues.New("1")) - n.AddRecoverable(clues.New("2")) + n.AddRecoverable(ctx, clues.New("1")) + n.AddRecoverable(ctx, clues.New("2")) bs, err := json.Marshal(n.Errors()) require.NoError(t, err, clues.ToCore(err)) @@ -419,13 +440,16 @@ func (suite *FaultErrorsUnitSuite) TestUnmarshalLegacy() { func (suite *FaultErrorsUnitSuite) TestTracker() { t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + eb := fault.New(false) lb := eb.Local() assert.NoError(t, lb.Failure(), clues.ToCore(lb.Failure())) assert.Empty(t, eb.Recovered()) - lb.AddRecoverable(assert.AnError) + lb.AddRecoverable(ctx, assert.AnError) assert.NoError(t, lb.Failure(), clues.ToCore(lb.Failure())) assert.NoError(t, eb.Failure(), clues.ToCore(eb.Failure())) assert.NotEmpty(t, eb.Recovered()) @@ -436,7 +460,7 @@ func (suite *FaultErrorsUnitSuite) TestTracker() { assert.NoError(t, lbt.Failure(), clues.ToCore(lbt.Failure())) assert.Empty(t, ebt.Recovered()) - lbt.AddRecoverable(assert.AnError) + lbt.AddRecoverable(ctx, assert.AnError) assert.Error(t, lbt.Failure()) assert.Error(t, ebt.Failure()) assert.NotEmpty(t, ebt.Recovered()) diff --git a/src/pkg/logger/logger.go b/src/pkg/logger/logger.go index 48c23e5af..fb4d37e4b 100644 --- a/src/pkg/logger/logger.go +++ b/src/pkg/logger/logger.go @@ -437,6 +437,14 @@ func Ctx(ctx context.Context) *zap.SugaredLogger { return l.(*zap.SugaredLogger).With(clues.In(ctx).Slice()...) } +// CtxStack retrieves the logger embedded in the context, and adds the +// stacktrace to the log info. +// If skip is non-zero, it skips the stack calls starting from the +// first. Skip always adds +1 to account for this wrapper. +func CtxStack(ctx context.Context, skip int) *zap.SugaredLogger { + return Ctx(ctx).With(zap.StackSkip("trace", skip+1)) +} + // CtxErr retrieves the logger embedded in the context // and packs all of the structured data in the error inside it. func CtxErr(ctx context.Context, err error) *zap.SugaredLogger { @@ -447,6 +455,19 @@ func CtxErr(ctx context.Context, err error) *zap.SugaredLogger { With(clues.InErr(err).Slice()...) } +// CtxErrStack retrieves the logger embedded in the context +// and packs all of the structured data in the error inside it. +// If skip is non-zero, it skips the stack calls starting from the +// first. Skip always adds +1 to account for this wrapper. +func CtxErrStack(ctx context.Context, err error, skip int) *zap.SugaredLogger { + return Ctx(ctx). + With( + "error", err, + "error_labels", clues.Labels(err)). + With(zap.StackSkip("trace", skip+1)). + With(clues.InErr(err).Slice()...) +} + // Flush writes out all buffered logs. func Flush(ctx context.Context) { _ = Ctx(ctx).Sync() diff --git a/src/pkg/repository/repository.go b/src/pkg/repository/repository.go index b58cf3eac..c8e30829a 100644 --- a/src/pkg/repository/repository.go +++ b/src/pkg/repository/repository.go @@ -402,7 +402,7 @@ func (r repository) Backups(ctx context.Context, ids []string) ([]*backup.Backup b, err := sw.GetBackup(ictx, model.StableID(id)) if err != nil { - errs.AddRecoverable(errWrapper(err)) + errs.AddRecoverable(ctx, errWrapper(err)) } bups = append(bups, b) diff --git a/src/pkg/selectors/scopes.go b/src/pkg/selectors/scopes.go index f0c5fb4da..2a860892b 100644 --- a/src/pkg/selectors/scopes.go +++ b/src/pkg/selectors/scopes.go @@ -368,7 +368,7 @@ func reduce[T scopeT, C categoryT]( repoPath, err := path.FromDataLayerPath(ent.RepoRef, true) if err != nil { - el.AddRecoverable(clues.Wrap(err, "transforming repoRef to path").WithClues(ictx)) + el.AddRecoverable(ctx, clues.Wrap(err, "transforming repoRef to path").WithClues(ictx)) continue } @@ -391,7 +391,7 @@ func reduce[T scopeT, C categoryT]( pv, err := dc.pathValues(repoPath, *ent, s.Cfg) if err != nil { - el.AddRecoverable(clues.Wrap(err, "getting path values").WithClues(ictx)) + el.AddRecoverable(ctx, clues.Wrap(err, "getting path values").WithClues(ictx)) continue } diff --git a/src/pkg/services/m365/api/contacts.go b/src/pkg/services/m365/api/contacts.go index fa52d8312..9d2c253d5 100644 --- a/src/pkg/services/m365/api/contacts.go +++ b/src/pkg/services/m365/api/contacts.go @@ -183,7 +183,7 @@ func (c Contacts) EnumerateContainers( } if err := graph.CheckIDNameAndParentFolderID(fold); err != nil { - errs.AddRecoverable(graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) continue } @@ -194,7 +194,7 @@ func (c Contacts) EnumerateContainers( temp := graph.NewCacheFolder(fold, nil, nil) if err := fn(&temp); err != nil { - errs.AddRecoverable(graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) continue } } diff --git a/src/pkg/services/m365/api/events.go b/src/pkg/services/m365/api/events.go index 8f35481e2..37f40107b 100644 --- a/src/pkg/services/m365/api/events.go +++ b/src/pkg/services/m365/api/events.go @@ -236,7 +236,7 @@ func (c Events) EnumerateContainers( cd := CalendarDisplayable{Calendarable: cal} if err := graph.CheckIDAndName(cd); err != nil { - errs.AddRecoverable(graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) continue } @@ -250,7 +250,7 @@ func (c Events) EnumerateContainers( path.Builder{}.Append(ptr.Val(cd.GetId())), // storage path path.Builder{}.Append(ptr.Val(cd.GetDisplayName()))) // display location if err := fn(&temp); err != nil { - errs.AddRecoverable(graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) continue } } diff --git a/src/pkg/services/m365/api/mail.go b/src/pkg/services/m365/api/mail.go index 8c7ca138c..441198c2a 100644 --- a/src/pkg/services/m365/api/mail.go +++ b/src/pkg/services/m365/api/mail.go @@ -264,7 +264,7 @@ func (c Mail) EnumerateContainers( } if err := graph.CheckIDNameAndParentFolderID(fold); err != nil { - errs.AddRecoverable(graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) continue } @@ -275,7 +275,7 @@ func (c Mail) EnumerateContainers( temp := graph.NewCacheFolder(fold, nil, nil) if err := fn(&temp); err != nil { - errs.AddRecoverable(graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) + errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) continue } } diff --git a/src/pkg/services/m365/api/sites.go b/src/pkg/services/m365/api/sites.go index d1f506cda..a73703703 100644 --- a/src/pkg/services/m365/api/sites.go +++ b/src/pkg/services/m365/api/sites.go @@ -72,7 +72,7 @@ func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, } if err != nil { - el.AddRecoverable(graph.Wrap(ctx, err, "validating site")) + el.AddRecoverable(ctx, graph.Wrap(ctx, err, "validating site")) return true } diff --git a/src/pkg/services/m365/api/users.go b/src/pkg/services/m365/api/users.go index 908128579..07d2430ac 100644 --- a/src/pkg/services/m365/api/users.go +++ b/src/pkg/services/m365/api/users.go @@ -97,7 +97,7 @@ func (c Users) GetAll( err := validateUser(item) if err != nil { - el.AddRecoverable(graph.Wrap(ctx, err, "validating user")) + el.AddRecoverable(ctx, graph.Wrap(ctx, err, "validating user")) } else { us = append(us, item) } From 6e2d72509c1517c9a6e86ebef11953787e4b50d0 Mon Sep 17 00:00:00 2001 From: Abin Simon Date: Wed, 14 Jun 2023 10:39:07 +0530 Subject: [PATCH 22/41] Skip any attachment fetches that fail with OLE conversion error (#3607) These files from what we can understand are not available and thus can't be fetched. This change ensures that we don't fail the backup just because of this error. --- #### Does this PR need a docs update or release note? - [x] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [ ] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- CHANGELOG.md | 1 + src/internal/m365/graph/errors.go | 8 ++++++ src/internal/m365/graph/errors_test.go | 39 ++++++++++++++++++++++++++ src/pkg/services/m365/api/mail.go | 14 +++++++++ 4 files changed, 62 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 99d92b4fd..d528b18c8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fix Exchange folder cache population error when parent folder isn't found. - Fix Exchange backup issue caused by incorrect json serialization - Fix issues with details model containing duplicate entry for api consumers +- Handle OLE conversion errors when trying to fetch attachments ### Changed - Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`. diff --git a/src/internal/m365/graph/errors.go b/src/internal/m365/graph/errors.go index 2f2427b6c..65150868b 100644 --- a/src/internal/m365/graph/errors.go +++ b/src/internal/m365/graph/errors.go @@ -44,6 +44,10 @@ const ( // the same name as another folder in the same parent. Such duplicate folder // names are not allowed by graph. folderExists errorCode = "ErrorFolderExists" + // cannotOpenFileAttachment happen when an attachment is + // inaccessible. The error message is usually "OLE conversion + // failed for an attachment." + cannotOpenFileAttachment errorCode = "ErrorCannotOpenFileAttachment" ) type errorMessage string @@ -126,6 +130,10 @@ func IsErrResourceNotFound(err error) bool { return hasErrorCode(err, resourceNotFound) } +func IsErrCannotOpenFileAttachment(err error) bool { + return hasErrorCode(err, cannotOpenFileAttachment) +} + func IsErrAccessDenied(err error) bool { return hasErrorCode(err, errorAccessDenied) || clues.HasLabel(err, LabelStatus(http.StatusForbidden)) } diff --git a/src/internal/m365/graph/errors_test.go b/src/internal/m365/graph/errors_test.go index a0095dc1e..714677179 100644 --- a/src/internal/m365/graph/errors_test.go +++ b/src/internal/m365/graph/errors_test.go @@ -384,3 +384,42 @@ func (suite *GraphErrorsUnitSuite) TestIsErrFolderExists() { }) } } + +func (suite *GraphErrorsUnitSuite) TestIsErrCannotOpenFileAttachment() { + table := []struct { + name string + err error + expect assert.BoolAssertionFunc + }{ + { + name: "nil", + err: nil, + expect: assert.False, + }, + { + name: "non-matching", + err: assert.AnError, + expect: assert.False, + }, + { + name: "as", + err: ErrInvalidDelta, + expect: assert.False, + }, + { + name: "non-matching oDataErr", + err: odErr("fnords"), + expect: assert.False, + }, + { + name: "quota-exceeded oDataErr", + err: odErr(string(cannotOpenFileAttachment)), + expect: assert.True, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + test.expect(suite.T(), IsErrCannotOpenFileAttachment(test.err)) + }) + } +} diff --git a/src/pkg/services/m365/api/mail.go b/src/pkg/services/m365/api/mail.go index 441198c2a..4c608ad28 100644 --- a/src/pkg/services/m365/api/mail.go +++ b/src/pkg/services/m365/api/mail.go @@ -406,6 +406,20 @@ func (c Mail) GetItem( ByAttachmentId(ptr.Val(a.GetId())). Get(ctx, attachConfig) if err != nil { + if graph.IsErrCannotOpenFileAttachment(err) { + logger.CtxErr(ctx, err). + With( + "skipped_reason", fault.SkipNotFound, + "attachment_id", ptr.Val(a.GetId()), + "attachment_size", ptr.Val(a.GetSize()), + ).Info("attachment not found") + // TODO This should use a `AddSkip` once we have + // figured out the semantics for skipping + // subcomponents of an item + + continue + } + return nil, nil, graph.Wrap(ctx, err, "getting mail attachment"). With("attachment_id", ptr.Val(a.GetId()), "attachment_size", ptr.Val(a.GetSize())) } From 61de33cbd30a26dae88883df236d5e2045ef3ece Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Jun 2023 06:08:52 +0000 Subject: [PATCH 23/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20sass=20from?= =?UTF-8?q?=201.63.3=20to=201.63.4=20in=20/website=20(#3621)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [sass](https://github.com/sass/dart-sass) from 1.63.3 to 1.63.4.
Release notes

Sourced from sass's releases.

Dart Sass 1.63.4

To install Sass 1.63.4, download one of the packages below and add it to your PATH, or see the Sass website for full installation instructions.

Changes

JavaScript API

  • Re-enable support for import sass from 'sass' when loading the package from an ESM module in Node.js. However, this syntax is now deprecated; ESM users should use import * as sass from 'sass' instead.

    On the browser and other ESM-only platforms, only import * as sass from 'sass' is supported.

  • Properly export the legacy API values TRUE, FALSE, NULL, and types from the ECMAScript module API.

Embedded Sass

  • Fix a race condition where closing standard input while requests are in-flight could sometimes cause the process to hang rather than shutting down gracefully.

  • Properly include the root stylesheet's URL in the set of loaded URLs when it fails to parse.

See the full changelog for changes in earlier releases.

Changelog

Sourced from sass's changelog.

1.63.4

JavaScript API

  • Re-enable support for import sass from 'sass' when loading the package from an ESM module in Node.js. However, this syntax is now deprecated; ESM users should use import * as sass from 'sass' instead.

    On the browser and other ESM-only platforms, only import * as sass from 'sass' is supported.

  • Properly export the legacy API values TRUE, FALSE, NULL, and types from the ECMAScript module API.

Embedded Sass

  • Fix a race condition where closing standard input while requests are in-flight could sometimes cause the process to hang rather than shutting down gracefully.

  • Properly include the root stylesheet's URL in the set of loaded URLs when it fails to parse.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sass&package-manager=npm_and_yarn&previous-version=1.63.3&new-version=1.63.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- website/package-lock.json | 14 +++++++------- website/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/website/package-lock.json b/website/package-lock.json index 9cffb7e0a..69a6c0cc6 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -24,7 +24,7 @@ "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", - "sass": "^1.63.3", + "sass": "^1.63.4", "tiny-slider": "^2.9.4", "tw-elements": "^1.0.0-alpha13", "wow.js": "^1.2.2" @@ -12522,9 +12522,9 @@ "license": "MIT" }, "node_modules/sass": { - "version": "1.63.3", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.3.tgz", - "integrity": "sha512-ySdXN+DVpfwq49jG1+hmtDslYqpS7SkOR5GpF6o2bmb1RL/xS+wvPmegMvMywyfsmAV6p7TgwXYGrCZIFFbAHg==", + "version": "1.63.4", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.4.tgz", + "integrity": "sha512-Sx/+weUmK+oiIlI+9sdD0wZHsqpbgQg8wSwSnGBjwb5GwqFhYNwwnI+UWZtLjKvKyFlKkatRK235qQ3mokyPoQ==", "dependencies": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", @@ -23699,9 +23699,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sass": { - "version": "1.63.3", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.3.tgz", - "integrity": "sha512-ySdXN+DVpfwq49jG1+hmtDslYqpS7SkOR5GpF6o2bmb1RL/xS+wvPmegMvMywyfsmAV6p7TgwXYGrCZIFFbAHg==", + "version": "1.63.4", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.63.4.tgz", + "integrity": "sha512-Sx/+weUmK+oiIlI+9sdD0wZHsqpbgQg8wSwSnGBjwb5GwqFhYNwwnI+UWZtLjKvKyFlKkatRK235qQ3mokyPoQ==", "requires": { "chokidar": ">=3.0.0 <4.0.0", "immutable": "^4.0.0", diff --git a/website/package.json b/website/package.json index 5704d651f..08ce79bd8 100644 --- a/website/package.json +++ b/website/package.json @@ -30,7 +30,7 @@ "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", - "sass": "^1.63.3", + "sass": "^1.63.4", "tiny-slider": "^2.9.4", "tw-elements": "^1.0.0-alpha13", "wow.js": "^1.2.2" From 466698f096931b4d01191af71acb7420dbefcca3 Mon Sep 17 00:00:00 2001 From: Keepers Date: Wed, 14 Jun 2023 10:09:44 -0600 Subject: [PATCH 24/41] fix restore func misnaming in onedrive (#3616) #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #1996 --- src/internal/m365/onedrive/restore.go | 10 +++++----- src/internal/m365/restore.go | 2 +- src/internal/m365/sharepoint/restore.go | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/internal/m365/onedrive/restore.go b/src/internal/m365/onedrive/restore.go index c1416a0a3..da3f83d92 100644 --- a/src/internal/m365/onedrive/restore.go +++ b/src/internal/m365/onedrive/restore.go @@ -59,8 +59,8 @@ func NewRestoreCaches() *restoreCaches { } } -// RestoreCollections will restore the specified data collections into OneDrive -func RestoreCollections( +// ConsumeRestoreCollections will restore the specified data collections into OneDrive +func ConsumeRestoreCollections( ctx context.Context, rh RestoreHandler, backupVersion int, @@ -102,7 +102,7 @@ func RestoreCollections( "full_path", dc.FullPath()) ) - metrics, err = ProduceRestoreCollection( + metrics, err = RestoreCollection( ictx, rh, backupVersion, @@ -133,12 +133,12 @@ func RestoreCollections( return status, el.Failure() } -// ProduceRestoreCollection handles restoration of an individual collection. +// RestoreCollection handles restoration of an individual collection. // returns: // - the collection's item and byte count metrics // - the updated metadata map that include metadata for folders in this collection // - error, if any besides recoverable -func ProduceRestoreCollection( +func RestoreCollection( ctx context.Context, rh RestoreHandler, backupVersion int, diff --git a/src/internal/m365/restore.go b/src/internal/m365/restore.go index f04d3296c..07d4cd968 100644 --- a/src/internal/m365/restore.go +++ b/src/internal/m365/restore.go @@ -45,7 +45,7 @@ func (ctrl *Controller) ConsumeRestoreCollections( case selectors.ServiceExchange: status, err = exchange.ConsumeRestoreCollections(ctx, ctrl.AC, restoreCfg, dcs, deets, errs) case selectors.ServiceOneDrive: - status, err = onedrive.RestoreCollections( + status, err = onedrive.ConsumeRestoreCollections( ctx, onedrive.NewRestoreHandler(ctrl.AC), backupVersion, diff --git a/src/internal/m365/sharepoint/restore.go b/src/internal/m365/sharepoint/restore.go index 504f9a06a..7076d4de9 100644 --- a/src/internal/m365/sharepoint/restore.go +++ b/src/internal/m365/sharepoint/restore.go @@ -65,7 +65,7 @@ func ConsumeRestoreCollections( switch dc.FullPath().Category() { case path.LibrariesCategory: - metrics, err = onedrive.ProduceRestoreCollection( + metrics, err = onedrive.RestoreCollection( ictx, libraryRestoreHandler{ac.Drives()}, backupVersion, From bd0f6f9769b6f7c0d94cfec59e8064f8b14a754c Mon Sep 17 00:00:00 2001 From: Keepers Date: Wed, 14 Jun 2023 10:50:17 -0600 Subject: [PATCH 25/41] final cleanup for renaming/movement (#3612) This last step in the cleanup has two goals: first to minimize the number of arbitrarily named and located files so that code is better condensed and co-located. This is all just file renaming and a minor amount of code copy-pasting. The second is to create a centralized package to own the ColInfo type structs that we use to both stub out test data, and also generate factory data for cmds. The current ownership is haphazard, and while this movement is a little more condensed, it's still jumping through some weird hoops to get things to work. Treat it as one good step forward, and we'll have to return to polish it another time. At least it'll be separated from the m365 folder at large this way, and more easily identified as supporting design rather than production usage. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :broom: Tech Debt/Cleanup #### Issue(s) * closes #1996 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- src/cmd/factory/impl/common.go | 64 ++-- src/internal/m365/backup_test.go | 12 +- src/internal/m365/controller_test.go | 349 +++++++++++++----- ...r_cache.go => contacts_container_cache.go} | 0 ...ner_cache.go => events_container_cache.go} | 0 .../m365/graph_connector_disconnected_test.go | 181 --------- ...onnector_helper_test.go => helper_test.go} | 26 +- src/internal/m365/mock/collection.go | 24 ++ .../onedrive/{drive.go => item_collector.go} | 0 .../{drive_test.go => item_collector_test.go} | 7 +- .../stub/stub.go} | 174 ++++----- ...ctor_onedrive_test.go => onedrive_test.go} | 179 ++++----- src/internal/m365/sharepoint/api/pages.go | 2 +- src/internal/m365/sharepoint/collection.go | 4 +- .../m365/sharepoint/collection_test.go | 4 +- src/internal/m365/sharepoint/list_info.go | 28 -- .../m365/sharepoint/list_info_test.go | 59 --- .../m365/sharepoint/{list.go => lists.go} | 21 ++ .../{list_test.go => lists_test.go} | 51 ++- .../m365/sharepoint/{pageInfo.go => pages.go} | 4 +- .../{pageInfo_test.go => pages_test.go} | 16 +- src/internal/m365/sharepoint/restore.go | 12 +- .../stub.go} | 225 +++++------ 23 files changed, 705 insertions(+), 737 deletions(-) rename src/internal/m365/exchange/{contact_container_cache.go => contacts_container_cache.go} (100%) rename src/internal/m365/exchange/{event_container_cache.go => events_container_cache.go} (100%) delete mode 100644 src/internal/m365/graph_connector_disconnected_test.go rename src/internal/m365/{graph_connector_helper_test.go => helper_test.go} (98%) create mode 100644 src/internal/m365/mock/collection.go rename src/internal/m365/onedrive/{drive.go => item_collector.go} (100%) rename src/internal/m365/onedrive/{drive_test.go => item_collector_test.go} (98%) rename src/internal/m365/{graph_connector_onedrive_test_helper.go => onedrive/stub/stub.go} (59%) rename src/internal/m365/{graph_connector_onedrive_test.go => onedrive_test.go} (88%) delete mode 100644 src/internal/m365/sharepoint/list_info.go delete mode 100644 src/internal/m365/sharepoint/list_info_test.go rename src/internal/m365/sharepoint/{list.go => lists.go} (93%) rename src/internal/m365/sharepoint/{list_test.go => lists_test.go} (53%) rename src/internal/m365/sharepoint/{pageInfo.go => pages.go} (84%) rename src/internal/m365/sharepoint/{pageInfo_test.go => pages_test.go} (76%) rename src/internal/m365/{graph_connector_test_helper.go => stub/stub.go} (71%) diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index f1b863275..d1855b108 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -18,7 +18,9 @@ import ( "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/m365" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub" "github.com/alcionai/corso/src/internal/m365/resource" + m365Stub "github.com/alcionai/corso/src/internal/m365/stub" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/account" @@ -84,14 +86,14 @@ func generateAndRestoreItems( items: items, }} - dest := control.DefaultRestoreConfig(dttm.SafeForTesting) - dest.Location = destFldr - print.Infof(ctx, "Restoring to folder %s", dest.Location) + restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting) + restoreCfg.Location = destFldr + print.Infof(ctx, "Restoring to folder %s", restoreCfg.Location) dataColls, err := buildCollections( service, tenantID, userID, - dest, + restoreCfg, collections) if err != nil { return nil, err @@ -99,7 +101,7 @@ func generateAndRestoreItems( print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination) - return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs) + return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, restoreCfg, opts, dataColls, errs) } // ------------------------------------------------------------------------------------------ @@ -108,7 +110,7 @@ func generateAndRestoreItems( func getControllerAndVerifyResourceOwner( ctx context.Context, - rc resource.Category, + resourceCat resource.Category, resourceOwner string, ) ( *m365.Controller, @@ -133,7 +135,7 @@ func getControllerAndVerifyResourceOwner( return nil, account.Account{}, nil, clues.Wrap(err, "finding m365 account details") } - ctrl, err := m365.NewController(ctx, acct, rc) + ctrl, err := m365.NewController(ctx, acct, resourceCat) if err != nil { return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api") } @@ -164,7 +166,7 @@ type collection struct { func buildCollections( service path.ServiceType, tenant, user string, - dest control.RestoreConfig, + restoreCfg control.RestoreConfig, colls []collection, ) ([]data.RestoreCollection, error) { collections := make([]data.RestoreCollection, 0, len(colls)) @@ -225,9 +227,9 @@ func generateAndRestoreDriveItems( ctx, flush := tester.NewContext(nil) defer flush() - dest := control.DefaultRestoreConfig(dttm.SafeForTesting) - dest.Location = destFldr - print.Infof(ctx, "Restoring to folder %s", dest.Location) + restoreCfg := control.DefaultRestoreConfig(dttm.SafeForTesting) + restoreCfg.Location = destFldr + print.Infof(ctx, "Restoring to folder %s", restoreCfg.Location) var driveID string @@ -249,7 +251,7 @@ func generateAndRestoreDriveItems( } var ( - cols []m365.OnedriveColInfo + cols []odStub.ColInfo rootPath = []string{"drives", driveID, "root:"} folderAPath = []string{"drives", driveID, "root:", folderAName} @@ -263,15 +265,15 @@ func generateAndRestoreDriveItems( ) for i := 0; i < count; i++ { - col := []m365.OnedriveColInfo{ + col := []odStub.ColInfo{ // basic folder and file creation { PathElements: rootPath, - Files: []m365.ItemData{ + Files: []odStub.ItemData{ { Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime), Data: fileAData, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, @@ -282,13 +284,13 @@ func generateAndRestoreDriveItems( Data: fileBData, }, }, - Folders: []m365.ItemData{ + Folders: []odStub.ItemData{ { Name: folderBName, }, { Name: folderAName, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -296,7 +298,7 @@ func generateAndRestoreDriveItems( }, { Name: folderCName, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -308,18 +310,18 @@ func generateAndRestoreDriveItems( // a folder that has permissions with an item in the folder with // the different permissions. PathElements: folderAPath, - Files: []m365.ItemData{ + Files: []odStub.ItemData{ { Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Data: fileEData, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, }, }, }, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -329,13 +331,13 @@ func generateAndRestoreDriveItems( // a folder that has permissions with an item in the folder with // no permissions. PathElements: folderCPath, - Files: []m365.ItemData{ + Files: []odStub.ItemData{ { Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Data: fileAData, }, }, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -343,23 +345,23 @@ func generateAndRestoreDriveItems( }, { PathElements: folderBPath, - Files: []m365.ItemData{ + Files: []odStub.ItemData{ { // restoring a file in a non-root folder that doesn't inherit // permissions. Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime), Data: fileBData, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, }, }, }, - Folders: []m365.ItemData{ + Folders: []odStub.ItemData{ { Name: folderAName, - Perms: m365.PermData{ + Perms: odStub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -372,7 +374,7 @@ func generateAndRestoreDriveItems( cols = append(cols, col...) } - input, err := m365.DataForInfo(service, cols, version.Backup) + input, err := odStub.DataForInfo(service, cols, version.Backup) if err != nil { return nil, err } @@ -389,7 +391,7 @@ func generateAndRestoreDriveItems( ToggleFeatures: control.Toggles{}, } - config := m365.ConfigInfo{ + config := m365Stub.ConfigInfo{ Opts: opts, Resource: resource.Users, Service: service, @@ -398,7 +400,7 @@ func generateAndRestoreDriveItems( RestoreCfg: tester.DefaultTestRestoreConfig(""), } - _, _, collections, _, err := m365.GetCollectionsAndExpected( + _, _, collections, _, err := m365Stub.GetCollectionsAndExpected( config, input, version.Backup) @@ -406,5 +408,5 @@ func generateAndRestoreDriveItems( return nil, err } - return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs) + return ctrl.ConsumeRestoreCollections(ctx, version.Backup, sel, restoreCfg, opts, collections, errs) } diff --git a/src/internal/m365/backup_test.go b/src/internal/m365/backup_test.go index 9429be012..03543061b 100644 --- a/src/internal/m365/backup_test.go +++ b/src/internal/m365/backup_test.go @@ -66,7 +66,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() { selUsers := []string{suite.user} - ctrl := loadController(ctx, suite.T(), resource.Users) + ctrl := newController(ctx, suite.T(), resource.Users) tests := []struct { name string getSelector func(t *testing.T) selectors.Selector @@ -167,7 +167,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner() defer flush() owners := []string{"snuffleupagus"} - ctrl := loadController(ctx, suite.T(), resource.Users) + ctrl := newController(ctx, suite.T(), resource.Users) tests := []struct { name string getSelector func(t *testing.T) selectors.Selector @@ -253,7 +253,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() { defer flush() selSites := []string{suite.site} - ctrl := loadController(ctx, suite.T(), resource.Sites) + ctrl := newController(ctx, suite.T(), resource.Sites) tests := []struct { name string expected int @@ -348,7 +348,7 @@ func (suite *SPCollectionIntgSuite) SetupSuite() { ctx, flush := tester.NewContext(suite.T()) defer flush() - suite.connector = loadController(ctx, suite.T(), resource.Sites) + suite.connector = newController(ctx, suite.T(), resource.Sites) suite.user = tester.M365UserID(suite.T()) tester.LogTimeOfTest(suite.T()) @@ -362,7 +362,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() { var ( siteID = tester.M365SiteID(t) - ctrl = loadController(ctx, t, resource.Sites) + ctrl = newController(ctx, t, resource.Sites) siteIDs = []string{siteID} ) @@ -409,7 +409,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() { var ( siteID = tester.M365SiteID(t) - ctrl = loadController(ctx, t, resource.Sites) + ctrl = newController(ctx, t, resource.Sites) siteIDs = []string{siteID} ) diff --git a/src/internal/m365/controller_test.go b/src/internal/m365/controller_test.go index e7be0ae5f..2d63e4261 100644 --- a/src/internal/m365/controller_test.go +++ b/src/internal/m365/controller_test.go @@ -17,6 +17,7 @@ import ( exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/m365/mock" "github.com/alcionai/corso/src/internal/m365/resource" + "github.com/alcionai/corso/src/internal/m365/stub" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" @@ -24,6 +25,7 @@ import ( "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" + selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" ) // --------------------------------------------------------------------------- @@ -280,7 +282,7 @@ func (suite *ControllerIntegrationSuite) SetupSuite() { ctx, flush := tester.NewContext(t) defer flush() - suite.ctrl = loadController(ctx, t, resource.Users) + suite.ctrl = newController(ctx, t, resource.Users) suite.user = tester.M365UserID(t) suite.secondaryUser = tester.SecondaryM365UserID(t) @@ -407,7 +409,7 @@ func (suite *ControllerIntegrationSuite) TestEmptyCollections() { func runRestore( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument - config ConfigInfo, + config stub.ConfigInfo, backupVersion int, collections []data.RestoreCollection, numRestoreItems int, @@ -419,7 +421,7 @@ func runRestore( start := time.Now() - restoreCtrl := loadController(ctx, t, config.Resource) + restoreCtrl := newController(ctx, t, config.Resource) restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true) deets, err := restoreCtrl.ConsumeRestoreCollections( ctx, @@ -450,11 +452,11 @@ func runRestore( func runBackupAndCompare( t *testing.T, ctx context.Context, //revive:disable-line:context-as-argument - config ConfigInfo, + config stub.ConfigInfo, expectedData map[string]map[string][]byte, totalItems int, totalKopiaItems int, - inputCollections []ColInfo, + inputCollections []stub.ColInfo, ) { t.Helper() @@ -481,7 +483,7 @@ func runBackupAndCompare( nameToID[ro] = ro } - backupCtrl := loadController(ctx, t, config.Resource) + backupCtrl := newController(ctx, t, config.Resource) backupCtrl.IDNameLookup = inMock.NewCache(idToName, nameToID) backupSel := backupSelectorForExpected(t, config.Service, expectedDests) @@ -531,7 +533,7 @@ func runRestoreBackupTest( ctx, flush := tester.NewContext(t) defer flush() - config := ConfigInfo{ + config := stub.ConfigInfo{ Opts: opts, Resource: test.resourceCat, Service: test.service, @@ -540,7 +542,7 @@ func runRestoreBackupTest( RestoreCfg: tester.DefaultTestRestoreConfig(""), } - totalItems, totalKopiaItems, collections, expectedData, err := GetCollectionsAndExpected( + totalItems, totalKopiaItems, collections, expectedData, err := stub.GetCollectionsAndExpected( config, test.collections, version.Backup) @@ -576,16 +578,16 @@ func runRestoreTestWithVersion( ctx, flush := tester.NewContext(t) defer flush() - config := ConfigInfo{ + config := stub.ConfigInfo{ Opts: opts, - Resource: test.resource, + Resource: test.resourceCat, Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, RestoreCfg: tester.DefaultTestRestoreConfig(""), } - totalItems, _, collections, _, err := GetCollectionsAndExpected( + totalItems, _, collections, _, err := stub.GetCollectionsAndExpected( config, test.collectionsPrevious, test.backupVersion) @@ -613,16 +615,16 @@ func runRestoreBackupTestVersions( ctx, flush := tester.NewContext(t) defer flush() - config := ConfigInfo{ + config := stub.ConfigInfo{ Opts: opts, - Resource: test.resource, + Resource: test.resourceCat, Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, RestoreCfg: tester.DefaultTestRestoreConfig(""), } - totalItems, _, collections, _, err := GetCollectionsAndExpected( + totalItems, _, collections, _, err := stub.GetCollectionsAndExpected( config, test.collectionsPrevious, test.backupVersion) @@ -637,7 +639,7 @@ func runRestoreBackupTestVersions( totalItems) // Get expected output for new version. - totalItems, totalKopiaItems, _, expectedData, err := GetCollectionsAndExpected( + totalItems, totalKopiaItems, _, expectedData, err := stub.GetCollectionsAndExpected( config, test.collectionsLatest, version.Backup) @@ -662,24 +664,24 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() { name: "EmailsWithAttachments", service: path.ExchangeService, resourceCat: resource.Users, - collections: []ColInfo{ + collections: []stub.ColInfo{ { PathElements: []string{"Inbox"}, Category: path.EmailCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID", - data: exchMock.MessageWithDirectAttachment( + Name: "someencodeditemID", + Data: exchMock.MessageWithDirectAttachment( subjectText + "-1", ), - lookupKey: subjectText + "-1", + LookupKey: subjectText + "-1", }, { - name: "someencodeditemID2", - data: exchMock.MessageWithTwoAttachments( + Name: "someencodeditemID2", + Data: exchMock.MessageWithTwoAttachments( subjectText + "-2", ), - lookupKey: subjectText + "-2", + LookupKey: subjectText + "-2", }, }, }, @@ -689,73 +691,73 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() { name: "MultipleEmailsMultipleFolders", service: path.ExchangeService, resourceCat: resource.Users, - collections: []ColInfo{ + collections: []stub.ColInfo{ { PathElements: []string{"Inbox"}, Category: path.EmailCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID", - data: exchMock.MessageWithBodyBytes( + Name: "someencodeditemID", + Data: exchMock.MessageWithBodyBytes( subjectText+"-1", bodyText+" 1.", bodyText+" 1.", ), - lookupKey: subjectText + "-1", + LookupKey: subjectText + "-1", }, }, }, { PathElements: []string{"Work"}, Category: path.EmailCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID2", - data: exchMock.MessageWithBodyBytes( + Name: "someencodeditemID2", + Data: exchMock.MessageWithBodyBytes( subjectText+"-2", bodyText+" 2.", bodyText+" 2.", ), - lookupKey: subjectText + "-2", + LookupKey: subjectText + "-2", }, { - name: "someencodeditemID3", - data: exchMock.MessageWithBodyBytes( + Name: "someencodeditemID3", + Data: exchMock.MessageWithBodyBytes( subjectText+"-3", bodyText+" 3.", bodyText+" 3.", ), - lookupKey: subjectText + "-3", + LookupKey: subjectText + "-3", }, }, }, { PathElements: []string{"Work", "Inbox"}, Category: path.EmailCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID4", - data: exchMock.MessageWithBodyBytes( + Name: "someencodeditemID4", + Data: exchMock.MessageWithBodyBytes( subjectText+"-4", bodyText+" 4.", bodyText+" 4.", ), - lookupKey: subjectText + "-4", + LookupKey: subjectText + "-4", }, }, }, { PathElements: []string{"Work", "Inbox", "Work"}, Category: path.EmailCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID5", - data: exchMock.MessageWithBodyBytes( + Name: "someencodeditemID5", + Data: exchMock.MessageWithBodyBytes( subjectText+"-5", bodyText+" 5.", bodyText+" 5.", ), - lookupKey: subjectText + "-5", + LookupKey: subjectText + "-5", }, }, }, @@ -765,25 +767,25 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() { name: "MultipleContactsSingleFolder", service: path.ExchangeService, resourceCat: resource.Users, - collections: []ColInfo{ + collections: []stub.ColInfo{ { PathElements: []string{"Contacts"}, Category: path.ContactsCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID", - data: exchMock.ContactBytes("Ghimley"), - lookupKey: "Ghimley", + Name: "someencodeditemID", + Data: exchMock.ContactBytes("Ghimley"), + LookupKey: "Ghimley", }, { - name: "someencodeditemID2", - data: exchMock.ContactBytes("Irgot"), - lookupKey: "Irgot", + Name: "someencodeditemID2", + Data: exchMock.ContactBytes("Irgot"), + LookupKey: "Irgot", }, { - name: "someencodeditemID3", - data: exchMock.ContactBytes("Jannes"), - lookupKey: "Jannes", + Name: "someencodeditemID3", + Data: exchMock.ContactBytes("Jannes"), + LookupKey: "Jannes", }, }, }, @@ -793,41 +795,41 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup() { name: "MultipleContactsMultipleFolders", service: path.ExchangeService, resourceCat: resource.Users, - collections: []ColInfo{ + collections: []stub.ColInfo{ { PathElements: []string{"Work"}, Category: path.ContactsCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID", - data: exchMock.ContactBytes("Ghimley"), - lookupKey: "Ghimley", + Name: "someencodeditemID", + Data: exchMock.ContactBytes("Ghimley"), + LookupKey: "Ghimley", }, { - name: "someencodeditemID2", - data: exchMock.ContactBytes("Irgot"), - lookupKey: "Irgot", + Name: "someencodeditemID2", + Data: exchMock.ContactBytes("Irgot"), + LookupKey: "Irgot", }, { - name: "someencodeditemID3", - data: exchMock.ContactBytes("Jannes"), - lookupKey: "Jannes", + Name: "someencodeditemID3", + Data: exchMock.ContactBytes("Jannes"), + LookupKey: "Jannes", }, }, }, { PathElements: []string{"Personal"}, Category: path.ContactsCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID4", - data: exchMock.ContactBytes("Argon"), - lookupKey: "Argon", + Name: "someencodeditemID4", + Data: exchMock.ContactBytes("Argon"), + LookupKey: "Argon", }, { - name: "someencodeditemID5", - data: exchMock.ContactBytes("Bernard"), - lookupKey: "Bernard", + Name: "someencodeditemID5", + Data: exchMock.ContactBytes("Bernard"), + LookupKey: "Bernard", }, }, }, @@ -926,26 +928,26 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { name: "Contacts", service: path.ExchangeService, resourceCat: resource.Users, - collections: []ColInfo{ + collections: []stub.ColInfo{ { PathElements: []string{"Work"}, Category: path.ContactsCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID", - data: exchMock.ContactBytes("Ghimley"), - lookupKey: "Ghimley", + Name: "someencodeditemID", + Data: exchMock.ContactBytes("Ghimley"), + LookupKey: "Ghimley", }, }, }, { PathElements: []string{"Personal"}, Category: path.ContactsCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "someencodeditemID2", - data: exchMock.ContactBytes("Irgot"), - lookupKey: "Irgot", + Name: "someencodeditemID2", + Data: exchMock.ContactBytes("Irgot"), + LookupKey: "Irgot", }, }, }, @@ -1004,12 +1006,12 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { }, }) - totalItems, _, collections, expectedData, err := collectionsForInfo( + totalItems, _, collections, expectedData, err := stub.CollectionsForInfo( test.service, suite.ctrl.tenant, suite.user, restoreCfg, - []ColInfo{collection}, + []stub.ColInfo{collection}, version.Backup, ) require.NoError(t, err) @@ -1027,7 +1029,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { restoreCfg.Location, ) - restoreCtrl := loadController(ctx, t, test.resourceCat) + restoreCtrl := newController(ctx, t, test.resourceCat) deets, err := restoreCtrl.ConsumeRestoreCollections( ctx, version.Backup, @@ -1057,7 +1059,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { // Run a backup and compare its output with what we put in. - backupCtrl := loadController(ctx, t, test.resourceCat) + backupCtrl := newController(ctx, t, test.resourceCat) backupSel := backupSelectorForExpected(t, test.service, expectedDests) t.Log("Selective backup of", backupSel) @@ -1079,7 +1081,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { t.Log("Backup enumeration complete") - ci := ConfigInfo{ + ci := stub.ConfigInfo{ Opts: control.Options{RestorePermissions: true}, // Alright to be empty, needed for OneDrive. RestoreCfg: control.RestoreConfig{}, @@ -1105,15 +1107,15 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_largeMailAttachmen name: "EmailsWithLargeAttachments", service: path.ExchangeService, resourceCat: resource.Users, - collections: []ColInfo{ + collections: []stub.ColInfo{ { PathElements: []string{"Inbox"}, Category: path.EmailCategory, - Items: []ItemInfo{ + Items: []stub.ItemInfo{ { - name: "35mbAttachment", - data: exchMock.MessageWithSizedAttachment(subjectText, 35), - lookupKey: subjectText, + Name: "35mbAttachment", + Data: exchMock.MessageWithSizedAttachment(subjectText, 35), + LookupKey: subjectText, }, }, }, @@ -1206,7 +1208,7 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() { defer flush() var ( - backupCtrl = loadController(ctx, t, test.resourceCat) + backupCtrl = newController(ctx, t, test.resourceCat) backupSel = test.selectorFunc(t) errs = fault.New(true) start = time.Now() @@ -1270,3 +1272,166 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() { }) } } + +type DisconnectedUnitSuite struct { + tester.Suite +} + +func TestDisconnectedUnitSuite(t *testing.T) { + s := &DisconnectedUnitSuite{ + Suite: tester.NewUnitSuite(t), + } + + suite.Run(t, s) +} + +func statusTestTask( + t *testing.T, + ctrl *Controller, + objects, success, folder int, +) { + ctx, flush := tester.NewContext(t) + defer flush() + + status := support.CreateStatus( + ctx, + support.Restore, folder, + support.CollectionMetrics{ + Objects: objects, + Successes: success, + Bytes: 0, + }, + "statusTestTask") + ctrl.UpdateStatus(status) +} + +func (suite *DisconnectedUnitSuite) TestController_Status() { + t := suite.T() + ctrl := Controller{wg: &sync.WaitGroup{}} + + // Two tasks + ctrl.incrementAwaitingMessages() + ctrl.incrementAwaitingMessages() + + // Each helper task processes 4 objects, 1 success, 3 errors, 1 folders + go statusTestTask(t, &ctrl, 4, 1, 1) + go statusTestTask(t, &ctrl, 4, 1, 1) + + stats := ctrl.Wait() + + assert.NotEmpty(t, ctrl.PrintableStatus()) + // Expect 8 objects + assert.Equal(t, 8, stats.Objects) + // Expect 2 success + assert.Equal(t, 2, stats.Successes) + // Expect 2 folders + assert.Equal(t, 2, stats.Folders) +} + +func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() { + sites := []string{"abc.site.foo", "bar.site.baz"} + + tests := []struct { + name string + excludes func(t *testing.T) selectors.Selector + filters func(t *testing.T) selectors.Selector + includes func(t *testing.T) selectors.Selector + checkError assert.ErrorAssertionFunc + }{ + { + name: "Valid User", + checkError: assert.NoError, + excludes: func(t *testing.T) selectors.Selector { + sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) + sel.Exclude(selTD.OneDriveBackupFolderScope(sel)) + sel.DiscreteOwner = "elliotReid@someHospital.org" + return sel.Selector + }, + filters: func(t *testing.T) selectors.Selector { + sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) + sel.Filter(selTD.OneDriveBackupFolderScope(sel)) + sel.DiscreteOwner = "elliotReid@someHospital.org" + return sel.Selector + }, + includes: func(t *testing.T) selectors.Selector { + sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) + sel.Include(selTD.OneDriveBackupFolderScope(sel)) + sel.DiscreteOwner = "elliotReid@someHospital.org" + return sel.Selector + }, + }, + { + name: "Invalid User", + checkError: assert.NoError, + excludes: func(t *testing.T) selectors.Selector { + sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) + sel.Exclude(selTD.OneDriveBackupFolderScope(sel)) + return sel.Selector + }, + filters: func(t *testing.T) selectors.Selector { + sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) + sel.Filter(selTD.OneDriveBackupFolderScope(sel)) + return sel.Selector + }, + includes: func(t *testing.T) selectors.Selector { + sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) + sel.Include(selTD.OneDriveBackupFolderScope(sel)) + return sel.Selector + }, + }, + { + name: "valid sites", + checkError: assert.NoError, + excludes: func(t *testing.T) selectors.Selector { + sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) + sel.DiscreteOwner = "abc.site.foo" + sel.Exclude(sel.AllData()) + return sel.Selector + }, + filters: func(t *testing.T) selectors.Selector { + sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) + sel.DiscreteOwner = "abc.site.foo" + sel.Filter(sel.AllData()) + return sel.Selector + }, + includes: func(t *testing.T) selectors.Selector { + sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) + sel.DiscreteOwner = "abc.site.foo" + sel.Include(sel.AllData()) + return sel.Selector + }, + }, + { + name: "invalid sites", + checkError: assert.Error, + excludes: func(t *testing.T) selectors.Selector { + sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) + sel.Exclude(sel.AllData()) + return sel.Selector + }, + filters: func(t *testing.T) selectors.Selector { + sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) + sel.Filter(sel.AllData()) + return sel.Selector + }, + includes: func(t *testing.T) selectors.Selector { + sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) + sel.Include(sel.AllData()) + return sel.Selector + }, + }, + } + + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + err := verifyBackupInputs(test.excludes(t), sites) + test.checkError(t, err, clues.ToCore(err)) + err = verifyBackupInputs(test.filters(t), sites) + test.checkError(t, err, clues.ToCore(err)) + err = verifyBackupInputs(test.includes(t), sites) + test.checkError(t, err, clues.ToCore(err)) + }) + } +} diff --git a/src/internal/m365/exchange/contact_container_cache.go b/src/internal/m365/exchange/contacts_container_cache.go similarity index 100% rename from src/internal/m365/exchange/contact_container_cache.go rename to src/internal/m365/exchange/contacts_container_cache.go diff --git a/src/internal/m365/exchange/event_container_cache.go b/src/internal/m365/exchange/events_container_cache.go similarity index 100% rename from src/internal/m365/exchange/event_container_cache.go rename to src/internal/m365/exchange/events_container_cache.go diff --git a/src/internal/m365/graph_connector_disconnected_test.go b/src/internal/m365/graph_connector_disconnected_test.go deleted file mode 100644 index c2dee9d58..000000000 --- a/src/internal/m365/graph_connector_disconnected_test.go +++ /dev/null @@ -1,181 +0,0 @@ -package m365 - -import ( - "sync" - "testing" - - "github.com/alcionai/clues" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" - - "github.com/alcionai/corso/src/internal/m365/support" - "github.com/alcionai/corso/src/internal/tester" - "github.com/alcionai/corso/src/pkg/selectors" - selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" -) - -// --------------------------------------------------------------- -// Disconnected Test Section -// --------------------------------------------------------------- -type DisconnectedSuite struct { - tester.Suite -} - -func TestSuite(t *testing.T) { - s := &DisconnectedSuite{ - Suite: tester.NewUnitSuite(t), - } - - suite.Run(t, s) -} - -func statusTestTask( - t *testing.T, - ctrl *Controller, - objects, success, folder int, -) { - ctx, flush := tester.NewContext(t) - defer flush() - - status := support.CreateStatus( - ctx, - support.Restore, folder, - support.CollectionMetrics{ - Objects: objects, - Successes: success, - Bytes: 0, - }, - "statusTestTask") - ctrl.UpdateStatus(status) -} - -func (suite *DisconnectedSuite) TestController_Status() { - t := suite.T() - ctrl := Controller{wg: &sync.WaitGroup{}} - - // Two tasks - ctrl.incrementAwaitingMessages() - ctrl.incrementAwaitingMessages() - - // Each helper task processes 4 objects, 1 success, 3 errors, 1 folders - go statusTestTask(t, &ctrl, 4, 1, 1) - go statusTestTask(t, &ctrl, 4, 1, 1) - - stats := ctrl.Wait() - - assert.NotEmpty(t, ctrl.PrintableStatus()) - // Expect 8 objects - assert.Equal(t, 8, stats.Objects) - // Expect 2 success - assert.Equal(t, 2, stats.Successes) - // Expect 2 folders - assert.Equal(t, 2, stats.Folders) -} - -func (suite *DisconnectedSuite) TestVerifyBackupInputs_allServices() { - sites := []string{"abc.site.foo", "bar.site.baz"} - - tests := []struct { - name string - excludes func(t *testing.T) selectors.Selector - filters func(t *testing.T) selectors.Selector - includes func(t *testing.T) selectors.Selector - checkError assert.ErrorAssertionFunc - }{ - { - name: "Valid User", - checkError: assert.NoError, - excludes: func(t *testing.T) selectors.Selector { - sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) - sel.Exclude(selTD.OneDriveBackupFolderScope(sel)) - sel.DiscreteOwner = "elliotReid@someHospital.org" - return sel.Selector - }, - filters: func(t *testing.T) selectors.Selector { - sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) - sel.Filter(selTD.OneDriveBackupFolderScope(sel)) - sel.DiscreteOwner = "elliotReid@someHospital.org" - return sel.Selector - }, - includes: func(t *testing.T) selectors.Selector { - sel := selectors.NewOneDriveBackup([]string{"elliotReid@someHospital.org", "foo@SomeCompany.org"}) - sel.Include(selTD.OneDriveBackupFolderScope(sel)) - sel.DiscreteOwner = "elliotReid@someHospital.org" - return sel.Selector - }, - }, - { - name: "Invalid User", - checkError: assert.NoError, - excludes: func(t *testing.T) selectors.Selector { - sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) - sel.Exclude(selTD.OneDriveBackupFolderScope(sel)) - return sel.Selector - }, - filters: func(t *testing.T) selectors.Selector { - sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) - sel.Filter(selTD.OneDriveBackupFolderScope(sel)) - return sel.Selector - }, - includes: func(t *testing.T) selectors.Selector { - sel := selectors.NewOneDriveBackup([]string{"foo@SomeCompany.org"}) - sel.Include(selTD.OneDriveBackupFolderScope(sel)) - return sel.Selector - }, - }, - { - name: "valid sites", - checkError: assert.NoError, - excludes: func(t *testing.T) selectors.Selector { - sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) - sel.DiscreteOwner = "abc.site.foo" - sel.Exclude(sel.AllData()) - return sel.Selector - }, - filters: func(t *testing.T) selectors.Selector { - sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) - sel.DiscreteOwner = "abc.site.foo" - sel.Filter(sel.AllData()) - return sel.Selector - }, - includes: func(t *testing.T) selectors.Selector { - sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) - sel.DiscreteOwner = "abc.site.foo" - sel.Include(sel.AllData()) - return sel.Selector - }, - }, - { - name: "invalid sites", - checkError: assert.Error, - excludes: func(t *testing.T) selectors.Selector { - sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) - sel.Exclude(sel.AllData()) - return sel.Selector - }, - filters: func(t *testing.T) selectors.Selector { - sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) - sel.Filter(sel.AllData()) - return sel.Selector - }, - includes: func(t *testing.T) selectors.Selector { - sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) - sel.Include(sel.AllData()) - return sel.Selector - }, - }, - } - - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - err := verifyBackupInputs(test.excludes(t), sites) - test.checkError(t, err, clues.ToCore(err)) - err = verifyBackupInputs(test.filters(t), sites) - test.checkError(t, err, clues.ToCore(err)) - err = verifyBackupInputs(test.includes(t), sites) - test.checkError(t, err, clues.ToCore(err)) - }) - } -} diff --git a/src/internal/m365/graph_connector_helper_test.go b/src/internal/m365/helper_test.go similarity index 98% rename from src/internal/m365/graph_connector_helper_test.go rename to src/internal/m365/helper_test.go index 1fc1573c7..fe1de81f8 100644 --- a/src/internal/m365/graph_connector_helper_test.go +++ b/src/internal/m365/helper_test.go @@ -19,7 +19,9 @@ import ( "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/m365/onedrive" "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + odStub "github.com/alcionai/corso/src/internal/m365/onedrive/stub" "github.com/alcionai/corso/src/internal/m365/resource" + m365Stub "github.com/alcionai/corso/src/internal/m365/stub" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -102,15 +104,15 @@ func testElementsMatch[T any]( type restoreBackupInfo struct { name string service path.ServiceType - collections []ColInfo + collections []m365Stub.ColInfo resourceCat resource.Category } type restoreBackupInfoMultiVersion struct { service path.ServiceType - collectionsLatest []ColInfo - collectionsPrevious []ColInfo - resource resource.Category + collectionsLatest []m365Stub.ColInfo + collectionsPrevious []m365Stub.ColInfo + resourceCat resource.Category backupVersion int } @@ -686,7 +688,7 @@ func compareDriveItem( t *testing.T, expected map[string][]byte, item data.Stream, - config ConfigInfo, + config m365Stub.ConfigInfo, rootDir bool, ) bool { // Skip Drive permissions in the folder that used to be the root. We don't @@ -793,7 +795,7 @@ func compareDriveItem( return true } - var fileData testOneDriveData + var fileData odStub.FileData err = json.Unmarshal(buf, &fileData) if !assert.NoError(t, err, "unmarshalling file data for file", name, clues.ToCore(err)) { @@ -829,7 +831,7 @@ func compareItem( service path.ServiceType, category path.CategoryType, item data.Stream, - config ConfigInfo, + config m365Stub.ConfigInfo, rootDir bool, ) bool { if mt, ok := item.(data.StreamModTime); ok { @@ -923,7 +925,7 @@ func checkCollections( expectedItems int, expected map[string]map[string][]byte, got []data.BackupCollection, - config ConfigInfo, + config m365Stub.ConfigInfo, ) int { collectionsWithItems := []data.BackupCollection{} @@ -985,7 +987,7 @@ func checkCollections( checkHasCollections(t, expected, collectionsWithItems) // Return how many metadata files were skipped so we can account for it in the - // check on Controller status. + // check on controller status. return skipped } @@ -1152,11 +1154,11 @@ func getSelectorWith( } } -func loadController(ctx context.Context, t *testing.T, r resource.Category) *Controller { +func newController(ctx context.Context, t *testing.T, r resource.Category) *Controller { a := tester.NewM365Account(t) - connector, err := NewController(ctx, a, r) + controller, err := NewController(ctx, a, r) require.NoError(t, err, clues.ToCore(err)) - return connector + return controller } diff --git a/src/internal/m365/mock/collection.go b/src/internal/m365/mock/collection.go new file mode 100644 index 000000000..e1d61db58 --- /dev/null +++ b/src/internal/m365/mock/collection.go @@ -0,0 +1,24 @@ +package mock + +import ( + "context" + + "github.com/alcionai/corso/src/internal/data" +) + +type RestoreCollection struct { + data.Collection + AuxItems map[string]data.Stream +} + +func (rc RestoreCollection) FetchItemByName( + ctx context.Context, + name string, +) (data.Stream, error) { + res := rc.AuxItems[name] + if res == nil { + return nil, data.ErrNotFound + } + + return res, nil +} diff --git a/src/internal/m365/onedrive/drive.go b/src/internal/m365/onedrive/item_collector.go similarity index 100% rename from src/internal/m365/onedrive/drive.go rename to src/internal/m365/onedrive/item_collector.go diff --git a/src/internal/m365/onedrive/drive_test.go b/src/internal/m365/onedrive/item_collector_test.go similarity index 98% rename from src/internal/m365/onedrive/drive_test.go rename to src/internal/m365/onedrive/item_collector_test.go index 348722b78..65e9bf5fe 100644 --- a/src/internal/m365/onedrive/drive_test.go +++ b/src/internal/m365/onedrive/item_collector_test.go @@ -28,13 +28,12 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api/mock" ) -// Unit tests -type OneDriveUnitSuite struct { +type ItemCollectorUnitSuite struct { tester.Suite } func TestOneDriveUnitSuite(t *testing.T) { - suite.Run(t, &OneDriveUnitSuite{Suite: tester.NewUnitSuite(t)}) + suite.Run(t, &ItemCollectorUnitSuite{Suite: tester.NewUnitSuite(t)}) } const ( @@ -51,7 +50,7 @@ func odErr(code string) *odataerrors.ODataError { return odErr } -func (suite *OneDriveUnitSuite) TestDrives() { +func (suite *ItemCollectorUnitSuite) TestDrives() { t := suite.T() ctx, flush := tester.NewContext(t) diff --git a/src/internal/m365/graph_connector_onedrive_test_helper.go b/src/internal/m365/onedrive/stub/stub.go similarity index 59% rename from src/internal/m365/graph_connector_onedrive_test_helper.go rename to src/internal/m365/onedrive/stub/stub.go index 77acc1b7d..44590d5a1 100644 --- a/src/internal/m365/graph_connector_onedrive_test_helper.go +++ b/src/internal/m365/onedrive/stub/stub.go @@ -1,4 +1,4 @@ -package m365 +package stub import ( "encoding/json" @@ -6,11 +6,10 @@ import ( "github.com/alcionai/clues" "github.com/google/uuid" - "golang.org/x/exp/maps" - "github.com/alcionai/corso/src/internal/data" odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + m365Stub "github.com/alcionai/corso/src/internal/m365/stub" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/path" ) @@ -61,59 +60,59 @@ type ItemData struct { Perms PermData } -type OnedriveColInfo struct { +type ColInfo struct { PathElements []string Perms PermData Files []ItemData Folders []ItemData } -type onedriveCollection struct { - service path.ServiceType +type collection struct { + Service path.ServiceType PathElements []string - items []ItemInfo - aux []ItemInfo - backupVersion int + Items []m365Stub.ItemInfo + Aux []m365Stub.ItemInfo + BackupVersion int } -func (c onedriveCollection) collection() ColInfo { +func (c collection) ColInfo() m365Stub.ColInfo { cat := path.FilesCategory - if c.service == path.SharePointService { + if c.Service == path.SharePointService { cat = path.LibrariesCategory } - return ColInfo{ + return m365Stub.ColInfo{ PathElements: c.PathElements, Category: cat, - Items: c.items, - AuxItems: c.aux, + Items: c.Items, + AuxItems: c.Aux, } } -func NewOneDriveCollection( +func NewCollection( service path.ServiceType, PathElements []string, backupVersion int, -) *onedriveCollection { - return &onedriveCollection{ - service: service, +) *collection { + return &collection{ + Service: service, PathElements: PathElements, - backupVersion: backupVersion, + BackupVersion: backupVersion, } } func DataForInfo( service path.ServiceType, - cols []OnedriveColInfo, + cols []ColInfo, backupVersion int, -) ([]ColInfo, error) { +) ([]m365Stub.ColInfo, error) { var ( - res []ColInfo + res []m365Stub.ColInfo err error ) for _, c := range cols { - onedriveCol := NewOneDriveCollection(service, c.PathElements, backupVersion) + onedriveCol := NewCollection(service, c.PathElements, backupVersion) for _, f := range c.Files { _, err = onedriveCol.withFile(f.Name, f.Data, f.Perms) @@ -134,18 +133,18 @@ func DataForInfo( return res, err } - res = append(res, onedriveCol.collection()) + res = append(res, onedriveCol.ColInfo()) } return res, nil } -func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) (*onedriveCollection, error) { - switch c.backupVersion { +func (c *collection) withFile(name string, fileData []byte, perm PermData) (*collection, error) { + switch c.BackupVersion { case 0: // Lookups will occur using the most recent version of things so we need // the embedded file name to match that. - item, err := onedriveItemWithData( + item, err := FileWithData( name, name+metadata.DataFileSuffix, fileData) @@ -153,12 +152,12 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermDat return c, err } - c.items = append(c.items, item) + c.Items = append(c.Items, item) // v1-5, early metadata design case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName: - items, err := onedriveItemWithData( + items, err := FileWithData( name+metadata.DataFileSuffix, name+metadata.DataFileSuffix, fileData) @@ -166,24 +165,24 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermDat return c, err } - c.items = append(c.items, items) + c.Items = append(c.Items, items) - md, err := onedriveMetadata( + md, err := ItemWithMetadata( "", name+metadata.MetaFileSuffix, name+metadata.MetaFileSuffix, perm, - c.backupVersion >= versionPermissionSwitchedToID) + c.BackupVersion >= versionPermissionSwitchedToID) if err != nil { return c, err } - c.items = append(c.items, md) - c.aux = append(c.aux, md) + c.Items = append(c.Items, md) + c.Aux = append(c.Aux, md) // v6+ current metadata design case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: - item, err := onedriveItemWithData( + item, err := FileWithData( name+metadata.DataFileSuffix, name+metadata.DataFileSuffix, fileData) @@ -191,50 +190,50 @@ func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermDat return c, err } - c.items = append(c.items, item) + c.Items = append(c.Items, item) - md, err := onedriveMetadata( + md, err := ItemWithMetadata( name, name+metadata.MetaFileSuffix, name, perm, - c.backupVersion >= versionPermissionSwitchedToID) + c.BackupVersion >= versionPermissionSwitchedToID) if err != nil { return c, err } - c.items = append(c.items, md) - c.aux = append(c.aux, md) + c.Items = append(c.Items, md) + c.Aux = append(c.Aux, md) default: - return c, clues.New(fmt.Sprintf("bad backup version. version %d", c.backupVersion)) + return c, clues.New(fmt.Sprintf("bad backup version. version %d", c.BackupVersion)) } return c, nil } -func (c *onedriveCollection) withFolder(name string, perm PermData) (*onedriveCollection, error) { - switch c.backupVersion { +func (c *collection) withFolder(name string, perm PermData) (*collection, error) { + switch c.BackupVersion { case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName, version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID: return c, nil case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker: - item, err := onedriveMetadata( + item, err := ItemWithMetadata( "", name+metadata.DirMetaFileSuffix, name+metadata.DirMetaFileSuffix, perm, - c.backupVersion >= versionPermissionSwitchedToID) + c.BackupVersion >= versionPermissionSwitchedToID) - c.items = append(c.items, item) + c.Items = append(c.Items, item) if err != nil { return c, err } default: - return c, clues.New(fmt.Sprintf("bad backup version.version %d", c.backupVersion)) + return c, clues.New(fmt.Sprintf("bad backup version.version %d", c.BackupVersion)) } return c, nil @@ -242,17 +241,17 @@ func (c *onedriveCollection) withFolder(name string, perm PermData) (*onedriveCo // withPermissions adds permissions to the folder represented by this // onedriveCollection. -func (c *onedriveCollection) withPermissions(perm PermData) (*onedriveCollection, error) { +func (c *collection) withPermissions(perm PermData) (*collection, error) { // These versions didn't store permissions for the folder or didn't store them // in the folder's collection. - if c.backupVersion < version.OneDrive4DirIncludesPermissions { + if c.BackupVersion < version.OneDrive4DirIncludesPermissions { return c, nil } name := c.PathElements[len(c.PathElements)-1] metaName := name - if c.backupVersion >= version.OneDrive5DirMetaNoName { + if c.BackupVersion >= version.OneDrive5DirMetaNoName { // We switched to just .dirmeta for metadata file names. metaName = "" } @@ -261,98 +260,63 @@ func (c *onedriveCollection) withPermissions(perm PermData) (*onedriveCollection return c, nil } - md, err := onedriveMetadata( + md, err := ItemWithMetadata( name, metaName+metadata.DirMetaFileSuffix, metaName+metadata.DirMetaFileSuffix, perm, - c.backupVersion >= versionPermissionSwitchedToID) + c.BackupVersion >= versionPermissionSwitchedToID) if err != nil { return c, err } - c.items = append(c.items, md) - c.aux = append(c.aux, md) + c.Items = append(c.Items, md) + c.Aux = append(c.Aux, md) return c, err } -type testOneDriveData struct { +type FileData struct { FileName string `json:"fileName,omitempty"` Data []byte `json:"data,omitempty"` } -func onedriveItemWithData( +func FileWithData( name, lookupKey string, fileData []byte, -) (ItemInfo, error) { - content := testOneDriveData{ +) (m365Stub.ItemInfo, error) { + content := FileData{ FileName: lookupKey, Data: fileData, } serialized, err := json.Marshal(content) if err != nil { - return ItemInfo{}, clues.Stack(err) + return m365Stub.ItemInfo{}, clues.Stack(err) } - return ItemInfo{ - name: name, - data: serialized, - lookupKey: lookupKey, + return m365Stub.ItemInfo{ + Name: name, + Data: serialized, + LookupKey: lookupKey, }, nil } -func onedriveMetadata( +func ItemWithMetadata( fileName, itemID, lookupKey string, perm PermData, permUseID bool, -) (ItemInfo, error) { +) (m365Stub.ItemInfo, error) { testMeta := getMetadata(fileName, perm, permUseID) testMetaJSON, err := json.Marshal(testMeta) if err != nil { - return ItemInfo{}, clues.Wrap(err, "marshalling metadata") + return m365Stub.ItemInfo{}, clues.Wrap(err, "marshalling metadata") } - return ItemInfo{ - name: itemID, - data: testMetaJSON, - lookupKey: lookupKey, + return m365Stub.ItemInfo{ + Name: itemID, + Data: testMetaJSON, + LookupKey: lookupKey, }, nil } - -func GetCollectionsAndExpected( - config ConfigInfo, - testCollections []ColInfo, - backupVersion int, -) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) { - var ( - collections []data.RestoreCollection - expectedData = map[string]map[string][]byte{} - totalItems = 0 - totalKopiaItems = 0 - ) - - for _, owner := range config.ResourceOwners { - numItems, kopiaItems, ownerCollections, userExpectedData, err := collectionsForInfo( - config.Service, - config.Tenant, - owner, - config.RestoreCfg, - testCollections, - backupVersion, - ) - if err != nil { - return totalItems, totalKopiaItems, collections, expectedData, err - } - - collections = append(collections, ownerCollections...) - totalItems += numItems - totalKopiaItems += kopiaItems - - maps.Copy(expectedData, userExpectedData) - } - - return totalItems, totalKopiaItems, collections, expectedData, nil -} diff --git a/src/internal/m365/graph_connector_onedrive_test.go b/src/internal/m365/onedrive_test.go similarity index 88% rename from src/internal/m365/graph_connector_onedrive_test.go rename to src/internal/m365/onedrive_test.go index 1a45992d3..ca821392f 100644 --- a/src/internal/m365/graph_connector_onedrive_test.go +++ b/src/internal/m365/onedrive_test.go @@ -16,6 +16,7 @@ import ( "github.com/alcionai/corso/src/internal/m365/graph" odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" + "github.com/alcionai/corso/src/internal/m365/onedrive/stub" "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" @@ -96,17 +97,17 @@ type oneDriveSuite interface { } type suiteInfoImpl struct { - ac api.Client - controller *Controller - resourceOwner string - resourceCat resource.Category - secondaryUser string - secondaryUserID string - service path.ServiceType - tertiaryUser string - tertiaryUserID string - user string - userID string + ac api.Client + controller *Controller + resourceOwner string + resourceCategory resource.Category + secondaryUser string + secondaryUserID string + service path.ServiceType + tertiaryUser string + tertiaryUserID string + user string + userID string } func NewSuiteInfoImpl( @@ -115,22 +116,22 @@ func NewSuiteInfoImpl( resourceOwner string, service path.ServiceType, ) suiteInfoImpl { - rc := resource.Users + rsc := resource.Users if service == path.SharePointService { - rc = resource.Sites + rsc = resource.Sites } - ctrl := loadController(ctx, t, rc) + ctrl := newController(ctx, t, rsc) return suiteInfoImpl{ - ac: ctrl.AC, - controller: ctrl, - resourceOwner: resourceOwner, - resourceCat: rc, - secondaryUser: tester.SecondaryM365UserID(t), - service: service, - tertiaryUser: tester.TertiaryM365UserID(t), - user: tester.M365UserID(t), + ac: ctrl.AC, + controller: ctrl, + resourceOwner: resourceOwner, + resourceCategory: rsc, + secondaryUser: tester.SecondaryM365UserID(t), + service: service, + tertiaryUser: tester.TertiaryM365UserID(t), + user: tester.M365UserID(t), } } @@ -163,7 +164,7 @@ func (si suiteInfoImpl) Service() path.ServiceType { } func (si suiteInfoImpl) Resource() resource.Category { - return si.resourceCat + return si.resourceCategory } // --------------------------------------------------------------------------- @@ -402,16 +403,16 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( folderBName, } - cols := []OnedriveColInfo{ + cols := []stub.ColInfo{ { PathElements: rootPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileAData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderAName, }, @@ -422,13 +423,13 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( }, { PathElements: folderAPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileBData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderBName, }, @@ -436,13 +437,13 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( }, { PathElements: subfolderBPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileCData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderAName, }, @@ -450,7 +451,7 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( }, { PathElements: subfolderAPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileDData, @@ -459,7 +460,7 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( }, { PathElements: folderBPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileEData, @@ -468,18 +469,18 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions( }, } - expected, err := DataForInfo(suite.Service(), cols, version.Backup) + expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup) require.NoError(suite.T(), err) for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("Version%d", vn), func() { t := suite.T() - input, err := DataForInfo(suite.Service(), cols, vn) + input, err := stub.DataForInfo(suite.Service(), cols, vn) require.NoError(suite.T(), err) testData := restoreBackupInfoMultiVersion{ service: suite.Service(), - resource: suite.Resource(), + resourceCat: suite.Resource(), backupVersion: vn, collectionsPrevious: input, collectionsLatest: expected, @@ -549,15 +550,15 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { folderCName, } - cols := []OnedriveColInfo{ + cols := []stub.ColInfo{ { PathElements: rootPath, - Files: []ItemData{ + Files: []stub.ItemData{ { // Test restoring a file that doesn't inherit permissions. Name: fileName, Data: fileAData, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, @@ -570,13 +571,13 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { Data: fileBData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderBName, }, { Name: folderAName, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -584,7 +585,7 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { }, { Name: folderCName, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -594,23 +595,23 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { }, { PathElements: folderBPath, - Files: []ItemData{ + Files: []stub.ItemData{ { // Test restoring a file in a non-root folder that doesn't inherit // permissions. Name: fileName, Data: fileBData, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, }, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderAName, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -624,18 +625,18 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { // // Tests a folder that has permissions with an item in the folder with // // the same permissions. // pathElements: subfolderAPath, - // files: []itemData{ + // files: []stub.ItemData{ // { // name: fileName, // data: fileDData, - // perms: permData{ + // perms: stub.PermData{ // user: secondaryUserName, // entityID: secondaryUserID, // roles: readPerm, // }, // }, // }, - // Perms: PermData{ + // Perms: stub.PermData{ // User: secondaryUserName, // EntityID: secondaryUserID, // Roles: readPerm, @@ -645,18 +646,18 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { // Tests a folder that has permissions with an item in the folder with // the different permissions. PathElements: folderAPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileEData, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, }, }, }, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -666,13 +667,13 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { // Tests a folder that has permissions with an item in the folder with // no permissions. PathElements: folderCPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileAData, }, }, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: readPerm, @@ -680,7 +681,7 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { }, } - expected, err := DataForInfo(suite.Service(), cols, version.Backup) + expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup) require.NoError(suite.T(), err) bss := suite.Service().String() @@ -690,12 +691,12 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) { // Ideally this can always be true or false and still // work, but limiting older versions to use emails so as // to validate that flow as well. - input, err := DataForInfo(suite.Service(), cols, vn) + input, err := stub.DataForInfo(suite.Service(), cols, vn) require.NoError(suite.T(), err) testData := restoreBackupInfoMultiVersion{ service: suite.Service(), - resource: suite.Resource(), + resourceCat: suite.Resource(), backupVersion: vn, collectionsPrevious: input, collectionsLatest: expected, @@ -730,18 +731,18 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) { suite.Service(), suite.ResourceOwner()) - inputCols := []OnedriveColInfo{ + inputCols := []stub.ColInfo{ { PathElements: []string{ odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, }, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileAData, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, @@ -751,14 +752,14 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) { }, } - expectedCols := []OnedriveColInfo{ + expectedCols := []stub.ColInfo{ { PathElements: []string{ odConsts.DrivesPathDir, driveID, odConsts.RootPathDir, }, - Files: []ItemData{ + Files: []stub.ItemData{ { // No permissions on the output since they weren't restored. Name: fileName, @@ -768,19 +769,19 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) { }, } - expected, err := DataForInfo(suite.Service(), expectedCols, version.Backup) + expected, err := stub.DataForInfo(suite.Service(), expectedCols, version.Backup) require.NoError(suite.T(), err) bss := suite.Service().String() for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() { t := suite.T() - input, err := DataForInfo(suite.Service(), inputCols, vn) + input, err := stub.DataForInfo(suite.Service(), inputCols, vn) require.NoError(suite.T(), err) testData := restoreBackupInfoMultiVersion{ service: suite.Service(), - resource: suite.Resource(), + resourceCat: suite.Resource(), backupVersion: vn, collectionsPrevious: input, collectionsLatest: expected, @@ -855,11 +856,11 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio folderCName, } - fileSet := []ItemData{ + fileSet := []stub.ItemData{ { Name: "file-custom", Data: fileAData, - Perms: PermData{ + Perms: stub.PermData{ User: secondaryUserName, EntityID: secondaryUserID, Roles: writePerm, @@ -869,14 +870,14 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio { Name: "file-inherited", Data: fileAData, - Perms: PermData{ + Perms: stub.PermData{ SharingMode: metadata.SharingModeInherited, }, }, { Name: "file-empty", Data: fileAData, - Perms: PermData{ + Perms: stub.PermData{ SharingMode: metadata.SharingModeCustom, }, }, @@ -900,23 +901,23 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio // - inherted-permission-file // - empty-permission-file (empty/empty might have interesting behavior) - cols := []OnedriveColInfo{ + cols := []stub.ColInfo{ { PathElements: rootPath, - Files: []ItemData{}, - Folders: []ItemData{ + Files: []stub.ItemData{}, + Folders: []stub.ItemData{ {Name: folderAName}, }, }, { PathElements: folderAPath, Files: fileSet, - Folders: []ItemData{ + Folders: []stub.ItemData{ {Name: folderAName}, {Name: folderBName}, {Name: folderCName}, }, - Perms: PermData{ + Perms: stub.PermData{ User: tertiaryUserName, EntityID: tertiaryUserID, Roles: readPerm, @@ -925,7 +926,7 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio { PathElements: subfolderAAPath, Files: fileSet, - Perms: PermData{ + Perms: stub.PermData{ User: tertiaryUserName, EntityID: tertiaryUserID, Roles: writePerm, @@ -935,20 +936,20 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio { PathElements: subfolderABPath, Files: fileSet, - Perms: PermData{ + Perms: stub.PermData{ SharingMode: metadata.SharingModeInherited, }, }, { PathElements: subfolderACPath, Files: fileSet, - Perms: PermData{ + Perms: stub.PermData{ SharingMode: metadata.SharingModeCustom, }, }, } - expected, err := DataForInfo(suite.Service(), cols, version.Backup) + expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup) require.NoError(suite.T(), err) bss := suite.Service().String() @@ -958,12 +959,12 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio // Ideally this can always be true or false and still // work, but limiting older versions to use emails so as // to validate that flow as well. - input, err := DataForInfo(suite.Service(), cols, vn) + input, err := stub.DataForInfo(suite.Service(), cols, vn) require.NoError(suite.T(), err) testData := restoreBackupInfoMultiVersion{ service: suite.Service(), - resource: suite.Resource(), + resourceCat: suite.Resource(), backupVersion: vn, collectionsPrevious: input, collectionsLatest: expected, @@ -1018,16 +1019,16 @@ func testRestoreFolderNamedFolderRegression( folderBName, } - cols := []OnedriveColInfo{ + cols := []stub.ColInfo{ { PathElements: rootPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileAData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderNamedFolder, }, @@ -1038,13 +1039,13 @@ func testRestoreFolderNamedFolderRegression( }, { PathElements: folderFolderPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileBData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderBName, }, @@ -1052,13 +1053,13 @@ func testRestoreFolderNamedFolderRegression( }, { PathElements: subfolderPath, - Files: []ItemData{ + Files: []stub.ItemData{ { Name: fileName, Data: fileCData, }, }, - Folders: []ItemData{ + Folders: []stub.ItemData{ { Name: folderNamedFolder, }, @@ -1066,19 +1067,19 @@ func testRestoreFolderNamedFolderRegression( }, } - expected, err := DataForInfo(suite.Service(), cols, version.Backup) + expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup) require.NoError(suite.T(), err) bss := suite.Service().String() for vn := startVersion; vn <= version.Backup; vn++ { suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() { t := suite.T() - input, err := DataForInfo(suite.Service(), cols, vn) + input, err := stub.DataForInfo(suite.Service(), cols, vn) require.NoError(suite.T(), err) testData := restoreBackupInfoMultiVersion{ service: suite.Service(), - resource: suite.Resource(), + resourceCat: suite.Resource(), backupVersion: vn, collectionsPrevious: input, collectionsLatest: expected, diff --git a/src/internal/m365/sharepoint/api/pages.go b/src/internal/m365/sharepoint/api/pages.go index 581d5de10..e7d24fd6f 100644 --- a/src/internal/m365/sharepoint/api/pages.go +++ b/src/internal/m365/sharepoint/api/pages.go @@ -172,7 +172,7 @@ func RestoreSitePage( itemData data.Stream, siteID, destName string, ) (details.ItemInfo, error) { - ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restorePage", diagnostics.Label("item_uuid", itemData.UUID())) + ctx, end := diagnostics.Span(ctx, "m365:sharepoint:restorePage", diagnostics.Label("item_uuid", itemData.UUID())) defer end() var ( diff --git a/src/internal/m365/sharepoint/collection.go b/src/internal/m365/sharepoint/collection.go index 90af58cbf..687a2ebb8 100644 --- a/src/internal/m365/sharepoint/collection.go +++ b/src/internal/m365/sharepoint/collection.go @@ -257,7 +257,7 @@ func (sc *Collection) retrieveLists( sc.data <- &Item{ id: ptr.Val(lst.GetId()), data: io.NopCloser(bytes.NewReader(byteArray)), - info: sharePointListInfo(lst, size), + info: listToSPInfo(lst, size), modTime: t, } @@ -320,7 +320,7 @@ func (sc *Collection) retrievePages( sc.data <- &Item{ id: ptr.Val(pg.GetId()), data: io.NopCloser(bytes.NewReader(byteArray)), - info: sharePointPageInfo(pg, root, size), + info: pageToSPInfo(pg, root, size), modTime: ptr.OrNow(pg.GetLastModifiedDateTime()), } diff --git a/src/internal/m365/sharepoint/collection_test.go b/src/internal/m365/sharepoint/collection_test.go index 5381710c2..5177ecf06 100644 --- a/src/internal/m365/sharepoint/collection_test.go +++ b/src/internal/m365/sharepoint/collection_test.go @@ -116,7 +116,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() { data := &Item{ id: name, data: io.NopCloser(bytes.NewReader(byteArray)), - info: sharePointListInfo(listing, int64(len(byteArray))), + info: listToSPInfo(listing, int64(len(byteArray))), } return data @@ -205,7 +205,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() { listData := &Item{ id: testName, data: io.NopCloser(bytes.NewReader(byteArray)), - info: sharePointListInfo(listing, int64(len(byteArray))), + info: listToSPInfo(listing, int64(len(byteArray))), } destName := tester.DefaultTestRestoreConfig("").Location diff --git a/src/internal/m365/sharepoint/list_info.go b/src/internal/m365/sharepoint/list_info.go deleted file mode 100644 index 62101b584..000000000 --- a/src/internal/m365/sharepoint/list_info.go +++ /dev/null @@ -1,28 +0,0 @@ -package sharepoint - -import ( - "github.com/microsoftgraph/msgraph-sdk-go/models" - - "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/pkg/backup/details" -) - -// sharePointListInfo translates models.Listable metadata into searchable content -// List Details: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0 -func sharePointListInfo(lst models.Listable, size int64) *details.SharePointInfo { - var ( - name = ptr.Val(lst.GetDisplayName()) - webURL = ptr.Val(lst.GetWebUrl()) - created = ptr.Val(lst.GetCreatedDateTime()) - modified = ptr.Val(lst.GetLastModifiedDateTime()) - ) - - return &details.SharePointInfo{ - ItemType: details.SharePointList, - ItemName: name, - Created: created, - Modified: modified, - WebURL: webURL, - Size: size, - } -} diff --git a/src/internal/m365/sharepoint/list_info_test.go b/src/internal/m365/sharepoint/list_info_test.go deleted file mode 100644 index a4467450f..000000000 --- a/src/internal/m365/sharepoint/list_info_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package sharepoint - -import ( - "testing" - - "github.com/microsoftgraph/msgraph-sdk-go/models" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" - - "github.com/alcionai/corso/src/internal/tester" - "github.com/alcionai/corso/src/pkg/backup/details" -) - -type SharePointInfoSuite struct { - tester.Suite -} - -func TestSharePointInfoSuite(t *testing.T) { - suite.Run(t, &SharePointInfoSuite{Suite: tester.NewUnitSuite(t)}) -} - -func (suite *SharePointInfoSuite) TestSharePointInfo() { - tests := []struct { - name string - listAndDeets func() (models.Listable, *details.SharePointInfo) - }{ - { - name: "Empty List", - listAndDeets: func() (models.Listable, *details.SharePointInfo) { - i := &details.SharePointInfo{ItemType: details.SharePointList} - return models.NewList(), i - }, - }, { - name: "Only Name", - listAndDeets: func() (models.Listable, *details.SharePointInfo) { - aTitle := "Whole List" - listing := models.NewList() - listing.SetDisplayName(&aTitle) - i := &details.SharePointInfo{ - ItemType: details.SharePointList, - ItemName: aTitle, - } - - return listing, i - }, - }, - } - for _, test := range tests { - suite.Run(test.name, func() { - t := suite.T() - - list, expected := test.listAndDeets() - info := sharePointListInfo(list, 10) - assert.Equal(t, expected.ItemType, info.ItemType) - assert.Equal(t, expected.ItemName, info.ItemName) - assert.Equal(t, expected.WebURL, info.WebURL) - }) - } -} diff --git a/src/internal/m365/sharepoint/list.go b/src/internal/m365/sharepoint/lists.go similarity index 93% rename from src/internal/m365/sharepoint/list.go rename to src/internal/m365/sharepoint/lists.go index 3dcaaa58f..0555516af 100644 --- a/src/internal/m365/sharepoint/list.go +++ b/src/internal/m365/sharepoint/lists.go @@ -10,9 +10,30 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" ) +// listToSPInfo translates models.Listable metadata into searchable content +// List Details: https://learn.microsoft.com/en-us/graph/api/resources/list?view=graph-rest-1.0 +func listToSPInfo(lst models.Listable, size int64) *details.SharePointInfo { + var ( + name = ptr.Val(lst.GetDisplayName()) + webURL = ptr.Val(lst.GetWebUrl()) + created = ptr.Val(lst.GetCreatedDateTime()) + modified = ptr.Val(lst.GetLastModifiedDateTime()) + ) + + return &details.SharePointInfo{ + ItemType: details.SharePointList, + ItemName: name, + Created: created, + Modified: modified, + WebURL: webURL, + Size: size, + } +} + type listTuple struct { name string id string diff --git a/src/internal/m365/sharepoint/list_test.go b/src/internal/m365/sharepoint/lists_test.go similarity index 53% rename from src/internal/m365/sharepoint/list_test.go rename to src/internal/m365/sharepoint/lists_test.go index 05990712d..32cb16918 100644 --- a/src/internal/m365/sharepoint/list_test.go +++ b/src/internal/m365/sharepoint/lists_test.go @@ -4,21 +4,23 @@ import ( "testing" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" ) -type SharePointSuite struct { +type ListsUnitSuite struct { tester.Suite creds account.M365Config } -func (suite *SharePointSuite) SetupSuite() { +func (suite *ListsUnitSuite) SetupSuite() { t := suite.T() a := tester.NewM365Account(t) m365, err := a.M365Config() @@ -27,8 +29,8 @@ func (suite *SharePointSuite) SetupSuite() { suite.creds = m365 } -func TestSharePointSuite(t *testing.T) { - suite.Run(t, &SharePointSuite{ +func TestListsUnitSuite(t *testing.T) { + suite.Run(t, &ListsUnitSuite{ Suite: tester.NewIntegrationSuite( t, [][]string{tester.M365AcctCredEnvs}, @@ -47,7 +49,7 @@ func TestSharePointSuite(t *testing.T) { // to verify if these 2 calls are valid // - fetchContentBaseTypes // - fetchColumnPositions -func (suite *SharePointSuite) TestLoadList() { +func (suite *ListsUnitSuite) TestLoadList() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -63,3 +65,42 @@ func (suite *SharePointSuite) TestLoadList() { assert.Greater(t, len(lists), 0) t.Logf("Length: %d\n", len(lists)) } + +func (suite *ListsUnitSuite) TestSharePointInfo() { + tests := []struct { + name string + listAndDeets func() (models.Listable, *details.SharePointInfo) + }{ + { + name: "Empty List", + listAndDeets: func() (models.Listable, *details.SharePointInfo) { + i := &details.SharePointInfo{ItemType: details.SharePointList} + return models.NewList(), i + }, + }, { + name: "Only Name", + listAndDeets: func() (models.Listable, *details.SharePointInfo) { + aTitle := "Whole List" + listing := models.NewList() + listing.SetDisplayName(&aTitle) + i := &details.SharePointInfo{ + ItemType: details.SharePointList, + ItemName: aTitle, + } + + return listing, i + }, + }, + } + for _, test := range tests { + suite.Run(test.name, func() { + t := suite.T() + + list, expected := test.listAndDeets() + info := listToSPInfo(list, 10) + assert.Equal(t, expected.ItemType, info.ItemType) + assert.Equal(t, expected.ItemName, info.ItemName) + assert.Equal(t, expected.WebURL, info.WebURL) + }) + } +} diff --git a/src/internal/m365/sharepoint/pageInfo.go b/src/internal/m365/sharepoint/pages.go similarity index 84% rename from src/internal/m365/sharepoint/pageInfo.go rename to src/internal/m365/sharepoint/pages.go index 8b5060bdd..c5e0bb633 100644 --- a/src/internal/m365/sharepoint/pageInfo.go +++ b/src/internal/m365/sharepoint/pages.go @@ -8,10 +8,10 @@ import ( "github.com/alcionai/corso/src/pkg/backup/details" ) -// sharePointPageInfo propagates metadata from the SharePoint Page data type +// pageToSPInfo propagates metadata from the SharePoint Page data type // into searchable content. // Page Details: https://learn.microsoft.com/en-us/graph/api/resources/sitepage?view=graph-rest-beta -func sharePointPageInfo(page models.SitePageable, root string, size int64) *details.SharePointInfo { +func pageToSPInfo(page models.SitePageable, root string, size int64) *details.SharePointInfo { var ( name, prefix, webURL string created, modified time.Time diff --git a/src/internal/m365/sharepoint/pageInfo_test.go b/src/internal/m365/sharepoint/pages_test.go similarity index 76% rename from src/internal/m365/sharepoint/pageInfo_test.go rename to src/internal/m365/sharepoint/pages_test.go index 7490b117f..d89b0d921 100644 --- a/src/internal/m365/sharepoint/pageInfo_test.go +++ b/src/internal/m365/sharepoint/pages_test.go @@ -1,13 +1,25 @@ package sharepoint import ( + "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/m365/graph/betasdk/models" + "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/backup/details" ) -func (suite *SharePointInfoSuite) TestSharePointInfo_Pages() { +type PagesUnitSuite struct { + tester.Suite +} + +func TestPagesUnitSuite(t *testing.T) { + suite.Run(t, &PagesUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *PagesUnitSuite) TestSharePointInfo_Pages() { tests := []struct { name string pageAndDeets func() (models.SitePageable, *details.SharePointInfo) @@ -39,7 +51,7 @@ func (suite *SharePointInfoSuite) TestSharePointInfo_Pages() { t := suite.T() paged, expected := test.pageAndDeets() - info := sharePointPageInfo(paged, "", 0) + info := pageToSPInfo(paged, "", 0) assert.Equal(t, expected.ItemType, info.ItemType) assert.Equal(t, expected.ItemName, info.ItemName) assert.Equal(t, expected.WebURL, info.WebURL) diff --git a/src/internal/m365/sharepoint/restore.go b/src/internal/m365/sharepoint/restore.go index 7076d4de9..772515fd4 100644 --- a/src/internal/m365/sharepoint/restore.go +++ b/src/internal/m365/sharepoint/restore.go @@ -129,7 +129,7 @@ func restoreListItem( itemData data.Stream, siteID, destName string, ) (details.ItemInfo, error) { - ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restoreList", diagnostics.Label("item_uuid", itemData.UUID())) + ctx, end := diagnostics.Span(ctx, "m365:sharepoint:restoreList", diagnostics.Label("item_uuid", itemData.UUID())) defer end() ctx = clues.Add(ctx, "list_item_id", itemData.UUID()) @@ -190,7 +190,7 @@ func restoreListItem( } } - dii.SharePoint = sharePointListInfo(restoredList, int64(len(byteArray))) + dii.SharePoint = listToSPInfo(restoredList, int64(len(byteArray))) return dii, nil } @@ -203,7 +203,7 @@ func RestoreListCollection( deets *details.Builder, errs *fault.Bus, ) (support.CollectionMetrics, error) { - ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restoreListCollection", diagnostics.Label("path", dc.FullPath())) + ctx, end := diagnostics.Span(ctx, "m365:sharepoint:restoreListCollection", diagnostics.Label("path", dc.FullPath())) defer end() var ( @@ -214,7 +214,7 @@ func RestoreListCollection( el = errs.Local() ) - trace.Log(ctx, "gc:sharepoint:restoreListCollection", directory.String()) + trace.Log(ctx, "m365:sharepoint:restoreListCollection", directory.String()) for { if el.Failure() != nil { @@ -285,8 +285,8 @@ func RestorePageCollection( siteID = directory.ResourceOwner() ) - trace.Log(ctx, "gc:sharepoint:restorePageCollection", directory.String()) - ctx, end := diagnostics.Span(ctx, "gc:sharepoint:restorePageCollection", diagnostics.Label("path", dc.FullPath())) + trace.Log(ctx, "m365:sharepoint:restorePageCollection", directory.String()) + ctx, end := diagnostics.Span(ctx, "m365:sharepoint:restorePageCollection", diagnostics.Label("path", dc.FullPath())) defer end() diff --git a/src/internal/m365/graph_connector_test_helper.go b/src/internal/m365/stub/stub.go similarity index 71% rename from src/internal/m365/graph_connector_test_helper.go rename to src/internal/m365/stub/stub.go index f95d1781f..601e57722 100644 --- a/src/internal/m365/graph_connector_test_helper.go +++ b/src/internal/m365/stub/stub.go @@ -1,12 +1,14 @@ -package m365 +package stub import ( "bytes" - "context" "io" + "golang.org/x/exp/maps" + "github.com/alcionai/corso/src/internal/data" exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" + "github.com/alcionai/corso/src/internal/m365/mock" "github.com/alcionai/corso/src/internal/m365/onedrive/metadata" "github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/pkg/control" @@ -33,9 +35,9 @@ type ItemInfo struct { // be the same before and after restoring the item in M365 and may not be // the M365 ID. When restoring items out of place, the item is assigned a // new ID making it unsuitable for a lookup key. - lookupKey string - name string - data []byte + LookupKey string + Name string + Data []byte } type ConfigInfo struct { @@ -47,19 +49,113 @@ type ConfigInfo struct { RestoreCfg control.RestoreConfig } -func mustToDataLayerPath( - service path.ServiceType, - tenant, resourceOwner string, - category path.CategoryType, - elements []string, - isItem bool, -) (path.Path, error) { - res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...) - if err != nil { - return nil, err +func GetCollectionsAndExpected( + config ConfigInfo, + testCollections []ColInfo, + backupVersion int, +) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) { + var ( + collections []data.RestoreCollection + expectedData = map[string]map[string][]byte{} + totalItems = 0 + totalKopiaItems = 0 + ) + + for _, owner := range config.ResourceOwners { + numItems, kopiaItems, ownerCollections, userExpectedData, err := CollectionsForInfo( + config.Service, + config.Tenant, + owner, + config.RestoreCfg, + testCollections, + backupVersion, + ) + if err != nil { + return totalItems, totalKopiaItems, collections, expectedData, err + } + + collections = append(collections, ownerCollections...) + totalItems += numItems + totalKopiaItems += kopiaItems + + maps.Copy(expectedData, userExpectedData) } - return res, err + return totalItems, totalKopiaItems, collections, expectedData, nil +} + +func CollectionsForInfo( + service path.ServiceType, + tenant, user string, + restoreCfg control.RestoreConfig, + allInfo []ColInfo, + backupVersion int, +) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) { + var ( + collections = make([]data.RestoreCollection, 0, len(allInfo)) + expectedData = make(map[string]map[string][]byte, len(allInfo)) + totalItems = 0 + kopiaEntries = 0 + ) + + for _, info := range allInfo { + pth, err := path.Build( + tenant, + user, + service, + info.Category, + false, + info.PathElements...) + if err != nil { + return totalItems, kopiaEntries, collections, expectedData, err + } + + mc := exchMock.NewCollection(pth, pth, len(info.Items)) + + baseDestPath, err := backupOutputPathFromRestore(restoreCfg, pth) + if err != nil { + return totalItems, kopiaEntries, collections, expectedData, err + } + + baseExpected := expectedData[baseDestPath.String()] + if baseExpected == nil { + expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items)) + baseExpected = expectedData[baseDestPath.String()] + } + + for i := 0; i < len(info.Items); i++ { + mc.Names[i] = info.Items[i].Name + mc.Data[i] = info.Items[i].Data + + baseExpected[info.Items[i].LookupKey] = info.Items[i].Data + + // We do not count metadata files against item count + if backupVersion > 0 && + (service == path.OneDriveService || service == path.SharePointService) && + metadata.HasMetaSuffix(info.Items[i].Name) { + continue + } + + totalItems++ + } + + c := mock.RestoreCollection{ + Collection: mc, + AuxItems: map[string]data.Stream{}, + } + + for _, aux := range info.AuxItems { + c.AuxItems[aux.Name] = &exchMock.Data{ + ID: aux.Name, + Reader: io.NopCloser(bytes.NewReader(aux.Data)), + } + } + + collections = append(collections, c) + kopiaEntries += len(info.Items) + } + + return totalItems, kopiaEntries, collections, expectedData, nil } // backupOutputPathFromRestore returns a path.Path denoting the location in @@ -86,102 +182,11 @@ func backupOutputPathFromRestore( base = append(base, inputPath.Folders()...) } - return mustToDataLayerPath( - inputPath.Service(), + return path.Build( inputPath.Tenant(), inputPath.ResourceOwner(), + inputPath.Service(), inputPath.Category(), - base, false, - ) -} - -// TODO(ashmrtn): Make this an actual mock class that can be used in other -// packages. -type mockRestoreCollection struct { - data.Collection - auxItems map[string]data.Stream -} - -func (rc mockRestoreCollection) FetchItemByName( - ctx context.Context, - name string, -) (data.Stream, error) { - res := rc.auxItems[name] - if res == nil { - return nil, data.ErrNotFound - } - - return res, nil -} - -func collectionsForInfo( - service path.ServiceType, - tenant, user string, - restoreCfg control.RestoreConfig, - allInfo []ColInfo, - backupVersion int, -) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) { - var ( - collections = make([]data.RestoreCollection, 0, len(allInfo)) - expectedData = make(map[string]map[string][]byte, len(allInfo)) - totalItems = 0 - kopiaEntries = 0 - ) - - for _, info := range allInfo { - pth, err := mustToDataLayerPath( - service, - tenant, - user, - info.Category, - info.PathElements, - false) - if err != nil { - return totalItems, kopiaEntries, collections, expectedData, err - } - - mc := exchMock.NewCollection(pth, pth, len(info.Items)) - - baseDestPath, err := backupOutputPathFromRestore(restoreCfg, pth) - if err != nil { - return totalItems, kopiaEntries, collections, expectedData, err - } - - baseExpected := expectedData[baseDestPath.String()] - if baseExpected == nil { - expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items)) - baseExpected = expectedData[baseDestPath.String()] - } - - for i := 0; i < len(info.Items); i++ { - mc.Names[i] = info.Items[i].name - mc.Data[i] = info.Items[i].data - - baseExpected[info.Items[i].lookupKey] = info.Items[i].data - - // We do not count metadata files against item count - if backupVersion > 0 && - (service == path.OneDriveService || service == path.SharePointService) && - metadata.HasMetaSuffix(info.Items[i].name) { - continue - } - - totalItems++ - } - - c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}} - - for _, aux := range info.AuxItems { - c.auxItems[aux.name] = &exchMock.Data{ - ID: aux.name, - Reader: io.NopCloser(bytes.NewReader(aux.data)), - } - } - - collections = append(collections, c) - kopiaEntries += len(info.Items) - } - - return totalItems, kopiaEntries, collections, expectedData, nil + base...) } From 271c0ef0a3cb7da1c18119b01cd856c296550f89 Mon Sep 17 00:00:00 2001 From: ashmrtn <3891298+ashmrtn@users.noreply.github.com> Date: Wed, 14 Jun 2023 10:31:03 -0700 Subject: [PATCH 26/41] Rename some test helpers (#3615) Had other names because they were made when other similar test code existed. --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #3525 #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/kopia/base_finder_test.go | 72 +++++++++++--------------- 1 file changed, 29 insertions(+), 43 deletions(-) diff --git a/src/internal/kopia/base_finder_test.go b/src/internal/kopia/base_finder_test.go index ca84f5d94..bf8110c23 100644 --- a/src/internal/kopia/base_finder_test.go +++ b/src/internal/kopia/base_finder_test.go @@ -111,13 +111,6 @@ func (sm mockEmptySnapshotManager) FindManifests( return nil, nil } -func (sm mockEmptySnapshotManager) LoadSnapshots( - context.Context, - []manifest.ID, -) ([]*snapshot.Manifest, error) { - return nil, clues.New("not implemented") -} - func (sm mockEmptySnapshotManager) LoadSnapshot( context.Context, manifest.ID, @@ -145,7 +138,7 @@ type manifestInfo struct { err error } -func newManifestInfo2( +func newManifestInfo( id manifest.ID, modTime time.Time, incomplete bool, @@ -189,12 +182,12 @@ func newManifestInfo2( return res } -type mockSnapshotManager2 struct { +type mockSnapshotManager struct { data []manifestInfo findErr error } -func matchesTags2(mi manifestInfo, tags map[string]string) bool { +func matchesTags(mi manifestInfo, tags map[string]string) bool { for k := range tags { if _, ok := mi.tags[k]; !ok { return false @@ -204,7 +197,7 @@ func matchesTags2(mi manifestInfo, tags map[string]string) bool { return true } -func (msm *mockSnapshotManager2) FindManifests( +func (msm *mockSnapshotManager) FindManifests( ctx context.Context, tags map[string]string, ) ([]*manifest.EntryMetadata, error) { @@ -219,7 +212,7 @@ func (msm *mockSnapshotManager2) FindManifests( res := []*manifest.EntryMetadata{} for _, mi := range msm.data { - if matchesTags2(mi, tags) { + if matchesTags(mi, tags) { res = append(res, mi.metadata) } } @@ -227,14 +220,7 @@ func (msm *mockSnapshotManager2) FindManifests( return res, nil } -func (msm *mockSnapshotManager2) LoadSnapshots( - ctx context.Context, - ids []manifest.ID, -) ([]*snapshot.Manifest, error) { - return nil, clues.New("not implemented") -} - -func (msm *mockSnapshotManager2) LoadSnapshot( +func (msm *mockSnapshotManager) LoadSnapshot( ctx context.Context, id manifest.ID, ) (*snapshot.Manifest, error) { @@ -353,7 +339,7 @@ func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() { defer flush() bf := baseFinder{ - sm: &mockSnapshotManager2{findErr: assert.AnError}, + sm: &mockSnapshotManager{findErr: assert.AnError}, bg: mockEmptyModelGetter{}, } reasons := []Reason{ @@ -387,7 +373,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Return Older Base If Fail To Get Manifest", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID2, testT2, testCompleteMan, @@ -396,7 +382,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -421,7 +407,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Return Older Base If Fail To Get Backup", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID2, testT2, testCompleteMan, @@ -430,7 +416,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -455,7 +441,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Return Older Base If Missing Details", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID2, testT2, testCompleteMan, @@ -464,7 +450,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -489,7 +475,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Old Backup Details Pointer", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -516,7 +502,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "All One Snapshot", input: testAllUsersAllCats, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -543,7 +529,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Multiple Bases Some Overlapping Reasons", input: testAllUsersAllCats, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -555,7 +541,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testUser2, testUser3, ), - newManifestInfo2( + newManifestInfo( testID2, testT2, testCompleteMan, @@ -648,7 +634,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Newer Incomplete Assist Snapshot", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -657,7 +643,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID2, testT2, testIncompleteMan, @@ -684,7 +670,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Incomplete Older Than Complete", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testIncompleteMan, @@ -693,7 +679,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID2, testT2, testCompleteMan, @@ -719,7 +705,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Newest Incomplete Only Incomplete", input: testUser1Mail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testIncompleteMan, @@ -728,7 +714,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID2, testT2, testIncompleteMan, @@ -752,7 +738,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { name: "Some Bases Not Found", input: testAllUsersMail, manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -778,7 +764,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { // Manifests are currently returned in the order they're defined by the // mock. manifestData: []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID2, testT2, testCompleteMan, @@ -787,7 +773,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { testMail, testUser1, ), - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -819,7 +805,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { defer flush() bf := baseFinder{ - sm: &mockSnapshotManager2{data: test.manifestData}, + sm: &mockSnapshotManager{data: test.manifestData}, bg: &mockModelGetter{data: test.backupData}, } @@ -850,7 +836,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() { manifestData := []manifestInfo{ - newManifestInfo2( + newManifestInfo( testID1, testT1, testCompleteMan, @@ -914,7 +900,7 @@ func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() { defer flush() bf := baseFinder{ - sm: &mockSnapshotManager2{data: manifestData}, + sm: &mockSnapshotManager{data: manifestData}, bg: &mockModelGetter{data: backupData}, } From 8996d3edab739b40655a02378c715440459cf6c2 Mon Sep 17 00:00:00 2001 From: ashmrtn <3891298+ashmrtn@users.noreply.github.com> Date: Wed, 14 Jun 2023 11:04:03 -0700 Subject: [PATCH 27/41] Minor fixups for base finding (#3617) Fix: * return errors when told to * fixup timestamp issues --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [x] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * #3525 #### Test Plan - [x] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/kopia/base_finder_test.go | 36 +++++++++++++++----------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/src/internal/kopia/base_finder_test.go b/src/internal/kopia/base_finder_test.go index bf8110c23..ff3d5f0c1 100644 --- a/src/internal/kopia/base_finder_test.go +++ b/src/internal/kopia/base_finder_test.go @@ -230,6 +230,10 @@ func (msm *mockSnapshotManager) LoadSnapshot( for _, mi := range msm.data { if mi.man.ID == id { + if mi.err != nil { + return nil, mi.err + } + return mi.man, nil } } @@ -259,10 +263,12 @@ func newBackupModel( err: err, } - if !oldDetailsID { - res.b.StreamStoreID = "ssid" - } else { - res.b.DetailsID = "ssid" + if hasDetailsSnap { + if !oldDetailsID { + res.b.StreamStoreID = "ssid" + } else { + res.b.DetailsID = "ssid" + } } return res @@ -393,14 +399,14 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { ), }, expectedBaseReasons: map[int][]Reason{ - 0: testUser1Mail, + 1: testUser1Mail, }, expectedAssistManifestReasons: map[int][]Reason{ - 0: testUser1Mail, + 1: testUser1Mail, }, backupData: []backupInfo{ newBackupModel(testBackup2, true, true, false, nil), - newBackupModel(testBackup1, false, false, false, assert.AnError), + newBackupModel(testBackup1, true, true, false, nil), }, }, { @@ -427,14 +433,14 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { ), }, expectedBaseReasons: map[int][]Reason{ - 0: testUser1Mail, + 1: testUser1Mail, }, expectedAssistManifestReasons: map[int][]Reason{ - 0: testUser1Mail, + 1: testUser1Mail, }, backupData: []backupInfo{ - newBackupModel(testBackup2, true, true, false, nil), - newBackupModel(testBackup1, false, false, false, assert.AnError), + newBackupModel(testBackup2, false, false, false, assert.AnError), + newBackupModel(testBackup1, true, true, false, nil), }, }, { @@ -461,14 +467,14 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { ), }, expectedBaseReasons: map[int][]Reason{ - 0: testUser1Mail, + 1: testUser1Mail, }, expectedAssistManifestReasons: map[int][]Reason{ - 0: testUser1Mail, + 1: testUser1Mail, }, backupData: []backupInfo{ - newBackupModel(testBackup2, true, true, false, nil), - newBackupModel(testBackup1, true, false, false, nil), + newBackupModel(testBackup2, true, false, false, nil), + newBackupModel(testBackup1, true, true, false, nil), }, }, { From d415be68441715dac2c79e6b7cefb5a96f98c87c Mon Sep 17 00:00:00 2001 From: ashmrtn <3891298+ashmrtn@users.noreply.github.com> Date: Wed, 14 Jun 2023 11:57:40 -0700 Subject: [PATCH 28/41] Mark snapshots missing backup as assist bases (#3618) Loosen restrictions slightly by returning snapshots that are missing their bakcup or details models as assist bases that can be used by kopia assisted incrementals. --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * #3525 #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/kopia/base_finder.go | 27 ++++++++++++++++++++++++++ src/internal/kopia/base_finder_test.go | 2 ++ 2 files changed, 29 insertions(+) diff --git a/src/internal/kopia/base_finder.go b/src/internal/kopia/base_finder.go index ebe8f3287..625ded643 100644 --- a/src/internal/kopia/base_finder.go +++ b/src/internal/kopia/base_finder.go @@ -192,6 +192,8 @@ func (b *baseFinder) findBasesInSet( Manifest: man, Reasons: []Reason{reason}, }) + + logger.Ctx(ictx).Info("found incomplete backup") } continue @@ -203,6 +205,18 @@ func (b *baseFinder) findBasesInSet( // Safe to continue here as we'll just end up attempting to use an older // backup as the base. logger.CtxErr(ictx, err).Debug("searching for base backup") + + if !foundIncomplete { + foundIncomplete = true + + kopiaAssistSnaps = append(kopiaAssistSnaps, ManifestEntry{ + Manifest: man, + Reasons: []Reason{reason}, + }) + + logger.Ctx(ictx).Info("found incomplete backup") + } + continue } @@ -216,6 +230,19 @@ func (b *baseFinder) findBasesInSet( "empty backup stream store ID", "search_backup_id", bup.ID) + if !foundIncomplete { + foundIncomplete = true + + kopiaAssistSnaps = append(kopiaAssistSnaps, ManifestEntry{ + Manifest: man, + Reasons: []Reason{reason}, + }) + + logger.Ctx(ictx).Infow( + "found incomplete backup", + "search_backup_id", bup.ID) + } + continue } diff --git a/src/internal/kopia/base_finder_test.go b/src/internal/kopia/base_finder_test.go index ff3d5f0c1..c950b4f9d 100644 --- a/src/internal/kopia/base_finder_test.go +++ b/src/internal/kopia/base_finder_test.go @@ -436,6 +436,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { 1: testUser1Mail, }, expectedAssistManifestReasons: map[int][]Reason{ + 0: testUser1Mail, 1: testUser1Mail, }, backupData: []backupInfo{ @@ -470,6 +471,7 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { 1: testUser1Mail, }, expectedAssistManifestReasons: map[int][]Reason{ + 0: testUser1Mail, 1: testUser1Mail, }, backupData: []backupInfo{ From 0a17a72800922b0db3cf6ee0c555eb77b7a5afb3 Mon Sep 17 00:00:00 2001 From: neha_gupta Date: Thu, 15 Jun 2023 11:31:57 +0530 Subject: [PATCH 29/41] fetch repoID from kopia is not found in config (#3578) Some events in have a empty repoID. This could be because of multiple config files used. And new config files might have repoID missing. Solution- Fetch repoID from Kopia is not found locally in the config file. Note: Since the Corso Start event happens before we connect to Kopia, it might have repoID missing if the config file is used for the first time. All the consecutive events will have correct values. Corso start event will start populating correct values if the config file is used once. #### Does this PR need a docs update or release note? - [ ] :no_entry: No #### Type of change - [ ] :bug: Bugfix #### Issue(s) * https://github.com/alcionai/corso/issues/3388 #### Test Plan - [ ] :muscle: Manual --- src/cli/backup/backup.go | 4 ++-- src/cli/backup/exchange.go | 4 ++-- src/cli/backup/onedrive.go | 4 ++-- src/cli/backup/sharepoint.go | 4 ++-- src/cli/repo/repo.go | 2 +- src/cli/repo/s3.go | 3 ++- src/cli/restore/exchange.go | 2 +- src/cli/restore/onedrive.go | 2 +- src/cli/restore/sharepoint.go | 2 +- src/cli/utils/utils.go | 40 ++++++++++++++++++++++++++++---- src/internal/events/events.go | 4 +++- src/pkg/repository/repository.go | 9 +++++++ 12 files changed, 61 insertions(+), 19 deletions(-) diff --git a/src/cli/backup/backup.go b/src/cli/backup/backup.go index 25a6d22e7..c721e4c3f 100644 --- a/src/cli/backup/backup.go +++ b/src/cli/backup/backup.go @@ -279,7 +279,7 @@ func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []st ctx := clues.Add(cmd.Context(), "delete_backup_id", bID) - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } @@ -300,7 +300,7 @@ func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []st func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error { ctx := cmd.Context() - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/backup/exchange.go b/src/cli/backup/exchange.go index ded194a05..af71c6a30 100644 --- a/src/cli/backup/exchange.go +++ b/src/cli/backup/exchange.go @@ -153,7 +153,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error { return err } - r, acct, err := utils.GetAccountAndConnect(ctx) + r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx) if err != nil { return Only(ctx, err) } @@ -262,7 +262,7 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error { ctx := cmd.Context() opts := utils.MakeExchangeOpts(cmd) - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/backup/onedrive.go b/src/cli/backup/onedrive.go index dca460de0..b47acd496 100644 --- a/src/cli/backup/onedrive.go +++ b/src/cli/backup/onedrive.go @@ -134,7 +134,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error { return err } - r, acct, err := utils.GetAccountAndConnect(ctx) + r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx) if err != nil { return Only(ctx, err) } @@ -220,7 +220,7 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error { ctx := cmd.Context() opts := utils.MakeOneDriveOpts(cmd) - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/backup/sharepoint.go b/src/cli/backup/sharepoint.go index 7f48d4c33..2197252ea 100644 --- a/src/cli/backup/sharepoint.go +++ b/src/cli/backup/sharepoint.go @@ -150,7 +150,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error { return err } - r, acct, err := utils.GetAccountAndConnect(ctx) + r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx) if err != nil { return Only(ctx, err) } @@ -312,7 +312,7 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error { ctx := cmd.Context() opts := utils.MakeSharePointOpts(cmd) - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/repo/repo.go b/src/cli/repo/repo.go index 5f768cb8b..6d36d1608 100644 --- a/src/cli/repo/repo.go +++ b/src/cli/repo/repo.go @@ -121,7 +121,7 @@ func handleMaintenanceCmd(cmd *cobra.Command, args []string) error { return err } - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return print.Only(ctx, err) } diff --git a/src/cli/repo/s3.go b/src/cli/repo/s3.go index 2daefe733..feba087a8 100644 --- a/src/cli/repo/s3.go +++ b/src/cli/repo/s3.go @@ -13,6 +13,7 @@ import ( "github.com/alcionai/corso/src/cli/options" . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" + "github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/storage" @@ -193,7 +194,7 @@ func connectS3Cmd(cmd *cobra.Command, args []string) error { repoID := cfg.RepoID if len(repoID) == 0 { - repoID = "not_found" + repoID = events.RepoIDNotFound } s3Cfg, err := cfg.Storage.S3Config() diff --git a/src/cli/restore/exchange.go b/src/cli/restore/exchange.go index f4390ef4c..514e6102c 100644 --- a/src/cli/restore/exchange.go +++ b/src/cli/restore/exchange.go @@ -89,7 +89,7 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error { return err } - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/restore/onedrive.go b/src/cli/restore/onedrive.go index 008ac18fd..85b159370 100644 --- a/src/cli/restore/onedrive.go +++ b/src/cli/restore/onedrive.go @@ -90,7 +90,7 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error { return err } - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/restore/sharepoint.go b/src/cli/restore/sharepoint.go index c9b47b6bc..a52f5bb2a 100644 --- a/src/cli/restore/sharepoint.go +++ b/src/cli/restore/sharepoint.go @@ -95,7 +95,7 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error { return err } - r, _, err := utils.GetAccountAndConnect(ctx) + r, _, _, err := utils.GetAccountAndConnect(ctx) if err != nil { return Only(ctx, err) } diff --git a/src/cli/utils/utils.go b/src/cli/utils/utils.go index 56a13ad1a..e0b4c5276 100644 --- a/src/cli/utils/utils.go +++ b/src/cli/utils/utils.go @@ -24,23 +24,53 @@ const ( Wildcard = "*" ) -func GetAccountAndConnect(ctx context.Context) (repository.Repository, *account.Account, error) { +func GetAccountAndConnect(ctx context.Context) (repository.Repository, *storage.Storage, *account.Account, error) { cfg, err := config.GetConfigRepoDetails(ctx, true, nil) if err != nil { - return nil, nil, err + return nil, nil, nil, err } repoID := cfg.RepoID if len(repoID) == 0 { - repoID = "not_found" + repoID = events.RepoIDNotFound } r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, repoID, options.Control()) if err != nil { - return nil, nil, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository") + return nil, nil, nil, clues.Wrap(err, "connecting to the "+cfg.Storage.Provider.String()+" repository") } - return r, &cfg.Account, nil + return r, &cfg.Storage, &cfg.Account, nil +} + +func AccountConnectAndWriteRepoConfig(ctx context.Context) (repository.Repository, *account.Account, error) { + r, stg, acc, err := GetAccountAndConnect(ctx) + if err != nil { + logger.CtxErr(ctx, err).Info("getting and connecting account") + return nil, nil, err + } + + s3Config, err := stg.S3Config() + if err != nil { + logger.CtxErr(ctx, err).Info("getting storage configuration") + return nil, nil, err + } + + m365Config, err := acc.M365Config() + if err != nil { + logger.CtxErr(ctx, err).Info("getting m365 configuration") + return nil, nil, err + } + + // repo config is already set while repo connect and init. This is just to confirm correct values. + // So won't fail is the write fails + err = config.WriteRepoConfig(ctx, s3Config, m365Config, r.GetID()) + if err != nil { + logger.CtxErr(ctx, err).Info("writing to repository configuration") + return nil, nil, err + } + + return r, acc, nil } // CloseRepo handles closing a repo. diff --git a/src/internal/events/events.go b/src/internal/events/events.go index 7dc5cbf6f..baa2c2117 100644 --- a/src/internal/events/events.go +++ b/src/internal/events/events.go @@ -52,7 +52,9 @@ const ( Service = "service" StartTime = "start_time" Status = "status" - RepoID = "not_found" + + // default values for keys + RepoIDNotFound = "not_found" ) const ( diff --git a/src/pkg/repository/repository.go b/src/pkg/repository/repository.go index c8e30829a..8bb99fef1 100644 --- a/src/pkg/repository/repository.go +++ b/src/pkg/repository/repository.go @@ -230,6 +230,15 @@ func Connect( return nil, clues.Wrap(err, "constructing event bus") } + if repoid == events.RepoIDNotFound { + rm, err := getRepoModel(ctx, ms) + if err != nil { + return nil, clues.New("retrieving repo info") + } + + repoid = string(rm.ID) + } + // Do not query repo ID if metrics are disabled if !opts.DisableMetrics { bus.SetRepoID(repoid) From dd19b484c8f89461354485826633a1f21b6c6b7f Mon Sep 17 00:00:00 2001 From: Abhishek Pandey Date: Thu, 15 Jun 2023 00:48:03 -0700 Subject: [PATCH 30/41] Add more tests for url cache (#3593) PR contents 1. Address a corner case where cache may be half filled ( e.g. scenario: delta query failing after a few pages). Empty the cache on any delta failures 2. Add unit tests --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [x] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- .../m365/onedrive/collections_test.go | 16 + src/internal/m365/onedrive/url_cache.go | 25 +- src/internal/m365/onedrive/url_cache_test.go | 374 ++++++++++++++++++ 3 files changed, 405 insertions(+), 10 deletions(-) diff --git a/src/internal/m365/onedrive/collections_test.go b/src/internal/m365/onedrive/collections_test.go index 5a5805179..d18ad0f4f 100644 --- a/src/internal/m365/onedrive/collections_test.go +++ b/src/internal/m365/onedrive/collections_test.go @@ -2494,6 +2494,22 @@ func driveItem( return coreItem(id, name, parentPath, parentID, isFile, isFolder, isPackage) } +func fileItem( + id, name, parentPath, parentID, url string, + deleted bool, +) models.DriveItemable { + di := driveItem(id, name, parentPath, parentID, true, false, false) + di.SetAdditionalData(map[string]interface{}{ + "@microsoft.graph.downloadUrl": url, + }) + + if deleted { + di.SetDeleted(models.NewDeleted()) + } + + return di +} + func malwareItem( id string, name string, diff --git a/src/internal/m365/onedrive/url_cache.go b/src/internal/m365/onedrive/url_cache.go index 4370136db..bb5e61b94 100644 --- a/src/internal/m365/onedrive/url_cache.go +++ b/src/internal/m365/onedrive/url_cache.go @@ -9,6 +9,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -33,7 +34,7 @@ type urlCache struct { itemPager api.DriveItemEnumerator - errors *fault.Bus + errs *fault.Bus } // newURLache creates a new URL cache for the specified drive ID @@ -41,7 +42,7 @@ func newURLCache( driveID string, refreshInterval time.Duration, itemPager api.DriveItemEnumerator, - errors *fault.Bus, + errs *fault.Bus, ) (*urlCache, error) { err := validateCacheParams( driveID, @@ -57,7 +58,7 @@ func newURLCache( driveID: driveID, refreshInterval: refreshInterval, itemPager: itemPager, - errors: errors, + errs: errs, }, nil } @@ -72,7 +73,7 @@ func validateCacheParams( return clues.New("drive id is empty") } - if refreshInterval <= 1*time.Second { + if refreshInterval < 1*time.Second { return clues.New("invalid refresh interval") } @@ -94,7 +95,6 @@ func (uc *urlCache) getItemProperties( ctx = clues.Add(ctx, "drive_id", uc.driveID) - // Lazy refresh if uc.needsRefresh() { err := uc.refreshCache(ctx) if err != nil { @@ -146,6 +146,9 @@ func (uc *urlCache) refreshCache( err := uc.deltaQuery(ctx) if err != nil { + // clear cache + uc.idToProps = make(map[string]itemProps) + return err } @@ -171,7 +174,7 @@ func (uc *urlCache) deltaQuery( uc.updateCache, map[string]string{}, "", - uc.errors) + uc.errs) if err != nil { return clues.Wrap(err, "delta query") } @@ -224,12 +227,14 @@ func (uc *urlCache) updateCache( continue } - var url string + var ( + url string + ad = item.GetAdditionalData() + ) for _, key := range downloadURLKeys { - tmp, ok := item.GetAdditionalData()[key].(*string) - if ok { - url = ptr.Val(tmp) + if v, err := str.AnyValueToString(key, ad); err == nil { + url = v break } } diff --git a/src/internal/m365/onedrive/url_cache_test.go b/src/internal/m365/onedrive/url_cache_test.go index c3674c8c7..6a6c696b8 100644 --- a/src/internal/m365/onedrive/url_cache_test.go +++ b/src/internal/m365/onedrive/url_cache_test.go @@ -1,6 +1,8 @@ package onedrive import ( + "errors" + "math/rand" "net/http" "sync" "testing" @@ -152,3 +154,375 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() { // Validate that <= 1 delta queries were made require.LessOrEqual(t, cache.deltaQueryCount, 1) } + +type URLCacheUnitSuite struct { + tester.Suite +} + +func TestURLCacheUnitSuite(t *testing.T) { + suite.Run(t, &URLCacheUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *URLCacheUnitSuite) TestGetItemProperties() { + deltaString := "delta" + next := "next" + driveID := "drive1" + + table := []struct { + name string + pagerResult map[string][]deltaPagerResult + expectedItemProps map[string]itemProps + expectedErr require.ErrorAssertionFunc + cacheAssert func(*urlCache, time.Time) + }{ + { + name: "single item in cache", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + }, + deltaLink: &deltaString, + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "1": { + downloadURL: "https://dummy1.com", + isDeleted: false, + }, + }, + expectedErr: require.NoError, + cacheAssert: func(uc *urlCache, startTime time.Time) { + require.Greater(suite.T(), uc.lastRefreshTime, startTime) + require.Equal(suite.T(), 1, uc.deltaQueryCount) + require.Equal(suite.T(), 1, len(uc.idToProps)) + }, + }, + { + name: "multiple items in cache", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + fileItem("2", "file2", "root", "root", "https://dummy2.com", false), + fileItem("3", "file3", "root", "root", "https://dummy3.com", false), + fileItem("4", "file4", "root", "root", "https://dummy4.com", false), + fileItem("5", "file5", "root", "root", "https://dummy5.com", false), + }, + deltaLink: &deltaString, + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "1": { + downloadURL: "https://dummy1.com", + isDeleted: false, + }, + "2": { + downloadURL: "https://dummy2.com", + isDeleted: false, + }, + "3": { + downloadURL: "https://dummy3.com", + isDeleted: false, + }, + "4": { + downloadURL: "https://dummy4.com", + isDeleted: false, + }, + "5": { + downloadURL: "https://dummy5.com", + isDeleted: false, + }, + }, + expectedErr: require.NoError, + cacheAssert: func(uc *urlCache, startTime time.Time) { + require.Greater(suite.T(), uc.lastRefreshTime, startTime) + require.Equal(suite.T(), 1, uc.deltaQueryCount) + require.Equal(suite.T(), 5, len(uc.idToProps)) + }, + }, + { + name: "duplicate items with potentially new urls", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + fileItem("2", "file2", "root", "root", "https://dummy2.com", false), + fileItem("3", "file3", "root", "root", "https://dummy3.com", false), + fileItem("1", "file1", "root", "root", "https://test1.com", false), + fileItem("2", "file2", "root", "root", "https://test2.com", false), + }, + deltaLink: &deltaString, + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "1": { + downloadURL: "https://test1.com", + isDeleted: false, + }, + "2": { + downloadURL: "https://test2.com", + isDeleted: false, + }, + "3": { + downloadURL: "https://dummy3.com", + isDeleted: false, + }, + }, + expectedErr: require.NoError, + cacheAssert: func(uc *urlCache, startTime time.Time) { + require.Greater(suite.T(), uc.lastRefreshTime, startTime) + require.Equal(suite.T(), 1, uc.deltaQueryCount) + require.Equal(suite.T(), 3, len(uc.idToProps)) + }, + }, + { + name: "deleted items", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + fileItem("2", "file2", "root", "root", "https://dummy2.com", false), + fileItem("1", "file1", "root", "root", "https://dummy1.com", true), + }, + deltaLink: &deltaString, + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "1": { + downloadURL: "", + isDeleted: true, + }, + "2": { + downloadURL: "https://dummy2.com", + isDeleted: false, + }, + }, + expectedErr: require.NoError, + cacheAssert: func(uc *urlCache, startTime time.Time) { + require.Greater(suite.T(), uc.lastRefreshTime, startTime) + require.Equal(suite.T(), 1, uc.deltaQueryCount) + require.Equal(suite.T(), 2, len(uc.idToProps)) + }, + }, + { + name: "item not found in cache", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + }, + deltaLink: &deltaString, + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "2": {}, + }, + expectedErr: require.Error, + cacheAssert: func(uc *urlCache, startTime time.Time) { + require.Greater(suite.T(), uc.lastRefreshTime, startTime) + require.Equal(suite.T(), 1, uc.deltaQueryCount) + require.Equal(suite.T(), 1, len(uc.idToProps)) + }, + }, + { + name: "multi-page delta query error", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + }, + nextLink: &next, + }, + { + items: []models.DriveItemable{ + fileItem("2", "file2", "root", "root", "https://dummy2.com", false), + }, + deltaLink: &deltaString, + err: errors.New("delta query error"), + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "1": {}, + "2": {}, + }, + expectedErr: require.Error, + cacheAssert: func(uc *urlCache, _ time.Time) { + require.Equal(suite.T(), time.Time{}, uc.lastRefreshTime) + require.Equal(suite.T(), 0, uc.deltaQueryCount) + require.Equal(suite.T(), 0, len(uc.idToProps)) + }, + }, + + { + name: "folder item", + pagerResult: map[string][]deltaPagerResult{ + driveID: { + { + items: []models.DriveItemable{ + fileItem("1", "file1", "root", "root", "https://dummy1.com", false), + driveItem("2", "folder2", "root", "root", false, true, false), + }, + deltaLink: &deltaString, + }, + }, + }, + expectedItemProps: map[string]itemProps{ + "2": {}, + }, + expectedErr: require.Error, + cacheAssert: func(uc *urlCache, startTime time.Time) { + require.Greater(suite.T(), uc.lastRefreshTime, startTime) + require.Equal(suite.T(), 1, uc.deltaQueryCount) + require.Equal(suite.T(), 1, len(uc.idToProps)) + }, + }, + } + + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + ctx, flush := tester.NewContext(t) + defer flush() + + itemPager := &mockItemPager{ + toReturn: test.pagerResult[driveID], + } + + cache, err := newURLCache( + driveID, + 1*time.Hour, + itemPager, + fault.New(true)) + + require.NoError(suite.T(), err, clues.ToCore(err)) + + numConcurrentReq := 100 + var wg sync.WaitGroup + wg.Add(numConcurrentReq) + + startTime := time.Now() + + for i := 0; i < numConcurrentReq; i++ { + go func() { + defer wg.Done() + + for id, expected := range test.expectedItemProps { + time.Sleep(time.Duration(rand.Intn(100)) * time.Millisecond) + + props, err := cache.getItemProperties(ctx, id) + + test.expectedErr(suite.T(), err, clues.ToCore(err)) + require.Equal(suite.T(), expected, props) + } + }() + } + + wg.Wait() + + test.cacheAssert(cache, startTime) + }) + } +} + +// Test needsRefresh +func (suite *URLCacheUnitSuite) TestNeedsRefresh() { + driveID := "drive1" + t := suite.T() + refreshInterval := 1 * time.Second + + cache, err := newURLCache( + driveID, + refreshInterval, + &mockItemPager{}, + fault.New(true)) + + require.NoError(t, err, clues.ToCore(err)) + + // cache is empty + require.True(t, cache.needsRefresh()) + + // cache is not empty, but refresh interval has passed + cache.idToProps["1"] = itemProps{ + downloadURL: "https://dummy1.com", + isDeleted: false, + } + + time.Sleep(refreshInterval) + require.True(t, cache.needsRefresh()) + + // none of the above + cache.lastRefreshTime = time.Now() + require.False(t, cache.needsRefresh()) +} + +// Test newURLCache +func (suite *URLCacheUnitSuite) TestNewURLCache() { + // table driven tests + table := []struct { + name string + driveID string + refreshInt time.Duration + itemPager api.DriveItemEnumerator + errors *fault.Bus + expectedErr require.ErrorAssertionFunc + }{ + { + name: "invalid driveID", + driveID: "", + refreshInt: 1 * time.Hour, + itemPager: &mockItemPager{}, + errors: fault.New(true), + expectedErr: require.Error, + }, + { + name: "invalid refresh interval", + driveID: "drive1", + refreshInt: 100 * time.Millisecond, + itemPager: &mockItemPager{}, + errors: fault.New(true), + expectedErr: require.Error, + }, + { + name: "invalid itemPager", + driveID: "drive1", + refreshInt: 1 * time.Hour, + itemPager: nil, + errors: fault.New(true), + expectedErr: require.Error, + }, + { + name: "valid", + driveID: "drive1", + refreshInt: 1 * time.Hour, + itemPager: &mockItemPager{}, + errors: fault.New(true), + expectedErr: require.NoError, + }, + } + + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + _, err := newURLCache( + test.driveID, + test.refreshInt, + test.itemPager, + test.errors) + + test.expectedErr(t, err, clues.ToCore(err)) + }) + } +} From 46eedc0ad1347bcdb32cb0c2a72233078f410806 Mon Sep 17 00:00:00 2001 From: Keepers Date: Thu, 15 Jun 2023 10:04:42 -0600 Subject: [PATCH 31/41] add default restore config normalizer (#3613) Adds a func that normalizes the restore config in case inputs don't match accepted or expected values. Also moves DefaultTestRestoreConfig out of /tester and into /control/testdata to avoid circular dependency issues. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :sunflower: Feature #### Issue(s) * #3562 #### Test Plan - [x] :zap: Unit test --- src/cmd/factory/impl/common.go | 3 +- src/internal/m365/controller_test.go | 13 +-- .../m365/exchange/contacts_restore_test.go | 3 +- .../m365/exchange/events_restore_test.go | 3 +- .../m365/exchange/mail_restore_test.go | 3 +- src/internal/m365/exchange/restore_test.go | 33 ++++---- src/internal/m365/onedrive/item_test.go | 3 +- src/internal/m365/onedrive/url_cache_test.go | 3 +- .../m365/sharepoint/api/pages_test.go | 3 +- .../m365/sharepoint/collection_test.go | 3 +- src/internal/operations/restore.go | 2 +- src/internal/operations/restore_test.go | 9 +- src/pkg/control/options.go | 31 +++++++ src/pkg/control/options_test.go | 83 +++++++++++++++++++ .../control/testdata/testdata.go} | 4 +- .../loadtest/repository_load_test.go | 3 +- src/pkg/repository/repository_test.go | 7 +- 17 files changed, 168 insertions(+), 41 deletions(-) create mode 100644 src/pkg/control/options_test.go rename src/{internal/tester/restore_destination.go => pkg/control/testdata/testdata.go} (85%) diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index d1855b108..f530e1d92 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -26,6 +26,7 @@ import ( "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" @@ -397,7 +398,7 @@ func generateAndRestoreDriveItems( Service: service, Tenant: tenantID, ResourceOwners: []string{resourceOwner}, - RestoreCfg: tester.DefaultTestRestoreConfig(""), + RestoreCfg: testdata.DefaultRestoreConfig(""), } _, _, collections, _, err := m365Stub.GetCollectionsAndExpected( diff --git a/src/internal/m365/controller_test.go b/src/internal/m365/controller_test.go index 2d63e4261..6d181eb66 100644 --- a/src/internal/m365/controller_test.go +++ b/src/internal/m365/controller_test.go @@ -22,6 +22,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" @@ -296,7 +297,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreFailsBadService() { defer flush() var ( - restoreCfg = tester.DefaultTestRestoreConfig("") + restoreCfg = testdata.DefaultRestoreConfig("") sel = selectors.Selector{ Service: selectors.ServiceUnknown, } @@ -323,7 +324,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreFailsBadService() { } func (suite *ControllerIntegrationSuite) TestEmptyCollections() { - restoreCfg := tester.DefaultTestRestoreConfig("") + restoreCfg := testdata.DefaultRestoreConfig("") table := []struct { name string col []data.RestoreCollection @@ -539,7 +540,7 @@ func runRestoreBackupTest( Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, - RestoreCfg: tester.DefaultTestRestoreConfig(""), + RestoreCfg: testdata.DefaultRestoreConfig(""), } totalItems, totalKopiaItems, collections, expectedData, err := stub.GetCollectionsAndExpected( @@ -584,7 +585,7 @@ func runRestoreTestWithVersion( Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, - RestoreCfg: tester.DefaultTestRestoreConfig(""), + RestoreCfg: testdata.DefaultRestoreConfig(""), } totalItems, _, collections, _, err := stub.GetCollectionsAndExpected( @@ -621,7 +622,7 @@ func runRestoreBackupTestVersions( Service: test.service, Tenant: tenant, ResourceOwners: resourceOwners, - RestoreCfg: tester.DefaultTestRestoreConfig(""), + RestoreCfg: testdata.DefaultRestoreConfig(""), } totalItems, _, collections, _, err := stub.GetCollectionsAndExpected( @@ -997,7 +998,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() { for i, collection := range test.collections { // Get a restoreCfg per collection so they're independent. - restoreCfg := tester.DefaultTestRestoreConfig("") + restoreCfg := testdata.DefaultRestoreConfig("") expectedDests = append(expectedDests, destAndCats{ resourceOwner: suite.user, dest: restoreCfg.Location, diff --git a/src/internal/m365/exchange/contacts_restore_test.go b/src/internal/m365/exchange/contacts_restore_test.go index 2eff0d6a6..de53f59e2 100644 --- a/src/internal/m365/exchange/contacts_restore_test.go +++ b/src/internal/m365/exchange/contacts_restore_test.go @@ -9,6 +9,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -51,7 +52,7 @@ func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() { path.EmailCategory, suite.creds.AzureTenantID, suite.userID, - tester.DefaultTestRestoreConfig("").Location, + testdata.DefaultRestoreConfig("").Location, []string{"Hufflepuff"}, []string{"Ravenclaw"}) } diff --git a/src/internal/m365/exchange/events_restore_test.go b/src/internal/m365/exchange/events_restore_test.go index dcce8c5f3..156d191d1 100644 --- a/src/internal/m365/exchange/events_restore_test.go +++ b/src/internal/m365/exchange/events_restore_test.go @@ -9,6 +9,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -51,7 +52,7 @@ func (suite *EventsRestoreIntgSuite) TestCreateContainerDestination() { path.EmailCategory, suite.creds.AzureTenantID, suite.userID, - tester.DefaultTestRestoreConfig("").Location, + testdata.DefaultRestoreConfig("").Location, []string{"Durmstrang"}, []string{"Beauxbatons"}) } diff --git a/src/internal/m365/exchange/mail_restore_test.go b/src/internal/m365/exchange/mail_restore_test.go index 74ac9e87a..9d71de800 100644 --- a/src/internal/m365/exchange/mail_restore_test.go +++ b/src/internal/m365/exchange/mail_restore_test.go @@ -9,6 +9,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -51,7 +52,7 @@ func (suite *MailRestoreIntgSuite) TestCreateContainerDestination() { path.EmailCategory, suite.creds.AzureTenantID, suite.userID, - tester.DefaultTestRestoreConfig("").Location, + testdata.DefaultRestoreConfig("").Location, []string{"Griffindor", "Croix"}, []string{"Griffindor", "Felicius"}) } diff --git a/src/internal/m365/exchange/restore_test.go b/src/internal/m365/exchange/restore_test.go index 5b4bbb825..a4e985620 100644 --- a/src/internal/m365/exchange/restore_test.go +++ b/src/internal/m365/exchange/restore_test.go @@ -13,6 +13,7 @@ import ( exchMock "github.com/alcionai/corso/src/internal/m365/exchange/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -54,7 +55,7 @@ func (suite *RestoreIntgSuite) TestRestoreContact() { var ( userID = tester.M365UserID(t) - folderName = tester.DefaultTestRestoreConfig("contact").Location + folderName = testdata.DefaultRestoreConfig("contact").Location handler = newContactRestoreHandler(suite.ac) ) @@ -88,7 +89,7 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() { var ( userID = tester.M365UserID(t) - subject = tester.DefaultTestRestoreConfig("event").Location + subject = testdata.DefaultRestoreConfig("event").Location handler = newEventRestoreHandler(suite.ac) ) @@ -154,7 +155,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageBytes("Restore Exchange Object"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailobj").Location + folderName := testdata.DefaultRestoreConfig("mailobj").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -167,7 +168,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithDirectAttachment("Restore 1 Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailwattch").Location + folderName := testdata.DefaultRestoreConfig("mailwattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -180,7 +181,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithItemAttachmentEvent("Event Item Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("eventwattch").Location + folderName := testdata.DefaultRestoreConfig("eventwattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -193,7 +194,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithItemAttachmentMail("Mail Item Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailitemattch").Location + folderName := testdata.DefaultRestoreConfig("mailitemattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -209,7 +210,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { ), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailbasicattch").Location + folderName := testdata.DefaultRestoreConfig("mailbasicattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -225,7 +226,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { ), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailnestattch").Location + folderName := testdata.DefaultRestoreConfig("mailnestattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -241,7 +242,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { ), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailcontactattch").Location + folderName := testdata.DefaultRestoreConfig("mailcontactattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -254,7 +255,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithNestedItemAttachmentEvent("Nested Item Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("nestedattch").Location + folderName := testdata.DefaultRestoreConfig("nestedattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -267,7 +268,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithLargeAttachment("Restore Large Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("maillargeattch").Location + folderName := testdata.DefaultRestoreConfig("maillargeattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -280,7 +281,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithTwoAttachments("Restore 2 Attachments"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailtwoattch").Location + folderName := testdata.DefaultRestoreConfig("mailtwoattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -293,7 +294,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.MessageWithOneDriveAttachment("Restore Reference(OneDrive) Attachment"), category: path.EmailCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("mailrefattch").Location + folderName := testdata.DefaultRestoreConfig("mailrefattch").Location folder, err := handlers[path.EmailCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -306,7 +307,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.ContactBytes("Test_Omega"), category: path.ContactsCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("contact").Location + folderName := testdata.DefaultRestoreConfig("contact").Location folder, err := handlers[path.ContactsCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -319,7 +320,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.EventBytes("Restored Event Object"), category: path.EventsCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("event").Location + folderName := testdata.DefaultRestoreConfig("event").Location calendar, err := handlers[path.EventsCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) @@ -332,7 +333,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() { bytes: exchMock.EventWithAttachment("Restored Event Attachment"), category: path.EventsCategory, destination: func(t *testing.T, ctx context.Context) string { - folderName := tester.DefaultTestRestoreConfig("eventobj").Location + folderName := testdata.DefaultRestoreConfig("eventobj").Location calendar, err := handlers[path.EventsCategory]. CreateContainer(ctx, userID, folderName, "") require.NoError(t, err, clues.ToCore(err)) diff --git a/src/internal/m365/onedrive/item_test.go b/src/internal/m365/onedrive/item_test.go index fd3497fc4..5fd36d345 100644 --- a/src/internal/m365/onedrive/item_test.go +++ b/src/internal/m365/onedrive/item_test.go @@ -15,6 +15,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -155,7 +156,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() { root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID) require.NoError(t, err, clues.ToCore(err)) - newFolderName := tester.DefaultTestRestoreConfig("folder").Location + newFolderName := testdata.DefaultRestoreConfig("folder").Location t.Logf("creating folder %s", newFolderName) newFolder, err := rh.PostItemInContainer( diff --git a/src/internal/m365/onedrive/url_cache_test.go b/src/internal/m365/onedrive/url_cache_test.go index 6a6c696b8..a7bafb68e 100644 --- a/src/internal/m365/onedrive/url_cache_test.go +++ b/src/internal/m365/onedrive/url_cache_test.go @@ -17,6 +17,7 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -65,7 +66,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() { t = suite.T() ac = suite.ac.Drives() driveID = suite.driveID - newFolderName = tester.DefaultTestRestoreConfig("folder").Location + newFolderName = testdata.DefaultRestoreConfig("folder").Location driveItemPager = suite.ac.Drives().NewItemPager(driveID, "", api.DriveItemSelectDefault()) ) diff --git a/src/internal/m365/sharepoint/api/pages_test.go b/src/internal/m365/sharepoint/api/pages_test.go index 0f2fa0471..58406557e 100644 --- a/src/internal/m365/sharepoint/api/pages_test.go +++ b/src/internal/m365/sharepoint/api/pages_test.go @@ -16,6 +16,7 @@ import ( spMock "github.com/alcionai/corso/src/internal/m365/sharepoint/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" ) @@ -92,7 +93,7 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() { ctx, flush := tester.NewContext(t) defer flush() - destName := tester.DefaultTestRestoreConfig("").Location + destName := testdata.DefaultRestoreConfig("").Location testName := "MockPage" // Create Test Page diff --git a/src/internal/m365/sharepoint/collection_test.go b/src/internal/m365/sharepoint/collection_test.go index 5177ecf06..76d26d3ed 100644 --- a/src/internal/m365/sharepoint/collection_test.go +++ b/src/internal/m365/sharepoint/collection_test.go @@ -19,6 +19,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -208,7 +209,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() { info: listToSPInfo(listing, int64(len(byteArray))), } - destName := tester.DefaultTestRestoreConfig("").Location + destName := testdata.DefaultRestoreConfig("").Location deets, err := restoreListItem(ctx, service, listData, suite.siteID, destName) assert.NoError(t, err, clues.ToCore(err)) diff --git a/src/internal/operations/restore.go b/src/internal/operations/restore.go index ef7e7ea38..c301daf5b 100644 --- a/src/internal/operations/restore.go +++ b/src/internal/operations/restore.go @@ -68,7 +68,7 @@ func NewRestoreOperation( operation: newOperation(opts, bus, kw, sw), acct: acct, BackupID: backupID, - RestoreCfg: restoreCfg, + RestoreCfg: control.EnsureRestoreConfigDefaults(ctx, restoreCfg), Selectors: sel, Version: "v0", rc: rc, diff --git a/src/internal/operations/restore_test.go b/src/internal/operations/restore_test.go index 5eb1d4fda..7faa5051d 100644 --- a/src/internal/operations/restore_test.go +++ b/src/internal/operations/restore_test.go @@ -28,6 +28,7 @@ import ( "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control/repository" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/store" @@ -51,7 +52,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() { sw = &store.Wrapper{} ctrl = &mock.Controller{} now = time.Now() - restoreCfg = tester.DefaultTestRestoreConfig("") + restoreCfg = testdata.DefaultRestoreConfig("") ) table := []struct { @@ -219,7 +220,7 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() { kw = &kopia.Wrapper{} sw = &store.Wrapper{} ctrl = &mock.Controller{} - restoreCfg = tester.DefaultTestRestoreConfig("") + restoreCfg = testdata.DefaultRestoreConfig("") opts = control.Defaults() ) @@ -378,7 +379,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run() { { name: "Exchange_Restore", owner: tester.M365UserID(suite.T()), - restoreCfg: tester.DefaultTestRestoreConfig(""), + restoreCfg: testdata.DefaultRestoreConfig(""), getSelector: func(t *testing.T, owners []string) selectors.Selector { rsel := selectors.NewExchangeRestore(owners) rsel.Include(rsel.AllData()) @@ -454,7 +455,7 @@ func (suite *RestoreOpIntegrationSuite) TestRestore_Run_errorNoBackup() { defer flush() var ( - restoreCfg = tester.DefaultTestRestoreConfig("") + restoreCfg = testdata.DefaultRestoreConfig("") mb = evmock.NewBus() ) diff --git a/src/pkg/control/options.go b/src/pkg/control/options.go index 4f592787e..d82907397 100644 --- a/src/pkg/control/options.go +++ b/src/pkg/control/options.go @@ -1,8 +1,14 @@ package control import ( + "context" + "strings" + + "golang.org/x/exp/slices" + "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/pkg/control/repository" + "github.com/alcionai/corso/src/pkg/logger" ) // Options holds the optional configurations for a process @@ -64,6 +70,8 @@ const ( Replace CollisionPolicy = "replace" ) +const RootLocation = "/" + // RestoreConfig contains type RestoreConfig struct { // Defines the per-item collision handling policy. @@ -94,6 +102,29 @@ func DefaultRestoreConfig(timeFormat dttm.TimeFormat) RestoreConfig { } } +// EnsureRestoreConfigDefaults sets all non-supported values in the config +// struct to the default value. +func EnsureRestoreConfigDefaults( + ctx context.Context, + rc RestoreConfig, +) RestoreConfig { + if !slices.Contains([]CollisionPolicy{Skip, Copy, Replace}, rc.OnCollision) { + logger.Ctx(ctx). + With( + "bad_collision_policy", rc.OnCollision, + "default_collision_policy", Skip). + Info("setting collision policy to default") + + rc.OnCollision = Skip + } + + if strings.TrimSpace(rc.Location) == RootLocation { + rc.Location = "" + } + + return rc +} + // --------------------------------------------------------------------------- // Feature Flags and Toggles // --------------------------------------------------------------------------- diff --git a/src/pkg/control/options_test.go b/src/pkg/control/options_test.go new file mode 100644 index 000000000..eac04ddfa --- /dev/null +++ b/src/pkg/control/options_test.go @@ -0,0 +1,83 @@ +package control + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/alcionai/corso/src/internal/tester" +) + +type OptionsUnitSuite struct { + tester.Suite +} + +func TestOptionsUnitSuite(t *testing.T) { + suite.Run(t, &OptionsUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *OptionsUnitSuite) TestEnsureRestoreConfigDefaults() { + table := []struct { + name string + input RestoreConfig + expect RestoreConfig + }{ + { + name: "populated", + input: RestoreConfig{ + OnCollision: Copy, + ProtectedResource: "batman", + Location: "badman", + Drive: "hatman", + }, + expect: RestoreConfig{ + OnCollision: Copy, + ProtectedResource: "batman", + Location: "badman", + Drive: "hatman", + }, + }, + { + name: "unpopulated", + input: RestoreConfig{ + OnCollision: Unknown, + ProtectedResource: "", + Location: "", + Drive: "", + }, + expect: RestoreConfig{ + OnCollision: Skip, + ProtectedResource: "", + Location: "", + Drive: "", + }, + }, + { + name: "populated, but modified", + input: RestoreConfig{ + OnCollision: CollisionPolicy("batman"), + ProtectedResource: "", + Location: "/", + Drive: "", + }, + expect: RestoreConfig{ + OnCollision: Skip, + ProtectedResource: "", + Location: "", + Drive: "", + }, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + result := EnsureRestoreConfigDefaults(ctx, test.input) + assert.Equal(t, test.expect, result) + }) + } +} diff --git a/src/internal/tester/restore_destination.go b/src/pkg/control/testdata/testdata.go similarity index 85% rename from src/internal/tester/restore_destination.go rename to src/pkg/control/testdata/testdata.go index 5b1c667c3..5c4f5df20 100644 --- a/src/internal/tester/restore_destination.go +++ b/src/pkg/control/testdata/testdata.go @@ -1,4 +1,4 @@ -package tester +package testdata import ( "strings" @@ -9,7 +9,7 @@ import ( const RestoreFolderPrefix = "Corso_Test" -func DefaultTestRestoreConfig(namespace string) control.RestoreConfig { +func DefaultRestoreConfig(namespace string) control.RestoreConfig { var ( restoreCfg = control.DefaultRestoreConfig(dttm.SafeForTesting) sft = dttm.FormatNow(dttm.SafeForTesting) diff --git a/src/pkg/repository/loadtest/repository_load_test.go b/src/pkg/repository/loadtest/repository_load_test.go index 9f586f648..86711a84f 100644 --- a/src/pkg/repository/loadtest/repository_load_test.go +++ b/src/pkg/repository/loadtest/repository_load_test.go @@ -20,6 +20,7 @@ import ( "github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" + ctrlTD "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/repository" @@ -151,7 +152,7 @@ func runRestoreLoadTest( t.Skip("restore load test is toggled off") } - restoreCfg := tester.DefaultTestRestoreConfig("") + restoreCfg := ctrlTD.DefaultRestoreConfig("") rst, err := r.NewRestore(ctx, backupID, restSel, restoreCfg) require.NoError(t, err, clues.ToCore(err)) diff --git a/src/pkg/repository/repository_test.go b/src/pkg/repository/repository_test.go index f3a06cc9e..84b876d9d 100644 --- a/src/pkg/repository/repository_test.go +++ b/src/pkg/repository/repository_test.go @@ -13,7 +13,8 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/control" - rep "github.com/alcionai/corso/src/pkg/control/repository" + ctrlRepo "github.com/alcionai/corso/src/pkg/control/repository" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/storage" @@ -242,7 +243,7 @@ func (suite *RepositoryIntegrationSuite) TestNewRestore() { defer flush() acct := tester.NewM365Account(t) - restoreCfg := tester.DefaultTestRestoreConfig("") + restoreCfg := testdata.DefaultRestoreConfig("") // need to initialize the repository before we can test connecting to it. st := tester.NewPrefixedS3Storage(t) @@ -269,7 +270,7 @@ func (suite *RepositoryIntegrationSuite) TestNewMaintenance() { r, err := repository.Initialize(ctx, acct, st, control.Defaults()) require.NoError(t, err, clues.ToCore(err)) - mo, err := r.NewMaintenance(ctx, rep.Maintenance{}) + mo, err := r.NewMaintenance(ctx, ctrlRepo.Maintenance{}) require.NoError(t, err, clues.ToCore(err)) require.NotNil(t, mo) } From 49903a11ae1f67fe0771e92053c96f1199b886d7 Mon Sep 17 00:00:00 2001 From: Keepers Date: Thu, 15 Jun 2023 10:38:55 -0600 Subject: [PATCH 32/41] quick logging fixes (#3624) Co-authored-by: aviator-app[bot] <48659329+aviator-app[bot]@users.noreply.github.com> --- src/internal/m365/exchange/mock/mail.go | 6 +++--- src/internal/m365/exchange/restore.go | 2 +- src/pkg/services/m365/api/serialization.go | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/internal/m365/exchange/mock/mail.go b/src/internal/m365/exchange/mock/mail.go index cb6e296d1..05b259154 100644 --- a/src/internal/m365/exchange/mock/mail.go +++ b/src/internal/m365/exchange/mock/mail.go @@ -744,10 +744,10 @@ func serialize(t *testing.T, item serialization.Parsable) []byte { return byteArray } -func hydrateMessage(byteArray []byte) (models.Messageable, error) { - parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", byteArray) +func hydrateMessage(body []byte) (models.Messageable, error) { + parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", body) if err != nil { - return nil, clues.Wrap(err, "deserializing bytes into base m365 object") + return nil, clues.Wrap(err, "deserializing bytes into base m365 object").With("body_size", len(body)) } anObject, err := parseNode.GetObjectValue(models.CreateMessageFromDiscriminatorValue) diff --git a/src/internal/m365/exchange/restore.go b/src/internal/m365/exchange/restore.go index f540e621d..77ffbe9f8 100644 --- a/src/internal/m365/exchange/restore.go +++ b/src/internal/m365/exchange/restore.go @@ -174,7 +174,7 @@ func restoreCollection( info, err := ir.restore(ictx, body, userID, destinationID, errs) if err != nil { - el.AddRecoverable(ctx, err) + el.AddRecoverable(ictx, err) continue } diff --git a/src/pkg/services/m365/api/serialization.go b/src/pkg/services/m365/api/serialization.go index 2410ca090..86dad6eb4 100644 --- a/src/pkg/services/m365/api/serialization.go +++ b/src/pkg/services/m365/api/serialization.go @@ -13,7 +13,7 @@ func createFromBytes( ) (serialization.Parsable, error) { parseNode, err := kjson.NewJsonParseNodeFactory().GetRootParseNode("application/json", bytes) if err != nil { - return nil, clues.Wrap(err, "deserializing bytes into base m365 object") + return nil, clues.Wrap(err, "deserializing bytes into base m365 object").With("bytes_len", len(bytes)) } v, err := parseNode.GetObjectValue(createFunc) From cfbed454ea0d53a0b88b8ed27c4551611b0822cd Mon Sep 17 00:00:00 2001 From: Abin Simon Date: Thu, 15 Jun 2023 22:40:29 +0530 Subject: [PATCH 33/41] Add note about skipping attachment fetch errors (#3626) https://github.com/alcionai/corso/pull/3607#discussion_r1229799665 --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [ ] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * # #### Test Plan - [ ] :muscle: Manual - [ ] :zap: Unit test - [ ] :green_heart: E2E --- src/pkg/services/m365/api/mail.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pkg/services/m365/api/mail.go b/src/pkg/services/m365/api/mail.go index 4c608ad28..6645bb27e 100644 --- a/src/pkg/services/m365/api/mail.go +++ b/src/pkg/services/m365/api/mail.go @@ -406,6 +406,9 @@ func (c Mail) GetItem( ByAttachmentId(ptr.Val(a.GetId())). Get(ctx, attachConfig) if err != nil { + // CannotOpenFileAttachment errors are not transient and + // happens possibly from the original item somehow getting + // deleted from M365 and so we can skip these if graph.IsErrCannotOpenFileAttachment(err) { logger.CtxErr(ctx, err). With( From c0f428ddc8329fd857347464188024bcc8080e53 Mon Sep 17 00:00:00 2001 From: ashmrtn <3891298+ashmrtn@users.noreply.github.com> Date: Thu, 15 Jun 2023 10:43:18 -0700 Subject: [PATCH 34/41] Refactor backup code to use BackupBases functions (#3596) Now that BackupBases defines functions, leverage them in other code to reduce the number of times we fetch Backup models and leverage the stronger invariants the new FindBases function has --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #3525 #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- src/internal/kopia/base_finder.go | 25 +- src/internal/kopia/base_finder_test.go | 14 +- src/internal/kopia/inject/inject.go | 2 +- src/internal/operations/backup.go | 135 +- .../operations/backup_integration_test.go | 6 +- src/internal/operations/backup_test.go | 356 +--- src/internal/operations/common.go | 33 +- src/internal/operations/manifests.go | 268 +-- src/internal/operations/manifests_test.go | 1659 +++++++---------- 9 files changed, 868 insertions(+), 1630 deletions(-) diff --git a/src/internal/kopia/base_finder.go b/src/internal/kopia/base_finder.go index 625ded643..9ac651512 100644 --- a/src/internal/kopia/base_finder.go +++ b/src/internal/kopia/base_finder.go @@ -249,6 +249,8 @@ func (b *baseFinder) findBasesInSet( // If we've made it to this point then we're considering the backup // complete as it has both an item data snapshot and a backup details // snapshot. + logger.Ctx(ictx).Infow("found complete backup", "base_backup_id", bup.ID) + me := ManifestEntry{ Manifest: man, Reasons: []Reason{reason}, @@ -293,11 +295,11 @@ func (b *baseFinder) getBase( return b.findBasesInSet(ctx, reason, metas) } -func (b *baseFinder) findBases( +func (b *baseFinder) FindBases( ctx context.Context, reasons []Reason, tags map[string]string, -) (backupBases, error) { +) BackupBases { var ( // All maps go from ID -> entry. We need to track by ID so we can coalesce // the reason for selecting something. Kopia assisted snapshots also use @@ -361,24 +363,13 @@ func (b *baseFinder) findBases( } } - return backupBases{ + res := &backupBases{ backups: maps.Values(baseBups), mergeBases: maps.Values(baseSnaps), assistBases: maps.Values(kopiaAssistSnaps), - }, nil -} - -func (b *baseFinder) FindBases( - ctx context.Context, - reasons []Reason, - tags map[string]string, -) ([]ManifestEntry, error) { - bb, err := b.findBases(ctx, reasons, tags) - if err != nil { - return nil, clues.Stack(err) } - // assistBases contains all snapshots so we can return it while maintaining - // almost all compatibility. - return bb.assistBases, nil + res.fixupAndVerify(ctx) + + return res } diff --git a/src/internal/kopia/base_finder_test.go b/src/internal/kopia/base_finder_test.go index c950b4f9d..f76b3c81a 100644 --- a/src/internal/kopia/base_finder_test.go +++ b/src/internal/kopia/base_finder_test.go @@ -5,11 +5,9 @@ import ( "testing" "time" - "github.com/alcionai/clues" "github.com/kopia/kopia/repo/manifest" "github.com/kopia/kopia/snapshot" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/data" @@ -332,8 +330,7 @@ func (suite *BaseFinderUnitSuite) TestNoResult_NoBackupsOrSnapshots() { }, } - bb, err := bf.findBases(ctx, reasons, nil) - assert.NoError(t, err, "getting bases: %v", clues.ToCore(err)) + bb := bf.FindBases(ctx, reasons, nil) assert.Empty(t, bb.MergeBases()) assert.Empty(t, bb.AssistBases()) } @@ -356,8 +353,7 @@ func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() { }, } - bb, err := bf.findBases(ctx, reasons, nil) - assert.NoError(t, err, "getting bases: %v", clues.ToCore(err)) + bb := bf.FindBases(ctx, reasons, nil) assert.Empty(t, bb.MergeBases()) assert.Empty(t, bb.AssistBases()) } @@ -817,11 +813,10 @@ func (suite *BaseFinderUnitSuite) TestGetBases() { bg: &mockModelGetter{data: test.backupData}, } - bb, err := bf.findBases( + bb := bf.FindBases( ctx, test.input, nil) - require.NoError(t, err, "getting bases: %v", clues.ToCore(err)) checkBackupEntriesMatch( t, @@ -912,11 +907,10 @@ func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() { bg: &mockModelGetter{data: backupData}, } - bb, err := bf.findBases( + bb := bf.FindBases( ctx, testAllUsersAllCats, test.tags) - require.NoError(t, err, "getting bases: %v", clues.ToCore(err)) checkManifestEntriesMatch( t, diff --git a/src/internal/kopia/inject/inject.go b/src/internal/kopia/inject/inject.go index d97e06d31..6921c353d 100644 --- a/src/internal/kopia/inject/inject.go +++ b/src/internal/kopia/inject/inject.go @@ -39,6 +39,6 @@ type ( ctx context.Context, reasons []kopia.Reason, tags map[string]string, - ) ([]kopia.ManifestEntry, error) + ) kopia.BackupBases } ) diff --git a/src/internal/operations/backup.go b/src/internal/operations/backup.go index 4772f8b20..f3a3cbd55 100644 --- a/src/internal/operations/backup.go +++ b/src/internal/operations/backup.go @@ -6,6 +6,7 @@ import ( "github.com/alcionai/clues" "github.com/google/uuid" + "github.com/kopia/kopia/repo/manifest" "github.com/alcionai/corso/src/internal/common/crash" "github.com/alcionai/corso/src/internal/common/dttm" @@ -296,20 +297,10 @@ func (op *BackupOperation) do( return nil, clues.Stack(err) } - type baseFinder struct { - kinject.BaseFinder - kinject.RestoreProducer - } - - bf := baseFinder{ - BaseFinder: kbf, - RestoreProducer: op.kopia, - } - mans, mdColls, canUseMetaData, err := produceManifestsAndMetadata( ctx, - bf, - op.store, + kbf, + op.kopia, reasons, fallbackReasons, op.account.ID(), op.incremental) @@ -318,10 +309,7 @@ func (op *BackupOperation) do( } if canUseMetaData { - _, lastBackupVersion, err = lastCompleteBackups(ctx, op.store, mans) - if err != nil { - return nil, clues.Wrap(err, "retrieving prior backups") - } + lastBackupVersion = mans.MinBackupVersion() } cs, ssmb, canUsePreviousBackup, err := produceBackupDataCollections( @@ -358,9 +346,8 @@ func (op *BackupOperation) do( err = mergeDetails( ctx, - op.store, detailsStore, - mans, + mans.Backups(), toMerge, deets, writeStats, @@ -482,7 +469,7 @@ func consumeBackupCollections( bc kinject.BackupConsumer, tenantID string, reasons []kopia.Reason, - mans []kopia.ManifestEntry, + bbs kopia.BackupBases, cs []data.BackupCollection, pmr prefixmatcher.StringSetReader, backupID model.StableID, @@ -506,9 +493,24 @@ func consumeBackupCollections( } } - bases := make([]kopia.IncrementalBase, 0, len(mans)) + // AssistBases should be the upper bound for how many snapshots we pass in. + bases := make([]kopia.IncrementalBase, 0, len(bbs.AssistBases())) + // Track IDs we've seen already so we don't accidentally duplicate some + // manifests. This can be removed when we move the code below into the kopia + // package. + ids := map[manifest.ID]struct{}{} - for _, m := range mans { + var mb []kopia.ManifestEntry + + if bbs != nil { + mb = bbs.MergeBases() + } + + // TODO(ashmrtn): Make a wrapper for Reson that allows adding a tenant and + // make a function that will spit out a prefix that includes the tenant. With + // that done this code can be moved to kopia wrapper since it's really more + // specific to that. + for _, m := range mb { paths := make([]*path.Builder, 0, len(m.Reasons)) services := map[string]struct{}{} categories := map[string]struct{}{} @@ -524,6 +526,8 @@ func consumeBackupCollections( categories[reason.Category.String()] = struct{}{} } + ids[m.ID] = struct{}{} + bases = append(bases, kopia.IncrementalBase{ Manifest: m.Manifest, SubtreePaths: paths, @@ -552,6 +556,18 @@ func consumeBackupCollections( "base_backup_id", mbID) } + // At the moment kopia assisted snapshots are in the same set as merge bases. + // When we fixup generating subtree paths we can remove this. + if bbs != nil { + for _, ab := range bbs.AssistBases() { + if _, ok := ids[ab.ID]; ok { + continue + } + + bases = append(bases, kopia.IncrementalBase{Manifest: ab.Manifest}) + } + } + kopiaStats, deets, itemsSourcedFromBase, err := bc.ConsumeBackupCollections( ctx, bases, @@ -663,61 +679,10 @@ func getNewPathRefs( return newPath, newLoc, updated, nil } -func lastCompleteBackups( - ctx context.Context, - ms *store.Wrapper, - mans []kopia.ManifestEntry, -) (map[string]*backup.Backup, int, error) { - var ( - oldestVersion = version.NoBackup - result = map[string]*backup.Backup{} - ) - - if len(mans) == 0 { - return result, -1, nil - } - - for _, man := range mans { - // For now skip snapshots that aren't complete. We will need to revisit this - // when we tackle restartability. - if len(man.IncompleteReason) > 0 { - continue - } - - var ( - mctx = clues.Add(ctx, "base_manifest_id", man.ID) - reasons = man.Reasons - ) - - bID, ok := man.GetTag(kopia.TagBackupID) - if !ok { - return result, oldestVersion, clues.New("no backup ID in snapshot manifest").WithClues(mctx) - } - - mctx = clues.Add(mctx, "base_manifest_backup_id", bID) - - bup, err := getBackupFromID(mctx, model.StableID(bID), ms) - if err != nil { - return result, oldestVersion, err - } - - for _, r := range reasons { - result[r.Key()] = bup - } - - if oldestVersion == -1 || bup.Version < oldestVersion { - oldestVersion = bup.Version - } - } - - return result, oldestVersion, nil -} - func mergeDetails( ctx context.Context, - ms *store.Wrapper, detailsStore streamstore.Streamer, - mans []kopia.ManifestEntry, + backups []kopia.BackupEntry, dataFromBackup kopia.DetailsMergeInfoer, deets *details.Builder, writeStats *kopia.BackupStats, @@ -738,29 +703,15 @@ func mergeDetails( var addedEntries int - for _, man := range mans { + for _, baseBackup := range backups { var ( - mctx = clues.Add(ctx, "base_manifest_id", man.ID) + mctx = clues.Add(ctx, "base_backup_id", baseBackup.ID) manifestAddedEntries int ) - // For now skip snapshots that aren't complete. We will need to revisit this - // when we tackle restartability. - if len(man.IncompleteReason) > 0 { - continue - } - - bID, ok := man.GetTag(kopia.TagBackupID) - if !ok { - return clues.New("no backup ID in snapshot manifest").WithClues(mctx) - } - - mctx = clues.Add(mctx, "base_manifest_backup_id", bID) - - baseBackup, baseDeets, err := getBackupAndDetailsFromID( + baseDeets, err := getDetailsFromBackup( mctx, - model.StableID(bID), - ms, + baseBackup.Backup, detailsStore, errs) if err != nil { @@ -781,7 +732,7 @@ func mergeDetails( // // TODO(ashmrtn): This logic will need expanded to cover entries from // checkpoints if we start doing kopia-assisted incrementals for those. - if !matchesReason(man.Reasons, rr) { + if !matchesReason(baseBackup.Reasons, rr) { continue } diff --git a/src/internal/operations/backup_integration_test.go b/src/internal/operations/backup_integration_test.go index df57dea03..771c77122 100644 --- a/src/internal/operations/backup_integration_test.go +++ b/src/internal/operations/backup_integration_test.go @@ -232,10 +232,8 @@ func checkBackupIsInManifests( bf, err := kw.NewBaseFinder(bo.store) require.NoError(t, err, clues.ToCore(err)) - mans, err := bf.FindBases(ctx, reasons, tags) - require.NoError(t, err, clues.ToCore(err)) - - for _, man := range mans { + mans := bf.FindBases(ctx, reasons, tags) + for _, man := range mans.MergeBases() { bID, ok := man.GetTag(kopia.TagBackupID) if !assert.Truef(t, ok, "snapshot manifest %s missing backup ID tag", man.ID) { continue diff --git a/src/internal/operations/backup_test.go b/src/internal/operations/backup_test.go index 248d40087..c8ff42f9d 100644 --- a/src/internal/operations/backup_test.go +++ b/src/internal/operations/backup_test.go @@ -2,13 +2,11 @@ package operations import ( "context" - "fmt" stdpath "path" "testing" "time" "github.com/alcionai/clues" - "github.com/kopia/kopia/repo/manifest" "github.com/kopia/kopia/snapshot" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -128,77 +126,6 @@ func (mbu mockBackupConsumer) ConsumeBackupCollections( // ----- model store for backups -type mockBackupStorer struct { - // Only using this to store backup models right now. - entries map[model.StableID]backup.Backup -} - -func (mbs mockBackupStorer) Get( - ctx context.Context, - s model.Schema, - id model.StableID, - toPopulate model.Model, -) error { - ctx = clues.Add( - ctx, - "model_schema", s, - "model_id", id, - "model_type", fmt.Sprintf("%T", toPopulate)) - - if s != model.BackupSchema { - return clues.New("unexpected schema").WithClues(ctx) - } - - r, ok := mbs.entries[id] - if !ok { - return clues.New("model not found").WithClues(ctx) - } - - bu, ok := toPopulate.(*backup.Backup) - if !ok { - return clues.New("bad population type").WithClues(ctx) - } - - *bu = r - - return nil -} - -func (mbs mockBackupStorer) Delete(context.Context, model.Schema, model.StableID) error { - return clues.New("not implemented") -} - -func (mbs mockBackupStorer) DeleteWithModelStoreID(context.Context, manifest.ID) error { - return clues.New("not implemented") -} - -func (mbs mockBackupStorer) GetIDsForType( - context.Context, - model.Schema, - map[string]string, -) ([]*model.BaseModel, error) { - return nil, clues.New("not implemented") -} - -func (mbs mockBackupStorer) GetWithModelStoreID( - context.Context, - model.Schema, - manifest.ID, - model.Model, -) error { - return clues.New("not implemented") -} - -func (mbs mockBackupStorer) Put(context.Context, model.Schema, model.Model) error { - return clues.New("not implemented") -} - -func (mbs mockBackupStorer) Update(context.Context, model.Schema, model.Model) error { - return clues.New("not implemented") -} - -// ----- model store for backups - type mockDetailsMergeInfoer struct { repoRefs map[string]path.Path locs map[string]*path.Builder @@ -260,27 +187,6 @@ func makeMetadataBasePath( return p } -func makeMetadataPath( - t *testing.T, - tenant string, - service path.ServiceType, - resourceOwner string, - category path.CategoryType, - fileName string, -) path.Path { - t.Helper() - - p, err := path.Builder{}.Append(fileName).ToServiceCategoryMetadataPath( - tenant, - resourceOwner, - service, - category, - true) - require.NoError(t, err, clues.ToCore(err)) - - return p -} - func makeFolderEntry( t *testing.T, pb, loc *path.Builder, @@ -379,25 +285,6 @@ func makeDetailsEntry( return res } -// TODO(ashmrtn): This should belong to some code that lives in the kopia -// package that is only compiled when running tests. -func makeKopiaTagKey(k string) string { - return "tag:" + k -} - -func makeManifest(t *testing.T, backupID model.StableID, incompleteReason string) *snapshot.Manifest { - t.Helper() - - tagKey := makeKopiaTagKey(kopia.TagBackupID) - - return &snapshot.Manifest{ - Tags: map[string]string{ - tagKey: string(backupID), - }, - IncompleteReason: incompleteReason, - } -} - // --------------------------------------------------------------------------- // unit tests // --------------------------------------------------------------------------- @@ -532,20 +419,20 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections ) table := []struct { - name string - inputMan []kopia.ManifestEntry + name string + // Backup model is untouched in this test so there's no need to populate it. + input kopia.BackupBases expected []kopia.IncrementalBase }{ { name: "SingleManifestSingleReason", - inputMan: []kopia.ManifestEntry{ - { + input: kopia.NewMockBackupBases().WithMergeBases( + kopia.ManifestEntry{ Manifest: manifest1, Reasons: []kopia.Reason{ emailReason, }, - }, - }, + }).ClearMockAssistBases(), expected: []kopia.IncrementalBase{ { Manifest: manifest1, @@ -557,15 +444,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections }, { name: "SingleManifestMultipleReasons", - inputMan: []kopia.ManifestEntry{ - { + input: kopia.NewMockBackupBases().WithMergeBases( + kopia.ManifestEntry{ Manifest: manifest1, Reasons: []kopia.Reason{ emailReason, contactsReason, }, - }, - }, + }).ClearMockAssistBases(), expected: []kopia.IncrementalBase{ { Manifest: manifest1, @@ -578,22 +464,21 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections }, { name: "MultipleManifestsMultipleReasons", - inputMan: []kopia.ManifestEntry{ - { + input: kopia.NewMockBackupBases().WithMergeBases( + kopia.ManifestEntry{ Manifest: manifest1, Reasons: []kopia.Reason{ emailReason, contactsReason, }, }, - { + kopia.ManifestEntry{ Manifest: manifest2, Reasons: []kopia.Reason{ emailReason, contactsReason, }, - }, - }, + }).ClearMockAssistBases(), expected: []kopia.IncrementalBase{ { Manifest: manifest1, @@ -611,6 +496,33 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections }, }, }, + { + name: "Single Manifest Single Reason With Assist Base", + input: kopia.NewMockBackupBases().WithMergeBases( + kopia.ManifestEntry{ + Manifest: manifest1, + Reasons: []kopia.Reason{ + emailReason, + }, + }).WithAssistBases( + kopia.ManifestEntry{ + Manifest: manifest2, + Reasons: []kopia.Reason{ + contactsReason, + }, + }), + expected: []kopia.IncrementalBase{ + { + Manifest: manifest1, + SubtreePaths: []*path.Builder{ + emailBuilder, + }, + }, + { + Manifest: manifest2, + }, + }, + }, } for _, test := range table { @@ -637,7 +549,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections mbu, tenant, nil, - test.inputMan, + test.input, nil, nil, model.StableID(""), @@ -731,9 +643,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems table := []struct { name string - populatedModels map[model.StableID]backup.Backup populatedDetails map[string]*details.Details - inputMans []kopia.ManifestEntry + inputBackups []kopia.BackupEntry mdm *mockDetailsMergeInfoer errCheck assert.ErrorAssertionFunc @@ -752,24 +663,6 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems // Use empty slice so we don't error out on nil != empty. expectedEntries: []*details.Entry{}, }, - { - name: "BackupIDNotFound", - mdm: func() *mockDetailsMergeInfoer { - res := newMockDetailsMergeInfoer() - res.add(itemPath1, itemPath1, locationPath1) - - return res - }(), - inputMans: []kopia.ManifestEntry{ - { - Manifest: makeManifest(suite.T(), "foo", ""), - Reasons: []kopia.Reason{ - pathReason1, - }, - }, - }, - errCheck: assert.Error, - }, { name: "DetailsIDNotFound", mdm: func() *mockDetailsMergeInfoer { @@ -778,22 +671,19 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: backup1.ID, + }, + DetailsID: "foo", + }, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: { - BaseModel: model.BaseModel{ - ID: backup1.ID, - }, - DetailsID: "foo", - }, - }, errCheck: assert.Error, }, { @@ -805,17 +695,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -835,23 +722,20 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -871,17 +755,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -933,17 +814,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -963,17 +841,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -996,17 +871,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -1029,17 +901,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -1063,17 +932,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -1097,24 +963,20 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems return res }(), - inputMans: []kopia.ManifestEntry{ + inputBackups: []kopia.BackupEntry{ { - Manifest: makeManifest(suite.T(), backup1.ID, ""), + Backup: &backup1, Reasons: []kopia.Reason{ pathReason1, }, }, { - Manifest: makeManifest(suite.T(), backup2.ID, ""), + Backup: &backup2, Reasons: []kopia.Reason{ pathReason3, }, }, }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - backup2.ID: backup2, - }, populatedDetails: map[string]*details.Details{ backup1.DetailsID: { DetailsModel: details.DetailsModel{ @@ -1140,54 +1002,6 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems makeDetailsEntry(suite.T(), itemPath3, locationPath3, 37, false), }, }, - { - name: "SomeBasesIncomplete", - mdm: func() *mockDetailsMergeInfoer { - res := newMockDetailsMergeInfoer() - res.add(itemPath1, itemPath1, locationPath1) - - return res - }(), - inputMans: []kopia.ManifestEntry{ - { - Manifest: makeManifest(suite.T(), backup1.ID, ""), - Reasons: []kopia.Reason{ - pathReason1, - }, - }, - { - Manifest: makeManifest(suite.T(), backup2.ID, "checkpoint"), - Reasons: []kopia.Reason{ - pathReason1, - }, - }, - }, - populatedModels: map[model.StableID]backup.Backup{ - backup1.ID: backup1, - backup2.ID: backup2, - }, - populatedDetails: map[string]*details.Details{ - backup1.DetailsID: { - DetailsModel: details.DetailsModel{ - Entries: []details.Entry{ - *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), - }, - }, - }, - backup2.DetailsID: { - DetailsModel: details.DetailsModel{ - Entries: []details.Entry{ - // This entry should not be picked due to being incomplete. - *makeDetailsEntry(suite.T(), itemPath1, locationPath1, 84, false), - }, - }, - }, - }, - errCheck: assert.NoError, - expectedEntries: []*details.Entry{ - makeDetailsEntry(suite.T(), itemPath1, locationPath1, 42, false), - }, - }, } for _, test := range table { @@ -1198,15 +1012,13 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems defer flush() mds := ssmock.Streamer{Deets: test.populatedDetails} - w := &store.Wrapper{Storer: mockBackupStorer{entries: test.populatedModels}} deets := details.Builder{} writeStats := kopia.BackupStats{} err := mergeDetails( ctx, - w, mds, - test.inputMans, + test.inputBackups, test.mdm, &deets, &writeStats, @@ -1247,30 +1059,22 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde locPath1 = path.Builder{}.Append(itemPath1.Folders()...) - backup1 = backup.Backup{ - BaseModel: model.BaseModel{ - ID: "bid1", - }, - DetailsID: "did1", - } - pathReason1 = kopia.Reason{ ResourceOwner: itemPath1.ResourceOwner(), Service: itemPath1.Service(), Category: itemPath1.Category(), } - inputMans = []kopia.ManifestEntry{ - { - Manifest: makeManifest(t, backup1.ID, ""), - Reasons: []kopia.Reason{ - pathReason1, + backup1 = kopia.BackupEntry{ + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: "bid1", }, + DetailsID: "did1", + }, + Reasons: []kopia.Reason{ + pathReason1, }, - } - - populatedModels = map[model.StableID]backup.Backup{ - backup1.ID: backup1, } itemSize = 42 @@ -1313,16 +1117,14 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde var ( mds = ssmock.Streamer{Deets: populatedDetails} - w = &store.Wrapper{Storer: mockBackupStorer{entries: populatedModels}} deets = details.Builder{} writeStats = kopia.BackupStats{} ) err := mergeDetails( ctx, - w, mds, - inputMans, + []kopia.BackupEntry{backup1}, mdm, &deets, &writeStats, diff --git a/src/internal/operations/common.go b/src/internal/operations/common.go index 70c53d2cb..57a40d2de 100644 --- a/src/internal/operations/common.go +++ b/src/internal/operations/common.go @@ -13,19 +13,6 @@ import ( "github.com/alcionai/corso/src/pkg/store" ) -func getBackupFromID( - ctx context.Context, - backupID model.StableID, - ms *store.Wrapper, -) (*backup.Backup, error) { - bup, err := ms.GetBackup(ctx, backupID) - if err != nil { - return nil, clues.Wrap(err, "getting backup") - } - - return bup, nil -} - func getBackupAndDetailsFromID( ctx context.Context, backupID model.StableID, @@ -38,6 +25,20 @@ func getBackupAndDetailsFromID( return nil, nil, clues.Wrap(err, "getting backup") } + deets, err := getDetailsFromBackup(ctx, bup, detailsStore, errs) + if err != nil { + return nil, nil, clues.Stack(err) + } + + return bup, deets, nil +} + +func getDetailsFromBackup( + ctx context.Context, + bup *backup.Backup, + detailsStore streamstore.Reader, + errs *fault.Bus, +) (*details.Details, error) { var ( deets details.Details umt = streamstore.DetailsReader(details.UnmarshalTo(&deets)) @@ -49,12 +50,12 @@ func getBackupAndDetailsFromID( } if len(ssid) == 0 { - return bup, nil, clues.New("no details or errors in backup").WithClues(ctx) + return nil, clues.New("no details or errors in backup").WithClues(ctx) } if err := detailsStore.Read(ctx, ssid, umt, errs); err != nil { - return nil, nil, clues.Wrap(err, "reading backup data from streamstore") + return nil, clues.Wrap(err, "reading backup data from streamstore") } - return bup, &deets, nil + return &deets, nil } diff --git a/src/internal/operations/manifests.go b/src/internal/operations/manifests.go index 121481066..5e1c79e4f 100644 --- a/src/internal/operations/manifests.go +++ b/src/internal/operations/manifests.go @@ -4,74 +4,39 @@ import ( "context" "github.com/alcionai/clues" - "github.com/kopia/kopia/repo/manifest" "github.com/pkg/errors" - "golang.org/x/exp/maps" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/internal/m365/graph" - "github.com/alcionai/corso/src/internal/model" - "github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" ) -type manifestRestorer interface { - inject.BaseFinder - inject.RestoreProducer -} - -type getBackuper interface { - GetBackup( - ctx context.Context, - backupID model.StableID, - ) (*backup.Backup, error) -} - // calls kopia to retrieve prior backup manifests, metadata collections to supply backup heuristics. +// TODO(ashmrtn): Make this a helper function that always returns as much as +// possible and call in another function that drops metadata and/or +// kopia-assisted incremental bases based on flag values. func produceManifestsAndMetadata( ctx context.Context, - mr manifestRestorer, - gb getBackuper, + bf inject.BaseFinder, + rp inject.RestoreProducer, reasons, fallbackReasons []kopia.Reason, tenantID string, getMetadata bool, -) ([]kopia.ManifestEntry, []data.RestoreCollection, bool, error) { +) (kopia.BackupBases, []data.RestoreCollection, bool, error) { var ( tags = map[string]string{kopia.TagBackupCategory: ""} metadataFiles = graph.AllMetadataFileNames() collections []data.RestoreCollection ) - ms, err := mr.FindBases(ctx, reasons, tags) - if err != nil { - return nil, nil, false, clues.Wrap(err, "looking up prior snapshots") - } - - // We only need to check that we have 1:1 reason:base if we're doing an - // incremental with associated metadata. This ensures that we're only sourcing - // data from a single Point-In-Time (base) for each incremental backup. - // - // TODO(ashmrtn): This may need updating if we start sourcing item backup - // details from previous snapshots when using kopia-assisted incrementals. - if err := verifyDistinctBases(ctx, ms); err != nil { - logger.CtxErr(ctx, err).Info("base snapshot collision, falling back to full backup") - return ms, nil, false, nil - } - - fbms, err := mr.FindBases(ctx, fallbackReasons, tags) - if err != nil { - return nil, nil, false, clues.Wrap(err, "looking up prior snapshots under alternate id") - } - - // Also check distinct bases for the fallback set. - if err := verifyDistinctBases(ctx, fbms); err != nil { - logger.CtxErr(ctx, err).Info("fallback snapshot collision, falling back to full backup") - return ms, nil, false, nil - } + bb := bf.FindBases(ctx, reasons, tags) + // TODO(ashmrtn): Only fetch these if we haven't already covered all the + // reasons for this backup. + fbb := bf.FindBases(ctx, fallbackReasons, tags) // one of three cases can occur when retrieving backups across reason migrations: // 1. the current reasons don't match any manifests, and we use the fallback to @@ -79,56 +44,26 @@ func produceManifestsAndMetadata( // 2. the current reasons only contain an incomplete manifest, and the fallback // can find a complete manifest. // 3. the current reasons contain all the necessary manifests. - ms = unionManifests(reasons, ms, fbms) + bb = bb.MergeBackupBases( + ctx, + fbb, + func(r kopia.Reason) string { + return r.Service.String() + r.Category.String() + }) if !getMetadata { - return ms, nil, false, nil + logger.Ctx(ctx).Debug("full backup requested, dropping merge bases") + + // TODO(ashmrtn): If this function is moved to be a helper function then + // move this change to the bases to the caller of this function. + bb.ClearMergeBases() + + return bb, nil, false, nil } - for _, man := range ms { - if len(man.IncompleteReason) > 0 { - continue - } - + for _, man := range bb.MergeBases() { mctx := clues.Add(ctx, "manifest_id", man.ID) - bID, ok := man.GetTag(kopia.TagBackupID) - if !ok { - err = clues.New("snapshot manifest missing backup ID").WithClues(ctx) - return nil, nil, false, err - } - - mctx = clues.Add(mctx, "manifest_backup_id", bID) - - bup, err := gb.GetBackup(mctx, model.StableID(bID)) - // if no backup exists for any of the complete manifests, we want - // to fall back to a complete backup. - if errors.Is(err, data.ErrNotFound) { - logger.Ctx(mctx).Infow("backup missing, falling back to full backup", clues.In(mctx).Slice()...) - return ms, nil, false, nil - } - - if err != nil { - return nil, nil, false, clues.Wrap(err, "retrieving prior backup data") - } - - ssid := bup.StreamStoreID - if len(ssid) == 0 { - ssid = bup.DetailsID - } - - mctx = clues.Add(mctx, "manifest_streamstore_id", ssid) - - // if no detailsID exists for any of the complete manifests, we want - // to fall back to a complete backup. This is a temporary prevention - // mechanism to keep backups from falling into a perpetually bad state. - // This makes an assumption that the ID points to a populated set of - // details; we aren't doing the work to look them up. - if len(ssid) == 0 { - logger.Ctx(ctx).Infow("backup missing streamstore ID, falling back to full backup", clues.In(mctx).Slice()...) - return ms, nil, false, nil - } - // a local fault.Bus intance is used to collect metadata files here. // we avoid the global fault.Bus because all failures here are ignorable, // and cascading errors up to the operation can cause a conflict that forces @@ -137,9 +72,19 @@ func produceManifestsAndMetadata( // spread around. Need to find more idiomatic handling. fb := fault.New(true) - colls, err := collectMetadata(mctx, mr, man, metadataFiles, tenantID, fb) + colls, err := collectMetadata(mctx, rp, man, metadataFiles, tenantID, fb) LogFaultErrors(ctx, fb.Errors(), "collecting metadata") + // TODO(ashmrtn): It should be alright to relax this condition a little. We + // should be able to just remove the offending manifest and backup from the + // set of bases. Since we're looking at manifests in this loop, it should be + // possible to find the backup by either checking the reasons or extracting + // the backup ID from the manifests tags. + // + // Assuming that only the corso metadata is corrupted for the manifest, it + // should be safe to leave this manifest in the AssistBases set, though we + // could remove it there too if we want to be conservative. That can be done + // by finding the manifest ID. if err != nil && !errors.Is(err, data.ErrNotFound) { // prior metadata isn't guaranteed to exist. // if it doesn't, we'll just have to do a @@ -150,148 +95,7 @@ func produceManifestsAndMetadata( collections = append(collections, colls...) } - if err != nil { - return nil, nil, false, err - } - - return ms, collections, true, nil -} - -// unionManifests reduces the two manifest slices into a single slice. -// Assumes fallback represents a prior manifest version (across some migration -// that disrupts manifest lookup), and that mans contains the current version. -// Also assumes the mans slice will have, at most, one complete and one incomplete -// manifest per service+category tuple. -// -// Selection priority, for each reason, follows these rules: -// 1. If the mans manifest is complete, ignore fallback manifests for that reason. -// 2. If the mans manifest is only incomplete, look for a matching complete manifest in fallbacks. -// 3. If mans has no entry for a reason, look for both complete and incomplete fallbacks. -func unionManifests( - reasons []kopia.Reason, - mans []kopia.ManifestEntry, - fallback []kopia.ManifestEntry, -) []kopia.ManifestEntry { - if len(fallback) == 0 { - return mans - } - - if len(mans) == 0 { - return fallback - } - - type manTup struct { - complete *kopia.ManifestEntry - incomplete *kopia.ManifestEntry - } - - tups := map[string]manTup{} - - for _, r := range reasons { - // no resource owner in the key. Assume it's the same owner across all - // manifests, but that the identifier is different due to migration. - k := r.Service.String() + r.Category.String() - tups[k] = manTup{} - } - - // track the manifests that were collected with the current lookup - for i := range mans { - m := &mans[i] - - for _, r := range m.Reasons { - k := r.Service.String() + r.Category.String() - t := tups[k] - // assume mans will have, at most, one complete and one incomplete per key - if len(m.IncompleteReason) > 0 { - t.incomplete = m - } else { - t.complete = m - } - - tups[k] = t - } - } - - // backfill from the fallback where necessary - for i := range fallback { - m := &fallback[i] - useReasons := []kopia.Reason{} - - for _, r := range m.Reasons { - k := r.Service.String() + r.Category.String() - t := tups[k] - - if t.complete != nil { - // assume fallbacks contains prior manifest versions. - // we don't want to stack a prior version incomplete onto - // a current version's complete snapshot. - continue - } - - useReasons = append(useReasons, r) - - if len(m.IncompleteReason) > 0 && t.incomplete == nil { - t.incomplete = m - } else if len(m.IncompleteReason) == 0 { - t.complete = m - } - - tups[k] = t - } - - if len(m.IncompleteReason) == 0 && len(useReasons) > 0 { - m.Reasons = useReasons - } - } - - // collect the results into a single slice of manifests - ms := map[string]kopia.ManifestEntry{} - - for _, m := range tups { - if m.complete != nil { - ms[string(m.complete.ID)] = *m.complete - } - - if m.incomplete != nil { - ms[string(m.incomplete.ID)] = *m.incomplete - } - } - - return maps.Values(ms) -} - -// verifyDistinctBases is a validation checker that ensures, for a given slice -// of manifests, that each manifest's Reason (owner, service, category) is only -// included once. If a reason is duplicated by any two manifests, an error is -// returned. -func verifyDistinctBases(ctx context.Context, mans []kopia.ManifestEntry) error { - reasons := map[string]manifest.ID{} - - for _, man := range mans { - // Incomplete snapshots are used only for kopia-assisted incrementals. The - // fact that we need this check here makes it seem like this should live in - // the kopia code. However, keeping it here allows for better debugging as - // the kopia code only has access to a path builder which means it cannot - // remove the resource owner from the error/log output. That is also below - // the point where we decide if we should do a full backup or an incremental. - if len(man.IncompleteReason) > 0 { - continue - } - - for _, reason := range man.Reasons { - reasonKey := reason.ResourceOwner + reason.Service.String() + reason.Category.String() - - if b, ok := reasons[reasonKey]; ok { - return clues.New("manifests have overlapping reasons"). - WithClues(ctx). - With("other_manifest_id", b) - } - - reasons[reasonKey] = man.ID - } - } - - return nil + return bb, collections, true, nil } // collectMetadata retrieves all metadata files associated with the manifest. diff --git a/src/internal/operations/manifests_test.go b/src/internal/operations/manifests_test.go index dd477ee50..e4ae9b6d3 100644 --- a/src/internal/operations/manifests_test.go +++ b/src/internal/operations/manifests_test.go @@ -8,9 +8,7 @@ import ( "github.com/kopia/kopia/repo/manifest" "github.com/kopia/kopia/snapshot" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "golang.org/x/exp/maps" "github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/kopia" @@ -25,49 +23,6 @@ import ( // interfaces // --------------------------------------------------------------------------- -type mockManifestRestorer struct { - mockRestoreProducer - mans []kopia.ManifestEntry - mrErr error // err varname already claimed by mockRestoreProducer -} - -func (mmr mockManifestRestorer) FindBases( - ctx context.Context, - reasons []kopia.Reason, - tags map[string]string, -) ([]kopia.ManifestEntry, error) { - mans := map[string]kopia.ManifestEntry{} - - for _, r := range reasons { - for _, m := range mmr.mans { - for _, mr := range m.Reasons { - if mr.ResourceOwner == r.ResourceOwner { - mans[string(m.ID)] = m - break - } - } - } - } - - return maps.Values(mans), mmr.mrErr -} - -type mockGetBackuper struct { - detailsID string - streamstoreID string - err error -} - -func (mg mockGetBackuper) GetBackup( - ctx context.Context, - backupID model.StableID, -) (*backup.Backup, error) { - return &backup.Backup{ - DetailsID: mg.detailsID, - StreamStoreID: mg.streamstoreID, - }, mg.err -} - type mockColl struct { id string // for comparisons p path.Path @@ -81,6 +36,36 @@ func (mc mockColl) FullPath() path.Path { return mc.p } +type mockBackupFinder struct { + // ResourceOwner -> returned set of data for call to FindBases. We can just + // switch on the ResourceOwner as the passed in Reasons should be the same + // beyond that and results are returned for the union of the reasons anyway. + // This does assume that the return data is properly constructed to return a + // union of the reasons etc. + data map[string]kopia.BackupBases +} + +func (bf *mockBackupFinder) FindBases( + _ context.Context, + reasons []kopia.Reason, + _ map[string]string, +) kopia.BackupBases { + if len(reasons) == 0 { + return kopia.NewMockBackupBases() + } + + if bf == nil { + return kopia.NewMockBackupBases() + } + + b := bf.data[reasons[0].ResourceOwner] + if b == nil { + return kopia.NewMockBackupBases() + } + + return b +} + // --------------------------------------------------------------------------- // tests // --------------------------------------------------------------------------- @@ -254,169 +239,24 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() { } } -func (suite *OperationsManifestsUnitSuite) TestVerifyDistinctBases() { - ro := "resource_owner" +func buildReasons( + ro string, + service path.ServiceType, + cats ...path.CategoryType, +) []kopia.Reason { + var reasons []kopia.Reason - table := []struct { - name string - mans []kopia.ManifestEntry - expect assert.ErrorAssertionFunc - }{ - { - name: "one manifest, one reason", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - }, - expect: assert.NoError, - }, - { - name: "one incomplete manifest", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{IncompleteReason: "ir"}, - }, - }, - expect: assert.NoError, - }, - { - name: "one manifest, multiple reasons", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.ContactsCategory, - }, - }, - }, - }, - expect: assert.NoError, - }, - { - name: "one manifest, duplicate reasons", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - }, - expect: assert.Error, - }, - { - name: "two manifests, non-overlapping reasons", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.ContactsCategory, - }, - }, - }, - }, - expect: assert.NoError, - }, - { - name: "two manifests, overlapping reasons", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - }, - expect: assert.Error, - }, - { - name: "two manifests, overlapping reasons, one snapshot incomplete", - mans: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - { - Manifest: &snapshot.Manifest{IncompleteReason: "ir"}, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - }, - expect: assert.NoError, - }, + for _, cat := range cats { + reasons = append( + reasons, + kopia.Reason{ + ResourceOwner: ro, + Service: service, + Category: cat, + }) } - for _, test := range table { - suite.Run(test.name, func() { - ctx, flush := tester.NewContext(suite.T()) - defer flush() - err := verifyDistinctBases(ctx, test.mans) - test.expect(suite.T(), err, clues.ToCore(err)) - }) - } + return reasons } func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() { @@ -426,228 +266,235 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() { did = "detailsid" ) - makeMan := func(pct path.CategoryType, id, incmpl, bid string) kopia.ManifestEntry { - tags := map[string]string{} - if len(bid) > 0 { - tags = map[string]string{"tag:" + kopia.TagBackupID: bid} - } - + makeMan := func(id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry { return kopia.ManifestEntry{ Manifest: &snapshot.Manifest{ ID: manifest.ID(id), IncompleteReason: incmpl, - Tags: tags, - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: ro, - Service: path.ExchangeService, - Category: pct, - }, }, + Reasons: buildReasons(ro, path.ExchangeService, cats...), } } table := []struct { - name string - mr mockManifestRestorer - gb mockGetBackuper - getMeta bool - assertErr assert.ErrorAssertionFunc - assertB assert.BoolAssertionFunc - expectDCS []mockColl - expectNilMans bool + name string + bf *mockBackupFinder + rp mockRestoreProducer + reasons []kopia.Reason + getMeta bool + assertErr assert.ErrorAssertionFunc + assertB assert.BoolAssertionFunc + expectDCS []mockColl + expectPaths func(t *testing.T, gotPaths []path.Path) + expectMans kopia.BackupBases }{ { - name: "don't get metadata, no mans", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{}, - }, - gb: mockGetBackuper{detailsID: did}, - getMeta: false, - assertErr: assert.NoError, - assertB: assert.False, - expectDCS: nil, + name: "don't get metadata, no mans", + rp: mockRestoreProducer{}, + reasons: []kopia.Reason{}, + getMeta: false, + assertErr: assert.NoError, + assertB: assert.False, + expectDCS: nil, + expectMans: kopia.NewMockBackupBases(), }, { name: "don't get metadata", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id1", "", "")}, + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{}, + reasons: []kopia.Reason{ + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, + }, }, - gb: mockGetBackuper{detailsID: did}, getMeta: false, assertErr: assert.NoError, assertB: assert.False, expectDCS: nil, + expectMans: kopia.NewMockBackupBases().WithAssistBases( + makeMan("id1", "", path.EmailCategory), + ), }, { name: "don't get metadata, incomplete manifest", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id1", "ir", "")}, - }, - gb: mockGetBackuper{detailsID: did}, - getMeta: false, - assertErr: assert.NoError, - assertB: assert.False, - expectDCS: nil, - }, - { - name: "fetch manifests errors", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mrErr: assert.AnError, - }, - gb: mockGetBackuper{detailsID: did}, - getMeta: true, - assertErr: assert.Error, - assertB: assert.False, - expectDCS: nil, - }, - { - name: "verify distinct bases fails", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{ - makeMan(path.EmailCategory, "id1", "", ""), - makeMan(path.EmailCategory, "id2", "", ""), + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithAssistBases( + makeMan("id1", "checkpoint", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{}, + reasons: []kopia.Reason{ + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, }, }, - gb: mockGetBackuper{detailsID: did}, getMeta: true, - assertErr: assert.NoError, // No error, even though verify failed. - assertB: assert.False, + assertErr: assert.NoError, + // Doesn't matter if it's true or false as merge/assist bases are + // distinct. A future PR can go and remove the requirement to pass the + // flag to kopia and just pass it the bases instead. + assertB: assert.True, expectDCS: nil, + expectMans: kopia.NewMockBackupBases().WithAssistBases( + makeMan("id1", "checkpoint", path.EmailCategory), + ), }, { - name: "no manifests", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{}, + name: "one valid man, multiple reasons", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory, path.ContactsCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + }, + }, + reasons: []kopia.Reason{ + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, + }, + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }, }, - gb: mockGetBackuper{detailsID: did}, getMeta: true, assertErr: assert.NoError, assertB: assert.True, - expectDCS: nil, + expectDCS: []mockColl{{id: "id1"}}, + expectPaths: func(t *testing.T, gotPaths []path.Path) { + for _, p := range gotPaths { + assert.Equal( + t, + path.ExchangeMetadataService, + p.Service(), + "read data service") + + assert.Contains( + t, + []path.CategoryType{ + path.EmailCategory, + path.ContactsCategory, + }, + p.Category(), + "read data category doesn't match a given reason", + ) + } + }, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory, path.ContactsCategory), + ), }, { - name: "only incomplete manifests", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{ - makeMan(path.EmailCategory, "id1", "ir", ""), - makeMan(path.ContactsCategory, "id2", "ir", ""), + name: "one valid man, extra incomplete man", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory), + ).WithAssistBases( + makeMan("id2", "checkpoint", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, + }, + }, + reasons: []kopia.Reason{ + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, }, }, - gb: mockGetBackuper{detailsID: did}, getMeta: true, assertErr: assert.NoError, assertB: assert.True, - expectDCS: nil, - }, - { - name: "man missing backup id", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{ - collsByID: map[string][]data.RestoreCollection{ - "id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}}, - }, - }, - mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")}, - }, - gb: mockGetBackuper{detailsID: did}, - getMeta: true, - assertErr: assert.Error, - assertB: assert.False, - expectNilMans: true, - }, - { - name: "backup missing details id", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{}, - mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id1", "", "bid")}, - }, - gb: mockGetBackuper{}, - getMeta: true, - assertErr: assert.NoError, - assertB: assert.False, - }, - { - name: "one complete, one incomplete", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{ - collsByID: map[string][]data.RestoreCollection{ - "id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}}, - "incmpl_id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "incmpl_id_coll"}}}, - }, - }, - mans: []kopia.ManifestEntry{ - makeMan(path.EmailCategory, "id", "", "bid"), - makeMan(path.EmailCategory, "incmpl_id", "ir", ""), - }, - }, - gb: mockGetBackuper{detailsID: did}, - getMeta: true, - assertErr: assert.NoError, - assertB: assert.True, - expectDCS: []mockColl{{id: "id_coll"}}, - }, - { - name: "single valid man", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{ - collsByID: map[string][]data.RestoreCollection{ - "id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}}, - }, - }, - mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")}, - }, - gb: mockGetBackuper{detailsID: did}, - getMeta: true, - assertErr: assert.NoError, - assertB: assert.True, - expectDCS: []mockColl{{id: "id_coll"}}, + expectDCS: []mockColl{{id: "id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory), + ).WithAssistBases( + makeMan("id2", "checkpoint", path.EmailCategory), + ), }, { name: "multiple valid mans", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{ - collsByID: map[string][]data.RestoreCollection{ - "mail": {data.NoFetchRestoreCollection{Collection: mockColl{id: "mail_coll"}}}, - "contact": {data.NoFetchRestoreCollection{Collection: mockColl{id: "contact_coll"}}}, - }, - }, - mans: []kopia.ManifestEntry{ - makeMan(path.EmailCategory, "mail", "", "bid"), - makeMan(path.ContactsCategory, "contact", "", "bid"), + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory), + makeMan("id2", "", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, + }, + }, + reasons: []kopia.Reason{ + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, }, }, - gb: mockGetBackuper{detailsID: did}, getMeta: true, assertErr: assert.NoError, assertB: assert.True, - expectDCS: []mockColl{ - {id: "mail_coll"}, - {id: "contact_coll"}, - }, + expectDCS: []mockColl{{id: "id1"}, {id: "id2"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory), + makeMan("id2", "", path.EmailCategory), + ), }, { name: "error collecting metadata", - mr: mockManifestRestorer{ - mockRestoreProducer: mockRestoreProducer{err: assert.AnError}, - mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id1", "", "bid")}, + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan("id1", "", path.EmailCategory), + ), + }, }, - gb: mockGetBackuper{detailsID: did}, - getMeta: true, - assertErr: assert.Error, - assertB: assert.False, - expectDCS: nil, - expectNilMans: true, + rp: mockRestoreProducer{err: assert.AnError}, + reasons: []kopia.Reason{ + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, + }, + }, + getMeta: true, + assertErr: assert.Error, + assertB: assert.False, + expectDCS: nil, + expectMans: nil, }, } + for _, test := range table { suite.Run(test.name, func() { t := suite.T() @@ -657,20 +504,470 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() { mans, dcs, b, err := produceManifestsAndMetadata( ctx, - &test.mr, - &test.gb, - []kopia.Reason{{ResourceOwner: ro}}, nil, + test.bf, + &test.rp, + test.reasons, nil, tid, test.getMeta) test.assertErr(t, err, clues.ToCore(err)) test.assertB(t, b) - expectMans := test.mr.mans - if test.expectNilMans { - expectMans = nil + kopia.AssertBackupBasesEqual(t, test.expectMans, mans) + + expect, got := []string{}, []string{} + + for _, dc := range test.expectDCS { + expect = append(expect, dc.id) } - assert.ElementsMatch(t, expectMans, mans) + for _, dc := range dcs { + if !assert.IsTypef( + t, + data.NoFetchRestoreCollection{}, + dc, + "unexpected type returned [%T]", + dc, + ) { + continue + } + + tmp := dc.(data.NoFetchRestoreCollection) + + if !assert.IsTypef( + t, + mockColl{}, + tmp.Collection, + "unexpected type returned [%T]", + tmp.Collection, + ) { + continue + } + + mc := tmp.Collection.(mockColl) + got = append(got, mc.id) + } + + assert.ElementsMatch(t, expect, got, "expected collections are present") + + if test.expectPaths != nil { + test.expectPaths(t, test.rp.gotPaths) + } + }) + } +} + +func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_FallbackReasons() { + const ( + ro = "resourceowner" + fbro = "fb_resourceowner" + tid = "tenantid" + did = "detailsid" + ) + + makeMan := func(ro, id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry { + return kopia.ManifestEntry{ + Manifest: &snapshot.Manifest{ + ID: manifest.ID(id), + IncompleteReason: incmpl, + Tags: map[string]string{"tag:" + kopia.TagBackupID: id + "bup"}, + }, + Reasons: buildReasons(ro, path.ExchangeService, cats...), + } + } + + makeBackup := func(ro, snapID string, cats ...path.CategoryType) kopia.BackupEntry { + return kopia.BackupEntry{ + Backup: &backup.Backup{ + BaseModel: model.BaseModel{ + ID: model.StableID(snapID + "bup"), + }, + SnapshotID: snapID, + StreamStoreID: snapID + "store", + }, + Reasons: buildReasons(ro, path.ExchangeService, cats...), + } + } + + emailReason := kopia.Reason{ + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.EmailCategory, + } + + fbEmailReason := kopia.Reason{ + ResourceOwner: fbro, + Service: path.ExchangeService, + Category: path.EmailCategory, + } + + table := []struct { + name string + bf *mockBackupFinder + rp mockRestoreProducer + reasons []kopia.Reason + fallbackReasons []kopia.Reason + getMeta bool + assertErr assert.ErrorAssertionFunc + assertB assert.BoolAssertionFunc + expectDCS []mockColl + expectMans kopia.BackupBases + }{ + { + name: "don't get metadata, only fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{}, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: false, + assertErr: assert.NoError, + assertB: assert.False, + expectDCS: nil, + expectMans: kopia.NewMockBackupBases().WithAssistBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ), + }, + { + name: "only fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + }, + }, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "fb_id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ), + }, + { + name: "complete mans and fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + }, + }, + reasons: []kopia.Reason{emailReason}, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ), + }, + { + name: "incomplete mans and fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithAssistBases( + makeMan(ro, "id2", "checkpoint", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithAssistBases( + makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, + "fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}}, + }, + }, + reasons: []kopia.Reason{emailReason}, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: nil, + expectMans: kopia.NewMockBackupBases().WithAssistBases( + makeMan(ro, "id2", "checkpoint", path.EmailCategory), + ), + }, + { + name: "complete and incomplete mans and fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ).WithAssistBases( + makeMan(ro, "id2", "checkpoint", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ).WithAssistBases( + makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + "fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}}, + }, + }, + reasons: []kopia.Reason{emailReason}, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ).WithAssistBases( + makeMan(ro, "id2", "checkpoint", path.EmailCategory), + ), + }, + { + name: "incomplete mans and complete fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithAssistBases( + makeMan(ro, "id2", "checkpoint", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id2"}}}, + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + }, + }, + reasons: []kopia.Reason{emailReason}, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "fb_id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory), + ).WithAssistBases( + makeMan(ro, "id2", "checkpoint", path.EmailCategory), + ), + }, + { + name: "complete mans and incomplete fallbacks", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithAssistBases( + makeMan(fbro, "fb_id2", "checkpoint", path.EmailCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "fb_id2": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id2"}}}, + }, + }, + reasons: []kopia.Reason{emailReason}, + fallbackReasons: []kopia.Reason{fbEmailReason}, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ), + }, + { + name: "complete mans and complete fallbacks, multiple reasons", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory), + ), + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory, path.ContactsCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + }, + }, + reasons: []kopia.Reason{ + emailReason, + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }, + }, + fallbackReasons: []kopia.Reason{ + fbEmailReason, + { + ResourceOwner: fbro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }, + }, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory), + ), + }, + { + name: "complete mans and complete fallbacks, distinct reasons", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.ContactsCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.ContactsCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + }, + }, + reasons: []kopia.Reason{emailReason}, + fallbackReasons: []kopia.Reason{ + { + ResourceOwner: fbro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }, + }, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + makeMan(fbro, "fb_id1", "", path.ContactsCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.ContactsCategory), + ), + }, + { + name: "complete mans and complete fallbacks, fallback has superset of reasons", + bf: &mockBackupFinder{ + data: map[string]kopia.BackupBases{ + ro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + ), + fbro: kopia.NewMockBackupBases().WithMergeBases( + makeMan(fbro, "fb_id1", "", path.EmailCategory, path.ContactsCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.EmailCategory, path.ContactsCategory), + ), + }, + }, + rp: mockRestoreProducer{ + collsByID: map[string][]data.RestoreCollection{ + "id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id1"}}}, + "fb_id1": {data.NoFetchRestoreCollection{Collection: mockColl{id: "fb_id1"}}}, + }, + }, + reasons: []kopia.Reason{ + emailReason, + { + ResourceOwner: ro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }, + }, + fallbackReasons: []kopia.Reason{ + fbEmailReason, + { + ResourceOwner: fbro, + Service: path.ExchangeService, + Category: path.ContactsCategory, + }, + }, + getMeta: true, + assertErr: assert.NoError, + assertB: assert.True, + expectDCS: []mockColl{{id: "id1"}, {id: "fb_id1"}}, + expectMans: kopia.NewMockBackupBases().WithMergeBases( + makeMan(ro, "id1", "", path.EmailCategory), + makeMan(fbro, "fb_id1", "", path.ContactsCategory), + ).WithBackups( + makeBackup(fbro, "fb_id1", path.ContactsCategory), + ), + }, + } + + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + mans, dcs, b, err := produceManifestsAndMetadata( + ctx, + test.bf, + &test.rp, + test.reasons, test.fallbackReasons, + tid, + test.getMeta) + test.assertErr(t, err, clues.ToCore(err)) + test.assertB(t, b) + + kopia.AssertBackupBasesEqual(t, test.expectMans, mans) expect, got := []string{}, []string{} @@ -709,603 +1006,3 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() { }) } } - -func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_fallbackReasons() { - const ( - ro = "resourceowner" - manComplete = "complete" - manIncomplete = "incmpl" - - fbro = "fb_resourceowner" - fbComplete = "fb_complete" - fbIncomplete = "fb_incmpl" - ) - - makeMan := func(id, incmpl string, reasons []kopia.Reason) kopia.ManifestEntry { - return kopia.ManifestEntry{ - Manifest: &snapshot.Manifest{ - ID: manifest.ID(id), - IncompleteReason: incmpl, - Tags: map[string]string{}, - }, - Reasons: reasons, - } - } - - type testInput struct { - id string - incomplete bool - } - - table := []struct { - name string - man []testInput - fallback []testInput - reasons []kopia.Reason - fallbackReasons []kopia.Reason - manCategories []path.CategoryType - fbCategories []path.CategoryType - assertErr assert.ErrorAssertionFunc - expectManIDs []string - expectNilMans bool - expectReasons map[string][]path.CategoryType - }{ - { - name: "only mans, no fallbacks", - man: []testInput{ - { - id: manComplete, - }, - { - id: manIncomplete, - incomplete: true, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{manComplete, manIncomplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.EmailCategory}, - manIncomplete: {path.EmailCategory}, - }, - }, - { - name: "no mans, only fallbacks", - fallback: []testInput{ - { - id: fbComplete, - }, - { - id: fbIncomplete, - incomplete: true, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{fbComplete, fbIncomplete}, - expectReasons: map[string][]path.CategoryType{ - fbComplete: {path.EmailCategory}, - fbIncomplete: {path.EmailCategory}, - }, - }, - { - name: "complete mans and fallbacks", - man: []testInput{ - { - id: manComplete, - }, - }, - fallback: []testInput{ - { - id: fbComplete, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{manComplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.EmailCategory}, - }, - }, - { - name: "incomplete mans and fallbacks", - man: []testInput{ - { - id: manIncomplete, - incomplete: true, - }, - }, - fallback: []testInput{ - { - id: fbIncomplete, - incomplete: true, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{manIncomplete}, - expectReasons: map[string][]path.CategoryType{ - manIncomplete: {path.EmailCategory}, - }, - }, - { - name: "complete and incomplete mans and fallbacks", - man: []testInput{ - { - id: manComplete, - }, - { - id: manIncomplete, - incomplete: true, - }, - }, - fallback: []testInput{ - { - id: fbComplete, - }, - { - id: fbIncomplete, - incomplete: true, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{manComplete, manIncomplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.EmailCategory}, - manIncomplete: {path.EmailCategory}, - }, - }, - { - name: "incomplete mans, complete fallbacks", - man: []testInput{ - { - id: manIncomplete, - incomplete: true, - }, - }, - fallback: []testInput{ - { - id: fbComplete, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{fbComplete, manIncomplete}, - expectReasons: map[string][]path.CategoryType{ - fbComplete: {path.EmailCategory}, - manIncomplete: {path.EmailCategory}, - }, - }, - { - name: "complete mans, incomplete fallbacks", - man: []testInput{ - { - id: manComplete, - }, - }, - fallback: []testInput{ - { - id: fbIncomplete, - incomplete: true, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{manComplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.EmailCategory}, - }, - }, - { - name: "complete mans, complete fallbacks, multiple reasons", - man: []testInput{ - { - id: manComplete, - }, - }, - fallback: []testInput{ - { - id: fbComplete, - }, - }, - manCategories: []path.CategoryType{path.EmailCategory, path.ContactsCategory}, - fbCategories: []path.CategoryType{path.EmailCategory, path.ContactsCategory}, - expectManIDs: []string{manComplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.EmailCategory, path.ContactsCategory}, - }, - }, - { - name: "complete mans, complete fallbacks, distinct reasons", - man: []testInput{ - { - id: manComplete, - }, - }, - fallback: []testInput{ - { - id: fbComplete, - }, - }, - manCategories: []path.CategoryType{path.ContactsCategory}, - fbCategories: []path.CategoryType{path.EmailCategory}, - expectManIDs: []string{manComplete, fbComplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.ContactsCategory}, - fbComplete: {path.EmailCategory}, - }, - }, - { - name: "fb has superset of mans reasons", - man: []testInput{ - { - id: manComplete, - }, - }, - fallback: []testInput{ - { - id: fbComplete, - }, - }, - manCategories: []path.CategoryType{path.ContactsCategory}, - fbCategories: []path.CategoryType{path.EmailCategory, path.ContactsCategory, path.EventsCategory}, - expectManIDs: []string{manComplete, fbComplete}, - expectReasons: map[string][]path.CategoryType{ - manComplete: {path.ContactsCategory}, - fbComplete: {path.EmailCategory, path.EventsCategory}, - }, - }, - } - for _, test := range table { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - mainReasons := []kopia.Reason{} - fbReasons := []kopia.Reason{} - - for _, cat := range test.manCategories { - mainReasons = append( - mainReasons, - kopia.Reason{ - ResourceOwner: ro, - Service: path.ExchangeService, - Category: cat, - }) - } - - for _, cat := range test.fbCategories { - fbReasons = append( - fbReasons, - kopia.Reason{ - ResourceOwner: fbro, - Service: path.ExchangeService, - Category: cat, - }) - } - - mans := []kopia.ManifestEntry{} - - for _, m := range test.man { - incomplete := "" - if m.incomplete { - incomplete = "ir" - } - - mans = append(mans, makeMan(m.id, incomplete, mainReasons)) - } - - for _, m := range test.fallback { - incomplete := "" - if m.incomplete { - incomplete = "ir" - } - - mans = append(mans, makeMan(m.id, incomplete, fbReasons)) - } - - mr := mockManifestRestorer{mans: mans} - - gotMans, _, b, err := produceManifestsAndMetadata( - ctx, - &mr, - nil, - mainReasons, - fbReasons, - "tid", - false) - require.NoError(t, err, clues.ToCore(err)) - assert.False(t, b, "no-metadata is forced for this test") - - manIDs := []string{} - - for _, m := range gotMans { - manIDs = append(manIDs, string(m.ID)) - - reasons := test.expectReasons[string(m.ID)] - - mrs := []path.CategoryType{} - for _, r := range m.Reasons { - mrs = append(mrs, r.Category) - } - - assert.ElementsMatch(t, reasons, mrs) - } - - assert.ElementsMatch(t, test.expectManIDs, manIDs) - }) - } -} - -// --------------------------------------------------------------------------- -// older tests -// --------------------------------------------------------------------------- - -type BackupManifestUnitSuite struct { - tester.Suite -} - -func TestBackupManifestUnitSuite(t *testing.T) { - suite.Run(t, &BackupManifestUnitSuite{Suite: tester.NewUnitSuite(t)}) -} - -func (suite *BackupManifestUnitSuite) TestBackupOperation_VerifyDistinctBases() { - const user = "a-user" - - table := []struct { - name string - input []kopia.ManifestEntry - errCheck assert.ErrorAssertionFunc - }{ - { - name: "SingleManifestMultipleReasons", - input: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{ - ID: "id1", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EventsCategory, - }, - }, - }, - }, - errCheck: assert.NoError, - }, - { - name: "MultipleManifestsDistinctReason", - input: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{ - ID: "id1", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - { - Manifest: &snapshot.Manifest{ - ID: "id2", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EventsCategory, - }, - }, - }, - }, - errCheck: assert.NoError, - }, - { - name: "MultipleManifestsSameReason", - input: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{ - ID: "id1", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - { - Manifest: &snapshot.Manifest{ - ID: "id2", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - }, - errCheck: assert.Error, - }, - { - name: "MultipleManifestsSameReasonOneIncomplete", - input: []kopia.ManifestEntry{ - { - Manifest: &snapshot.Manifest{ - ID: "id1", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - { - Manifest: &snapshot.Manifest{ - ID: "id2", - IncompleteReason: "checkpoint", - }, - Reasons: []kopia.Reason{ - { - ResourceOwner: user, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - }, - errCheck: assert.NoError, - }, - } - - for _, test := range table { - suite.Run(test.name, func() { - ctx, flush := tester.NewContext(suite.T()) - defer flush() - - err := verifyDistinctBases(ctx, test.input) - test.errCheck(suite.T(), err, clues.ToCore(err)) - }) - } -} - -func (suite *BackupManifestUnitSuite) TestBackupOperation_CollectMetadata() { - var ( - tenant = "a-tenant" - resourceOwner = "a-user" - fileNames = []string{ - "delta", - "paths", - } - - emailDeltaPath = makeMetadataPath( - suite.T(), - tenant, - path.ExchangeService, - resourceOwner, - path.EmailCategory, - fileNames[0], - ) - emailPathsPath = makeMetadataPath( - suite.T(), - tenant, - path.ExchangeService, - resourceOwner, - path.EmailCategory, - fileNames[1], - ) - contactsDeltaPath = makeMetadataPath( - suite.T(), - tenant, - path.ExchangeService, - resourceOwner, - path.ContactsCategory, - fileNames[0], - ) - contactsPathsPath = makeMetadataPath( - suite.T(), - tenant, - path.ExchangeService, - resourceOwner, - path.ContactsCategory, - fileNames[1], - ) - ) - - table := []struct { - name string - inputMan kopia.ManifestEntry - inputFiles []string - expected []path.Path - }{ - { - name: "SingleReasonSingleFile", - inputMan: kopia.ManifestEntry{ - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: resourceOwner, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - inputFiles: []string{fileNames[0]}, - expected: []path.Path{emailDeltaPath}, - }, - { - name: "SingleReasonMultipleFiles", - inputMan: kopia.ManifestEntry{ - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: resourceOwner, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - }, - }, - inputFiles: fileNames, - expected: []path.Path{emailDeltaPath, emailPathsPath}, - }, - { - name: "MultipleReasonsMultipleFiles", - inputMan: kopia.ManifestEntry{ - Manifest: &snapshot.Manifest{}, - Reasons: []kopia.Reason{ - { - ResourceOwner: resourceOwner, - Service: path.ExchangeService, - Category: path.EmailCategory, - }, - { - ResourceOwner: resourceOwner, - Service: path.ExchangeService, - Category: path.ContactsCategory, - }, - }, - }, - inputFiles: fileNames, - expected: []path.Path{ - emailDeltaPath, - emailPathsPath, - contactsDeltaPath, - contactsPathsPath, - }, - }, - } - - for _, test := range table { - suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - mr := &mockRestoreProducer{} - - _, err := collectMetadata(ctx, mr, test.inputMan, test.inputFiles, tenant, fault.New(true)) - assert.NoError(t, err, clues.ToCore(err)) - - checkPaths(t, test.expected, mr.gotPaths) - }) - } -} From 5184920b52cf40dd90c88dbbc74e2605b99793ac Mon Sep 17 00:00:00 2001 From: Keepers Date: Thu, 15 Jun 2023 12:28:31 -0600 Subject: [PATCH 35/41] add foldermatcher to handler (#3565) Missed adding the foldermatcher to the handlers for onedrive and sharepoint during the api refactor. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #1996 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- src/internal/m365/onedrive/backup.go | 17 +----------- src/internal/m365/onedrive/collections.go | 26 +++++++++---------- .../m365/onedrive/collections_test.go | 17 +++++------- src/internal/m365/onedrive/handlers.go | 4 +++ .../m365/onedrive/item_collector_test.go | 20 ++++---------- src/internal/m365/onedrive/item_handler.go | 12 ++++++++- src/internal/m365/onedrive/item_test.go | 6 ++++- src/internal/m365/onedrive/mock/handlers.go | 8 ++++++ src/internal/m365/sharepoint/backup.go | 15 +---------- src/internal/m365/sharepoint/backup_test.go | 15 +---------- .../m365/sharepoint/library_handler.go | 12 ++++++++- 11 files changed, 66 insertions(+), 86 deletions(-) diff --git a/src/internal/m365/onedrive/backup.go b/src/internal/m365/onedrive/backup.go index f1a47e9e1..eaedf4284 100644 --- a/src/internal/m365/onedrive/backup.go +++ b/src/internal/m365/onedrive/backup.go @@ -19,20 +19,6 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api" ) -type odFolderMatcher struct { - scope selectors.OneDriveScope -} - -func (fm odFolderMatcher) IsAny() bool { - return fm.scope.IsAny(selectors.OneDriveFolder) -} - -func (fm odFolderMatcher) Matches(dir string) bool { - return fm.scope.Matches(selectors.OneDriveFolder, dir) -} - -// ProduceBackupCollections returns a set of DataCollection which represents the OneDrive data -// for the specified user func ProduceBackupCollections( ctx context.Context, ac api.Client, @@ -68,10 +54,9 @@ func ProduceBackupCollections( logger.Ctx(ctx).Debug("creating OneDrive collections") nc := NewCollections( - &itemBackupHandler{ac.Drives()}, + &itemBackupHandler{ac.Drives(), scope}, tenant, user.ID(), - odFolderMatcher{scope}, su, ctrlOpts) diff --git a/src/internal/m365/onedrive/collections.go b/src/internal/m365/onedrive/collections.go index 24371b22c..7122a2361 100644 --- a/src/internal/m365/onedrive/collections.go +++ b/src/internal/m365/onedrive/collections.go @@ -42,11 +42,6 @@ const ( const restrictedDirectory = "Site Pages" -type folderMatcher interface { - IsAny() bool - Matches(string) bool -} - // Collections is used to retrieve drive data for a // resource owner, which can be either a user or a sharepoint site. type Collections struct { @@ -54,7 +49,7 @@ type Collections struct { tenantID string resourceOwner string - matcher folderMatcher + statusUpdater support.StatusUpdater ctrl control.Options @@ -74,7 +69,6 @@ func NewCollections( bh BackupHandler, tenantID string, resourceOwner string, - matcher folderMatcher, statusUpdater support.StatusUpdater, ctrlOpts control.Options, ) *Collections { @@ -82,7 +76,6 @@ func NewCollections( handler: bh, tenantID: tenantID, resourceOwner: resourceOwner, - matcher: matcher, CollectionMap: map[string]map[string]*Collection{}, statusUpdater: statusUpdater, ctrl: ctrlOpts, @@ -697,7 +690,7 @@ func (c *Collections) UpdateCollections( } // Skip items that don't match the folder selectors we were given. - if shouldSkipDrive(ctx, collectionPath, c.matcher, driveName) { + if shouldSkip(ctx, collectionPath, c.handler, driveName) { logger.Ctx(ictx).Debugw("path not selected", "skipped_path", collectionPath.String()) continue } @@ -827,12 +820,17 @@ func (c *Collections) UpdateCollections( return el.Failure() } -func shouldSkipDrive(ctx context.Context, drivePath path.Path, m folderMatcher, driveName string) bool { - return !includePath(ctx, m, drivePath) || +type dirScopeChecker interface { + IsAllPass() bool + IncludesDir(dir string) bool +} + +func shouldSkip(ctx context.Context, drivePath path.Path, dsc dirScopeChecker, driveName string) bool { + return !includePath(ctx, dsc, drivePath) || (drivePath.Category() == path.LibrariesCategory && restrictedDirectory == driveName) } -func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) bool { +func includePath(ctx context.Context, dsc dirScopeChecker, folderPath path.Path) bool { // Check if the folder is allowed by the scope. pb, err := path.GetDriveFolderPath(folderPath) if err != nil { @@ -842,11 +840,11 @@ func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) boo // Hack for the edge case where we're looking at the root folder and can // select any folder. Right now the root folder has an empty folder path. - if len(pb.Elements()) == 0 && m.IsAny() { + if len(pb.Elements()) == 0 && dsc.IsAllPass() { return true } - return m.Matches(pb.String()) + return dsc.IncludesDir(pb.String()) } func updatePath(paths map[string]string, id, newPath string) { diff --git a/src/internal/m365/onedrive/collections_test.go b/src/internal/m365/onedrive/collections_test.go index d18ad0f4f..bc64875f4 100644 --- a/src/internal/m365/onedrive/collections_test.go +++ b/src/internal/m365/onedrive/collections_test.go @@ -743,10 +743,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() { maps.Copy(outputFolderMap, tt.inputFolderMap) c := NewCollections( - &itemBackupHandler{api.Drives{}}, + &itemBackupHandler{api.Drives{}, tt.scope}, tenant, user, - testFolderMatcher{tt.scope}, nil, control.Options{ToggleFeatures: control.Toggles{}}) @@ -1238,13 +1237,12 @@ func (p *mockItemPager) ValuesIn(api.DeltaPageLinker) ([]models.DriveItemable, e func (suite *OneDriveCollectionsUnitSuite) TestGet() { var ( - anyFolder = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] - tenant = "a-tenant" - user = "a-user" - empty = "" - next = "next" - delta = "delta1" - delta2 = "delta2" + tenant = "a-tenant" + user = "a-user" + empty = "" + next = "next" + delta = "delta1" + delta2 = "delta2" ) metadataPath, err := path.Builder{}.ToServiceCategoryMetadataPath( @@ -2345,7 +2343,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() { mbh, tenant, user, - testFolderMatcher{anyFolder}, func(*support.ControllerOperationStatus) {}, control.Options{ToggleFeatures: control.Toggles{}}) diff --git a/src/internal/m365/onedrive/handlers.go b/src/internal/m365/onedrive/handlers.go index 78ea162ff..079bcd727 100644 --- a/src/internal/m365/onedrive/handlers.go +++ b/src/internal/m365/onedrive/handlers.go @@ -54,6 +54,10 @@ type BackupHandler interface { // provided path. FormatDisplayPath(driveName string, parentPath *path.Builder) string NewLocationIDer(driveID string, elems ...string) details.LocationIDer + + // scope wrapper funcs + IsAllPass() bool + IncludesDir(dir string) bool } type GetItemPermissioner interface { diff --git a/src/internal/m365/onedrive/item_collector_test.go b/src/internal/m365/onedrive/item_collector_test.go index 65e9bf5fe..a935cc802 100644 --- a/src/internal/m365/onedrive/item_collector_test.go +++ b/src/internal/m365/onedrive/item_collector_test.go @@ -390,7 +390,10 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() { for _, test := range table { suite.Run(test.name, func() { t := suite.T() - bh := itemBackupHandler{suite.ac.Drives()} + bh := itemBackupHandler{ + suite.ac.Drives(), + (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0], + } pager := suite.ac.Drives().NewUserDrivePager(suite.userID, nil) ctx, flush := tester.NewContext(t) @@ -415,18 +418,6 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() { } } -type testFolderMatcher struct { - scope selectors.OneDriveScope -} - -func (fm testFolderMatcher) IsAny() bool { - return fm.scope.IsAny(selectors.OneDriveFolder) -} - -func (fm testFolderMatcher) Matches(p string) bool { - return fm.scope.Matches(selectors.OneDriveFolder, p) -} - func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() { creds, err := tester.NewM365Account(suite.T()).M365Config() require.NoError(suite.T(), err, clues.ToCore(err)) @@ -459,10 +450,9 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() { ) colls := NewCollections( - &itemBackupHandler{suite.ac.Drives()}, + &itemBackupHandler{suite.ac.Drives(), scope}, creds.AzureTenantID, test.user, - testFolderMatcher{scope}, service.updateStatus, control.Options{ ToggleFeatures: control.Toggles{}, diff --git a/src/internal/m365/onedrive/item_handler.go b/src/internal/m365/onedrive/item_handler.go index a95791237..904a20bd6 100644 --- a/src/internal/m365/onedrive/item_handler.go +++ b/src/internal/m365/onedrive/item_handler.go @@ -12,6 +12,7 @@ import ( odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -22,7 +23,8 @@ import ( var _ BackupHandler = &itemBackupHandler{} type itemBackupHandler struct { - ac api.Drives + ac api.Drives + scope selectors.OneDriveScope } func (h itemBackupHandler) Get( @@ -108,6 +110,14 @@ func (h itemBackupHandler) GetItem( return h.ac.GetItem(ctx, driveID, itemID) } +func (h itemBackupHandler) IsAllPass() bool { + return h.scope.IsAny(selectors.OneDriveFolder) +} + +func (h itemBackupHandler) IncludesDir(dir string) bool { + return h.scope.Matches(selectors.OneDriveFolder, dir) +} + // --------------------------------------------------------------------------- // Restore // --------------------------------------------------------------------------- diff --git a/src/internal/m365/onedrive/item_test.go b/src/internal/m365/onedrive/item_test.go index 5fd36d345..d862e6edf 100644 --- a/src/internal/m365/onedrive/item_test.go +++ b/src/internal/m365/onedrive/item_test.go @@ -17,6 +17,7 @@ import ( "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -118,7 +119,10 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() { suite.user, suite.userDriveID) - bh := itemBackupHandler{suite.service.ac.Drives()} + bh := itemBackupHandler{ + suite.service.ac.Drives(), + (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0], + } // Read data for the file itemData, err := downloadItem(ctx, bh, driveItem) diff --git a/src/internal/m365/onedrive/mock/handlers.go b/src/internal/m365/onedrive/mock/handlers.go index 0b48ffa6c..bafda5022 100644 --- a/src/internal/m365/onedrive/mock/handlers.go +++ b/src/internal/m365/onedrive/mock/handlers.go @@ -184,6 +184,14 @@ var defaultSharePointLocationIDer = func(driveID string, elems ...string) detail return details.NewSharePointLocationIDer(driveID, elems...) } +func (h BackupHandler) IsAllPass() bool { + return true +} + +func (h BackupHandler) IncludesDir(string) bool { + return true +} + // --------------------------------------------------------------------------- // Get Itemer // --------------------------------------------------------------------------- diff --git a/src/internal/m365/sharepoint/backup.go b/src/internal/m365/sharepoint/backup.go index 79765bfdb..0596707d3 100644 --- a/src/internal/m365/sharepoint/backup.go +++ b/src/internal/m365/sharepoint/backup.go @@ -220,10 +220,9 @@ func collectLibraries( var ( collections = []data.BackupCollection{} colls = onedrive.NewCollections( - &libraryBackupHandler{ad}, + &libraryBackupHandler{ad, scope}, tenantID, site.ID(), - folderMatcher{scope}, updater.UpdateStatus, ctrlOpts) ) @@ -301,15 +300,3 @@ func collectPages( return spcs, el.Failure() } - -type folderMatcher struct { - scope selectors.SharePointScope -} - -func (fm folderMatcher) IsAny() bool { - return fm.scope.IsAny(selectors.SharePointLibraryFolder) -} - -func (fm folderMatcher) Matches(dir string) bool { - return fm.scope.Matches(selectors.SharePointLibraryFolder, dir) -} diff --git a/src/internal/m365/sharepoint/backup_test.go b/src/internal/m365/sharepoint/backup_test.go index d36e40377..ecffd5ecb 100644 --- a/src/internal/m365/sharepoint/backup_test.go +++ b/src/internal/m365/sharepoint/backup_test.go @@ -29,18 +29,6 @@ var testBaseDrivePath = path.Builder{}.Append( "driveID1", odConsts.RootPathDir) -type testFolderMatcher struct { - scope selectors.SharePointScope -} - -func (fm testFolderMatcher) IsAny() bool { - return fm.scope.IsAny(selectors.SharePointLibraryFolder) -} - -func (fm testFolderMatcher) Matches(p string) bool { - return fm.scope.Matches(selectors.SharePointLibraryFolder, p) -} - // --------------------------------------------------------------------------- // tests // --------------------------------------------------------------------------- @@ -113,10 +101,9 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { ) c := onedrive.NewCollections( - &libraryBackupHandler{api.Drives{}}, + &libraryBackupHandler{api.Drives{}, test.scope}, tenantID, site, - testFolderMatcher{test.scope}, nil, control.Defaults()) diff --git a/src/internal/m365/sharepoint/library_handler.go b/src/internal/m365/sharepoint/library_handler.go index 4ea9e1e92..eff8a9bff 100644 --- a/src/internal/m365/sharepoint/library_handler.go +++ b/src/internal/m365/sharepoint/library_handler.go @@ -13,13 +13,15 @@ import ( odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" ) var _ onedrive.BackupHandler = &libraryBackupHandler{} type libraryBackupHandler struct { - ac api.Drives + ac api.Drives + scope selectors.SharePointScope } func (h libraryBackupHandler) Get( @@ -139,6 +141,14 @@ func (h libraryBackupHandler) GetItem( return h.ac.GetItem(ctx, driveID, itemID) } +func (h libraryBackupHandler) IsAllPass() bool { + return h.scope.IsAny(selectors.SharePointLibraryFolder) +} + +func (h libraryBackupHandler) IncludesDir(dir string) bool { + return h.scope.Matches(selectors.SharePointLibraryFolder, dir) +} + // --------------------------------------------------------------------------- // Restore // --------------------------------------------------------------------------- From 416383a99c77583dbba5e10691fc598b69f2921f Mon Sep 17 00:00:00 2001 From: Keepers Date: Thu, 15 Jun 2023 18:07:09 -0600 Subject: [PATCH 36/41] cascade restoreCfg collision policy into onedrive (#3623) Adds collision policy handling to onedrive item posts. This allows us to override the default "replace" behavior that currently returns a 409 for the creation endpoint, in case we want to use skip (ie: fail) or copy handling. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :sunflower: Feature #### Issue(s) * #3562 #### Test Plan - [x] :muscle: Manual - [x] :zap: Unit test - [x] :green_heart: E2E --- .../m365/exchange/mock/collections.go | 1 + src/internal/m365/graph/errors.go | 28 ++- src/internal/m365/onedrive/handlers.go | 2 + src/internal/m365/onedrive/item_handler.go | 4 +- src/internal/m365/onedrive/item_test.go | 7 +- src/internal/m365/onedrive/mock/handlers.go | 67 ++++++ src/internal/m365/onedrive/mock/item.go | 85 +++++++ src/internal/m365/onedrive/restore.go | 62 +++++- src/internal/m365/onedrive/restore_test.go | 76 +++++++ src/internal/m365/onedrive/url_cache_test.go | 7 +- src/internal/m365/restore.go | 1 + .../m365/sharepoint/library_handler.go | 4 +- src/internal/m365/sharepoint/restore.go | 2 +- .../operations/backup_integration_test.go | 3 +- src/pkg/services/m365/api/drive.go | 28 ++- src/pkg/services/m365/api/drive_test.go | 207 +++++++++++++++++- 16 files changed, 549 insertions(+), 35 deletions(-) diff --git a/src/internal/m365/exchange/mock/collections.go b/src/internal/m365/exchange/mock/collections.go index 36de3cfd1..0e601da3d 100644 --- a/src/internal/m365/exchange/mock/collections.go +++ b/src/internal/m365/exchange/mock/collections.go @@ -138,6 +138,7 @@ func (medc *DataCollection) Items( return res } +// TODO: move to data/mock for service-agnostic mocking // Data represents a single item retrieved from exchange type Data struct { ID string diff --git a/src/internal/m365/graph/errors.go b/src/internal/m365/graph/errors.go index 65150868b..cbc82080c 100644 --- a/src/internal/m365/graph/errors.go +++ b/src/internal/m365/graph/errors.go @@ -33,13 +33,16 @@ const ( itemNotFoundShort errorCode = "itemNotFound" mailboxNotEnabledForRESTAPI errorCode = "MailboxNotEnabledForRESTAPI" malwareDetected errorCode = "malwareDetected" - requestResourceNotFound errorCode = "Request_ResourceNotFound" - quotaExceeded errorCode = "ErrorQuotaExceeded" - resourceNotFound errorCode = "ResourceNotFound" - resyncRequired errorCode = "ResyncRequired" // alt: resyncRequired - syncFolderNotFound errorCode = "ErrorSyncFolderNotFound" - syncStateInvalid errorCode = "SyncStateInvalid" - syncStateNotFound errorCode = "SyncStateNotFound" + // nameAlreadyExists occurs when a request with + // @microsoft.graph.conflictBehavior=fail finds a conflicting file. + nameAlreadyExists errorCode = "nameAlreadyExists" + quotaExceeded errorCode = "ErrorQuotaExceeded" + requestResourceNotFound errorCode = "Request_ResourceNotFound" + resourceNotFound errorCode = "ResourceNotFound" + resyncRequired errorCode = "ResyncRequired" // alt: resyncRequired + syncFolderNotFound errorCode = "ErrorSyncFolderNotFound" + syncStateInvalid errorCode = "SyncStateInvalid" + syncStateNotFound errorCode = "SyncStateNotFound" // This error occurs when an attempt is made to create a folder that has // the same name as another folder in the same parent. Such duplicate folder // names are not allowed by graph. @@ -79,6 +82,12 @@ var ( // https://learn.microsoft.com/en-us/graph/errors#code-property ErrInvalidDelta = clues.New("invalid delta token") + // ErrItemAlreadyExistsConflict denotes that a post or put attempted to create + // an item which already exists by some unique identifier. The identifier is + // not always the id. For example, in onedrive, this error can be produced + // when filenames collide in a @microsoft.graph.conflictBehavior=fail request. + ErrItemAlreadyExistsConflict = clues.New("item already exists") + // ErrServiceNotEnabled identifies that a resource owner does not have // access to a given service. ErrServiceNotEnabled = clues.New("service is not enabled for that resource owner") @@ -162,6 +171,11 @@ func IsErrUnauthorized(err error) bool { return clues.HasLabel(err, LabelStatus(http.StatusUnauthorized)) } +func IsErrItemAlreadyExistsConflict(err error) bool { + return hasErrorCode(err, nameAlreadyExists) || + errors.Is(err, ErrItemAlreadyExistsConflict) +} + // LabelStatus transforms the provided statusCode into // a standard label that can be attached to a clues error // and later reviewed when checking error statuses. diff --git a/src/internal/m365/onedrive/handlers.go b/src/internal/m365/onedrive/handlers.go index 079bcd727..3090703d9 100644 --- a/src/internal/m365/onedrive/handlers.go +++ b/src/internal/m365/onedrive/handlers.go @@ -7,6 +7,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -117,6 +118,7 @@ type PostItemInContainerer interface { ctx context.Context, driveID, parentFolderID string, newItem models.DriveItemable, + onCollision control.CollisionPolicy, ) (models.DriveItemable, error) } diff --git a/src/internal/m365/onedrive/item_handler.go b/src/internal/m365/onedrive/item_handler.go index 904a20bd6..a23e07c61 100644 --- a/src/internal/m365/onedrive/item_handler.go +++ b/src/internal/m365/onedrive/item_handler.go @@ -11,6 +11,7 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -172,8 +173,9 @@ func (h itemRestoreHandler) PostItemInContainer( ctx context.Context, driveID, parentFolderID string, newItem models.DriveItemable, + onCollision control.CollisionPolicy, ) (models.DriveItemable, error) { - return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem) + return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem, onCollision) } func (h itemRestoreHandler) GetFolderByName( diff --git a/src/internal/m365/onedrive/item_test.go b/src/internal/m365/onedrive/item_test.go index d862e6edf..44b3005db 100644 --- a/src/internal/m365/onedrive/item_test.go +++ b/src/internal/m365/onedrive/item_test.go @@ -15,6 +15,7 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/selectors" @@ -167,7 +168,8 @@ func (suite *ItemIntegrationSuite) TestItemWriter() { ctx, test.driveID, ptr.Val(root.GetId()), - newItem(newFolderName, true)) + newItem(newFolderName, true), + control.Copy) require.NoError(t, err, clues.ToCore(err)) require.NotNil(t, newFolder.GetId()) @@ -178,7 +180,8 @@ func (suite *ItemIntegrationSuite) TestItemWriter() { ctx, test.driveID, ptr.Val(newFolder.GetId()), - newItem(newItemName, false)) + newItem(newItemName, false), + control.Copy) require.NoError(t, err, clues.ToCore(err)) require.NotNil(t, newItem.GetId()) diff --git a/src/internal/m365/onedrive/mock/handlers.go b/src/internal/m365/onedrive/mock/handlers.go index bafda5022..23ef8a4d5 100644 --- a/src/internal/m365/onedrive/mock/handlers.go +++ b/src/internal/m365/onedrive/mock/handlers.go @@ -5,10 +5,12 @@ import ( "net/http" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/models" odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -223,3 +225,68 @@ func (m GetsItemPermission) GetItemPermission( ) (models.PermissionCollectionResponseable, error) { return m.Perm, m.Err } + +// --------------------------------------------------------------------------- +// Restore Handler +// --------------------------------------------------------------------------- + +type RestoreHandler struct { + ItemInfo details.ItemInfo + + PostItemResp models.DriveItemable + PostItemErr error +} + +func (h RestoreHandler) AugmentItemInfo( + details.ItemInfo, + models.DriveItemable, + int64, + *path.Builder, +) details.ItemInfo { + return h.ItemInfo +} + +func (h RestoreHandler) NewItemContentUpload( + context.Context, + string, string, +) (models.UploadSessionable, error) { + return nil, clues.New("not implemented") +} + +func (h RestoreHandler) DeleteItemPermission( + context.Context, + string, string, string, +) error { + return clues.New("not implemented") +} + +func (h RestoreHandler) PostItemPermissionUpdate( + context.Context, + string, string, + *drives.ItemItemsItemInvitePostRequestBody, +) (drives.ItemItemsItemInviteResponseable, error) { + return nil, clues.New("not implemented") +} + +func (h RestoreHandler) PostItemInContainer( + context.Context, + string, string, + models.DriveItemable, + control.CollisionPolicy, +) (models.DriveItemable, error) { + return h.PostItemResp, h.PostItemErr +} + +func (h RestoreHandler) GetFolderByName( + context.Context, + string, string, string, +) (models.DriveItemable, error) { + return nil, clues.New("not implemented") +} + +func (h RestoreHandler) GetRootFolder( + context.Context, + string, +) (models.DriveItemable, error) { + return nil, clues.New("not implemented") +} diff --git a/src/internal/m365/onedrive/mock/item.go b/src/internal/m365/onedrive/mock/item.go index dcd86e11c..4e54f179c 100644 --- a/src/internal/m365/onedrive/mock/item.go +++ b/src/internal/m365/onedrive/mock/item.go @@ -1,5 +1,90 @@ package mock +import ( + "bytes" + "context" + "io" + "time" + + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/pkg/backup/details" +) + +// --------------------------------------------------------------------------- +// data.Stream +// --------------------------------------------------------------------------- + +var _ data.Stream = &Data{} + +// TODO: move to data/mock for service-agnostic mocking +// Data represents a single item retrieved from, or restored to, onedrive +type Data struct { + ID string + Reader io.ReadCloser + ReadErr error + size int64 + modifiedTime time.Time + deleted bool +} + +func (d *Data) UUID() string { return d.ID } +func (d *Data) Deleted() bool { return d.deleted } +func (d *Data) Size() int64 { return d.size } +func (d *Data) ModTime() time.Time { return d.modifiedTime } + +func (d *Data) ToReader() io.ReadCloser { + if d.ReadErr != nil { + return io.NopCloser(errReader{d.ReadErr}) + } + + return d.Reader +} + +func (d *Data) Info() details.ItemInfo { + return details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ItemName: "test.txt", + Size: 1, + }, + } +} + +type errReader struct { + readErr error +} + +func (er errReader) Read([]byte) (int, error) { + return 0, er.readErr +} + +// --------------------------------------------------------------------------- +// FetchItemByNamer +// --------------------------------------------------------------------------- + +var _ data.FetchItemByNamer = &FetchItemByName{} + +type FetchItemByName struct { + Item data.Stream + Err error +} + +func (f FetchItemByName) FetchItemByName(context.Context, string) (data.Stream, error) { + return f.Item, f.Err +} + +// --------------------------------------------------------------------------- +// stub payload +// --------------------------------------------------------------------------- + +func FileRespReadCloser(pl string) io.ReadCloser { + return io.NopCloser(bytes.NewReader([]byte(pl))) +} + +const DriveFileMetaData = `{ + "fileName": "fnords.txt" +}` + //nolint:lll const DriveFilePayloadData = `{ "@odata.context": "https://graph.microsoft.com/v1.0/$metadata#drives('b%22-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5AV')/items/$entity", diff --git a/src/internal/m365/onedrive/restore.go b/src/internal/m365/onedrive/restore.go index da3f83d92..0edbaf62a 100644 --- a/src/internal/m365/onedrive/restore.go +++ b/src/internal/m365/onedrive/restore.go @@ -76,10 +76,7 @@ func ConsumeRestoreCollections( el = errs.Local() ) - ctx = clues.Add( - ctx, - "backup_version", backupVersion, - "restore_location", restoreCfg.Location) + ctx = clues.Add(ctx, "backup_version", backupVersion) // Reorder collections so that the parents directories are created // before the child directories; a requirement for permissions. @@ -97,7 +94,6 @@ func ConsumeRestoreCollections( ictx = clues.Add( ctx, "category", dc.FullPath().Category(), - "destination", clues.Hide(restoreCfg.Location), "resource_owner", clues.Hide(dc.FullPath().ResourceOwner()), "full_path", dc.FullPath()) ) @@ -105,10 +101,10 @@ func ConsumeRestoreCollections( metrics, err = RestoreCollection( ictx, rh, + restoreCfg, backupVersion, dc, caches, - restoreCfg.Location, deets, opts.RestorePermissions, errs) @@ -141,12 +137,12 @@ func ConsumeRestoreCollections( func RestoreCollection( ctx context.Context, rh RestoreHandler, + restoreCfg control.RestoreConfig, backupVersion int, dc data.RestoreCollection, caches *restoreCaches, - restoreContainerName string, deets *details.Builder, - restorePerms bool, + restorePerms bool, // TODD: move into restoreConfig errs *fault.Bus, ) (support.CollectionMetrics, error) { var ( @@ -181,7 +177,13 @@ func RestoreCollection( // from the backup under this the restore folder instead of root) // i.e. Restore into `/` // the drive into which this folder gets restored is tracked separately in drivePath. - restoreDir := path.Builder{}.Append(restoreContainerName).Append(drivePath.Folders...) + restoreDir := &path.Builder{} + + if len(restoreCfg.Location) > 0 { + restoreDir = restoreDir.Append(restoreCfg.Location) + } + + restoreDir = restoreDir.Append(drivePath.Folders...) ctx = clues.Add( ctx, @@ -280,6 +282,7 @@ func RestoreCollection( itemInfo, skipped, err := restoreItem( ictx, rh, + restoreCfg, dc, backupVersion, drivePath, @@ -328,6 +331,7 @@ func RestoreCollection( func restoreItem( ctx context.Context, rh RestoreHandler, + restoreCfg control.RestoreConfig, fibn data.FetchItemByNamer, backupVersion int, drivePath *path.DrivePath, @@ -345,12 +349,17 @@ func restoreItem( itemInfo, err := restoreV0File( ctx, rh, + restoreCfg, drivePath, fibn, restoreFolderID, copyBuffer, itemData) if err != nil { + if errors.Is(err, graph.ErrItemAlreadyExistsConflict) && restoreCfg.OnCollision == control.Skip { + return details.ItemInfo{}, true, nil + } + return details.ItemInfo{}, false, clues.Wrap(err, "v0 restore") } @@ -394,6 +403,7 @@ func restoreItem( itemInfo, err := restoreV1File( ctx, rh, + restoreCfg, drivePath, fibn, restoreFolderID, @@ -403,6 +413,10 @@ func restoreItem( itemPath, itemData) if err != nil { + if errors.Is(err, graph.ErrItemAlreadyExistsConflict) && restoreCfg.OnCollision == control.Skip { + return details.ItemInfo{}, true, nil + } + return details.ItemInfo{}, false, clues.Wrap(err, "v1 restore") } @@ -414,6 +428,7 @@ func restoreItem( itemInfo, err := restoreV6File( ctx, rh, + restoreCfg, drivePath, fibn, restoreFolderID, @@ -423,6 +438,10 @@ func restoreItem( itemPath, itemData) if err != nil { + if errors.Is(err, graph.ErrItemAlreadyExistsConflict) && restoreCfg.OnCollision == control.Skip { + return details.ItemInfo{}, true, nil + } + return details.ItemInfo{}, false, clues.Wrap(err, "v6 restore") } @@ -432,6 +451,7 @@ func restoreItem( func restoreV0File( ctx context.Context, rh RestoreHandler, + restoreCfg control.RestoreConfig, drivePath *path.DrivePath, fibn data.FetchItemByNamer, restoreFolderID string, @@ -440,6 +460,7 @@ func restoreV0File( ) (details.ItemInfo, error) { _, itemInfo, err := restoreData( ctx, + restoreCfg, rh, fibn, itemData.UUID(), @@ -457,6 +478,7 @@ func restoreV0File( func restoreV1File( ctx context.Context, rh RestoreHandler, + restoreCfg control.RestoreConfig, drivePath *path.DrivePath, fibn data.FetchItemByNamer, restoreFolderID string, @@ -470,6 +492,7 @@ func restoreV1File( itemID, itemInfo, err := restoreData( ctx, + restoreCfg, rh, fibn, trimmedName, @@ -513,6 +536,7 @@ func restoreV1File( func restoreV6File( ctx context.Context, rh RestoreHandler, + restoreCfg control.RestoreConfig, drivePath *path.DrivePath, fibn data.FetchItemByNamer, restoreFolderID string, @@ -550,6 +574,7 @@ func restoreV6File( itemID, itemInfo, err := restoreData( ctx, + restoreCfg, rh, fibn, meta.FileName, @@ -683,7 +708,16 @@ func createRestoreFolders( } // create the folder if not found - folderItem, err = fr.PostItemInContainer(ictx, driveID, parentFolderID, newItem(folder, true)) + // the Replace collision policy is used since collisions on that + // policy will no-op and return the existing folder. This has two + // benefits: first, we get to treat the post as idempotent; and + // second, we don't have to worry about race conditions. + folderItem, err = fr.PostItemInContainer( + ictx, + driveID, + parentFolderID, + newItem(folder, true), + control.Replace) if err != nil { return "", clues.Wrap(err, "creating folder") } @@ -706,6 +740,7 @@ type itemRestorer interface { // restoreData will create a new item in the specified `parentFolderID` and upload the data.Stream func restoreData( ctx context.Context, + restoreCfg control.RestoreConfig, ir itemRestorer, fibn data.FetchItemByNamer, name string, @@ -725,7 +760,12 @@ func restoreData( } // Create Item - newItem, err := ir.PostItemInContainer(ctx, driveID, parentFolderID, newItem(name, false)) + newItem, err := ir.PostItemInContainer( + ctx, + driveID, + parentFolderID, + newItem(name, false), + restoreCfg.OnCollision) if err != nil { return "", details.ItemInfo{}, err } diff --git a/src/internal/m365/onedrive/restore_test.go b/src/internal/m365/onedrive/restore_test.go index 0af13eccb..e88216721 100644 --- a/src/internal/m365/onedrive/restore_test.go +++ b/src/internal/m365/onedrive/restore_test.go @@ -4,12 +4,17 @@ import ( "testing" "github.com/alcionai/clues" + "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + "github.com/alcionai/corso/src/internal/m365/graph" + odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/onedrive/mock" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/version" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" ) @@ -315,3 +320,74 @@ func (suite *RestoreUnitSuite) TestAugmentRestorePaths_DifferentRestorePath() { }) } } + +func (suite *RestoreUnitSuite) TestRestoreItem_errItemAlreadyExists() { + table := []struct { + name string + onCollision control.CollisionPolicy + expectErr func(*testing.T, error) + expectSkipped assert.BoolAssertionFunc + }{ + { + name: "skip", + onCollision: control.Skip, + expectErr: func(t *testing.T, err error) { + require.NoError(t, err, clues.ToCore(err)) + }, + expectSkipped: assert.True, + }, + { + name: "replace", + onCollision: control.Replace, + expectErr: func(t *testing.T, err error) { + require.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err)) + }, + expectSkipped: assert.False, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + var ( + rh = mock.RestoreHandler{ + PostItemErr: graph.ErrItemAlreadyExistsConflict, + } + restoreCfg = control.RestoreConfig{ + OnCollision: test.onCollision, + } + dpb = odConsts.DriveFolderPrefixBuilder("driveID1") + ) + + dpp, err := dpb.ToDataLayerOneDrivePath("t", "u", false) + require.NoError(t, err) + + dp, err := path.ToDrivePath(dpp) + require.NoError(t, err) + + _, skip, err := restoreItem( + ctx, + rh, + restoreCfg, + mock.FetchItemByName{ + Item: &mock.Data{ + Reader: mock.FileRespReadCloser(mock.DriveFileMetaData), + }, + }, + version.Backup, + dp, + "", + []byte{}, + NewRestoreCaches(), + false, + &mock.Data{ID: uuid.NewString()}, + nil) + + test.expectErr(t, err) + test.expectSkipped(t, skip) + }) + } +} diff --git a/src/internal/m365/onedrive/url_cache_test.go b/src/internal/m365/onedrive/url_cache_test.go index a7bafb68e..6e5da998c 100644 --- a/src/internal/m365/onedrive/url_cache_test.go +++ b/src/internal/m365/onedrive/url_cache_test.go @@ -17,6 +17,7 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -81,7 +82,8 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() { ctx, driveID, ptr.Val(root.GetId()), - newItem(newFolderName, true)) + newItem(newFolderName, true), + control.Copy) require.NoError(t, err, clues.ToCore(err)) require.NotNil(t, newFolder.GetId()) @@ -97,7 +99,8 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() { ctx, driveID, nfid, - newItem(newItemName, false)) + newItem(newItemName, false), + control.Copy) if err != nil { // Something bad happened, skip this item continue diff --git a/src/internal/m365/restore.go b/src/internal/m365/restore.go index 07d4cd968..18c35060c 100644 --- a/src/internal/m365/restore.go +++ b/src/internal/m365/restore.go @@ -34,6 +34,7 @@ func (ctrl *Controller) ConsumeRestoreCollections( defer end() ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()}) + ctx = clues.Add(ctx, "restore_config", restoreCfg) // TODO(rkeepers): needs PII control var ( status *support.ControllerOperationStatus diff --git a/src/internal/m365/sharepoint/library_handler.go b/src/internal/m365/sharepoint/library_handler.go index eff8a9bff..a51621f7b 100644 --- a/src/internal/m365/sharepoint/library_handler.go +++ b/src/internal/m365/sharepoint/library_handler.go @@ -12,6 +12,7 @@ import ( "github.com/alcionai/corso/src/internal/m365/onedrive" odConsts "github.com/alcionai/corso/src/internal/m365/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" @@ -198,8 +199,9 @@ func (h libraryRestoreHandler) PostItemInContainer( ctx context.Context, driveID, parentFolderID string, newItem models.DriveItemable, + onCollision control.CollisionPolicy, ) (models.DriveItemable, error) { - return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem) + return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem, onCollision) } func (h libraryRestoreHandler) GetFolderByName( diff --git a/src/internal/m365/sharepoint/restore.go b/src/internal/m365/sharepoint/restore.go index 772515fd4..191ac5f96 100644 --- a/src/internal/m365/sharepoint/restore.go +++ b/src/internal/m365/sharepoint/restore.go @@ -68,10 +68,10 @@ func ConsumeRestoreCollections( metrics, err = onedrive.RestoreCollection( ictx, libraryRestoreHandler{ac.Drives()}, + restoreCfg, backupVersion, dc, caches, - restoreCfg.Location, deets, opts.RestorePermissions, errs) diff --git a/src/internal/operations/backup_integration_test.go b/src/internal/operations/backup_integration_test.go index 771c77122..36ec0cfa3 100644 --- a/src/internal/operations/backup_integration_test.go +++ b/src/internal/operations/backup_integration_test.go @@ -1564,7 +1564,8 @@ func runDriveIncrementalTest( ctx, driveID, targetContainer, - driveItem) + driveItem, + control.Copy) require.NoErrorf(t, err, "creating new file %v", clues.ToCore(err)) newFileID = ptr.Val(newFile.GetId()) diff --git a/src/pkg/services/m365/api/drive.go b/src/pkg/services/m365/api/drive.go index 478da708e..d1dd93fc3 100644 --- a/src/pkg/services/m365/api/drive.go +++ b/src/pkg/services/m365/api/drive.go @@ -9,6 +9,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/pkg/control" ) // --------------------------------------------------------------------------- @@ -123,21 +124,44 @@ func (c Drives) NewItemContentUpload( return r, nil } -const itemChildrenRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s/children" +//nolint:lll +const itemChildrenRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s/children?@microsoft.graph.conflictBehavior=%s" + +const ( + conflictBehaviorFail = "fail" + conflictBehaviorRename = "rename" + conflictBehaviorReplace = "replace" +) // PostItemInContainer creates a new item in the specified folder func (c Drives) PostItemInContainer( ctx context.Context, driveID, parentFolderID string, newItem models.DriveItemable, + onCollision control.CollisionPolicy, ) (models.DriveItemable, error) { + // graph api has no policy for Skip; instead we wrap the same-name failure + // as a graph.ErrItemAlreadyExistsConflict. + conflictBehavior := conflictBehaviorFail + + switch onCollision { + case control.Replace: + conflictBehavior = conflictBehaviorReplace + case control.Copy: + conflictBehavior = conflictBehaviorRename + } + // Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended // here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310 - rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID) + rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID, conflictBehavior) builder := drives.NewItemItemsRequestBuilder(rawURL, c.Stable.Adapter()) newItem, err := builder.Post(ctx, newItem, nil) if err != nil { + if graph.IsErrItemAlreadyExistsConflict(err) { + return nil, clues.Stack(graph.ErrItemAlreadyExistsConflict, err) + } + return nil, graph.Wrap(ctx, err, "creating item in folder") } diff --git a/src/pkg/services/m365/api/drive_test.go b/src/pkg/services/m365/api/drive_test.go index 22d6d71e6..12329427c 100644 --- a/src/pkg/services/m365/api/drive_test.go +++ b/src/pkg/services/m365/api/drive_test.go @@ -4,23 +4,35 @@ import ( "testing" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/pkg/account" + "github.com/alcionai/corso/src/pkg/control" + "github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/services/m365/api" ) -type OneDriveAPISuite struct { +type DriveAPISuite struct { tester.Suite - creds account.M365Config - ac api.Client + creds account.M365Config + ac api.Client + driveID string + rootFolderID string } -func (suite *OneDriveAPISuite) SetupSuite() { +func (suite *DriveAPISuite) SetupSuite() { t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + userID := tester.M365UserID(t) a := tester.NewM365Account(t) creds, err := a.M365Config() require.NoError(t, err, clues.ToCore(err)) @@ -28,17 +40,27 @@ func (suite *OneDriveAPISuite) SetupSuite() { suite.creds = creds suite.ac, err = api.NewClient(creds) require.NoError(t, err, clues.ToCore(err)) + + drive, err := suite.ac.Users().GetDefaultDrive(ctx, userID) + require.NoError(t, err, clues.ToCore(err)) + + suite.driveID = ptr.Val(drive.GetId()) + + rootFolder, err := suite.ac.Drives().GetRootFolder(ctx, suite.driveID) + require.NoError(t, err, clues.ToCore(err)) + + suite.rootFolderID = ptr.Val(rootFolder.GetId()) } -func TestOneDriveAPIs(t *testing.T) { - suite.Run(t, &OneDriveAPISuite{ +func TestDriveAPIs(t *testing.T) { + suite.Run(t, &DriveAPISuite{ Suite: tester.NewIntegrationSuite( t, [][]string{tester.M365AcctCredEnvs}), }) } -func (suite *OneDriveAPISuite) TestCreatePagerAndGetPage() { +func (suite *DriveAPISuite) TestDrives_CreatePagerAndGetPage() { t := suite.T() ctx, flush := tester.NewContext(t) @@ -51,3 +73,174 @@ func (suite *OneDriveAPISuite) TestCreatePagerAndGetPage() { assert.NoError(t, err, clues.ToCore(err)) assert.NotNil(t, a) } + +// newItem initializes a `models.DriveItemable` that can be used as input to `createItem` +func newItem(name string, folder bool) *models.DriveItem { + itemToCreate := models.NewDriveItem() + itemToCreate.SetName(&name) + + if folder { + itemToCreate.SetFolder(models.NewFolder()) + } else { + itemToCreate.SetFile(models.NewFile()) + } + + return itemToCreate +} + +func (suite *DriveAPISuite) TestDrives_PostItemInContainer() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + rc := testdata.DefaultRestoreConfig("drive_api_post_item") + + // generate a parent for the test data + parent, err := suite.ac.Drives().PostItemInContainer( + ctx, + suite.driveID, + suite.rootFolderID, + newItem(rc.Location, true), + control.Replace) + require.NoError(t, err, clues.ToCore(err)) + + // generate a folder to use for collision testing + folder := newItem("collision", true) + origFolder, err := suite.ac.Drives().PostItemInContainer( + ctx, + suite.driveID, + ptr.Val(parent.GetId()), + folder, + control.Copy) + require.NoError(t, err, clues.ToCore(err)) + + // generate an item to use for collision testing + file := newItem("collision.txt", false) + origFile, err := suite.ac.Drives().PostItemInContainer( + ctx, + suite.driveID, + ptr.Val(parent.GetId()), + file, + control.Copy) + require.NoError(t, err, clues.ToCore(err)) + + table := []struct { + name string + onCollision control.CollisionPolicy + postItem models.DriveItemable + expectErr func(t *testing.T, err error) + expectItem func(t *testing.T, i models.DriveItemable) + }{ + { + name: "fail folder", + onCollision: control.Skip, + postItem: folder, + expectErr: func(t *testing.T, err error) { + assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err)) + }, + expectItem: func(t *testing.T, i models.DriveItemable) { + assert.Nil(t, i) + }, + }, + { + name: "rename folder", + onCollision: control.Copy, + postItem: folder, + expectErr: func(t *testing.T, err error) { + assert.NoError(t, err, clues.ToCore(err)) + }, + expectItem: func(t *testing.T, i models.DriveItemable) { + assert.NotEqual( + t, + ptr.Val(origFolder.GetId()), + ptr.Val(i.GetId()), + "renamed item should have a different id") + assert.NotEqual( + t, + ptr.Val(origFolder.GetName()), + ptr.Val(i.GetName()), + "renamed item should have a different name") + }, + }, + { + name: "replace folder", + onCollision: control.Replace, + postItem: folder, + expectErr: func(t *testing.T, err error) { + assert.NoError(t, err, clues.ToCore(err)) + }, + expectItem: func(t *testing.T, i models.DriveItemable) { + assert.Equal( + t, + ptr.Val(origFolder.GetId()), + ptr.Val(i.GetId()), + "replaced item should have the same id") + assert.Equal( + t, + ptr.Val(origFolder.GetName()), + ptr.Val(i.GetName()), + "replaced item should have the same name") + }, + }, + { + name: "fail file", + onCollision: control.Skip, + postItem: file, + expectErr: func(t *testing.T, err error) { + assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err)) + }, + expectItem: func(t *testing.T, i models.DriveItemable) { + assert.Nil(t, i) + }, + }, + { + name: "rename file", + onCollision: control.Copy, + postItem: file, + expectErr: func(t *testing.T, err error) { + assert.NoError(t, err, clues.ToCore(err)) + }, + expectItem: func(t *testing.T, i models.DriveItemable) { + assert.NotEqual( + t, + ptr.Val(origFile.GetId()), + ptr.Val(i.GetId()), + "renamed item should have a different id") + assert.NotEqual( + t, + ptr.Val(origFolder.GetName()), + ptr.Val(i.GetName()), + "renamed item should have a different name") + }, + }, + // FIXME: this *should* behave the same as folder collision, but there's either a + // bug or a deviation in graph api behavior. + // See open ticket: https://github.com/OneDrive/onedrive-api-docs/issues/1702 + { + name: "replace file", + onCollision: control.Replace, + postItem: file, + expectErr: func(t *testing.T, err error) { + assert.ErrorIs(t, err, graph.ErrItemAlreadyExistsConflict, clues.ToCore(err)) + }, + expectItem: func(t *testing.T, i models.DriveItemable) { + assert.Nil(t, i) + }, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + i, err := suite.ac.Drives().PostItemInContainer( + ctx, + suite.driveID, + ptr.Val(parent.GetId()), + test.postItem, + test.onCollision) + + test.expectErr(t, err) + test.expectItem(t, i) + }) + } +} From c70207b1f85e0cbc7a9364b778c6985b1b1f88d4 Mon Sep 17 00:00:00 2001 From: Keepers Date: Thu, 15 Jun 2023 18:38:16 -0600 Subject: [PATCH 37/41] separate pager and plain apis for exchange (#3627) Separates the pager and enumerattion functionality from the rest of the exchange api funcs for each category. Purely code movement, no logic changes. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :broom: Tech Debt/Cleanup #### Issue(s) * #3562 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- src/pkg/services/m365/api/contacts.go | 213 --------------- src/pkg/services/m365/api/contacts_pager.go | 226 ++++++++++++++++ src/pkg/services/m365/api/events.go | 229 ---------------- src/pkg/services/m365/api/events_pager.go | 243 +++++++++++++++++ src/pkg/services/m365/api/mail.go | 259 ------------------- src/pkg/services/m365/api/mail_pager.go | 273 ++++++++++++++++++++ 6 files changed, 742 insertions(+), 701 deletions(-) create mode 100644 src/pkg/services/m365/api/contacts_pager.go create mode 100644 src/pkg/services/m365/api/events_pager.go create mode 100644 src/pkg/services/m365/api/mail_pager.go diff --git a/src/pkg/services/m365/api/contacts.go b/src/pkg/services/m365/api/contacts.go index 9d2c253d5..c253212cd 100644 --- a/src/pkg/services/m365/api/contacts.go +++ b/src/pkg/services/m365/api/contacts.go @@ -14,7 +14,6 @@ import ( "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/selectors" ) // --------------------------------------------------------------------------- @@ -137,79 +136,6 @@ func (c Contacts) PatchFolder( return nil } -// --------------------------------------------------------------------------- -// container pager -// --------------------------------------------------------------------------- - -// EnumerateContainers iterates through all of the users current -// contacts folders, converting each to a graph.CacheFolder, and calling -// fn(cf) on each one. -// Folder hierarchy is represented in its current state, and does -// not contain historical data. -func (c Contacts) EnumerateContainers( - ctx context.Context, - userID, baseContainerID string, - fn func(graph.CachedContainer) error, - errs *fault.Bus, -) error { - config := &users.ItemContactFoldersItemChildFoldersRequestBuilderGetRequestConfiguration{ - QueryParameters: &users.ItemContactFoldersItemChildFoldersRequestBuilderGetQueryParameters{ - Select: idAnd(displayName, parentFolderID), - }, - } - - el := errs.Local() - builder := c.Stable. - Client(). - Users(). - ByUserId(userID). - ContactFolders(). - ByContactFolderId(baseContainerID). - ChildFolders() - - for { - if el.Failure() != nil { - break - } - - resp, err := builder.Get(ctx, config) - if err != nil { - return graph.Stack(ctx, err) - } - - for _, fold := range resp.GetValue() { - if el.Failure() != nil { - return el.Failure() - } - - if err := graph.CheckIDNameAndParentFolderID(fold); err != nil { - errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) - continue - } - - fctx := clues.Add( - ctx, - "container_id", ptr.Val(fold.GetId()), - "container_display_name", ptr.Val(fold.GetDisplayName())) - - temp := graph.NewCacheFolder(fold, nil, nil) - if err := fn(&temp); err != nil { - errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) - continue - } - } - - link, ok := ptr.ValOK(resp.GetOdataNextLink()) - if !ok { - break - } - - builder = users.NewItemContactFoldersItemChildFoldersRequestBuilder(link, c.Stable.Adapter()) - } - - return el.Failure() -} - // --------------------------------------------------------------------------- // items // --------------------------------------------------------------------------- @@ -284,145 +210,6 @@ func (c Contacts) DeleteItem( return nil } -// --------------------------------------------------------------------------- -// item pager -// --------------------------------------------------------------------------- - -var _ itemPager = &contactPager{} - -type contactPager struct { - gs graph.Servicer - builder *users.ItemContactFoldersItemContactsRequestBuilder - options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration -} - -func (c Contacts) NewContactPager( - ctx context.Context, - userID, containerID string, - immutableIDs bool, -) itemPager { - config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{ - QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{ - Select: idAnd(parentFolderID), - }, - Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), - } - - builder := c.Stable. - Client(). - Users(). - ByUserId(userID). - ContactFolders(). - ByContactFolderId(containerID). - Contacts() - - return &contactPager{c.Stable, builder, config} -} - -func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) { - resp, err := p.builder.Get(ctx, p.options) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil -} - -func (p *contactPager) setNext(nextLink string) { - p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter()) -} - -// non delta pagers don't need reset -func (p *contactPager) reset(context.Context) {} - -func (p *contactPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { - return toValues[models.Contactable](pl) -} - -// --------------------------------------------------------------------------- -// delta item pager -// --------------------------------------------------------------------------- - -var _ itemPager = &contactDeltaPager{} - -type contactDeltaPager struct { - gs graph.Servicer - userID string - containerID string - builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder - options *users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration -} - -func getContactDeltaBuilder( - ctx context.Context, - gs graph.Servicer, - userID, containerID string, - options *users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration, -) *users.ItemContactFoldersItemContactsDeltaRequestBuilder { - builder := gs.Client().Users().ByUserId(userID).ContactFolders().ByContactFolderId(containerID).Contacts().Delta() - return builder -} - -func (c Contacts) NewContactDeltaPager( - ctx context.Context, - userID, containerID, oldDelta string, - immutableIDs bool, -) itemPager { - options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{ - QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{ - Select: idAnd(parentFolderID), - }, - Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), - } - - var builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder - if oldDelta != "" { - builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) - } else { - builder = getContactDeltaBuilder(ctx, c.Stable, userID, containerID, options) - } - - return &contactDeltaPager{c.Stable, userID, containerID, builder, options} -} - -func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { - resp, err := p.builder.Get(ctx, p.options) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return resp, nil -} - -func (p *contactDeltaPager) setNext(nextLink string) { - p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter()) -} - -func (p *contactDeltaPager) reset(ctx context.Context) { - p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options) -} - -func (p *contactDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { - return toValues[models.Contactable](pl) -} - -func (c Contacts) GetAddedAndRemovedItemIDs( - ctx context.Context, - userID, containerID, oldDelta string, - immutableIDs bool, - canMakeDeltaQueries bool, -) ([]string, []string, DeltaUpdate, error) { - ctx = clues.Add( - ctx, - "category", selectors.ExchangeContact, - "container_id", containerID) - - pager := c.NewContactPager(ctx, userID, containerID, immutableIDs) - deltaPager := c.NewContactDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) - - return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) -} - // --------------------------------------------------------------------------- // Serialization // --------------------------------------------------------------------------- diff --git a/src/pkg/services/m365/api/contacts_pager.go b/src/pkg/services/m365/api/contacts_pager.go new file mode 100644 index 000000000..da79b3ce9 --- /dev/null +++ b/src/pkg/services/m365/api/contacts_pager.go @@ -0,0 +1,226 @@ +package api + +import ( + "context" + + "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" + "github.com/microsoftgraph/msgraph-sdk-go/users" + + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/selectors" +) + +// --------------------------------------------------------------------------- +// container pager +// --------------------------------------------------------------------------- + +// EnumerateContainers iterates through all of the users current +// contacts folders, converting each to a graph.CacheFolder, and calling +// fn(cf) on each one. +// Folder hierarchy is represented in its current state, and does +// not contain historical data. +func (c Contacts) EnumerateContainers( + ctx context.Context, + userID, baseContainerID string, + fn func(graph.CachedContainer) error, + errs *fault.Bus, +) error { + config := &users.ItemContactFoldersItemChildFoldersRequestBuilderGetRequestConfiguration{ + QueryParameters: &users.ItemContactFoldersItemChildFoldersRequestBuilderGetQueryParameters{ + Select: idAnd(displayName, parentFolderID), + }, + } + + el := errs.Local() + builder := c.Stable. + Client(). + Users(). + ByUserId(userID). + ContactFolders(). + ByContactFolderId(baseContainerID). + ChildFolders() + + for { + if el.Failure() != nil { + break + } + + resp, err := builder.Get(ctx, config) + if err != nil { + return graph.Stack(ctx, err) + } + + for _, fold := range resp.GetValue() { + if el.Failure() != nil { + return el.Failure() + } + + if err := graph.CheckIDNameAndParentFolderID(fold); err != nil { + errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) + continue + } + + fctx := clues.Add( + ctx, + "container_id", ptr.Val(fold.GetId()), + "container_display_name", ptr.Val(fold.GetDisplayName())) + + temp := graph.NewCacheFolder(fold, nil, nil) + if err := fn(&temp); err != nil { + errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) + continue + } + } + + link, ok := ptr.ValOK(resp.GetOdataNextLink()) + if !ok { + break + } + + builder = users.NewItemContactFoldersItemChildFoldersRequestBuilder(link, c.Stable.Adapter()) + } + + return el.Failure() +} + +// --------------------------------------------------------------------------- +// item pager +// --------------------------------------------------------------------------- + +var _ itemPager = &contactPager{} + +type contactPager struct { + gs graph.Servicer + builder *users.ItemContactFoldersItemContactsRequestBuilder + options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration +} + +func (c Contacts) NewContactPager( + ctx context.Context, + userID, containerID string, + immutableIDs bool, +) itemPager { + config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{ + QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{ + Select: idAnd(parentFolderID), + }, + Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), + } + + builder := c.Stable. + Client(). + Users(). + ByUserId(userID). + ContactFolders(). + ByContactFolderId(containerID). + Contacts() + + return &contactPager{c.Stable, builder, config} +} + +func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) { + resp, err := p.builder.Get(ctx, p.options) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil +} + +func (p *contactPager) setNext(nextLink string) { + p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter()) +} + +// non delta pagers don't need reset +func (p *contactPager) reset(context.Context) {} + +func (p *contactPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { + return toValues[models.Contactable](pl) +} + +// --------------------------------------------------------------------------- +// delta item pager +// --------------------------------------------------------------------------- + +var _ itemPager = &contactDeltaPager{} + +type contactDeltaPager struct { + gs graph.Servicer + userID string + containerID string + builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder + options *users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration +} + +func getContactDeltaBuilder( + ctx context.Context, + gs graph.Servicer, + userID, containerID string, + options *users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration, +) *users.ItemContactFoldersItemContactsDeltaRequestBuilder { + builder := gs.Client().Users().ByUserId(userID).ContactFolders().ByContactFolderId(containerID).Contacts().Delta() + return builder +} + +func (c Contacts) NewContactDeltaPager( + ctx context.Context, + userID, containerID, oldDelta string, + immutableIDs bool, +) itemPager { + options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{ + QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{ + Select: idAnd(parentFolderID), + }, + Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), + } + + var builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder + if oldDelta != "" { + builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) + } else { + builder = getContactDeltaBuilder(ctx, c.Stable, userID, containerID, options) + } + + return &contactDeltaPager{c.Stable, userID, containerID, builder, options} +} + +func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { + resp, err := p.builder.Get(ctx, p.options) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return resp, nil +} + +func (p *contactDeltaPager) setNext(nextLink string) { + p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter()) +} + +func (p *contactDeltaPager) reset(ctx context.Context) { + p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options) +} + +func (p *contactDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { + return toValues[models.Contactable](pl) +} + +func (c Contacts) GetAddedAndRemovedItemIDs( + ctx context.Context, + userID, containerID, oldDelta string, + immutableIDs bool, + canMakeDeltaQueries bool, +) ([]string, []string, DeltaUpdate, error) { + ctx = clues.Add( + ctx, + "category", selectors.ExchangeContact, + "container_id", containerID) + + pager := c.NewContactPager(ctx, userID, containerID, immutableIDs) + deltaPager := c.NewContactDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) + + return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) +} diff --git a/src/pkg/services/m365/api/events.go b/src/pkg/services/m365/api/events.go index 37f40107b..574e2de21 100644 --- a/src/pkg/services/m365/api/events.go +++ b/src/pkg/services/m365/api/events.go @@ -18,7 +18,6 @@ import ( "github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/path" ) // --------------------------------------------------------------------------- @@ -190,86 +189,6 @@ func (c Events) PatchCalendar( return nil } -// --------------------------------------------------------------------------- -// container pager -// --------------------------------------------------------------------------- - -// EnumerateContainers iterates through all of the users current -// calendars, converting each to a graph.CacheFolder, and -// calling fn(cf) on each one. -// Folder hierarchy is represented in its current state, and does -// not contain historical data. -func (c Events) EnumerateContainers( - ctx context.Context, - userID, baseContainerID string, - fn func(graph.CachedContainer) error, - errs *fault.Bus, -) error { - var ( - el = errs.Local() - config = &users.ItemCalendarsRequestBuilderGetRequestConfiguration{ - QueryParameters: &users.ItemCalendarsRequestBuilderGetQueryParameters{ - Select: idAnd("name"), - }, - } - builder = c.Stable. - Client(). - Users(). - ByUserId(userID). - Calendars() - ) - - for { - if el.Failure() != nil { - break - } - - resp, err := builder.Get(ctx, config) - if err != nil { - return graph.Stack(ctx, err) - } - - for _, cal := range resp.GetValue() { - if el.Failure() != nil { - break - } - - cd := CalendarDisplayable{Calendarable: cal} - if err := graph.CheckIDAndName(cd); err != nil { - errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) - continue - } - - fctx := clues.Add( - ctx, - "container_id", ptr.Val(cal.GetId()), - "container_name", ptr.Val(cal.GetName())) - - temp := graph.NewCacheFolder( - cd, - path.Builder{}.Append(ptr.Val(cd.GetId())), // storage path - path.Builder{}.Append(ptr.Val(cd.GetDisplayName()))) // display location - if err := fn(&temp); err != nil { - errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) - continue - } - } - - link, ok := ptr.ValOK(resp.GetOdataNextLink()) - if !ok { - break - } - - builder = users.NewItemCalendarsRequestBuilder(link, c.Stable.Adapter()) - } - - return el.Failure() -} - -const ( - eventBetaDeltaURLTemplate = "https://graph.microsoft.com/beta/users/%s/calendars/%s/events/delta" -) - // --------------------------------------------------------------------------- // items // --------------------------------------------------------------------------- @@ -434,154 +353,6 @@ func (c Events) PostLargeAttachment( return us, nil } -// --------------------------------------------------------------------------- -// item pager -// --------------------------------------------------------------------------- - -var _ itemPager = &eventPager{} - -type eventPager struct { - gs graph.Servicer - builder *users.ItemCalendarsItemEventsRequestBuilder - options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration -} - -func (c Events) NewEventPager( - ctx context.Context, - userID, containerID string, - immutableIDs bool, -) (itemPager, error) { - options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{ - Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), - } - - builder := c.Stable. - Client(). - Users(). - ByUserId(userID). - Calendars(). - ByCalendarId(containerID). - Events() - - return &eventPager{c.Stable, builder, options}, nil -} - -func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) { - resp, err := p.builder.Get(ctx, p.options) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil -} - -func (p *eventPager) setNext(nextLink string) { - p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter()) -} - -// non delta pagers don't need reset -func (p *eventPager) reset(context.Context) {} - -func (p *eventPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { - return toValues[models.Eventable](pl) -} - -// --------------------------------------------------------------------------- -// delta item pager -// --------------------------------------------------------------------------- - -var _ itemPager = &eventDeltaPager{} - -type eventDeltaPager struct { - gs graph.Servicer - userID string - containerID string - builder *users.ItemCalendarsItemEventsDeltaRequestBuilder - options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration -} - -func (c Events) NewEventDeltaPager( - ctx context.Context, - userID, containerID, oldDelta string, - immutableIDs bool, -) (itemPager, error) { - options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{ - Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), - } - - var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder - - if oldDelta == "" { - builder = getEventDeltaBuilder(ctx, c.Stable, userID, containerID, options) - } else { - builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) - } - - return &eventDeltaPager{c.Stable, userID, containerID, builder, options}, nil -} - -func getEventDeltaBuilder( - ctx context.Context, - gs graph.Servicer, - userID, containerID string, - options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration, -) *users.ItemCalendarsItemEventsDeltaRequestBuilder { - // Graph SDK only supports delta queries against events on the beta version, so we're - // manufacturing use of the beta version url to make the call instead. - // See: https://learn.microsoft.com/ko-kr/graph/api/event-delta?view=graph-rest-beta&tabs=http - // Note that the delta item body is skeletal compared to the actual event struct. Lucky - // for us, we only need the item ID. As a result, even though we hacked the version, the - // response body parses properly into the v1.0 structs and complies with our wanted interfaces. - // Likewise, the NextLink and DeltaLink odata tags carry our hack forward, so the rest of the code - // works as intended (until, at least, we want to _not_ call the beta anymore). - rawURL := fmt.Sprintf(eventBetaDeltaURLTemplate, userID, containerID) - builder := users.NewItemCalendarsItemEventsDeltaRequestBuilder(rawURL, gs.Adapter()) - - return builder -} - -func (p *eventDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { - resp, err := p.builder.Get(ctx, p.options) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return resp, nil -} - -func (p *eventDeltaPager) setNext(nextLink string) { - p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter()) -} - -func (p *eventDeltaPager) reset(ctx context.Context) { - p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options) -} - -func (p *eventDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { - return toValues[models.Eventable](pl) -} - -func (c Events) GetAddedAndRemovedItemIDs( - ctx context.Context, - userID, containerID, oldDelta string, - immutableIDs bool, - canMakeDeltaQueries bool, -) ([]string, []string, DeltaUpdate, error) { - ctx = clues.Add(ctx, "container_id", containerID) - - pager, err := c.NewEventPager(ctx, userID, containerID, immutableIDs) - if err != nil { - return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager") - } - - deltaPager, err := c.NewEventDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) - if err != nil { - return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager") - } - - return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) -} - // --------------------------------------------------------------------------- // Serialization // --------------------------------------------------------------------------- diff --git a/src/pkg/services/m365/api/events_pager.go b/src/pkg/services/m365/api/events_pager.go new file mode 100644 index 000000000..bb390a288 --- /dev/null +++ b/src/pkg/services/m365/api/events_pager.go @@ -0,0 +1,243 @@ +package api + +import ( + "context" + "fmt" + + "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" + "github.com/microsoftgraph/msgraph-sdk-go/users" + + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/path" +) + +const ( + eventBetaDeltaURLTemplate = "https://graph.microsoft.com/beta/users/%s/calendars/%s/events/delta" +) + +// --------------------------------------------------------------------------- +// container pager +// --------------------------------------------------------------------------- + +// EnumerateContainers iterates through all of the users current +// calendars, converting each to a graph.CacheFolder, and +// calling fn(cf) on each one. +// Folder hierarchy is represented in its current state, and does +// not contain historical data. +func (c Events) EnumerateContainers( + ctx context.Context, + userID, baseContainerID string, + fn func(graph.CachedContainer) error, + errs *fault.Bus, +) error { + var ( + el = errs.Local() + config = &users.ItemCalendarsRequestBuilderGetRequestConfiguration{ + QueryParameters: &users.ItemCalendarsRequestBuilderGetQueryParameters{ + Select: idAnd("name"), + }, + } + builder = c.Stable. + Client(). + Users(). + ByUserId(userID). + Calendars() + ) + + for { + if el.Failure() != nil { + break + } + + resp, err := builder.Get(ctx, config) + if err != nil { + return graph.Stack(ctx, err) + } + + for _, cal := range resp.GetValue() { + if el.Failure() != nil { + break + } + + cd := CalendarDisplayable{Calendarable: cal} + if err := graph.CheckIDAndName(cd); err != nil { + errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) + continue + } + + fctx := clues.Add( + ctx, + "container_id", ptr.Val(cal.GetId()), + "container_name", ptr.Val(cal.GetName())) + + temp := graph.NewCacheFolder( + cd, + path.Builder{}.Append(ptr.Val(cd.GetId())), // storage path + path.Builder{}.Append(ptr.Val(cd.GetDisplayName()))) // display location + if err := fn(&temp); err != nil { + errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) + continue + } + } + + link, ok := ptr.ValOK(resp.GetOdataNextLink()) + if !ok { + break + } + + builder = users.NewItemCalendarsRequestBuilder(link, c.Stable.Adapter()) + } + + return el.Failure() +} + +// --------------------------------------------------------------------------- +// item pager +// --------------------------------------------------------------------------- + +var _ itemPager = &eventPager{} + +type eventPager struct { + gs graph.Servicer + builder *users.ItemCalendarsItemEventsRequestBuilder + options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration +} + +func (c Events) NewEventPager( + ctx context.Context, + userID, containerID string, + immutableIDs bool, +) (itemPager, error) { + options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{ + Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), + } + + builder := c.Stable. + Client(). + Users(). + ByUserId(userID). + Calendars(). + ByCalendarId(containerID). + Events() + + return &eventPager{c.Stable, builder, options}, nil +} + +func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) { + resp, err := p.builder.Get(ctx, p.options) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil +} + +func (p *eventPager) setNext(nextLink string) { + p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter()) +} + +// non delta pagers don't need reset +func (p *eventPager) reset(context.Context) {} + +func (p *eventPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { + return toValues[models.Eventable](pl) +} + +// --------------------------------------------------------------------------- +// delta item pager +// --------------------------------------------------------------------------- + +var _ itemPager = &eventDeltaPager{} + +type eventDeltaPager struct { + gs graph.Servicer + userID string + containerID string + builder *users.ItemCalendarsItemEventsDeltaRequestBuilder + options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration +} + +func (c Events) NewEventDeltaPager( + ctx context.Context, + userID, containerID, oldDelta string, + immutableIDs bool, +) (itemPager, error) { + options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{ + Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), + } + + var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder + + if oldDelta == "" { + builder = getEventDeltaBuilder(ctx, c.Stable, userID, containerID, options) + } else { + builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) + } + + return &eventDeltaPager{c.Stable, userID, containerID, builder, options}, nil +} + +func getEventDeltaBuilder( + ctx context.Context, + gs graph.Servicer, + userID, containerID string, + options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration, +) *users.ItemCalendarsItemEventsDeltaRequestBuilder { + // Graph SDK only supports delta queries against events on the beta version, so we're + // manufacturing use of the beta version url to make the call instead. + // See: https://learn.microsoft.com/ko-kr/graph/api/event-delta?view=graph-rest-beta&tabs=http + // Note that the delta item body is skeletal compared to the actual event struct. Lucky + // for us, we only need the item ID. As a result, even though we hacked the version, the + // response body parses properly into the v1.0 structs and complies with our wanted interfaces. + // Likewise, the NextLink and DeltaLink odata tags carry our hack forward, so the rest of the code + // works as intended (until, at least, we want to _not_ call the beta anymore). + rawURL := fmt.Sprintf(eventBetaDeltaURLTemplate, userID, containerID) + builder := users.NewItemCalendarsItemEventsDeltaRequestBuilder(rawURL, gs.Adapter()) + + return builder +} + +func (p *eventDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { + resp, err := p.builder.Get(ctx, p.options) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return resp, nil +} + +func (p *eventDeltaPager) setNext(nextLink string) { + p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter()) +} + +func (p *eventDeltaPager) reset(ctx context.Context) { + p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options) +} + +func (p *eventDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { + return toValues[models.Eventable](pl) +} + +func (c Events) GetAddedAndRemovedItemIDs( + ctx context.Context, + userID, containerID, oldDelta string, + immutableIDs bool, + canMakeDeltaQueries bool, +) ([]string, []string, DeltaUpdate, error) { + ctx = clues.Add(ctx, "container_id", containerID) + + pager, err := c.NewEventPager(ctx, userID, containerID, immutableIDs) + if err != nil { + return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager") + } + + deltaPager, err := c.NewEventDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) + if err != nil { + return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager") + } + + return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) +} diff --git a/src/pkg/services/m365/api/mail.go b/src/pkg/services/m365/api/mail.go index 6645bb27e..f08cbb7c5 100644 --- a/src/pkg/services/m365/api/mail.go +++ b/src/pkg/services/m365/api/mail.go @@ -17,7 +17,6 @@ import ( "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" - "github.com/alcionai/corso/src/pkg/selectors" ) const ( @@ -188,109 +187,6 @@ func (c Mail) PatchFolder( return nil } -// --------------------------------------------------------------------------- -// container pager -// --------------------------------------------------------------------------- - -type mailFolderPager struct { - service graph.Servicer - builder *users.ItemMailFoldersRequestBuilder -} - -func (c Mail) NewMailFolderPager(userID string) mailFolderPager { - // v1.0 non delta /mailFolders endpoint does not return any of the nested folders - rawURL := fmt.Sprintf(mailFoldersBetaURLTemplate, userID) - builder := users.NewItemMailFoldersRequestBuilder(rawURL, c.Stable.Adapter()) - - return mailFolderPager{c.Stable, builder} -} - -func (p *mailFolderPager) getPage(ctx context.Context) (PageLinker, error) { - page, err := p.builder.Get(ctx, nil) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return page, nil -} - -func (p *mailFolderPager) setNext(nextLink string) { - p.builder = users.NewItemMailFoldersRequestBuilder(nextLink, p.service.Adapter()) -} - -func (p *mailFolderPager) valuesIn(pl PageLinker) ([]models.MailFolderable, error) { - // Ideally this should be `users.ItemMailFoldersResponseable`, but - // that is not a thing as stable returns different result - page, ok := pl.(models.MailFolderCollectionResponseable) - if !ok { - return nil, clues.New("converting to ItemMailFoldersResponseable") - } - - return page.GetValue(), nil -} - -// EnumerateContainers iterates through all of the users current -// mail folders, converting each to a graph.CacheFolder, and calling -// fn(cf) on each one. -// Folder hierarchy is represented in its current state, and does -// not contain historical data. -func (c Mail) EnumerateContainers( - ctx context.Context, - userID, baseContainerID string, - fn func(graph.CachedContainer) error, - errs *fault.Bus, -) error { - el := errs.Local() - pgr := c.NewMailFolderPager(userID) - - for { - if el.Failure() != nil { - break - } - - page, err := pgr.getPage(ctx) - if err != nil { - return graph.Stack(ctx, err) - } - - resp, err := pgr.valuesIn(page) - if err != nil { - return graph.Stack(ctx, err) - } - - for _, fold := range resp { - if el.Failure() != nil { - break - } - - if err := graph.CheckIDNameAndParentFolderID(fold); err != nil { - errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) - continue - } - - fctx := clues.Add( - ctx, - "container_id", ptr.Val(fold.GetId()), - "container_name", ptr.Val(fold.GetDisplayName())) - - temp := graph.NewCacheFolder(fold, nil, nil) - if err := fn(&temp); err != nil { - errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) - continue - } - } - - link, ok := ptr.ValOK(page.GetOdataNextLink()) - if !ok { - break - } - - pgr.setNext(link) - } - - return el.Failure() -} - // --------------------------------------------------------------------------- // items // --------------------------------------------------------------------------- @@ -549,161 +445,6 @@ func (c Mail) PostLargeAttachment( return us, nil } -// --------------------------------------------------------------------------- -// item pager -// --------------------------------------------------------------------------- - -var _ itemPager = &mailPager{} - -type mailPager struct { - gs graph.Servicer - builder *users.ItemMailFoldersItemMessagesRequestBuilder - options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration -} - -func (c Mail) NewMailPager( - ctx context.Context, - userID, containerID string, - immutableIDs bool, -) itemPager { - config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{ - QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{ - Select: idAnd("isRead"), - }, - Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), - } - - builder := c.Stable. - Client(). - Users(). - ByUserId(userID). - MailFolders(). - ByMailFolderId(containerID). - Messages() - - return &mailPager{c.Stable, builder, config} -} - -func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) { - page, err := p.builder.Get(ctx, p.options) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil -} - -func (p *mailPager) setNext(nextLink string) { - p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter()) -} - -// non delta pagers don't have reset -func (p *mailPager) reset(context.Context) {} - -func (p *mailPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { - return toValues[models.Messageable](pl) -} - -// --------------------------------------------------------------------------- -// delta item pager -// --------------------------------------------------------------------------- - -var _ itemPager = &mailDeltaPager{} - -type mailDeltaPager struct { - gs graph.Servicer - userID string - containerID string - builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder - options *users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration -} - -func getMailDeltaBuilder( - ctx context.Context, - gs graph.Servicer, - user, containerID string, - options *users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration, -) *users.ItemMailFoldersItemMessagesDeltaRequestBuilder { - builder := gs. - Client(). - Users(). - ByUserId(user). - MailFolders(). - ByMailFolderId(containerID). - Messages(). - Delta() - - return builder -} - -func (c Mail) NewMailDeltaPager( - ctx context.Context, - userID, containerID, oldDelta string, - immutableIDs bool, -) itemPager { - config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{ - QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{ - Select: idAnd("isRead"), - }, - Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), - } - - var builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder - - if len(oldDelta) > 0 { - builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) - } else { - builder = getMailDeltaBuilder(ctx, c.Stable, userID, containerID, config) - } - - return &mailDeltaPager{c.Stable, userID, containerID, builder, config} -} - -func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { - page, err := p.builder.Get(ctx, p.options) - if err != nil { - return nil, graph.Stack(ctx, err) - } - - return page, nil -} - -func (p *mailDeltaPager) setNext(nextLink string) { - p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter()) -} - -func (p *mailDeltaPager) reset(ctx context.Context) { - p.builder = p.gs. - Client(). - Users(). - ByUserId(p.userID). - MailFolders(). - ByMailFolderId(p.containerID). - Messages(). - Delta() -} - -func (p *mailDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { - return toValues[models.Messageable](pl) -} - -func (c Mail) GetAddedAndRemovedItemIDs( - ctx context.Context, - userID, containerID, oldDelta string, - immutableIDs bool, - canMakeDeltaQueries bool, -) ([]string, []string, DeltaUpdate, error) { - ctx = clues.Add( - ctx, - "category", selectors.ExchangeMail, - "container_id", containerID) - - pager := c.NewMailPager(ctx, userID, containerID, immutableIDs) - deltaPager := c.NewMailDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) - - return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) -} - // --------------------------------------------------------------------------- // Serialization // --------------------------------------------------------------------------- diff --git a/src/pkg/services/m365/api/mail_pager.go b/src/pkg/services/m365/api/mail_pager.go new file mode 100644 index 000000000..71ce09663 --- /dev/null +++ b/src/pkg/services/m365/api/mail_pager.go @@ -0,0 +1,273 @@ +package api + +import ( + "context" + "fmt" + + "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" + "github.com/microsoftgraph/msgraph-sdk-go/users" + + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/m365/graph" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/selectors" +) + +// --------------------------------------------------------------------------- +// container pager +// --------------------------------------------------------------------------- + +type mailFolderPager struct { + service graph.Servicer + builder *users.ItemMailFoldersRequestBuilder +} + +func (c Mail) NewMailFolderPager(userID string) mailFolderPager { + // v1.0 non delta /mailFolders endpoint does not return any of the nested folders + rawURL := fmt.Sprintf(mailFoldersBetaURLTemplate, userID) + builder := users.NewItemMailFoldersRequestBuilder(rawURL, c.Stable.Adapter()) + + return mailFolderPager{c.Stable, builder} +} + +func (p *mailFolderPager) getPage(ctx context.Context) (PageLinker, error) { + page, err := p.builder.Get(ctx, nil) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return page, nil +} + +func (p *mailFolderPager) setNext(nextLink string) { + p.builder = users.NewItemMailFoldersRequestBuilder(nextLink, p.service.Adapter()) +} + +func (p *mailFolderPager) valuesIn(pl PageLinker) ([]models.MailFolderable, error) { + // Ideally this should be `users.ItemMailFoldersResponseable`, but + // that is not a thing as stable returns different result + page, ok := pl.(models.MailFolderCollectionResponseable) + if !ok { + return nil, clues.New("converting to ItemMailFoldersResponseable") + } + + return page.GetValue(), nil +} + +// EnumerateContainers iterates through all of the users current +// mail folders, converting each to a graph.CacheFolder, and calling +// fn(cf) on each one. +// Folder hierarchy is represented in its current state, and does +// not contain historical data. +func (c Mail) EnumerateContainers( + ctx context.Context, + userID, baseContainerID string, + fn func(graph.CachedContainer) error, + errs *fault.Bus, +) error { + el := errs.Local() + pgr := c.NewMailFolderPager(userID) + + for { + if el.Failure() != nil { + break + } + + page, err := pgr.getPage(ctx) + if err != nil { + return graph.Stack(ctx, err) + } + + resp, err := pgr.valuesIn(page) + if err != nil { + return graph.Stack(ctx, err) + } + + for _, fold := range resp { + if el.Failure() != nil { + break + } + + if err := graph.CheckIDNameAndParentFolderID(fold); err != nil { + errs.AddRecoverable(ctx, graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation)) + continue + } + + fctx := clues.Add( + ctx, + "container_id", ptr.Val(fold.GetId()), + "container_name", ptr.Val(fold.GetDisplayName())) + + temp := graph.NewCacheFolder(fold, nil, nil) + if err := fn(&temp); err != nil { + errs.AddRecoverable(ctx, graph.Stack(fctx, err).Label(fault.LabelForceNoBackupCreation)) + continue + } + } + + link, ok := ptr.ValOK(page.GetOdataNextLink()) + if !ok { + break + } + + pgr.setNext(link) + } + + return el.Failure() +} + +// --------------------------------------------------------------------------- +// item pager +// --------------------------------------------------------------------------- + +var _ itemPager = &mailPager{} + +type mailPager struct { + gs graph.Servicer + builder *users.ItemMailFoldersItemMessagesRequestBuilder + options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration +} + +func (c Mail) NewMailPager( + ctx context.Context, + userID, containerID string, + immutableIDs bool, +) itemPager { + config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{ + QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{ + Select: idAnd("isRead"), + }, + Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)), + } + + builder := c.Stable. + Client(). + Users(). + ByUserId(userID). + MailFolders(). + ByMailFolderId(containerID). + Messages() + + return &mailPager{c.Stable, builder, config} +} + +func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) { + page, err := p.builder.Get(ctx, p.options) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil +} + +func (p *mailPager) setNext(nextLink string) { + p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter()) +} + +// non delta pagers don't have reset +func (p *mailPager) reset(context.Context) {} + +func (p *mailPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { + return toValues[models.Messageable](pl) +} + +// --------------------------------------------------------------------------- +// delta item pager +// --------------------------------------------------------------------------- + +var _ itemPager = &mailDeltaPager{} + +type mailDeltaPager struct { + gs graph.Servicer + userID string + containerID string + builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder + options *users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration +} + +func getMailDeltaBuilder( + ctx context.Context, + gs graph.Servicer, + user, containerID string, + options *users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration, +) *users.ItemMailFoldersItemMessagesDeltaRequestBuilder { + builder := gs. + Client(). + Users(). + ByUserId(user). + MailFolders(). + ByMailFolderId(containerID). + Messages(). + Delta() + + return builder +} + +func (c Mail) NewMailDeltaPager( + ctx context.Context, + userID, containerID, oldDelta string, + immutableIDs bool, +) itemPager { + config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{ + QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{ + Select: idAnd("isRead"), + }, + Headers: newPreferHeaders(preferPageSize(maxDeltaPageSize), preferImmutableIDs(immutableIDs)), + } + + var builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder + + if len(oldDelta) > 0 { + builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(oldDelta, c.Stable.Adapter()) + } else { + builder = getMailDeltaBuilder(ctx, c.Stable, userID, containerID, config) + } + + return &mailDeltaPager{c.Stable, userID, containerID, builder, config} +} + +func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) { + page, err := p.builder.Get(ctx, p.options) + if err != nil { + return nil, graph.Stack(ctx, err) + } + + return page, nil +} + +func (p *mailDeltaPager) setNext(nextLink string) { + p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter()) +} + +func (p *mailDeltaPager) reset(ctx context.Context) { + p.builder = p.gs. + Client(). + Users(). + ByUserId(p.userID). + MailFolders(). + ByMailFolderId(p.containerID). + Messages(). + Delta() +} + +func (p *mailDeltaPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) { + return toValues[models.Messageable](pl) +} + +func (c Mail) GetAddedAndRemovedItemIDs( + ctx context.Context, + userID, containerID, oldDelta string, + immutableIDs bool, + canMakeDeltaQueries bool, +) ([]string, []string, DeltaUpdate, error) { + ctx = clues.Add( + ctx, + "category", selectors.ExchangeMail, + "container_id", containerID) + + pager := c.NewMailPager(ctx, userID, containerID, immutableIDs) + deltaPager := c.NewMailDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs) + + return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries) +} From 00695aa099df7b67e61479e0446cbc9e70f6bc3e Mon Sep 17 00:00:00 2001 From: Vaibhav Kamra Date: Thu, 15 Jun 2023 20:34:49 -0700 Subject: [PATCH 38/41] Increase S3 TLS Handshake Timeout (#3631) This should help in CI and other environments where the TLS handshake appears to take longer under load --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [x] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Test Plan - [ ] :muscle: Manual - [ ] :zap: Unit test - [x] :green_heart: E2E --- src/go.mod | 2 +- src/go.sum | 4 ++-- src/internal/kopia/s3.go | 19 ++++++++++--------- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/go.mod b/src/go.mod index 3c154584d..201359a2b 100644 --- a/src/go.mod +++ b/src/go.mod @@ -2,7 +2,7 @@ module github.com/alcionai/corso/src go 1.20 -replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 +replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230616023302-6c5412bbf417 require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 diff --git a/src/go.sum b/src/go.sum index b1e923509..0ffce79dc 100644 --- a/src/go.sum +++ b/src/go.sum @@ -55,8 +55,8 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 h1:mjUjCCGvJpmnLh3fuVzpfOSFC9lp9TOIOfjj51L5Rs0= github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= -github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 h1:Wrl99Y7jftZMnNDiOIcRJrjstZO3IEj3+Q/sip27vmI= -github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79/go.mod h1:Iic7CcKhsq+A7MLR9hh6VJfgpcJhLx3Kn+BgjY+azvI= +github.com/alcionai/kopia v0.12.2-0.20230616023302-6c5412bbf417 h1:s0B7Be1qqZW+XDRStDYvyXZ7ovQAMkg0N1F/ji4TJyc= +github.com/alcionai/kopia v0.12.2-0.20230616023302-6c5412bbf417/go.mod h1:Iic7CcKhsq+A7MLR9hh6VJfgpcJhLx3Kn+BgjY+azvI= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= diff --git a/src/internal/kopia/s3.go b/src/internal/kopia/s3.go index 6b5c081d7..3e0baead9 100644 --- a/src/internal/kopia/s3.go +++ b/src/internal/kopia/s3.go @@ -26,15 +26,16 @@ func s3BlobStorage(ctx context.Context, s storage.Storage) (blob.Storage, error) } opts := s3.Options{ - BucketName: cfg.Bucket, - Endpoint: endpoint, - Prefix: cfg.Prefix, - DoNotUseTLS: cfg.DoNotUseTLS, - DoNotVerifyTLS: cfg.DoNotVerifyTLS, - Tags: s.SessionTags, - SessionName: s.SessionName, - RoleARN: s.Role, - RoleDuration: s.SessionDuration, + BucketName: cfg.Bucket, + Endpoint: endpoint, + Prefix: cfg.Prefix, + DoNotUseTLS: cfg.DoNotUseTLS, + DoNotVerifyTLS: cfg.DoNotVerifyTLS, + Tags: s.SessionTags, + SessionName: s.SessionName, + RoleARN: s.Role, + RoleDuration: s.SessionDuration, + TLSHandshakeTimeout: 60, } store, err := s3.New(ctx, &opts, false) From 79338fbe03f835186f8408e89242b6970de4a4b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Jun 2023 11:58:01 +0000 Subject: [PATCH 39/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20github.com/mi?= =?UTF-8?q?crosoft/kiota-serialization-json-go=20from=201.0.1=20to=201.0.2?= =?UTF-8?q?=20in=20/src=20(#3625)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/microsoft/kiota-serialization-json-go](https://github.com/microsoft/kiota-serialization-json-go) from 1.0.1 to 1.0.2.
Release notes

Sourced from github.com/microsoft/kiota-serialization-json-go's releases.

v1.0.2

  • Safely serialize null values in collections of Objects, Enums or primitives.
Changelog

Sourced from github.com/microsoft/kiota-serialization-json-go's changelog.

[1.0.2] - 2023-06-14

  • Safely serialize null values in collections of Objects, Enums or primitives.

Changed

Commits
  • 7f47527 Fix/null serialization collection (#92)
  • e5fc62c Merge pull request #90 from microsoft/dependabot/go_modules/github.com/stretc...
  • 0fc7f86 Bump github.com/stretchr/testify from 1.8.3 to 1.8.4
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/microsoft/kiota-serialization-json-go&package-manager=go_modules&previous-version=1.0.1&new-version=1.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 2 +- src/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index 201359a2b..4beb54399 100644 --- a/src/go.mod +++ b/src/go.mod @@ -18,7 +18,7 @@ require ( github.com/microsoft/kiota-authentication-azure-go v1.0.0 github.com/microsoft/kiota-http-go v1.0.0 github.com/microsoft/kiota-serialization-form-go v1.0.0 - github.com/microsoft/kiota-serialization-json-go v1.0.1 + github.com/microsoft/kiota-serialization-json-go v1.0.2 github.com/microsoftgraph/msgraph-sdk-go v1.4.0 github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 github.com/pkg/errors v0.9.1 diff --git a/src/go.sum b/src/go.sum index 0ffce79dc..88905c624 100644 --- a/src/go.sum +++ b/src/go.sum @@ -281,8 +281,8 @@ github.com/microsoft/kiota-http-go v1.0.0 h1:F1hd6gMlLeEgH2CkRB7z13ow7LxMKMWEmms github.com/microsoft/kiota-http-go v1.0.0/go.mod h1:eujxJliqodotsYepIc6ihhK+vXMMt5Q8YiSNL7+7M7U= github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjblsyVgifS11b3WCx+eFEsAI= github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA= -github.com/microsoft/kiota-serialization-json-go v1.0.1 h1:nI3pLpqep7L6BLJPT7teCqkYFRmgyuA2G0zx6ZrwgFE= -github.com/microsoft/kiota-serialization-json-go v1.0.1/go.mod h1:KS+eFtwtJGsosXRQr/Qilep7ZD1MRF+VtO7LnL7Oyuw= +github.com/microsoft/kiota-serialization-json-go v1.0.2 h1:RXan8v7yWBD88XxVZ2W38BBcqu2UqWtgS54nCbOS5ow= +github.com/microsoft/kiota-serialization-json-go v1.0.2/go.mod h1:AUItT9exyxmjZQE8IeFD9ygP77q9GKVb+AQE2V5Ikho= github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA= github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M= github.com/microsoftgraph/msgraph-sdk-go v1.4.0 h1:ibNwMDEZ6HikA9BVXu+TljCzCiE+yFsD6wLpJbTc1tc= From e000bf75050dc1b8b205ee93f1a66c786723d92e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Jun 2023 16:12:32 +0000 Subject: [PATCH 40/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20golang.org/x/?= =?UTF-8?q?tools=20from=200.9.3=20to=200.10.0=20in=20/src=20(#3633)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [golang.org/x/tools](https://github.com/golang/tools) from 0.9.3 to 0.10.0.
Release notes

Sourced from golang.org/x/tools's releases.

gopls/v0.10.0

This release contains initial support for standalone packages and package renaming. Please see below for more details.

We are also changing our release policy to better align with semver.

Support changes

This version of gopls contains changes to our release policy, deprecates support for some older Go versions, and deprecates support for several experimental features.

New release policy

As described in golang/go#55267, we are tightening our release policy to better follow semver, increase stability, and reduce release-related toil. Significant new features will only be introduced in *.*.0 patch versions, and subsequent patch releases will consist only of bugfixes. For example, this version (v0.10.0) introduces several new features, described below. Subsequent v0.10.* releases will contain only bugfixes.

Final support for Go 1.13-1.15

Consistent with the above release policy and our stated support window, the v0.10.* minor version will be the final set of releases to support being used with Go 1.13-1.15. See golang/go#52982 for details.

Gopls will pop up a warning if it resolves a version of the go command that is older than 1.16. Starting with gopls@v0.11.0, gopls will cease to function when used with a go command with a version older than 1.16.

Deprecated experimental features

The following experimental features are deprecated, and will be removed in gopls@v0.11.0:

  • experimentalWorkspaceModulegolang/go#52897go.work files. See our documentation for information on how to use go.work files to work on multiple modules.
  • experimentalWatchedFileDelaygolang/go#55268workspace/didChangeWatchedFiles notifications.
  • experimentalUseInvalidMetadatagolang/go#54180

New Features

Support for "standalone packages"

Gopls now recognizes certain files as "standalone main packages", meaning they should be interpreted as main packages consisting of a single file. To do this, gopls looks for packages named main containing a single build constraint that matches one of the tags configured by the new standaloneTags setting.

This enables cross references and other features when working in a file such as the example below that contains a //go:build ignore build constraint.

image

(preview) Support for package renaming

golang/go#41567golang/go#56184.

To rename a package, initiate a rename request on the package clause of a file in the package: image

When this renaming is applied, gopls will adjust other package files accordingly, rename the package directory, and update import paths referencing the current package or any nested package in the renamed directory. image

Method information in hover

Hovering over a type now shows a summary of its methods.

... (truncated)

Commits
  • 7261b32 gopls/internal/regtest: fix goimports on windows when using vendoring
  • 41e4e56 gopls/internal/lsp/source/completion: ensuring completion completeness
  • ac29460 go/ssa: fix bug in writeSignature on external functions
  • 3b62e7e go/ssa: use core type within (*builder).receiver
  • f394d45 gopls/internal/lsp/cache: compute xrefs and methodsets asynchronously
  • 27dbf85 go.mod: update golang.org/x dependencies
  • c6c9830 go/types/objectpath: memoize scope lookup in objectpath.Encoder
  • 0245e1d gopls/internal/regtest/codelens: set GOWORK=off for go mod vendor
  • 85be888 go/analysis/passes/defers: add analyser for defer mistake
  • c43232f cmd/digraph: improve examples using go list, mod
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/tools&package-manager=go_modules&previous-version=0.9.3&new-version=0.10.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 6 +++--- src/go.sum | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/go.mod b/src/go.mod index 4beb54399..e96b742e4 100644 --- a/src/go.mod +++ b/src/go.mod @@ -34,7 +34,7 @@ require ( go.uber.org/zap v1.24.0 golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb golang.org/x/time v0.3.0 - golang.org/x/tools v0.9.3 + golang.org/x/tools v0.10.0 ) require ( @@ -116,9 +116,9 @@ require ( go.uber.org/atomic v1.10.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/crypto v0.10.0 // indirect - golang.org/x/mod v0.10.0 // indirect + golang.org/x/mod v0.11.0 // indirect golang.org/x/net v0.11.0 - golang.org/x/sync v0.2.0 // indirect + golang.org/x/sync v0.3.0 // indirect golang.org/x/sys v0.9.0 // indirect golang.org/x/text v0.10.0 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect diff --git a/src/go.sum b/src/go.sum index 88905c624..d3ccd8f65 100644 --- a/src/go.sum +++ b/src/go.sum @@ -490,8 +490,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= -golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= +golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -551,8 +551,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI= -golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -672,8 +672,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.9.3 h1:Gn1I8+64MsuTb/HpH+LmQtNas23LhUVr3rYZ0eKuaMM= -golang.org/x/tools v0.9.3/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= +golang.org/x/tools v0.10.0 h1:tvDr/iQoUqNdohiYm0LmmKcBk+q86lb9EprIUFhHHGg= +golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From 9f7a6422a0b25602277361274e14b864df3c709d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Jun 2023 16:37:10 +0000 Subject: [PATCH 41/41] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20github.com/aw?= =?UTF-8?q?s/aws-sdk-go=20from=201.44.282=20to=201.44.283=20in=20/src=20(#?= =?UTF-8?q?3632)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.282 to 1.44.283.
Release notes

Sourced from github.com/aws/aws-sdk-go's releases.

Release v1.44.283 (2023-06-15)

Service Client Updates

  • service/auditmanager: Updates service API and documentation
  • service/elasticfilesystem: Updates service documentation
    • Documentation updates for EFS.
  • service/guardduty: Updates service documentation
    • Updated descriptions for some APIs.
  • service/location: Updates service API and documentation
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/aws/aws-sdk-go&package-manager=go_modules&previous-version=1.44.282&new-version=1.44.283)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- src/go.mod | 2 +- src/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/go.mod b/src/go.mod index e96b742e4..5c17db194 100644 --- a/src/go.mod +++ b/src/go.mod @@ -8,7 +8,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230613181047-258ea4f19225 github.com/armon/go-metrics v0.4.1 - github.com/aws/aws-sdk-go v1.44.282 + github.com/aws/aws-sdk-go v1.44.283 github.com/aws/aws-xray-sdk-go v1.8.1 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.0 diff --git a/src/go.sum b/src/go.sum index d3ccd8f65..b6186f763 100644 --- a/src/go.sum +++ b/src/go.sum @@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= -github.com/aws/aws-sdk-go v1.44.282 h1:ZPB9QhwxmMIEC8ja0DdFowOl5fODWaZ6s2cZ40fx6r8= -github.com/aws/aws-sdk-go v1.44.282/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.283 h1:ObMaIvdhHJM2sIrbcljd7muHBaFb+Kp/QsX6iflGDg4= +github.com/aws/aws-sdk-go v1.44.283/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=