Merge branch 'main' into refactor_get_item

This commit is contained in:
Danny 2022-11-30 17:19:05 -05:00 committed by GitHub
commit a090901627
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
120 changed files with 10130 additions and 1431 deletions

View File

@ -1,11 +1,16 @@
name: Build/Release Corso name: Build/Release Corso
on: on:
workflow_dispatch: workflow_dispatch:
pull_request: pull_request:
push: push:
branches: [main] branches: [main]
tags: ["v*.*.*"] tags: ["v*.*.*"]
repository_dispatch:
types: [ok-to-test-command]
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
id-token: write id-token: write
@ -131,16 +136,15 @@ jobs:
name: docs name: docs
path: docs/build path: docs/build
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Integration and Unit Testing ------------------------------------------------------------------- # --- Integration and Unit Testing -------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Test-Suite: Test-Suite-Trusted:
needs: [Precheck, Checkout] needs: [Precheck, Checkout]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true' if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults: defaults:
run: run:
working-directory: src working-directory: src
@ -179,6 +183,7 @@ jobs:
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
-tags testing \
-json \ -json \
-v \ -v \
-failfast \ -failfast \
@ -194,6 +199,108 @@ jobs:
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
Test-Suite-Fork:
needs: [Precheck]
environment: Testing
if: (!startsWith(github.ref , 'refs/tags/') && github.ref != 'refs/heads/main') && (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name != github.repository)
runs-on: ubuntu-latest
defaults:
run:
working-directory: src
steps:
- name: Fail check
if: github.event_name != 'repository_dispatch'
run: |
echo "Workflow requires approval from a maintainer to run. It will be automatically rerun on approval."
exit 1
# add comment to PR with link to workflow run
- uses: marocchino/sticky-pull-request-comment@v2
with:
message: |
https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
# Check out merge commit
- name: Fork based /ok-to-test checkout
uses: actions/checkout@v2
with:
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
- run: mkdir testlog
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
# run the tests
- name: Integration Tests
env:
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_CI_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
run: |
set -euo pipefail
go test \
-json \
-v \
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
# Upload the original go test log as an artifact for later review.
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v3
with:
name: test-log
path: src/testlog/gotest.log
if-no-files-found: error
retention-days: 14
# Update check run called "Test-Suite-Fork"
- uses: actions/github-script@v5
id: update-check-run
if: ${{ always() }}
env:
number: ${{ github.event.client_payload.pull_request.number }}
job: ${{ github.job }}
# Conveniently, job.status maps to https://developer.github.com/v3/checks/runs/#update-a-check-run
conclusion: ${{ job.status }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const { data: pull } = await github.rest.pulls.get({
...context.repo,
pull_number: process.env.number
});
const ref = pull.head.sha;
const { data: checks } = await github.rest.checks.listForRef({
...context.repo,
ref
});
const check = checks.check_runs.filter(c => c.name === process.env.job);
const { data: result } = await github.rest.checks.update({
...context.repo,
check_run_id: check[0].id,
status: 'completed',
conclusion: process.env.conclusion
});
return result;
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Source Code Linting ---------------------------------------------------------------------------- # --- Source Code Linting ----------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
@ -217,9 +324,12 @@ jobs:
- name: Go Lint - name: Go Lint
uses: golangci/golangci-lint-action@v3 uses: golangci/golangci-lint-action@v3
with: with:
version: v1.45.2 # Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code.
version: v1.50.1
working-directory: src working-directory: src
skip-cache: true skip-pkg-cache: true
skip-build-cache: true
# check licenses # check licenses
- name: Get go-licenses - name: Get go-licenses
@ -233,7 +343,7 @@ jobs:
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Publish-Binary: Publish-Binary:
needs: [Test-Suite, Linting, Docs-Linting, SetEnv] needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
@ -270,7 +380,7 @@ jobs:
path: src/dist/* path: src/dist/*
Publish-Docs: Publish-Docs:
needs: [Test-Suite, Linting, Docs-Linting, SetEnv] needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
@ -308,7 +418,7 @@ jobs:
aws cloudfront create-invalidation --distribution-id ${{ secrets.DOCS_CF_DISTRIBUTION }} --paths "/*" aws cloudfront create-invalidation --distribution-id ${{ secrets.DOCS_CF_DISTRIBUTION }} --paths "/*"
Publish-Image: Publish-Image:
needs: [Test-Suite, Linting, Docs-Linting, SetEnv] needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'

31
.github/workflows/ok-to-test.yml vendored Normal file
View File

@ -0,0 +1,31 @@
# If someone with write access comments "/ok-to-test" on a pull request, emit a repository_dispatch event
name: Ok To Test
on:
issue_comment:
types: [created]
jobs:
ok-to-test:
runs-on: ubuntu-latest
# Only run for PRs, not issue comments
if: ${{ github.event.issue.pull_request }}
steps:
- name: Generate token
id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.PRIVATE_KEY }}
- name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v1
env:
TOKEN: ${{ steps.generate_token.outputs.token }}
with:
token: ${{ env.TOKEN }} # GitHub App installation access token
reaction-token: ${{ secrets.GITHUB_TOKEN }}
issue-type: pull-request
commands: ok-to-test
named-args: true
permission: write

View File

@ -19,12 +19,12 @@ services, possibly beyond M365, will expand based on the interest and needs of t
# Getting Started # Getting Started
See the [Corso Documentation](https://docs.corsobackup.io) for more information. See the [Corso Documentation](https://corsobackup.io/docs/intro) for more information.
# Building Corso # Building Corso
To learn more about working with the project source core and building Corso, see the To learn more about working with the project source core and building Corso, see the
[Developer section](https://docs.corsobackup.io/developers/build) of the Corso Documentation. [Developer section](https://corsobackup.io/docs/developers/build) of the Corso Documentation.
# Roadmap # Roadmap

View File

@ -4,7 +4,7 @@ LABEL MAINTAINER="Niraj Tolia"
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
# NOTE for lines 13,15: update in CI when updating # NOTE for lines 13,15: update in CI when updating
RUN apt-get -y update && apt-get -y install gpg emacs curl git make \ RUN apt-get -y update && apt-get -y install gpg emacs curl zip git make \
&& curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \ && curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \
&& apt-get -y install nodejs \ && apt-get -y install nodejs \
&& apt-get autoclean \ && apt-get autoclean \
@ -12,7 +12,9 @@ RUN apt-get -y update && apt-get -y install gpg emacs curl git make \
&& npm --version \ && npm --version \
&& cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \ && cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \
&& tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \ && tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \
&& npm install -g markdownlint-cli@0.32.2 && npm install -g markdownlint-cli@0.32.2 \
&& curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
&& unzip awscliv2.zip && /bin/bash aws/install && rm -rf awscliv2.zip aws
WORKDIR /usr/src WORKDIR /usr/src
COPY package.json package-lock.json* ./ COPY package.json package-lock.json* ./

View File

@ -1,4 +1,4 @@
.PHONY: buildimage build dev shell check genclidocs _validatemdgen .PHONY: buildimage build serve dev shell check genclidocs _validatemdgen publish sync
CORSO_BUILD_DIR := /tmp/.corsobuild CORSO_BUILD_DIR := /tmp/.corsobuild
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
@ -40,6 +40,9 @@ dockershell:
build: genclidocs build: genclidocs
$(DOCSC) npm run build $(DOCSC) npm run build
serve:
$(DOCSC) npm run serve
genclidocs: _validatemdgen ${MDGEN_BINARY} genclidocs: _validatemdgen ${MDGEN_BINARY}
@echo 'Auto-generating Corso CLI docs...' @echo 'Auto-generating Corso CLI docs...'
$(DOCSC) rm -rf docs/cli $(DOCSC) rm -rf docs/cli
@ -57,3 +60,13 @@ ${MDGEN_BINARY}: $(shell find ${CORSO_LOCAL_PATH}/src -type f -name *.go) $(shel
clean: clean:
$(DOCSC) rm -rf docs/cli build node_modules $(DOCSC) rm -rf docs/cli build node_modules
$(GOC) rm -rf ${CORSO_BUILD_DIR}/* $(GOC) rm -rf ${CORSO_BUILD_DIR}/*
publish: clean build
docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY \
-e AWS_SESSION_TOKEN -e AWS_REGION \
--rm -v ${PWD}:/usr/src/docs corso/docs:latest \
make sync
sync:
aws s3 sync /usr/src/docs/build/ s3://corsobackup.io/ --exclude ".git/*" --delete
aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*"

View File

@ -41,6 +41,14 @@ make build
This command generates static content into the `build` directory for integration with any static contents hosting service. This command generates static content into the `build` directory for integration with any static contents hosting service.
## Serving static documentation
```bash
make serve
```
This command will serve the static content generated with `make build` at [http://localhost:3000](http://localhost:3000).
## Style and linting ## Style and linting
```bash ```bash

View File

@ -5,6 +5,8 @@ authors: nica
tags: [corso, microsoft 365] tags: [corso, microsoft 365]
--- ---
![Office desk](../static/img/blog/office_desk.jpg)
Have you had it with Google sheets? So have I. Excel is my home. Its where I write all my best formulae. And what Have you had it with Google sheets? So have I. Excel is my home. Its where I write all my best formulae. And what
about PowerPoint? The way it just finds stock photos for you? The automatic alternative designs for this slide about PowerPoint? The way it just finds stock photos for you? The automatic alternative designs for this slide
button? Its too good. I cant give up Microsoft 365. button? Its too good. I cant give up Microsoft 365.
@ -60,7 +62,7 @@ support is coming soon).
Corsos secure backup protects against accidental data loss, service provider downtime and malicious threats, including Corsos secure backup protects against accidental data loss, service provider downtime and malicious threats, including
ransomware attacks. Plus, a robust user community provides a venue for admins to share and learn about data protection ransomware attacks. Plus, a robust user community provides a venue for admins to share and learn about data protection
and find best practices for how to security configure their M365 environments. As a member of the community, youll and find best practices for how to securely configure their M365 environments. As a member of the community, youll
have access to blogs, forums, and discussion, as well as updates on public and feedback-driven development. have access to blogs, forums, and discussion, as well as updates on public and feedback-driven development.
[Join the Corso community on Discord](https://discord.gg/63DTTSnuhT). [Join the Corso community on Discord](https://discord.gg/63DTTSnuhT).
@ -73,8 +75,8 @@ costs, as well.
## Interested in Trying Corso? ## Interested in Trying Corso?
Corso is currently a tool in alpha to give you a CLI for backups of your M365 data. Corso, currently in alpha, provides a CLI-based tool for backups of your M365 data.
[Follow the quickstart guide](https://docs.corsobackup.io/) to start protecting your business-critical M365 data in [Follow the quickstart guide](../../docs/quickstart) to start protecting your business-critical M365 data in
just a few minutes. Because Corso is currently in alpha, it should NOT be used in production. just a few minutes. Because Corso is currently in alpha, it should NOT be used in production.
Corso supports Microsoft 365 Exchange and OneDrive, with SharePoint and Teams support in active development. Coverage Corso supports Microsoft 365 Exchange and OneDrive, with SharePoint and Teams support in active development. Coverage

View File

@ -1,5 +0,0 @@
# Architecture
<!-- vale proselint.Annotations = NO -->
TODO
<!-- vale proselint.Annotations = YES -->

View File

@ -30,7 +30,7 @@
- Set M365 Credentials environment variables - Set M365 Credentials environment variables
> You can find more information on how to get these values in our [M365 docs](../setup/m365_access). > You can find more information on how to get these values in our [M365 docs](../../setup/m365_access/).
```bash ```bash
export AZURE_CLIENT_ID=<id> export AZURE_CLIENT_ID=<id>

View File

@ -12,6 +12,6 @@ beyond M365, will expand based on the interest and needs of the community.
## Getting started ## Getting started
You can follow the [Quick Start](quickstart) guide for an end-to-end Corso walk through. Alternatively, follow You can follow the [Quick Start](../quickstart) guide for an end-to-end Corso walk through. Alternatively, follow
the instructions in the [Corso Setup](setup/concepts) section to dive into the details on how to configure and the instructions in the [Corso Setup](../setup/concepts) section to dive into the details on how to configure and
run Corso. run Corso.

View File

@ -21,13 +21,13 @@ pull the image.
## Connect to Microsoft 365 ## Connect to Microsoft 365
Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time operation. Follow the instructions Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time operation. Follow the instructions
[here](setup/m365_access) to obtain the necessary credentials and then make them available to Corso. [here](../setup/m365_access) to obtain the necessary credentials and then make them available to Corso.
<Tabs groupId="os"> <Tabs groupId="os">
<TabItem value="win" label="Powershell"> <TabItem value="win" label="Powershell">
```powershell ```powershell
$Env:AZURE_CLIENT_ID = "<Application (client) ID for configured app>" $Env:AZURE_CLIENT_ID = "<Application (../client) ID for configured app>"
$Env:AZURE_TENANT_ID = "<Directory (tenant) ID for configured app>" $Env:AZURE_TENANT_ID = "<Directory (tenant) ID for configured app>"
$Env:AZURE_CLIENT_SECRET = "<Client secret value>" $Env:AZURE_CLIENT_SECRET = "<Client secret value>"
``` ```
@ -36,7 +36,7 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
<TabItem value="unix" label="Linux/macOS"> <TabItem value="unix" label="Linux/macOS">
```bash ```bash
export AZURE_CLIENT_ID=<Application (client) ID for configured app> export AZURE_CLIENT_ID=<Application (../client) ID for configured app>
export AZURE_TENANT_ID=<Directory (tenant) ID for configured app> export AZURE_TENANT_ID=<Directory (tenant) ID for configured app>
export AZURE_CLIENT_SECRET=<Client secret value> export AZURE_CLIENT_SECRET=<Client secret value>
``` ```
@ -45,7 +45,7 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
<TabItem value="docker" label="Docker"> <TabItem value="docker" label="Docker">
```bash ```bash
export AZURE_CLIENT_ID=<Application (client) ID for configured app> export AZURE_CLIENT_ID=<Application (../client) ID for configured app>
export AZURE_TENANT_ID=<Directory (tenant) ID for configured app> export AZURE_TENANT_ID=<Directory (tenant) ID for configured app>
export AZURE_CLIENT_SECRET=<Client secret value> export AZURE_CLIENT_SECRET=<Client secret value>
``` ```
@ -55,9 +55,9 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
## Create a Corso repository ## Create a Corso repository
To create a secure backup location for Corso, you will first need to [download Corso](setup/download). To create a secure backup location for Corso, you will first need to [download Corso](../setup/download).
Use the container or native executable to initialize the Corso repository using an Use the container or native executable to initialize the Corso repository using an
[encryption passphrase](setup/configuration#environment-variables) and a pre-created S3 bucket (Corso doesn't create [encryption passphrase](../setup/configuration#environment-variables) and a pre-created S3 bucket (Corso doesn't create
the bucket if it doesn't exist). The steps below use `corso-test` as the bucket name but, if you are using AWS, you the bucket if it doesn't exist). The steps below use `corso-test` as the bucket name but, if you are using AWS, you
will need a different unique name for the bucket. will need a different unique name for the bucket.
@ -118,7 +118,7 @@ docker run --env-file $HOME/.corso/corso.env \\
## Create your first backup ## Create your first backup
Corso can do much more, but you can start by creating a backup of your Exchange mailbox. If it has been a while since Corso can do much more, but you can start by creating a backup of your Exchange mailbox. If it has been a while since
you initialized the Corso repository, you might need to [connect to it again](setup/repos#connect-to-a-repository). you initialized the Corso repository, you might need to [connect to it again](../setup/repos#connect-to-a-repository).
<Tabs groupId="os"> <Tabs groupId="os">
<TabItem value="win" label="Powershell"> <TabItem value="win" label="Powershell">
@ -202,7 +202,7 @@ docker run --env-file $HOME/.corso/corso.env \\
``` ```
Next, select one of the available backups and list all backed up emails. See Next, select one of the available backups and list all backed up emails. See
[here](cli/corso_backup_details_exchange) for more advanced filtering options. [here](../cli/corso_backup_details_exchange) for more advanced filtering options.
<Tabs groupId="os"> <Tabs groupId="os">
<TabItem value="win" label="Powershell"> <TabItem value="win" label="Powershell">
@ -285,5 +285,5 @@ A confirmation of the recovered email will be shown and the email will appear in
The above tutorial only scratches the surface for Corso's capabilities. We encourage you to dig deeper by: The above tutorial only scratches the surface for Corso's capabilities. We encourage you to dig deeper by:
* Learning about [Corso concepts and setup](setup/concepts) * Learning about [Corso concepts and setup](../setup/concepts)
* Explore Corso backup and restore options for Exchange and Onedrive in the [Command Line Reference](cli/corso) * Explore Corso backup and restore options for Exchange and Onedrive in the [Command Line Reference](../cli/corso)

View File

@ -21,7 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
## Corso concepts {#corso-concepts} ## Corso concepts {#corso-concepts}
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your * **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
*M365 Services* data. See [Repositories](repos) for more information. *M365 Services* data. See [Repositories](../repos) for more information.
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the * **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations. original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.

View File

@ -14,7 +14,7 @@ Two things are needed to configure Corso:
Three distinct pieces of configuration are required by Corso: Three distinct pieces of configuration are required by Corso:
* S3 object storage configuration to store backups. See [AWS Credentials Setup](repos#s3-creds-setup) for * S3 object storage configuration to store backups. See [AWS Credentials Setup](../repos#s3-creds-setup) for
alternate ways to pass AWS credentials. alternate ways to pass AWS credentials.
* `AWS_ACCESS_KEY_ID`: Access key for an IAM user or role for accessing an S3 bucket * `AWS_ACCESS_KEY_ID`: Access key for an IAM user or role for accessing an S3 bucket
* `AWS_SECRET_ACCESS_KEY`: Secret key associated with the access key * `AWS_SECRET_ACCESS_KEY`: Secret key associated with the access key

View File

@ -7,8 +7,8 @@ description: "Connect to a Microsft 365 tenant"
import Tabs from '@theme/Tabs'; import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem'; import TabItem from '@theme/TabItem';
To perform backup and restore operations, Corso requires access to your [M365 tenant](concepts#m365-concepts) To perform backup and restore operations, Corso requires access to your [M365 tenant](../concepts#m365-concepts)
by creating an [Azure AD application](concepts#m365-concepts) with appropriate permissions. by creating an [Azure AD application](../concepts#m365-concepts) with appropriate permissions.
The following steps outline a simplified procedure for creating an Azure Ad application suitable for use with Corso. The following steps outline a simplified procedure for creating an Azure Ad application suitable for use with Corso.
For more details, please refer to the For more details, please refer to the
@ -25,7 +25,7 @@ AD application.
From the list of [Azure services](https://portal.azure.com/#allservices), select From the list of [Azure services](https://portal.azure.com/#allservices), select
**Azure Active Directory &#8594; App Registrations &#8594; New Registration** **Azure Active Directory &#8594; App Registrations &#8594; New Registration**
![Registering a new application](/img/m365app_create_new.png) ![Registering a new application](../../static/img/m365app_create_new.png)
### Configure basic settings ### Configure basic settings
@ -36,14 +36,14 @@ Next, configure the following:
* Skip the **Redirect URI** option * Skip the **Redirect URI** option
* Click **Register** at the bottom of the screen * Click **Register** at the bottom of the screen
![Configuring the application](/img/m365app_configure.png) ![Configuring the application](../../static/img/m365app_configure.png)
### Configure required permissions ### Configure required permissions
Within the new application (`CorsoApp` in the below diagram), select **API Permissions &#8594; Add a permission** from Within the new application (`CorsoApp` in the below diagram), select **API Permissions &#8594; Add a permission** from
the management panel. the management panel.
![Adding application permissions](/img/m365app_permissions.png) ![Adding application permissions](../../static/img/m365app_permissions.png)
Select the following permissions from **Microsoft API &#8594; Microsoft Graph &#8594; Application Permissions** and Select the following permissions from **Microsoft API &#8594; Microsoft Graph &#8594; Application Permissions** and
then click **Add permissions**. then click **Add permissions**.
@ -63,7 +63,7 @@ then click **Add permissions**.
Finally, grant admin consent to this application. This step is required even if the user that created the application Finally, grant admin consent to this application. This step is required even if the user that created the application
is an Microsoft 365 admin. is an Microsoft 365 admin.
![Granting administrator consent](/img/m365app_consent.png) ![Granting administrator consent](../../static/img/m365app_consent.png)
## Export application credentials ## Export application credentials
@ -74,7 +74,7 @@ as environment variables.
To view the tenant and client ID, select Overview from the app management panel. To view the tenant and client ID, select Overview from the app management panel.
![Obtaining Tenant and Client IDs](/img/m365app_ids.png) ![Obtaining Tenant and Client IDs](../../static/img/m365app_ids.png)
Copy the client and tenant IDs and export them into the following environment variables. Copy the client and tenant IDs and export them into the following environment variables.
@ -112,7 +112,7 @@ management panel.
Click **New Client Secret** under **Client secrets** and follow the instructions to create a secret. Click **New Client Secret** under **Client secrets** and follow the instructions to create a secret.
![Obtaining the Azure client secrete](/img/m365app_secret.png) ![Obtaining the Azure client secrete](../../static/img/m365app_secret.png)
After creating the secret, immediately copy the secret **Value** because it won't be available later. Export it as an After creating the secret, immediately copy the secret **Value** because it won't be available later. Export it as an
environment variable. environment variable.

View File

@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
import TOCInline from '@theme/TOCInline'; import TOCInline from '@theme/TOCInline';
import {Version} from '@site/src/corsoEnv'; import {Version} from '@site/src/corsoEnv';
A Corso [repository](concepts#corso-concepts) stores encrypted copies of your backup data. Repositories are A Corso [repository](../concepts#corso-concepts) stores encrypted copies of your backup data. Repositories are
supported on the following object storage systems: supported on the following object storage systems:
<TOCInline toc={toc} maxHeadingLevel={2}/><br/> <TOCInline toc={toc} maxHeadingLevel={2}/><br/>
@ -67,7 +67,7 @@ The two most commonly-used options are:
### Initialize repository ### Initialize repository
Before first use, you need to initialize a Corso repository with `corso repo init s3`. See the command details Before first use, you need to initialize a Corso repository with `corso repo init s3`. See the command details
[here](../cli/corso_repo_init_s3). [here](../../cli/corso_repo_init_s3).
<Tabs groupId="os"> <Tabs groupId="os">
<TabItem value="win" label="Powershell"> <TabItem value="win" label="Powershell">
@ -104,7 +104,7 @@ docker run --env-file $HOME/.corso/corso.env \\
### Connect to a repository ### Connect to a repository
If a repository already exists, you can connect to it with `corso repo connect s3`. See the command details If a repository already exists, you can connect to it with `corso repo connect s3`. See the command details
[here](../cli/corso_repo_connect_s3). [here](../../cli/corso_repo_connect_s3).
<Tabs groupId="os"> <Tabs groupId="os">
<TabItem value="win" label="Powershell"> <TabItem value="win" label="Powershell">
@ -144,3 +144,9 @@ need to use the following flag with the initial Corso `repo init` command:
```bash ```bash
--endpoint <domain.example.com> --endpoint <domain.example.com>
``` ```
### Testing with insecure TLS configurations
Corso also supports the use of object storage systems with no TLS certificate or with self-signed
TLS certificates with the `--disable-tls` or `--disable-tls-verification` flags.
[These flags](../../cli/corso_repo_init_s3) should only be used for testing.

View File

@ -16,7 +16,7 @@ metadata for basic information about installed versions and usage in a privacy-p
generic description of most-commonly used backup operations and statistics on the duration and size of backups. No user generic description of most-commonly used backup operations and statistics on the duration and size of backups. No user
data is stored or transmitted during this process. data is stored or transmitted during this process.
Telemetry reporting can be turned off by using the `--no-stats` flag. See the [Command Line Reference](../cli/corso) Telemetry reporting can be turned off by using the `--no-stats` flag. See the [Command Line Reference](../../cli/corso)
section for more information. section for more information.
</details> </details>

View File

@ -6,13 +6,14 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula');
/** @type {import('@docusaurus/types').Config} */ /** @type {import('@docusaurus/types').Config} */
const config = { const config = {
title: 'Corso Documentation', title: 'Corso',
tagline: 'Free, Secure, and Open-Source Backup for Microsoft 365', tagline: 'Free, Secure, and Open-Source Backup for Microsoft 365',
url: 'https://docs.corsobackup.io', url: 'https://corsobackup.io',
baseUrl: process.env.CORSO_DOCS_BASEURL || '/', baseUrl: process.env.CORSO_DOCS_BASEURL || '/',
onBrokenLinks: 'throw', onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'throw', onBrokenMarkdownLinks: 'throw',
favicon: 'img/corso_logo.svg', favicon: 'img/corso_logo.svg',
trailingSlash: true,
// GitHub pages deployment config. // GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these. // If you aren't using GitHub pages, you don't need these.
@ -84,7 +85,11 @@ const config = {
position: 'left', position: 'left',
label: 'Docs', label: 'Docs',
}, },
{to: '/blog', label: 'Blog', position: 'left'}, {
to: '/blog',
label: 'Blog',
position: 'left'
},
{ {
href: 'https://github.com/alcionai/corso', href: 'https://github.com/alcionai/corso',
label: 'GitHub', label: 'GitHub',
@ -133,11 +138,6 @@ const config = {
label: 'GitHub', label: 'GitHub',
href: 'https://github.com/alcionai/corso', href: 'https://github.com/alcionai/corso',
}, },
{
label: 'Corso Website',
href: 'https://corsobackup.io/',
},
], ],
}, },
], ],
@ -145,8 +145,8 @@ const config = {
}, },
colorMode: { colorMode: {
defaultMode: 'dark', defaultMode: 'dark',
disableSwitch: false, disableSwitch: true,
respectPrefersColorScheme: true, respectPrefersColorScheme: false,
}, },
zoom: { zoom: {
@ -168,8 +168,8 @@ const config = {
metadata : [ metadata : [
{name: 'twitter:card', content: 'summary_large_image'}, {name: 'twitter:card', content: 'summary_large_image'},
{name: 'twitter:site', content: '@corsobackup'}, {name: 'twitter:site', content: '@corsobackup'},
{name: 'twitter:title', content: 'Corso Documentation: Free, Secure, and Open-Source Backup for Microsoft 365'}, {name: 'twitter:title', content: 'Corso: Free, Secure, and Open-Source Backup for Microsoft 365'},
{name: 'twitter:description', content: 'Documentation for Corso, an open-source tool, that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage.'}, {name: 'twitter:description', content: 'Corso is an open-source tool that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage.'},
], ],
prism: { prism: {

5885
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -17,19 +17,29 @@
"@docusaurus/core": "2.2.0", "@docusaurus/core": "2.2.0",
"@docusaurus/plugin-google-gtag": "^2.2.0", "@docusaurus/plugin-google-gtag": "^2.2.0",
"@docusaurus/preset-classic": "2.2.0", "@docusaurus/preset-classic": "2.2.0",
"@loadable/component": "^5.15.2",
"@mdx-js/react": "^1.6.22", "@mdx-js/react": "^1.6.22",
"animate.css": "^4.1.1",
"clsx": "^1.2.1", "clsx": "^1.2.1",
"docusaurus-plugin-image-zoom": "^0.1.1", "docusaurus-plugin-image-zoom": "^0.1.1",
"docusaurus-plugin-sass": "^0.2.2", "docusaurus-plugin-sass": "^0.2.2",
"feather-icons": "^4.29.0",
"jarallax": "^2.0.4",
"mdx-mermaid": "^1.3.2", "mdx-mermaid": "^1.3.2",
"mermaid": "^9.2.2", "mermaid": "^9.2.2",
"prism-react-renderer": "^1.3.5", "prism-react-renderer": "^1.3.5",
"react": "^17.0.2", "react": "^17.0.2",
"react-dom": "^17.0.2", "react-dom": "^17.0.2",
"sass": "^1.56.1" "sass": "^1.56.1",
"tw-elements": "^1.0.0-alpha12",
"wowjs": "^1.1.3"
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/module-type-aliases": "2.2.0" "@docusaurus/module-type-aliases": "2.2.0",
"@iconify/react": "^4.0.0",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.19",
"tailwindcss": "^3.2.4"
}, },
"browserslist": { "browserslist": {
"production": [ "production": [

6
docs/postcss.config.js Normal file
View File

@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View File

@ -1,61 +0,0 @@
import React from 'react';
import clsx from 'clsx';
import styles from './styles.module.css';
const FeatureList = [
{
title: 'Secure',
Svg: require('@site/static/img/security.svg').default,
description: (
<>
Corso provides secure data backup that protects customers against accidental data loss, service provider downtime, and malicious threats including ransomware attacks.
</>
),
},
{
title: 'Robust',
Svg: require('@site/static/img/data.svg').default,
description: (
<>
Corso, purpose-built for M365 protection, provides easy-to-use comprehensive backup and restore workflows that reduce backup time, improve time-to-recovery, reduce admin overhead, and replace unreliable scripts or workarounds.
</>
),
},
{
title: 'Low Cost',
Svg: require('@site/static/img/savings.svg').default,
description: (
<>
Corso, a 100% open-source tool, provides a free alternative for cost-conscious teams. It further reduces storage costs by supporting flexible retention policies and efficiently compressing and deduplicating data before storing it in low-cost cloud object storage.
</>
),
},
];
function Feature({Svg, title, description}) {
return (
<div className={clsx('col col--4')}>
<div className="text--center">
<Svg className={styles.featureSvg} role="img" />
</div>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
}
export default function HomepageFeatures() {
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}

View File

@ -1,11 +0,0 @@
.features {
display: flex;
align-items: center;
padding: 2rem 0;
width: 100%;
}
.featureSvg {
height: 200px;
width: 200px;
}

View File

@ -0,0 +1,38 @@
import { Icon } from "@iconify/react";
import React, { useEffect } from "react";
export default function BackToTop() {
function scroll() {
window.scrollTo({ top: 0, left: 0, behavior: "smooth" });
}
function scrollFunction() {
var mybutton = document.getElementById("back-to-top");
if (mybutton != null) {
if (
document.body.scrollTop > 500 ||
document.documentElement.scrollTop > 500
) {
mybutton.classList.add("flex");
mybutton.classList.remove("hidden");
} else {
mybutton.classList.add("hidden");
mybutton.classList.remove("flex");
}
}
}
useEffect(() => {
window.onscroll = function () {
scrollFunction();
};
}, []);
return (
<a
href="#"
onClick={() => scroll()}
id="back-to-top"
className="back-to-top flex-col justify-center items-center fixed hidden text-lg rounded-full z-10 bottom-5 right-5 h-9 w-9 text-center bg-indigo-600 text-white leading-9"
>
<Icon icon="mdi:arrow-up" color="#fff" />
</a>
);
}

View File

@ -0,0 +1,40 @@
import React, { useEffect } from "react";
import { Icon } from "@iconify/react";
export default function Cookies() {
function acceptCookies() {
document.cookie = "cookies=accepted; expires=Fri, 31 Dec 9999 23:59:59 GMT";
document.getElementById("cookies").style.display = "none";
}
return (
<div
id="cookies"
className="cookie-popup !tracking-wide fixed max-w-lg bottom-3 right-3 left-3 sm:left-0 sm:right-0 mx-auto bg-white dark:bg-slate-900 shadow dark:shadow-gray-800 rounded-md pt-6 pb-2 px-6 z-50"
>
<p className="text-slate-400">
This website uses cookies to provide you with a great user experience.
By using it, you accept our{" "}
<a
href="cookies.html"
target="_blank"
className="text-emerald-600 dark:text-emerald-500 font-semibold"
>
use of cookies
</a>
.
</p>
<div className="cookie-popup-actions text-right">
<button
onClick={() => acceptCookies()}
className="absolute border-none !bg-transparent p-0 cursor-pointer font-semibold top-2 right-2"
>
<Icon
className="text-dark dark:text-slate-200 text-2xl"
icon="humbleicons:times"
/>
</button>
</div>
</div>
);
}

View File

@ -0,0 +1,32 @@
import React from "react";
import "animate.css";
export default function Demo() {
return (
<section className="relative flex !tracking-wide flex-col items-center overflow-hidden">
<div className="!container relative">
<div className="flex flex-col content-center items-center justify-start relative md:mt-24 mt-16 text-center">
<div className="wow w-[95%] sm:w-[80%] animate__animated relative animate__fadeIn">
<div className="flex flex-row items-center bg-gray-200 rounded-t-lg h-6">
<div className="align-middle flex flex-col items-center justify-center">
<img className="h-4 px-2" src="assets/images/powershell.svg" />
</div>
</div>
<div
className="!p-2 relative rounded-b-lg overflow-clip"
style={{
borderLeft: "2px solid #e5e7eb",
borderRight: "2px solid #e5e7eb",
borderBottom: "2px solid #e5e7eb",
}}
>
<video className="w-full" poster="assets/images/corso_demo_thumbnail.png" muted loop autoPlay playsInline>
<source src="assets/images/corso_demo.mp4" type="video/mp4" />
</video>
</div>
</div>
</div>
</div>
</section>
);
}

View File

@ -0,0 +1,69 @@
import React from "react";
import "animate.css";
import { Icon } from "@iconify/react";
export default function FourthSection() {
return (
<section className="relative !tracking-wide md:py-16 py-12 md:pt-0 pt-0">
<div className="absolute bottom-0 left-0 !z-0 right-0 sm:h-2/3 h-4/5 bg-gradient-to-b from-indigo-500 to-indigo-600"></div>
<div className="container !z-50">
<div
className="grid grid-cols-1 justify-center wow animate__animated animate__fadeInUp"
data-wow-delay=".1s"
>
<div className="relative flex flex-col items-center justify-center z-1">
<div className="grid grid-cols-1 md:text-left text-center justify-center">
<div className="relative">
<img
src="assets/images/laptop-macbook.png"
className="mx-auto"
alt="Laptop image showing Microsoft 365 icons"
/>
</div>
</div>
<div className="content md:mt-0">
<div className="grid lg:grid-cols-12 grid-cols-1 md:text-left text-center justify-center">
<div className="lg:col-start-2 lg:col-span-10">
<div className="grid md:grid-cols-2 grid-cols-1 items-center">
<div className="mt-8">
<div className="section-title text-md-start">
<h3 className="md:text-3xl text-2xl md:leading-normal leading-normal font-semibold text-white mt-2">
Start Protecting Your
<br /> Microsoft 365 Data!
</h3>
<h6 className="text-white/50 text-lg font-semibold">
Corso is Free and Open Source
</h6>
</div>
</div>
<div className="mt-8">
<div className="section-title text-md-start">
<p className="text-white/50 max-w-xl mx-auto mb-2">
Follow our quick-start guide to start protecting your
business-critical Microsoft 365 data in just a few
minutes.
</p>
<a
href="docs/quickstart"
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
>
Get Started{" "}
<Icon
icon="uim:angle-right-b"
className="align-middle"
/>
</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
);
}

View File

@ -0,0 +1,74 @@
import React from "react";
import "animate.css";
export default function Hero() {
return (
<section className="relative !tracking-wide flex flex-col home-wrapper items-center overflow-hidden">
<div
className="bg-[#151C3D] absolute"
style={{
left: "-20rem",
right: 0,
zIndex: 1,
top: "-30%",
height: "62rem",
width: "140rem",
transform: "rotate(-12deg)",
}}
></div>
<div
style={{
zIndex: "1 !important",
}}
className="!container relative !z-10"
>
<div className="grid !z-10 grid-cols-1 mt-28 text-center">
<div className="wow !z-10 animate__animated animate__fadeIn">
<h4 className="font-bold !text-white !z-10 !leading-normal text-4xl lg:text-5xl mb-5">
Free, Secure, and Open-Source
<br /> Backup for Microsoft 365
</h4>
<p className="text-slate-300 !z-10 text-xl max-w-xl mx-auto">
The #1 open-source backup tool for Microsoft 365
</p>
</div>
<div className="mt-12 !z-10 mb-6 space-x-4">
<a
href="../docs/quickstart"
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
>
Quickstart
</a>
<a
href="../docs/setup/download"
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
>
Download
</a>
</div>
<div
className="home-dashboard mt-8 !z-10 wow animate__ animate__fadeIn animated"
style={{
visibility: "visible",
animationName: "fadeIn",
}}
>
<img
src="img/cloudbackup.svg"
className="w-[70%] inline-block object-contain"
alt="Cloud backup and storage"
/>
</div>
</div>
<div className="bg-indigo-600 w-8 h-16 !z-10 absolute left-8 lg:bottom-28 md:bottom-36 sm:bottom-40 bottom-16"></div>
<div className="bg-indigo-600/20 w-8 h-16 !z-10 absolute left-20 lg:bottom-32 md:bottom-40 sm:bottom-44 bottom-20"></div>
<div className="bg-indigo-600/20 !z-10 w-8 h-16 absolute right-20 xl:bottom-[420px] lg:bottom-[315px] md:bottom-[285px] sm:bottom-80 bottom-32"></div>
<div className="bg-indigo-600 w-8 h-16 !z-10 absolute right-8 xl:bottom-[440px] lg:bottom-[335px] md:bottom-[305px] sm:bottom-[340px] bottom-36"></div>
</div>
</section>
);
}

View File

@ -0,0 +1,673 @@
import React, { useEffect } from "react";
import feather from "feather-icons";
import { WOW } from "wowjs";
import { jarallax } from "jarallax";
import { Icon } from "@iconify/react";
import "animate.css";
import "tw-elements";
export default function KeyLoveFAQ() {
useEffect(() => {
new WOW().init();
feather.replace();
jarallax(document.querySelectorAll(".jarallax"), {
speed: 0.2,
});
});
return (
<section className="relative md:py-24 !tracking-wide py-16 overflow-hidden">
<div className="container">
<div
className="grid grid-cols-1 pb-8 text-center wow animate__animated animate__fadeInUp"
data-wow-delay=".1s"
>
<h3 className="mb-6 mt-8 md:text-4xl text-white text-3xl md:leading-normal leading-normal font-bold">
Key Features
</h3>
<p className="text-slate-400 max-w-xl mx-auto">
See why Corso is a perfect fit for your Microsoft 365 backup and
recovery needs.
</p>
</div>
<div className="grid lg:grid-cols-3 md:grid-cols-2 grid-cols-1 grid-flow-row-dense gap-[30px] mt-8">
<div
className="col-start-1 wow animate__animated animate__fadeInUp"
data-wow-delay=".1s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="share-2" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">
Comprehensive Workflows
</h4>
</div>
</div>
</div>
<div
className="col-start-1 wow animate__animated animate__fadeInUp"
data-wow-delay=".1s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="zap" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">High Throughput</h4>
</div>
</div>
</div>
<div
className="col-start-1 wow animate__animated animate__fadeInUp"
data-wow-delay=".1s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="activity" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">Fault Tolerance</h4>
</div>
</div>
</div>
<div
className="lg:col-start-2 md:col-start-1 wow animate__animated animate__fadeInUp"
data-wow-delay=".3s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="lock" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">
End-to-End Encryption
</h4>
</div>
</div>
</div>
<div
className="lg:col-start-2 md:col-start-2 md:order-last wow animate__animated animate__fadeInUp"
data-wow-delay=".3s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="copy" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">Deduplication</h4>
</div>
</div>
</div>
<div
className="lg:col-start-2 md:col-start-2 md:order-last wow animate__animated animate__fadeInUp"
data-wow-delay=".3s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="minimize-2" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">Compression</h4>
</div>
</div>
</div>
<div
className="lg:col-start-3 md:col-start-2 wow animate__animated animate__fadeInUp"
data-wow-delay=".5s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i data-feather="code" className="h-5 w-5 rotate-45"></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">Open Source</h4>
</div>
</div>
</div>
<div
className="lg:col-start-3 md:col-start-2 wow animate__animated animate__fadeInUp"
data-wow-delay=".5s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i
data-feather="upload-cloud"
className="h-5 w-5 rotate-45"
></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">
Choice of Object Storage
</h4>
</div>
</div>
</div>
<div
className="lg:col-start-3 md:col-start-2 wow animate__animated animate__fadeInUp"
data-wow-delay=".5s"
>
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
<i
data-feather="check-circle"
className="h-5 w-5 rotate-45"
></i>
</div>
<div className="flex-1">
<h4 className="mb-0 text-lg font-bold">Retention Policies</h4>
</div>
</div>
</div>
</div>
</div>
<div className="container md:mt-24 mt-16">
<div className="container lg mx-auto">
<div className="grid grid-cols-1 pb-2 text-center wow animate__animated animate__fadeInUp">
<h3 className="mb-6 mt-8 md:text-4xl text-3xl md:leading-normal leading-normal font-bold">
Why Everyone{" "}
<span className="after:absolute after:right-0 after:left-0 after:bottom-1 after:lg:h-3 after:h-2 after:w-auto after:rounded-md after:bg-indigo-600/30 relative text-indigo-600">
Loves
<div className="absolute right-0 left-0 bottom-1 lg:h-3 h-2 w-auto rounded-md bg-indigo-600/30"></div>
</span>{" "}
Corso
</h3>
</div>
</div>
<div className="grid md:grid-cols-2 grid-cols-1 items-center gap-[30px]">
<div
className="relative wow animate__animated animate__fadeInLeft"
data-wow-delay=".3s"
>
<img
src="/img/why/chat.svg"
className="rounded-lg"
alt="Group discussion"
/>
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 left-0 rotate-45 -z-1 rounded-3xl"></div>
</div>
<div
className="lg:ml-8 wow animate__animated animate__fadeInRight"
data-wow-delay=".3s"
>
<h3 className="mb-4 text-3xl leading-normal font-bold">
Community
</h3>
<p className="text-slate-400">
The Corso community provides a venue for M365 admins to share and
learn about the importance of data protection as well as best
practices around M365 secure configuration and compliance
management.
</p>
<ul className="list-none text-slate-400 mt-4">
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Community-led blogs, forums, and discussions
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Public and feedback-driven development roadmap{" "}
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
All community contributions welcome
</li>
</ul>
<div className="mt-4">
<a
href="https://discord.gg/63DTTSnuhT"
target="_blank"
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
>
Join Us On Discord{" "}
<Icon icon="uim:angle-right-b" className="align-middle" />
</a>
</div>
</div>
</div>
</div>
<div className="container md:mt-24 mt-16">
<div className="grid md:grid-cols-2 grid-cols-1 items-center gap-[30px]">
<div
className="relative order-1 md:order-2 wow animate__animated animate__fadeInRight"
data-wow-delay=".5s"
>
<img
src="/img/why/security.svg"
className="rounded-lg"
alt="Approval of fingerprint security"
/>
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 right-0 rotate-45 -z-1 rounded-3xl"></div>
</div>
<div
className="lg:mr-8 order-2 md:order-1 wow animate__animated animate__fadeInLeft"
data-wow-delay=".5s"
>
<h3 className="mb-4 text-3xl leading-normal font-bold">
Data Security
</h3>
<p className="text-slate-400">
Corso provides secure data backup that protects customers against
accidental data loss, service provider downtime, and malicious
threats including ransomware attacks.
</p>
<ul className="list-none text-slate-400 mt-4">
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
End-to-end zero-trust AES-256 and TLS encryption
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Support for air-gapped backup storage
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Choice of backup storage provider and geo location
</li>
</ul>
</div>
</div>
</div>
<div className="container md:mt-24 mt-16">
<div className="grid md:grid-cols-2 grid-cols-1 items-center mt-8 gap-[30px]">
<div
className="relative wow animate__animated animate__fadeInLeft"
data-wow-delay=".5s"
>
<img
src="/img/why/data.svg"
className="rounded-lg"
alt="Data extraction dashboard"
/>
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 left-0 rotate-45 -z-1 rounded-3xl"></div>
</div>
<div
className="lg:ml-8 wow animate__animated animate__fadeInRight"
data-wow-delay=".5s"
>
<h3 className="mb-4 text-3xl leading-normal font-bold">
Robust Backups
</h3>
<p className="text-slate-400">
Corso, purpose-built for M365 protection, provides easy-to-use
comprehensive backup and restore workflows that reduces backup
time, improve time-to-recovery, reduce admin overhead, and replace
unreliable scripts or workarounds.
</p>
<ul className="list-none text-slate-400 mt-4">
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Constantly updated M365 Graph Data engine
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Purpose-built, flexible, fine-grained data protection workflows
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
High-performance backup and recovery data movers
</li>
</ul>
<div className="mt-4">
<a
href="docs/quickstart"
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
>
Use The Quick Start For Your First Backup{" "}
<Icon icon="uim:angle-right-b" className="align-middle" />
</a>
</div>
</div>
</div>
</div>
<div className="container md:mt-24 mt-16">
<div className="grid md:grid-cols-2 grid-cols-1 items-center gap-[30px]">
<div
className="relative order-1 md:order-2 wow animate__animated animate__fadeInRight"
data-wow-delay=".5s"
>
<img
src="/img/why/savings.svg"
className="rounded-lg"
alt="Adding money to a savings jar"
/>
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 right-0 rotate-45 -z-1 rounded-3xl"></div>
</div>
<div
className="lg:mr-8 order-2 md:order-1 wow animate__animated animate__fadeInLeft"
data-wow-delay=".5s"
>
<h3 className="mb-4 text-3xl leading-normal font-bold">
Cost Savings
</h3>
<p className="text-slate-400">
Corso, a 100% open-source tool, provides a free alternative for
cost-conscious teams. It further reduces storage costs by
supporting flexible retention policies and efficiently compressing
and deduplicating data before storing it in low-cost cloud object
storage.
</p>
<ul className="list-none text-slate-400 mt-4">
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Free forever OSS with no licensing costs
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Client-side compression and deduplication
</li>
<li className="mb-1 flex">
<Icon
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Support for S3-compliant storage including AWS Glacier IA
</li>
</ul>
<div className="mt-4">
<a
href="docs/setup/repos"
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
>
Read about our Object Storage support{" "}
<Icon icon="uim:angle-right-b" className="align-middle" />
</a>
</div>
</div>
</div>
</div>
{/* Accordions */}
<div className="container md:mb-8 mb-4 md:mt-24 mt-16 wow animate__animated animate__fadeInUp">
<div className="grid grid-cols-1 pb-8 text-center">
<h3 className="mb-6 mt-8 text-white md:text-4xl text-3xl md:leading-normal leading-normal font-bold">
Frequently Asked Questions
</h3>
</div>
<div className="relative grid md:grid-cols-12 grid-cols-1 items-center gap-[30px]">
<div className="md:col-span-6">
<div className="relative">
<div className="relative rounded-xl overflow-hidden shadow-md dark:shadow-gray-800">
<div
className="w-full jarallax py-72 bg-slate-400 custom-bg_ bg-no-repeat bg-top"
data-jarallax='{"speed": 0.1}'
></div>
</div>
</div>
</div>
<div className="md:col-span-6">
<div className="accordion space-y-3" id="accordionExample">
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="headingOne"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapseOne"
aria-expanded="false"
aria-controls="collapseOne"
>
<span>What platforms does Corso run on?</span>
</button>
</h2>
<div
id="collapseOne"
className="accordion-collapse collapse"
aria-labelledby="headingOne"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400">
Corso has both native binaries and container images for
Windows, Linux, and macOS.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading2"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse2"
aria-expanded="false"
aria-controls="collapse2"
>
<span>
What Microsoft 365 services can I backup using Corso?
</span>
</button>
</h2>
<div
id="collapse2"
className="accordion-collapse collapse"
aria-labelledby="heading2"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400">
Corso currently supports OneDrive and Exchange. Support
for Teams and SharePoint is in active development and is
therefore not recommended for production use.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading3"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse3"
aria-expanded="false"
aria-controls="collapse3"
>
<span>What object storage does Corso support?</span>
</button>
</h2>
<div
id="collapse3"
className="accordion-collapse collapse"
aria-labelledby="heading3"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 dark:text-gray-400 !visible">
Corso supports any S3-compliant object storage system
including AWS S3 (including Glacier Instant Access),
Google Cloud Storage, and Backblaze. Azure Blob support is
coming soon.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 font-semibold"
id="heading4"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse4"
aria-expanded="false"
aria-controls="collapse4"
>
<span>How can I get help for Corso?</span>
</button>
</h2>
<div
id="collapse4"
className="accordion-collapse collapse"
aria-labelledby="heading4"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 dark:text-gray-400 !visible">
If you are unable to find an answer in our documentation,
please file{" "}
<a
href="https://github.com/alcionai/corso/issues"
className="text-indigo-600"
target="_blank"
>
GitHub issues
</a>{" "}
for bugs or join the{" "}
<a
href="https://discord.gg/63DTTSnuhT"
className="text-indigo-600"
target="_blank"
>
Discord community
</a>
.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading5"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse5"
aria-expanded="false"
aria-controls="collapse5"
>
<span>What is Corso's open-source license?</span>
</button>
</h2>
<div
id="collapse5"
className="accordion-collapse collapse"
aria-labelledby="heading5"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 dark:text-gray-400 !visible">
Corso's source code is licensed under the OSI-approved
Apache v2 open-source license.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading6"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse6"
aria-expanded="false"
aria-controls="collapse6"
>
<span>How do I request a new feature?</span>
</button>
</h2>
<div
id="collapse6"
className="accordion-collapse collapse"
aria-labelledby="heading6"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 dark:text-gray-400 !visible">
You can request new features by creating a{" "}
<a
href="https://github.com/alcionai/corso/issues"
className="text-indigo-600"
target="_blank"
>
new GitHub issue
</a>{" "}
and labeling it as an enhancement.
</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
);
}

View File

@ -0,0 +1,23 @@
import React from "react";
import "animate.css";
import loadable from "@loadable/component";
import Hero from "./Hero";
import Demo from "./Demo";
import FourthSection from "./FourthSection";
import Cookies from "./Cookies";
const KeyLoveFAQComp = loadable(() => import("./KeyLoveFAQ"));
const BackToTopComp = loadable(() => import("./BackToTop"));
export function MainComp() {
return (
<>
<Hero />
<Demo />
<KeyLoveFAQComp />
<FourthSection />
<BackToTopComp />
<Cookies />
</>
);
}

View File

@ -1,9 +1,71 @@
/** /**
* Any CSS included here will be global. The classic template CUSTOM TO THE NEW HOME PAGE
* bundles Infima by default. Infima is a CSS framework designed to
* work well for content-centric websites.
*/ */
@import url('https://fonts.googleapis.com/css2?family=Nunito&display=swap');
*{
font-family: 'Nunito', sans-serif !important;
}
html{
scroll-behavior: smooth !important;
}
.accordion-button-custom::after {
-ms-flex-shrink: 0;
flex-shrink: 0;
width: 1.25rem;
height: 1.25rem;
margin-left: auto;
content: "";
background-image: url("../../static/assets/download.svg");
background-repeat: no-repeat;
background-size: 1.25rem;
transition: transform 0.2s ease-in-out;
}
.accordion-button-custom:not(.collapsed)::after {
background-image: url("../../static/assets/download_blue.svg");
transform: rotate(-180deg);
}
.accordion-button-custom:not(.collapsed) {
color: #2563eb;
background-color: #fff;
box-shadow: inset 0 -1px 0 #e5e7eb;
}
.link-underline {
border-bottom-width: 0;
background-image: linear-gradient(transparent, transparent), linear-gradient(#4f46E5, #4f46E5);
background-size: 0 2px;
background-position: 0 100%;
background-repeat: no-repeat;
transition: background-size .5s ease-in-out;
}
.link-underline-black {
background-image: linear-gradient(transparent, transparent), linear-gradient(#4f46E5, #4f46E5);
}
.link-underline:hover {
background-size: 100% 2px;
background-position: 0 100%
}
@tailwind utilities;
@tailwind components;
@layer utilities {
.scale-hover:hover {
transform: scale(1.05);
}
}
.custom-bg_{
background: url("../../static/assets/images/cta.jpg")
}
/* You can override the default Infima variables here. */ /* You can override the default Infima variables here. */
:root { :root {
--ifm-color-primary: #1e204e; --ifm-color-primary: #1e204e;

View File

@ -1,40 +1,14 @@
import React from 'react'; import React from "react";
import clsx from 'clsx'; import Layout from "@theme/Layout";
import Layout from '@theme/Layout'; import { MainComp } from "@site/src/components/parts/MainComp";
import Link from '@docusaurus/Link';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import styles from './index.module.css';
import HomepageFeatures from '@site/src/components/HomepageFeatures';
function HomepageHeader() {
const {siteConfig} = useDocusaurusContext();
return (
<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<h1 className="hero__title">{siteConfig.title}</h1>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
<Link
className="button button--secondary button--lg"
to="/docs/quickstart">
Corso Quickstart
</Link>
</div>
</div>
</header>
);
}
export default function Home() { export default function Home() {
const {siteConfig} = useDocusaurusContext();
return ( return (
<Layout <Layout
title={`${siteConfig.title}`} title="Free, Secure, and Open-Source Backup for Microsoft 365"
description="Documentation for Corso, a free, secure, and open-source backup tool for Microsoft 365"> description="Intro, docs, and blog for Corso, an open-source tool, that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage."
<HomepageHeader /> >
<main> <MainComp />
<HomepageFeatures />
</main>
</Layout> </Layout>
); );
} }

View File

@ -1,7 +0,0 @@
---
title: Markdown page example
---
# Markdown page example
You don't need React to write simple standalone pages.

1
docs/static/assets/download.svg vendored Normal file
View File

@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='#fff'><path fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/></svg>

After

Width:  |  Height:  |  Size: 228 B

1
docs/static/assets/download_blue.svg vendored Normal file
View File

@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='#2563eb'><path fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/></svg>

After

Width:  |  Height:  |  Size: 231 B

BIN
docs/static/assets/images/corso_demo.mp4 vendored Normal file

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 335 KiB

BIN
docs/static/assets/images/cta.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 419 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 415 KiB

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="PowerShell" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="204.691px" height="154.521px" viewBox="0 0 204.691 154.521" style="enable-background:new 0 0 204.691 154.521;"
xml:space="preserve">
<g>
<path style="display:none;fill-rule:evenodd;clip-rule:evenodd;fill:#2671BE;" d="M-47.547,226.872
c0-97.129,0.094-194.259-0.195-291.387c-0.021-6.982,1.404-8.411,8.388-8.389c94.397,0.292,188.798,0.292,283.195,0
c6.984-0.022,8.41,1.407,8.389,8.389c-0.289,97.128-0.195,194.258-0.195,291.387c-3.238,2.008-6.837,1.129-10.268,1.131
c-93.015,0.049-186.031,0.049-279.047,0C-40.711,228.001-44.31,228.88-47.547,226.872z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#E0EAF5;" d="M120.14,0.032c23.011-0.008,46.023-0.078,69.034,0.019
c13.68,0.056,17.537,4.627,14.588,18.137c-8.636,39.566-17.466,79.092-26.415,118.589c-2.83,12.484-9.332,17.598-22.465,17.637
c-46.023,0.137-92.046,0.152-138.068-0.006c-15.043-0.053-19-5.148-15.759-19.404C9.849,96.287,18.69,57.582,27.602,18.892
C30.997,4.148,36.099,0.1,51.104,0.057C74.116-0.008,97.128,0.04,120.14,0.032z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#2671BE;" d="M85.365,149.813c-23.014-0.008-46.029,0.098-69.042-0.053
c-11.67-0.076-13.792-2.83-11.165-14.244c8.906-38.71,18.099-77.355,26.807-116.109C34.3,9.013,39.337,4.419,50.473,4.522
c46.024,0.427,92.056,0.137,138.083,0.184c11.543,0.011,13.481,2.48,10.89,14.187c-8.413,38.007-16.879,76.003-25.494,113.965
c-3.224,14.207-6.938,16.918-21.885,16.951C129.833,149.856,107.598,149.821,85.365,149.813z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#FDFDFE;" d="M104.948,73.951c-1.543-1.81-3.237-3.894-5.031-5.886
c-10.173-11.3-20.256-22.684-30.61-33.815c-4.738-5.094-6.248-10.041-0.558-15.069c5.623-4.97,11.148-4.53,16.306,1.188
c14.365,15.919,28.713,31.856,43.316,47.556c5.452,5.864,4.182,9.851-1.823,14.196c-23.049,16.683-45.968,33.547-68.862,50.443
c-5.146,3.799-10.052,4.75-14.209-0.861c-4.586-6.189-0.343-9.871,4.414-13.335c17.013-12.392,33.993-24.83,50.9-37.366
C101.146,79.256,104.527,78.238,104.948,73.951z"/>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#FCFDFD;" d="M112.235,133.819c-6.196,0-12.401,0.213-18.583-0.068
c-4.932-0.223-7.9-2.979-7.838-8.174c0.06-4.912,2.536-8.605,7.463-8.738c13.542-0.363,27.104-0.285,40.651-0.02
c4.305,0.084,7.483,2.889,7.457,7.375c-0.031,5.146-2.739,9.133-8.25,9.465c-6.944,0.42-13.931,0.104-20.899,0.104
C112.235,133.78,112.235,133.8,112.235,133.819z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
docs/static/img/blog/office_desk.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 KiB

1
docs/static/img/cloudbackup.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 138 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.5 KiB

1
docs/static/img/why/chat.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 24 KiB

1
docs/static/img/why/data.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 38 KiB

1
docs/static/img/why/savings.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 115 KiB

1
docs/static/img/why/security.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 24 KiB

2
docs/static/robots.txt vendored Normal file
View File

@ -0,0 +1,2 @@
User-agent: *
Disallow:

65
docs/tailwind.config.js Normal file
View File

@ -0,0 +1,65 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: ["./src/**/*.{js,jsx,ts,tsx}"],
darkMode: ['class', '[data-theme="dark"]'],
theme: {
screens: {
xs: "540px",
sm: "640px",
md: "768px",
lg: "1024px",
xl: "1280px",
"2xl": "1536px",
},
fontFamily: {
nunito: ['"Nunito", sans-serif'],
},
container: {
center: true,
padding: {
DEFAULT: "12px",
sm: "1rem",
lg: "45px",
xl: "5rem",
"2xl": "13rem",
},
},
extend: {
colors: {
dark: "#3c4858",
black: "#161c2d",
"dark-footer": "#192132",
},
boxShadow: {
sm: "0 2px 4px 0 rgb(60 72 88 / 0.15)",
DEFAULT: "0 0 3px rgb(60 72 88 / 0.15)",
md: "0 5px 13px rgb(60 72 88 / 0.20)",
lg: "0 10px 25px -3px rgb(60 72 88 / 0.15)",
xl: "0 20px 25px -5px rgb(60 72 88 / 0.1), 0 8px 10px -6px rgb(60 72 88 / 0.1)",
"2xl": "0 25px 50px -12px rgb(60 72 88 / 0.25)",
inner: "inset 0 2px 4px 0 rgb(60 72 88 / 0.05)",
testi: "2px 2px 2px -1px rgb(60 72 88 / 0.15)",
},
spacing: {
0.75: "0.1875rem",
3.25: "0.8125rem",
},
maxWidth: ({ theme, breakpoints }) => ({
1200: "71.25rem",
992: "60rem",
768: "45rem",
}),
zIndex: {
1: "1",
2: "2",
3: "3",
999: "999",
},
},
},
plugins: [require("tw-elements/dist/plugin")],
};

View File

@ -1,5 +1,5 @@
run: run:
timeout: 15m timeout: 20m
linters: linters:
enable: enable:
@ -13,6 +13,11 @@ linters:
- revive - revive
- wsl - wsl
disable:
# Consumes a large amount of memory when running with Graph SDK in the
# project causing OOM failures in Github actions.
- staticcheck
linters-settings: linters-settings:
gci: gci:
sections: sections:

View File

@ -1,5 +1,5 @@
# This must match the version defined in .github/workflows/lint.yaml. # This must match the version defined in .github/workflows/lint.yaml.
WANTED_LINT_VERSION := 1.45.2 WANTED_LINT_VERSION := 1.50.1
LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4) LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4)
HAS_LINT := $(shell which golangci-lint) HAS_LINT := $(shell which golangci-lint)

View File

@ -14,6 +14,7 @@ var subCommandFuncs = []func() *cobra.Command{
var serviceCommands = []func(parent *cobra.Command) *cobra.Command{ var serviceCommands = []func(parent *cobra.Command) *cobra.Command{
addExchangeCommands, addExchangeCommands,
addOneDriveCommands, addOneDriveCommands,
addSharePointCommands,
} }
// AddCommands attaches all `corso backup * *` commands to the parent. // AddCommands attaches all `corso backup * *` commands to the parent.

View File

@ -3,6 +3,7 @@ package backup
import ( import (
"context" "context"
"github.com/hashicorp/go-multierror"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
@ -19,6 +20,7 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -270,27 +272,62 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
sel := exchangeBackupCreateSelectors(user, exchangeData) sel := exchangeBackupCreateSelectors(user, exchangeData)
bo, err := r.NewBackup(ctx, sel) users, err := m365.UserIDs(ctx, acct)
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to initialize Exchange backup")) return Only(ctx, errors.Wrap(err, "Failed to retrieve M365 users"))
}
var (
errs *multierror.Error
bIDs []model.StableID
)
for _, scope := range sel.DiscreteScopes(users) {
for _, selUser := range scope.Get(selectors.ExchangeUser) {
opSel := selectors.NewExchangeBackup()
opSel.Include([]selectors.ExchangeScope{scope.DiscreteCopy(selUser)})
bo, err := r.NewBackup(ctx, opSel.Selector)
if err != nil {
errs = multierror.Append(errs, errors.Wrapf(
err,
"Failed to initialize Exchange backup for user %s",
scope.Get(selectors.ExchangeUser),
))
continue
} }
err = bo.Run(ctx) err = bo.Run(ctx)
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to run Exchange backup")) errs = multierror.Append(errs, errors.Wrapf(
err,
"Failed to run Exchange backup for user %s",
scope.Get(selectors.ExchangeUser),
))
continue
} }
bu, err := r.Backup(ctx, bo.Results.BackupID) bIDs = append(bIDs, bo.Results.BackupID)
}
}
bups, err := r.Backups(ctx, bIDs)
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage")) return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage"))
} }
bu.Print(ctx) backup.PrintAll(ctx, bups)
if e := errs.ErrorOrNil(); e != nil {
return Only(ctx, e)
}
return nil return nil
} }
func exchangeBackupCreateSelectors(userIDs, data []string) selectors.Selector { func exchangeBackupCreateSelectors(userIDs, data []string) *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup() sel := selectors.NewExchangeBackup()
if len(data) == 0 { if len(data) == 0 {
@ -310,7 +347,7 @@ func exchangeBackupCreateSelectors(userIDs, data []string) selectors.Selector {
} }
} }
return sel.Selector return sel
} }
func validateExchangeBackupCreateFlags(userIDs, data []string) error { func validateExchangeBackupCreateFlags(userIDs, data []string) error {
@ -373,7 +410,7 @@ func listExchangeCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
bs, err := r.Backups(ctx, store.Service(path.ExchangeService)) bs, err := r.BackupsByTag(ctx, store.Service(path.ExchangeService))
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository")) return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository"))
} }
@ -453,8 +490,7 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
// runDetailsExchangeCmd actually performs the lookup in backup details. Assumes // runDetailsExchangeCmd actually performs the lookup in backup details.
// len(backupID) > 0.
func runDetailsExchangeCmd( func runDetailsExchangeCmd(
ctx context.Context, ctx context.Context,
r repository.BackupGetter, r repository.BackupGetter,

View File

@ -3,6 +3,7 @@ package backup
import ( import (
"context" "context"
"github.com/hashicorp/go-multierror"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
@ -18,6 +19,7 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -90,7 +92,7 @@ func addOneDriveCommands(parent *cobra.Command) *cobra.Command {
c, fs = utils.AddCommand(parent, oneDriveListCmd()) c, fs = utils.AddCommand(parent, oneDriveListCmd())
fs.StringVar(&backupID, fs.StringVar(&backupID,
"backup", "", utils.BackupFN, "",
"ID of the backup to retrieve.") "ID of the backup to retrieve.")
case detailsCommand: case detailsCommand:
@ -192,22 +194,57 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
sel := oneDriveBackupCreateSelectors(user) sel := oneDriveBackupCreateSelectors(user)
bo, err := r.NewBackup(ctx, sel) users, err := m365.UserIDs(ctx, acct)
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to initialize OneDrive backup")) return Only(ctx, errors.Wrap(err, "Failed to retrieve M365 users"))
}
var (
errs *multierror.Error
bIDs []model.StableID
)
for _, scope := range sel.DiscreteScopes(users) {
for _, selUser := range scope.Get(selectors.OneDriveUser) {
opSel := selectors.NewOneDriveBackup()
opSel.Include([]selectors.OneDriveScope{scope.DiscreteCopy(selUser)})
bo, err := r.NewBackup(ctx, opSel.Selector)
if err != nil {
errs = multierror.Append(errs, errors.Wrapf(
err,
"Failed to initialize OneDrive backup for user %s",
scope.Get(selectors.OneDriveUser),
))
continue
} }
err = bo.Run(ctx) err = bo.Run(ctx)
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to run OneDrive backup")) errs = multierror.Append(errs, errors.Wrapf(
err,
"Failed to run OneDrive backup for user %s",
scope.Get(selectors.OneDriveUser),
))
continue
} }
bu, err := r.Backup(ctx, bo.Results.BackupID) bIDs = append(bIDs, bo.Results.BackupID)
}
}
bups, err := r.Backups(ctx, bIDs)
if err != nil { if err != nil {
return errors.Wrap(err, "Unable to retrieve backup results from storage") return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage"))
} }
bu.Print(ctx) backup.PrintAll(ctx, bups)
if e := errs.ErrorOrNil(); e != nil {
return Only(ctx, e)
}
return nil return nil
} }
@ -220,11 +257,11 @@ func validateOneDriveBackupCreateFlags(users []string) error {
return nil return nil
} }
func oneDriveBackupCreateSelectors(users []string) selectors.Selector { func oneDriveBackupCreateSelectors(users []string) *selectors.OneDriveBackup {
sel := selectors.NewOneDriveBackup() sel := selectors.NewOneDriveBackup()
sel.Include(sel.Users(users)) sel.Include(sel.Users(users))
return sel.Selector return sel
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -272,7 +309,7 @@ func listOneDriveCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
bs, err := r.Backups(ctx, store.Service(path.OneDriveService)) bs, err := r.BackupsByTag(ctx, store.Service(path.OneDriveService))
if err != nil { if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository")) return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository"))
} }
@ -344,8 +381,7 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
// runDetailsOneDriveCmd actually performs the lookup in backup details. Assumes // runDetailsOneDriveCmd actually performs the lookup in backup details.
// len(backupID) > 0.
func runDetailsOneDriveCmd( func runDetailsOneDriveCmd(
ctx context.Context, ctx context.Context,
r repository.BackupGetter, r repository.BackupGetter,
@ -388,7 +424,7 @@ func oneDriveDeleteCmd() *cobra.Command {
} }
} }
// deletes an exchange service backup. // deletes a oneDrive service backup.
func deleteOneDriveCmd(cmd *cobra.Command, args []string) error { func deleteOneDriveCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()

View File

@ -0,0 +1,443 @@
package backup
import (
"context"
"github.com/hashicorp/go-multierror"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
"github.com/alcionai/corso/src/pkg/store"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
var (
site []string
libraryPaths []string
libraryItems []string
sharepointData []string
)
const (
dataLibraries = "libraries"
)
const (
sharePointServiceCommand = "sharepoint"
sharePointServiceCommandCreateUseSuffix = "--site <siteId> | '" + utils.Wildcard + "'"
sharePointServiceCommandDeleteUseSuffix = "--backup <backupId>"
sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
const (
sharePointServiceCommandCreateExamples = `# Backup SharePoint data for <site>
corso backup create sharepoint --site <site_id>
# Backup SharePoint for Alice and Bob
corso backup create sharepoint --site <site_id_1>,<site_id_2>
# TODO: Site IDs may contain commas. We'll need to warn the site about escaping them.
# Backup all SharePoint data for all sites
corso backup create sharepoint --site '*'`
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup delete sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
sharePointServiceCommandDetailsExamples = `# Explore <site>'s files from backup 1234abcd-12ab-cd34-56de-1234abcd
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --site <site_id>`
)
// called by backup.go to map parent subcommands to provider-specific handling.
func addSharePointCommands(parent *cobra.Command) *cobra.Command {
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch parent.Use {
case createCommand:
c, fs = utils.AddCommand(parent, sharePointCreateCmd(), utils.HideCommand())
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
c.Example = sharePointServiceCommandCreateExamples
fs.StringArrayVar(&site,
utils.SiteFN, nil,
"Backup SharePoint data by site ID; accepts '"+utils.Wildcard+"' to select all sites. (required)")
// TODO: implement
fs.StringSliceVar(
&sharepointData,
utils.DataFN, nil,
"Select one or more types of data to backup: "+dataLibraries+".")
options.AddOperationFlags(c)
case listCommand:
c, fs = utils.AddCommand(parent, sharePointListCmd(), utils.HideCommand())
fs.StringVar(&backupID,
utils.BackupFN, "",
"ID of the backup to retrieve.")
case detailsCommand:
c, fs = utils.AddCommand(parent, sharePointDetailsCmd())
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
c.Example = sharePointServiceCommandDetailsExamples
fs.StringVar(&backupID,
utils.BackupFN, "",
"ID of the backup to retrieve.")
cobra.CheckErr(c.MarkFlagRequired(utils.BackupFN))
// sharepoint hierarchy flags
fs.StringSliceVar(
&libraryPaths,
utils.LibraryFN, nil,
"Select backup details by Library name.")
fs.StringSliceVar(
&libraryItems,
utils.LibraryItemFN, nil,
"Select backup details by library item name or ID.")
// info flags
// fs.StringVar(
// &fileCreatedAfter,
// utils.FileCreatedAfterFN, "",
// "Select backup details for items created after this datetime.")
case deleteCommand:
c, fs = utils.AddCommand(parent, sharePointDeleteCmd(), utils.HideCommand())
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
c.Example = sharePointServiceCommandDeleteExamples
fs.StringVar(&backupID,
utils.BackupFN, "",
"ID of the backup to delete. (required)")
cobra.CheckErr(c.MarkFlagRequired(utils.BackupFN))
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create sharepoint [<flag>...]`
func sharePointCreateCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "Backup M365 SharePoint service data",
RunE: createSharePointCmd,
Args: cobra.NoArgs,
Example: sharePointServiceCommandCreateExamples,
}
}
// processes an sharepoint service backup.
func createSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if err := validateSharePointBackupCreateFlags(site); err != nil {
return err
}
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
if err != nil {
return Only(ctx, err)
}
r, err := repository.Connect(ctx, acct, s, options.Control())
if err != nil {
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
}
defer utils.CloseRepo(ctx, r)
sel := sharePointBackupCreateSelectors(site)
sites, err := m365.Sites(ctx, acct)
if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to retrieve SharePoint sites"))
}
var (
errs *multierror.Error
bIDs []model.StableID
)
for _, scope := range sel.DiscreteScopes(sites) {
for _, selSite := range scope.Get(selectors.SharePointSite) {
opSel := selectors.NewSharePointBackup()
opSel.Include([]selectors.SharePointScope{scope.DiscreteCopy(selSite)})
bo, err := r.NewBackup(ctx, opSel.Selector)
if err != nil {
errs = multierror.Append(errs, errors.Wrapf(
err,
"Failed to initialize SharePoint backup for site %s",
scope.Get(selectors.SharePointSite),
))
continue
}
err = bo.Run(ctx)
if err != nil {
errs = multierror.Append(errs, errors.Wrapf(
err,
"Failed to run SharePoint backup for site %s",
scope.Get(selectors.SharePointSite),
))
continue
}
bIDs = append(bIDs, bo.Results.BackupID)
}
}
bups, err := r.Backups(ctx, bIDs)
if err != nil {
return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage"))
}
backup.PrintAll(ctx, bups)
if e := errs.ErrorOrNil(); e != nil {
return Only(ctx, e)
}
return nil
}
func validateSharePointBackupCreateFlags(sites []string) error {
if len(sites) == 0 {
return errors.New("requires one or more --site ids or the wildcard --site *")
}
return nil
}
func sharePointBackupCreateSelectors(sites []string) *selectors.SharePointBackup {
sel := selectors.NewSharePointBackup()
sel.Include(sel.Sites(sites))
return sel
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list sharepoint [<flag>...]`
func sharePointListCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "List the history of M365 SharePoint service backups",
RunE: listSharePointCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
if err != nil {
return Only(ctx, err)
}
r, err := repository.Connect(ctx, acct, s, options.Control())
if err != nil {
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
}
defer utils.CloseRepo(ctx, r)
if len(backupID) > 0 {
b, err := r.Backup(ctx, model.StableID(backupID))
if err != nil {
if errors.Is(err, kopia.ErrNotFound) {
return Only(ctx, errors.Errorf("No backup exists with the id %s", backupID))
}
return Only(ctx, errors.Wrap(err, "Failed to find backup "+backupID))
}
b.Print(ctx)
return nil
}
bs, err := r.BackupsByTag(ctx, store.Service(path.SharePointService))
if err != nil {
return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository"))
}
backup.PrintAll(ctx, bs)
return nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete sharepoint [<flag>...]`
func sharePointDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "Delete backed-up M365 SharePoint service data",
RunE: deleteSharePointCmd,
Args: cobra.NoArgs,
Example: sharePointServiceCommandDeleteExamples,
}
}
// deletes a sharePoint service backup.
func deleteSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
if err != nil {
return Only(ctx, err)
}
r, err := repository.Connect(ctx, acct, s, options.Control())
if err != nil {
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
}
defer utils.CloseRepo(ctx, r)
if err := r.DeleteBackup(ctx, model.StableID(backupID)); err != nil {
return Only(ctx, errors.Wrapf(err, "Deleting backup %s", backupID))
}
Info(ctx, "Deleted SharePoint backup ", backupID)
return nil
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details onedrive [<flag>...]`
func sharePointDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "Shows the details of a M365 SharePoint service backup",
RunE: detailsSharePointCmd,
Args: cobra.NoArgs,
Example: sharePointServiceCommandDetailsExamples,
}
}
// lists the history of backup operations
func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
if err != nil {
return Only(ctx, err)
}
r, err := repository.Connect(ctx, acct, s, options.Control())
if err != nil {
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
}
defer utils.CloseRepo(ctx, r)
opts := utils.SharePointOpts{
Sites: site,
LibraryPaths: libraryPaths,
LibraryItems: libraryItems,
Populated: utils.GetPopulatedFlags(cmd),
}
ds, err := runDetailsSharePointCmd(ctx, r, backupID, opts)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) == 0 {
Info(ctx, selectors.ErrorNoMatchingItems)
return nil
}
ds.PrintEntries(ctx)
return nil
}
// runDetailsSharePointCmd actually performs the lookup in backup details.
func runDetailsSharePointCmd(
ctx context.Context,
r repository.BackupGetter,
backupID string,
opts utils.SharePointOpts,
) (*details.Details, error) {
if err := utils.ValidateSharePointRestoreFlags(backupID, opts); err != nil {
return nil, err
}
d, _, err := r.BackupDetails(ctx, backupID)
if err != nil {
if errors.Is(err, kopia.ErrNotFound) {
return nil, errors.Errorf("no backup exists with the id %s", backupID)
}
return nil, errors.Wrap(err, "Failed to get backup details in the repository")
}
sel := selectors.NewSharePointRestore()
utils.IncludeSharePointRestoreDataSelectors(sel, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
// if no selector flags were specified, get all data in the service.
if len(sel.Scopes()) == 0 {
sel.Include(sel.Sites(selectors.Any()))
}
return sel.Reduce(ctx, d), nil
}

View File

@ -0,0 +1,236 @@
package backup_test
import (
"fmt"
"strings"
"testing"
"github.com/google/uuid"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/storage"
)
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type NoBackupSharePointIntegrationSuite struct {
suite.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
m365SiteID string
recorder strings.Builder
}
func TestNoBackupSharePointIntegrationSuite(t *testing.T) {
if err := tester.RunOnAny(
tester.CorsoCITests,
tester.CorsoCLITests,
tester.CorsoCLIBackupTests,
); err != nil {
t.Skip(err)
}
suite.Run(t, new(NoBackupSharePointIntegrationSuite))
}
func (suite *NoBackupSharePointIntegrationSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
_, err := tester.GetRequiredEnvSls(
tester.AWSStorageCredEnvs,
tester.M365AcctCredEnvs)
require.NoError(t, err)
// prepare common details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
tester.TestCfgStorageProvider: "S3",
tester.TestCfgPrefix: cfg.Prefix,
}
suite.vpr, suite.cfgFP, err = tester.MakeTempTestConfigClone(t, force)
require.NoError(t, err)
ctx = config.SetViper(ctx, suite.vpr)
suite.m365SiteID = tester.M365SiteID(t)
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
}
func (suite *NoBackupSharePointIntegrationSuite) TestSharePointBackupListCmd_empty() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
suite.recorder.Reset()
cmd := tester.StubRootCmd(
"backup", "list", "sharepoint",
"--config-file", suite.cfgFP)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String()
// as an offhand check: the result should contain the m365 sitet id
assert.Equal(t, "No backups available\n", result)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteSharePointIntegrationSuite struct {
suite.Suite
acct account.Account
st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
backupOp operations.BackupOperation
recorder strings.Builder
}
func TestBackupDeleteSharePointIntegrationSuite(t *testing.T) {
if err := tester.RunOnAny(
tester.CorsoCITests,
tester.CorsoCLITests,
tester.CorsoCLIBackupTests,
); err != nil {
t.Skip(err)
}
suite.Run(t, new(BackupDeleteSharePointIntegrationSuite))
}
func (suite *BackupDeleteSharePointIntegrationSuite) SetupSuite() {
t := suite.T()
_, err := tester.GetRequiredEnvSls(
tester.AWSStorageCredEnvs,
tester.M365AcctCredEnvs)
require.NoError(t, err)
// prepare common details
suite.acct = tester.NewM365Account(t)
suite.st = tester.NewPrefixedS3Storage(t)
cfg, err := suite.st.S3Config()
require.NoError(t, err)
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
tester.TestCfgStorageProvider: "S3",
tester.TestCfgPrefix: cfg.Prefix,
}
suite.vpr, suite.cfgFP, err = tester.MakeTempTestConfigClone(t, force)
require.NoError(t, err)
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
// init the repo first
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
require.NoError(t, err)
m365SiteID := tester.M365SiteID(t)
// some tests require an existing backup
sel := selectors.NewSharePointBackup()
sel.Include(sel.Libraries([]string{m365SiteID}, selectors.Any()))
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, suite.backupOp.Run(ctx))
require.NoError(t, err)
}
func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
suite.recorder.Reset()
cmd := tester.StubRootCmd(
"backup", "delete", "sharepoint",
"--config-file", suite.cfgFP,
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
require.NoError(t, cmd.ExecuteContext(ctx))
result := suite.recorder.String()
assert.Equal(t, fmt.Sprintf("Deleted SharePoint backup %s\n", string(suite.backupOp.Results.BackupID)), result)
}
// moved out of the func above to make the linter happy
// // a follow-up details call should fail, due to the backup ID being deleted
// cmd = tester.StubRootCmd(
// "backup", "details", "sharepoint",
// "--config-file", suite.cfgFP,
// "--backup", string(suite.backupOp.Results.BackupID))
// cli.BuildCommandTree(cmd)
// require.Error(t, cmd.ExecuteContext(ctx))
func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd_unknownID() {
t := suite.T()
ctx, flush := tester.NewContext()
ctx = config.SetViper(ctx, suite.vpr)
defer flush()
cmd := tester.StubRootCmd(
"backup", "delete", "sharepoint",
"--config-file", suite.cfgFP,
"--"+utils.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
require.Error(t, cmd.ExecuteContext(ctx))
}

View File

@ -0,0 +1,127 @@
package backup
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type SharePointSuite struct {
suite.Suite
}
func TestSharePointSuite(t *testing.T) {
suite.Run(t, new(SharePointSuite))
}
func (suite *SharePointSuite) TestAddSharePointCommands() {
expectUse := sharePointServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
"create sharepoint", createCommand, expectUse + " " + sharePointServiceCommandCreateUseSuffix,
sharePointCreateCmd().Short, createSharePointCmd,
},
{
"list sharepoint", listCommand, expectUse,
sharePointListCmd().Short, listSharePointCmd,
},
{
"details sharepoint", detailsCommand, expectUse + " " + sharePointServiceCommandDetailsUseSuffix,
sharePointDetailsCmd().Short, detailsSharePointCmd,
},
{
"delete sharepoint", deleteCommand, expectUse + " " + sharePointServiceCommandDeleteUseSuffix,
sharePointDeleteCmd().Short, deleteSharePointCmd,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
cmd := &cobra.Command{Use: test.use}
c := addSharePointCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}
func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
table := []struct {
name string
site []string
expect assert.ErrorAssertionFunc
}{
{
name: "no sites",
expect: assert.Error,
},
{
name: "sites",
site: []string{"fnord"},
expect: assert.NoError,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
test.expect(t, validateSharePointBackupCreateFlags(test.site))
})
}
}
func (suite *SharePointSuite) TestSharePointBackupDetailsSelectors() {
ctx, flush := tester.NewContext()
defer flush()
for _, test := range testdata.SharePointOptionDetailLookups {
suite.T().Run(test.Name, func(t *testing.T) {
output, err := runDetailsSharePointCmd(
ctx,
test.BackupGetter,
"backup-ID",
test.Opts,
)
assert.NoError(t, err)
assert.ElementsMatch(t, test.Expected, output.Entries)
})
}
}
func (suite *SharePointSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
ctx, flush := tester.NewContext()
defer flush()
for _, test := range testdata.BadSharePointOptionsFormats {
suite.T().Run(test.Name, func(t *testing.T) {
output, err := runDetailsSharePointCmd(
ctx,
test.BackupGetter,
"backup-ID",
test.Opts,
)
assert.Error(t, err)
assert.Empty(t, output)
})
}
}

View File

@ -40,7 +40,7 @@ var corsoCmd = &cobra.Command{
func handleCorsoCmd(cmd *cobra.Command, args []string) error { func handleCorsoCmd(cmd *cobra.Command, args []string) error {
v, _ := cmd.Flags().GetBool("version") v, _ := cmd.Flags().GetBool("version")
if v { if v {
print.Infof(cmd.Context(), "Corso\nversion: "+version) print.Outf(cmd.Context(), "Corso\nversion: "+version)
return nil return nil
} }

View File

@ -89,6 +89,38 @@ func err(w io.Writer, s ...any) {
fmt.Fprint(w, msg...) fmt.Fprint(w, msg...)
} }
// Out prints the params to cobra's output writer (stdOut by default)
// if s is nil, prints nothing.
func Out(ctx context.Context, s ...any) {
out(getRootCmd(ctx).OutOrStdout(), s...)
}
// out is the testable core of Out()
func out(w io.Writer, s ...any) {
if len(s) == 0 {
return
}
fmt.Fprint(w, s...)
fmt.Fprintf(w, "\n")
}
// Out prints the formatted strings to cobra's output writer (stdOut by default)
// if t is empty, prints nothing.
func Outf(ctx context.Context, t string, s ...any) {
outf(getRootCmd(ctx).OutOrStdout(), t, s...)
}
// outf is the testable core of Outf()
func outf(w io.Writer, t string, s ...any) {
if len(t) == 0 {
return
}
fmt.Fprintf(w, t, s...)
fmt.Fprintf(w, "\n")
}
// Info prints the params to cobra's error writer (stdErr by default) // Info prints the params to cobra's error writer (stdErr by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
func Info(ctx context.Context, s ...any) { func Info(ctx context.Context, s ...any) {
@ -138,14 +170,12 @@ type Printable interface {
// Item prints the printable, according to the caller's requested format. // Item prints the printable, according to the caller's requested format.
func Item(ctx context.Context, p Printable) { func Item(ctx context.Context, p Printable) {
print(getRootCmd(ctx).OutOrStdout(), p) printItem(getRootCmd(ctx).OutOrStdout(), p)
} }
// print prints the printable items, // print prints the printable items,
// according to the caller's requested format. // according to the caller's requested format.
// func printItem(w io.Writer, p Printable) {
//revive:disable:redefines-builtin-id
func print(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug { if outputAsJSON || outputAsJSONDebug {
outputJSON(w, p, outputAsJSONDebug) outputJSON(w, p, outputAsJSONDebug)
return return

View File

@ -4,12 +4,21 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
func (suite *ExchangeUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() { type OneDriveUtilsSuite struct {
suite.Suite
}
func TestOneDriveUtilsSuite(t *testing.T) {
suite.Run(t, new(OneDriveUtilsSuite))
}
func (suite *OneDriveUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
var ( var (
empty = []string{} empty = []string{}
single = []string{"single"} single = []string{"single"}

View File

@ -0,0 +1,97 @@
package utils
import (
"errors"
"github.com/alcionai/corso/src/pkg/selectors"
)
const (
LibraryItemFN = "library-item"
LibraryFN = "library"
)
type SharePointOpts struct {
Sites []string
LibraryItems []string
LibraryPaths []string
Populated PopulatedFlags
}
// ValidateSharePointRestoreFlags checks common flags for correctness and interdependencies
func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error {
if len(backupID) == 0 {
return errors.New("a backup ID is required")
}
// if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
// return errors.New("invalid time format for created-after")
// }
return nil
}
// AddSharePointFilter adds the scope of the provided values to the selector's
// filter set
func AddSharePointFilter(
sel *selectors.SharePointRestore,
v string,
f func(string) []selectors.SharePointScope,
) {
if len(v) == 0 {
return
}
sel.Filter(f(v))
}
// IncludeSharePointRestoreDataSelectors builds the common data-selector
// inclusions for SharePoint commands.
func IncludeSharePointRestoreDataSelectors(
sel *selectors.SharePointRestore,
opts SharePointOpts,
) {
lp, ln := len(opts.LibraryPaths), len(opts.LibraryItems)
// only use the inclusion if either a path or item name
// is specified
if lp+ln == 0 {
return
}
if len(opts.Sites) == 0 {
opts.Sites = selectors.Any()
}
// either scope the request to a set of sites
if lp+ln == 0 {
sel.Include(sel.Sites(opts.Sites))
return
}
opts.LibraryPaths = trimFolderSlash(opts.LibraryPaths)
if ln == 0 {
opts.LibraryItems = selectors.Any()
}
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.LibraryPaths)
if len(containsFolders) > 0 {
sel.Include(sel.LibraryItems(opts.Sites, containsFolders, opts.LibraryItems))
}
if len(prefixFolders) > 0 {
sel.Include(sel.LibraryItems(opts.Sites, prefixFolders, opts.LibraryItems, selectors.PrefixMatch()))
}
}
// FilterSharePointRestoreInfoSelectors builds the common info-selector filters.
func FilterSharePointRestoreInfoSelectors(
sel *selectors.SharePointRestore,
opts SharePointOpts,
) {
// AddSharePointFilter(sel, opts.FileCreatedAfter, sel.CreatedAfter)
}

View File

@ -0,0 +1,99 @@
package utils_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
type SharePointUtilsSuite struct {
suite.Suite
}
func TestSharePointUtilsSuite(t *testing.T) {
suite.Run(t, new(SharePointUtilsSuite))
}
func (suite *ExchangeUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
var (
empty = []string{}
single = []string{"single"}
multi = []string{"more", "than", "one"}
containsOnly = []string{"contains"}
prefixOnly = []string{"/prefix"}
containsAndPrefix = []string{"contains", "/prefix"}
)
table := []struct {
name string
opts utils.SharePointOpts
expectIncludeLen int
}{
{
name: "no inputs",
opts: utils.SharePointOpts{
Sites: empty,
LibraryPaths: empty,
LibraryItems: empty,
},
expectIncludeLen: 0,
},
{
name: "single inputs",
opts: utils.SharePointOpts{
Sites: single,
LibraryPaths: single,
LibraryItems: single,
},
expectIncludeLen: 1,
},
{
name: "multi inputs",
opts: utils.SharePointOpts{
Sites: multi,
LibraryPaths: multi,
LibraryItems: multi,
},
expectIncludeLen: 1,
},
{
name: "library contains",
opts: utils.SharePointOpts{
Sites: empty,
LibraryPaths: containsOnly,
LibraryItems: empty,
},
expectIncludeLen: 1,
},
{
name: "library prefixes",
opts: utils.SharePointOpts{
Sites: empty,
LibraryPaths: prefixOnly,
LibraryItems: empty,
},
expectIncludeLen: 1,
},
{
name: "library prefixes and contains",
opts: utils.SharePointOpts{
Sites: empty,
LibraryPaths: containsAndPrefix,
LibraryItems: empty,
},
expectIncludeLen: 2,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
sel := selectors.NewSharePointRestore()
// no return, mutates sel as a side effect
utils.IncludeSharePointRestoreDataSelectors(sel, test.opts)
assert.Len(t, sel.Includes, test.expectIncludeLen)
})
}
}

View File

@ -395,6 +395,94 @@ var (
} }
) )
type SharePointOptionsTest struct {
Name string
Opts utils.SharePointOpts
BackupGetter *MockBackupGetter
Expected []details.DetailsEntry
}
var (
// BadSharePointOptionsFormats contains SharePointOpts with flags that should
// cause errors about the format of the input flag. Mocks are configured to
// allow the system to run if it doesn't throw an error on formatting.
BadSharePointOptionsFormats = []SharePointOptionsTest{
// {
// Name: "BadFileCreatedBefore",
// Opts: utils.OneDriveOpts{
// FileCreatedBefore: "foo",
// Populated: utils.PopulatedFlags{
// utils.FileCreatedBeforeFN: struct{}{},
// },
// },
// },
// {
// Name: "EmptyFileCreatedBefore",
// Opts: utils.OneDriveOpts{
// FileCreatedBefore: "",
// Populated: utils.PopulatedFlags{
// utils.FileCreatedBeforeFN: struct{}{},
// },
// },
// },
}
// SharePointOptionDetailLookups contains flag inputs and expected results for
// some choice input patterns. This set is not exhaustive. All inputs and
// outputs are according to the data laid out in selectors/testdata. Mocks are
// configured to return the full dataset listed in selectors/testdata.
SharePointOptionDetailLookups = []SharePointOptionsTest{
{
Name: "AllLibraryItems",
Expected: testdata.SharePointLibraryItems,
Opts: utils.SharePointOpts{
LibraryPaths: selectors.Any(),
},
},
{
Name: "FolderPrefixMatch",
Expected: testdata.SharePointLibraryItems,
Opts: utils.SharePointOpts{
LibraryPaths: []string{testdata.SharePointLibraryFolder},
},
},
{
Name: "FolderPrefixMatchTrailingSlash",
Expected: testdata.SharePointLibraryItems,
Opts: utils.SharePointOpts{
LibraryPaths: []string{testdata.SharePointLibraryFolder + "/"},
},
},
{
Name: "FolderPrefixMatchTrailingSlash",
Expected: testdata.SharePointLibraryItems,
Opts: utils.SharePointOpts{
LibraryPaths: []string{testdata.SharePointLibraryFolder + "/"},
},
},
{
Name: "ShortRef",
Expected: []details.DetailsEntry{
testdata.SharePointLibraryItems[0],
testdata.SharePointLibraryItems[1],
},
Opts: utils.SharePointOpts{
LibraryItems: []string{
testdata.SharePointLibraryItems[0].ShortRef,
testdata.SharePointLibraryItems[1].ShortRef,
},
},
},
// {
// Name: "CreatedBefore",
// Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[1]},
// Opts: utils.SharePointOpts{
// FileCreatedBefore: common.FormatTime(testdata.Time1.Add(time.Second)),
// },
// },
}
)
// MockBackupGetter implements the repo.BackupGetter interface and returns // MockBackupGetter implements the repo.BackupGetter interface and returns
// (selectors/testdata.GetDetailsSet(), nil, nil) when BackupDetails is called // (selectors/testdata.GetDetailsSet(), nil, nil) when BackupDetails is called
// on the nil instance. If an instance is given or Backups is called returns an // on the nil instance. If an instance is given or Backups is called returns an
@ -408,7 +496,14 @@ func (MockBackupGetter) Backup(
return nil, errors.New("unexpected call to mock") return nil, errors.New("unexpected call to mock")
} }
func (MockBackupGetter) Backups(context.Context, ...store.FilterOption) ([]*backup.Backup, error) { func (MockBackupGetter) Backups(context.Context, []model.StableID) ([]*backup.Backup, error) {
return nil, errors.New("unexpected call to mock")
}
func (MockBackupGetter) BackupsByTag(
context.Context,
...store.FilterOption,
) ([]*backup.Backup, error) {
return nil, errors.New("unexpected call to mock") return nil, errors.New("unexpected call to mock")
} }

View File

@ -17,6 +17,7 @@ import (
const ( const (
BackupFN = "backup" BackupFN = "backup"
DataFN = "data" DataFN = "data"
SiteFN = "site"
UserFN = "user" UserFN = "user"
) )
@ -57,10 +58,32 @@ func HasNoFlagsAndShownHelp(cmd *cobra.Command) bool {
return false return false
} }
type cmdCfg struct {
hidden bool
}
type cmdOpt func(*cmdCfg)
func (cc *cmdCfg) populate(opts ...cmdOpt) {
for _, opt := range opts {
opt(cc)
}
}
func HideCommand() cmdOpt {
return func(cc *cmdCfg) {
cc.hidden = true
}
}
// AddCommand adds a clone of the subCommand to the parent, // AddCommand adds a clone of the subCommand to the parent,
// and returns both the clone and its pflags. // and returns both the clone and its pflags.
func AddCommand(parent, c *cobra.Command) (*cobra.Command, *pflag.FlagSet) { func AddCommand(parent, c *cobra.Command, opts ...cmdOpt) (*cobra.Command, *pflag.FlagSet) {
cc := &cmdCfg{}
cc.populate(opts...)
parent.AddCommand(c) parent.AddCommand(c)
c.Hidden = cc.hidden
c.Flags().SortFlags = false c.Flags().SortFlags = false

View File

@ -4,7 +4,7 @@ go 1.18
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0
github.com/aws/aws-sdk-go v1.44.132 github.com/aws/aws-sdk-go v1.44.145
github.com/aws/aws-xray-sdk-go v1.7.1 github.com/aws/aws-xray-sdk-go v1.7.1
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0
github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-multierror v1.1.1

View File

@ -58,8 +58,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/aws/aws-sdk-go v1.44.132 h1:+IjL9VoR0OXScQ5gyme9xjcolwUkd3uaH144f4Ao+4s= github.com/aws/aws-sdk-go v1.44.145 h1:KMVRrIyjBsNz3xGPuHIRnhIuKlb5h3Ii5e5jbi3cgnc=
github.com/aws/aws-sdk-go v1.44.132/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.44.145/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.7.1 h1:mji68Db4oWipJ6SiQQuFiWBYWI8sUvPfcv86mLFVKHQ= github.com/aws/aws-xray-sdk-go v1.7.1 h1:mji68Db4oWipJ6SiQQuFiWBYWI8sUvPfcv86mLFVKHQ=
github.com/aws/aws-xray-sdk-go v1.7.1/go.mod h1:aNQo1pqFaaeKaf18CSWCkoaXUI+PQZ7yfNE28YyE2CI= github.com/aws/aws-xray-sdk-go v1.7.1/go.mod h1:aNQo1pqFaaeKaf18CSWCkoaXUI+PQZ7yfNE28YyE2CI=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=

View File

@ -89,8 +89,6 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
parseT("2006-01-02T03:00:04-01:00"), parseT("2006-01-02T03:00:04-01:00"),
} }
type timeFormatter func(time.Time) string
formats := []common.TimeFormat{ formats := []common.TimeFormat{
common.ClippedSimple, common.ClippedSimple,
common.ClippedSimpleOneDrive, common.ClippedSimpleOneDrive,

View File

@ -29,7 +29,7 @@ func (gc *GraphConnector) DataCollections(ctx context.Context, sels selectors.Se
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String())) ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
defer end() defer end()
err := verifyBackupInputs(sels, gc.Users) err := verifyBackupInputs(sels, gc.GetUsers(), gc.GetSiteIds())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -40,52 +40,59 @@ func (gc *GraphConnector) DataCollections(ctx context.Context, sels selectors.Se
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
return gc.OneDriveDataCollections(ctx, sels) return gc.OneDriveDataCollections(ctx, sels)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
return gc.SharePointDataCollections(ctx, sels) colls, err := sharepoint.DataCollections(ctx, sels, gc.GetSiteIds(), gc.credentials.AzureTenantID, gc)
if err != nil {
return nil, err
}
for range colls {
gc.incrementAwaitingMessages()
}
return colls, nil
default: default:
return nil, errors.Errorf("service %s not supported", sels) return nil, errors.Errorf("service %s not supported", sels.Service.String())
} }
} }
func verifyBackupInputs(sel selectors.Selector, mapOfUsers map[string]string) error { func verifyBackupInputs(sels selectors.Selector, userPNs, siteIDs []string) error {
var personnel []string var ids []string
// retrieve users from selectors resourceOwners, err := sels.ResourceOwners()
switch sel.Service {
case selectors.ServiceExchange:
backup, err := sel.ToExchangeBackup()
if err != nil { if err != nil {
return err return errors.Wrap(err, "invalid backup inputs")
} }
for _, scope := range backup.Scopes() { switch sels.Service {
temp := scope.Get(selectors.ExchangeUser) case selectors.ServiceExchange, selectors.ServiceOneDrive:
personnel = append(personnel, temp...) ids = userPNs
}
case selectors.ServiceOneDrive: case selectors.ServiceSharePoint:
backup, err := sel.ToOneDriveBackup() ids = siteIDs
if err != nil {
return err
} }
for _, user := range backup.Scopes() { // verify resourceOwners
temp := user.Get(selectors.OneDriveUser) normROs := map[string]struct{}{}
personnel = append(personnel, temp...)
for _, id := range ids {
normROs[strings.ToLower(id)] = struct{}{}
} }
default: for _, ro := range resourceOwners.Includes {
return errors.New("service %s not supported") if _, ok := normROs[strings.ToLower(ro)]; !ok {
return fmt.Errorf("included resource owner %s not found within tenant", ro)
}
} }
// verify personnel for _, ro := range resourceOwners.Excludes {
normUsers := map[string]struct{}{} if _, ok := normROs[strings.ToLower(ro)]; !ok {
return fmt.Errorf("excluded resource owner %s not found within tenant", ro)
for k := range mapOfUsers { }
normUsers[strings.ToLower(k)] = struct{}{}
} }
for _, user := range personnel { for _, ro := range resourceOwners.Filters {
if _, ok := normUsers[strings.ToLower(user)]; !ok { if _, ok := normROs[strings.ToLower(ro)]; !ok {
return fmt.Errorf("%s user not found within tenant", user) return fmt.Errorf("filtered resource owner %s not found within tenant", ro)
} }
} }
@ -193,6 +200,18 @@ func (gc *GraphConnector) ExchangeDataCollection(
// OneDrive // OneDrive
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type odFolderMatcher struct {
scope selectors.OneDriveScope
}
func (fm odFolderMatcher) IsAny() bool {
return fm.scope.IsAny(selectors.OneDriveFolder)
}
func (fm odFolderMatcher) Matches(dir string) bool {
return fm.scope.Matches(selectors.OneDriveFolder, dir)
}
// OneDriveDataCollections returns a set of DataCollection which represents the OneDrive data // OneDriveDataCollections returns a set of DataCollection which represents the OneDrive data
// for the specified user // for the specified user
func (gc *GraphConnector) OneDriveDataCollections( func (gc *GraphConnector) OneDriveDataCollections(
@ -218,7 +237,8 @@ func (gc *GraphConnector) OneDriveDataCollections(
odcs, err := onedrive.NewCollections( odcs, err := onedrive.NewCollections(
gc.credentials.AzureTenantID, gc.credentials.AzureTenantID,
user, user,
scope, onedrive.OneDriveSource,
odFolderMatcher{scope},
&gc.graphService, &gc.graphService,
gc.UpdateStatus, gc.UpdateStatus,
).Get(ctx) ).Get(ctx)
@ -236,104 +256,3 @@ func (gc *GraphConnector) OneDriveDataCollections(
return collections, errs return collections, errs
} }
// ---------------------------------------------------------------------------
// SharePoint
// ---------------------------------------------------------------------------
// createSharePointCollections - utility function that retrieves M365
// IDs through Microsoft Graph API. The selectors.SharePointScope
// determines the type of collections that are retrieved.
func (gc *GraphConnector) createSharePointCollections(
ctx context.Context,
scope selectors.SharePointScope,
) ([]*sharepoint.Collection, error) {
var (
errs *multierror.Error
sites = scope.Get(selectors.SharePointSite)
colls = make([]*sharepoint.Collection, 0)
)
// Create collection of ExchangeDataCollection
for _, site := range sites {
collections := make(map[string]*sharepoint.Collection)
qp := graph.QueryParams{
Category: scope.Category().PathType(),
ResourceOwner: site,
FailFast: gc.failFast,
Credentials: gc.credentials,
}
foldersComplete, closer := observe.MessageWithCompletion(fmt.Sprintf("∙ %s - %s:", qp.Category, site))
defer closer()
defer close(foldersComplete)
// resolver, err := exchange.PopulateExchangeContainerResolver(
// ctx,
// qp,
// qp.Scope.Category().PathType(),
// )
// if err != nil {
// return nil, errors.Wrap(err, "getting folder cache")
// }
// err = sharepoint.FilterContainersAndFillCollections(
// ctx,
// qp,
// collections,
// gc.UpdateStatus,
// resolver)
// if err != nil {
// return nil, errors.Wrap(err, "filling collections")
// }
foldersComplete <- struct{}{}
for _, collection := range collections {
gc.incrementAwaitingMessages()
colls = append(colls, collection)
}
}
return colls, errs.ErrorOrNil()
}
// SharePointDataCollections returns a set of DataCollection which represents the SharePoint data
// for the specified user
func (gc *GraphConnector) SharePointDataCollections(
ctx context.Context,
selector selectors.Selector,
) ([]data.Collection, error) {
b, err := selector.ToSharePointBackup()
if err != nil {
return nil, errors.Wrap(err, "sharePointDataCollection: parsing selector")
}
var (
scopes = b.DiscreteScopes(gc.GetSites())
collections = []data.Collection{}
errs error
)
// for each scope that includes oneDrive items, get all
for _, scope := range scopes {
// Creates a map of collections based on scope
dcs, err := gc.createSharePointCollections(ctx, scope)
if err != nil {
return nil, support.WrapAndAppend(scope.Get(selectors.SharePointSite)[0], err, errs)
}
for _, collection := range dcs {
collections = append(collections, collection)
}
}
for range collections {
gc.incrementAwaitingMessages()
}
return collections, errs
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
@ -162,16 +163,16 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
connector := loadConnector(ctx, suite.T(), Users) connector := loadConnector(ctx, suite.T(), Sites)
tests := []struct { tests := []struct {
name string name string
getSelector func(t *testing.T) selectors.Selector getSelector func(t *testing.T) selectors.Selector
}{ }{
{ {
name: "Items - TODO: actual sharepoint categories", name: "Libraries",
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup() sel := selectors.NewSharePointBackup()
sel.Include(sel.Folders([]string{suite.site}, selectors.Any())) sel.Include(sel.Libraries([]string{suite.site}, selectors.Any()))
return sel.Selector return sel.Selector
}, },
@ -180,24 +181,31 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
_, err := connector.SharePointDataCollections(ctx, test.getSelector(t)) collection, err := sharepoint.DataCollections(
ctx,
test.getSelector(t),
[]string{suite.site},
connector.credentials.AzureTenantID,
connector)
require.NoError(t, err) require.NoError(t, err)
// TODO: Implementation // we don't know an exact count of drives this will produce,
// assert.Equal(t, len(collection), 1) // but it should be more than one.
assert.Less(t, 1, len(collection))
// channel := collection[0].Items() // the test only reads the firstt collection
connector.incrementAwaitingMessages()
// for object := range channel { for object := range collection[0].Items() {
// buf := &bytes.Buffer{} buf := &bytes.Buffer{}
// _, err := buf.ReadFrom(object.ToReader()) _, err := buf.ReadFrom(object.ToReader())
// assert.NoError(t, err, "received a buf.Read error") assert.NoError(t, err, "received a buf.Read error")
// } }
// status := connector.AwaitStatus() status := connector.AwaitStatus()
// assert.NotZero(t, status.Successful) assert.NotZero(t, status.Successful)
// t.Log(status.String()) t.Log(status.String())
}) })
} }
} }
@ -274,10 +282,8 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestMailFetch()
require.NotEmpty(t, c.FullPath().Folder()) require.NotEmpty(t, c.FullPath().Folder())
folder := c.FullPath().Folder() folder := c.FullPath().Folder()
if _, ok := test.folderNames[folder]; ok {
delete(test.folderNames, folder) delete(test.folderNames, folder)
} }
}
assert.Empty(t, test.folderNames) assert.Empty(t, test.folderNames)
}) })
@ -361,7 +367,7 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestContactSeria
assert.NotZero(t, read) assert.NotZero(t, read)
contact, err := support.CreateContactFromBytes(buf.Bytes()) contact, err := support.CreateContactFromBytes(buf.Bytes())
assert.NotNil(t, contact) assert.NotNil(t, contact)
assert.NoError(t, err, "error on converting contact bytes: "+string(buf.Bytes())) assert.NoError(t, err, "error on converting contact bytes: "+buf.String())
count++ count++
} }
assert.NotZero(t, count) assert.NotZero(t, count)
@ -426,7 +432,7 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestEventsSerial
assert.NotZero(t, read) assert.NotZero(t, read)
event, err := support.CreateEventFromBytes(buf.Bytes()) event, err := support.CreateEventFromBytes(buf.Bytes())
assert.NotNil(t, event) assert.NotNil(t, event)
assert.NoError(t, err, "experienced error parsing event bytes: "+string(buf.Bytes())) assert.NoError(t, err, "experienced error parsing event bytes: "+buf.String())
} }
status := connector.AwaitStatus() status := connector.AwaitStatus()
@ -499,16 +505,17 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
var ( var (
t = suite.T() t = suite.T()
userID = tester.M365UserID(t) siteID = tester.M365SiteID(t)
gc = loadConnector(ctx, t, Sites)
sel = selectors.NewSharePointBackup()
) )
gc := loadConnector(ctx, t, Sites) sel.Include(sel.Libraries(
scope := selectors.NewSharePointBackup().Folders( []string{siteID},
[]string{userID},
[]string{"foo"}, []string{"foo"},
selectors.PrefixMatch(), selectors.PrefixMatch(),
)[0] ))
_, err := gc.createSharePointCollections(ctx, scope) _, err := gc.DataCollections(ctx, sel.Selector)
require.NoError(t, err) require.NoError(t, err)
} }

View File

@ -3,8 +3,7 @@ package exchange
import ( import (
"context" "context"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core" cf "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/childfolders"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -70,43 +69,39 @@ func (cfc *contactFolderCache) Populate(
} }
var ( var (
containers = make(map[string]graph.Container)
errs error errs error
errUpdater = func(s string, e error) { options, err = optionsForContactChildFolders([]string{"displayName", "parentFolderId"})
errs = support.WrapAndAppend(s, e, errs)
}
) )
query, err := cfc. if err != nil {
return errors.Wrap(err, "contact cache resolver option")
}
builder := cfc.
gs.Client(). gs.Client().
UsersById(cfc.userID). UsersById(cfc.userID).
ContactFoldersById(baseID). ContactFoldersById(baseID).
ChildFolders(). ChildFolders()
Get(ctx, nil)
for {
resp, err := builder.Get(ctx, options)
if err != nil { if err != nil {
return errors.Wrap(err, support.ConnectorStackErrorTrace(err)) return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
} }
iter, err := msgraphgocore.NewPageIterator(query, cfc.gs.Adapter(), for _, fold := range resp.GetValue() {
models.CreateContactFolderCollectionResponseFromDiscriminatorValue) if err := checkIDAndName(fold); err != nil {
if err != nil { errs = support.WrapAndAppend(
return errors.Wrap(err, support.ConnectorStackErrorTrace(err)) "adding folder to contact resolver",
err,
errs,
)
continue
} }
cb := IterativeCollectContactContainers(containers,
"",
errUpdater)
if err := iter.Iterate(ctx, cb); err != nil {
return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
}
if errs != nil {
return errs
}
for _, entry := range containers {
temp := cacheFolder{ temp := cacheFolder{
Container: entry, Container: fold,
} }
err = cfc.addFolder(temp) err = cfc.addFolder(temp)
@ -118,6 +113,13 @@ func (cfc *contactFolderCache) Populate(
} }
} }
if resp.GetOdataNextLink() == nil {
break
}
builder = cf.NewChildFoldersRequestBuilder(*resp.GetOdataNextLink(), cfc.gs.Adapter())
}
if err := cfc.populatePaths(ctx); err != nil { if err := cfc.populatePaths(ctx); err != nil {
errs = support.WrapAndAppend( errs = support.WrapAndAppend(
"contacts resolver", "contacts resolver",

View File

@ -0,0 +1,124 @@
package exchange
import (
"context"
abs "github.com/microsoft/kiota-abstractions-go"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
mscontactdelta "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/contacts/delta"
msmaildelta "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders/item/messages/delta"
)
//nolint:lll
const (
mailURLTemplate = "{+baseurl}/users/{user%2Did}/mailFolders/{mailFolder%2Did}/messages/microsoft.graph.delta(){?%24top,%24skip,%24search,%24filter,%24count,%24select,%24orderby}"
contactsURLTemplate = "{+baseurl}/users/{user%2Did}/contactFolders/{contactFolder%2Did}/contacts/microsoft.graph.delta(){?%24top,%24skip,%24search,%24filter,%24count,%24select,%24orderby}"
)
// The following functions are based off the code in v0.41.0 of msgraph-sdk-go
// for sending delta requests with query parameters.
//nolint:unused
func createGetRequestInformationWithRequestConfiguration(
baseRequestInfoFunc func() (*abs.RequestInformation, error),
requestConfig *DeltaRequestBuilderGetRequestConfiguration,
template string,
) (*abs.RequestInformation, error) {
requestInfo, err := baseRequestInfoFunc()
if err != nil {
return nil, err
}
requestInfo.UrlTemplate = template
if requestConfig != nil {
if requestConfig.QueryParameters != nil {
requestInfo.AddQueryParameters(*(requestConfig.QueryParameters))
}
requestInfo.AddRequestHeaders(requestConfig.Headers)
requestInfo.AddRequestOptions(requestConfig.Options)
}
return requestInfo, nil
}
//nolint:unused
func sendMessagesDeltaGet(
ctx context.Context,
m *msmaildelta.DeltaRequestBuilder,
requestConfiguration *DeltaRequestBuilderGetRequestConfiguration,
adapter abs.RequestAdapter,
) (msmaildelta.DeltaResponseable, error) {
requestInfo, err := createGetRequestInformationWithRequestConfiguration(
func() (*abs.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil)
},
requestConfiguration,
mailURLTemplate,
)
if err != nil {
return nil, err
}
errorMapping := abs.ErrorMappings{
"4XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
"5XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
}
res, err := adapter.SendAsync(
ctx,
requestInfo,
msmaildelta.CreateDeltaResponseFromDiscriminatorValue,
errorMapping,
)
if err != nil {
return nil, err
}
if res == nil {
return nil, nil
}
return res.(msmaildelta.DeltaResponseable), nil
}
//nolint:unused
func sendContactsDeltaGet(
ctx context.Context,
m *mscontactdelta.DeltaRequestBuilder,
requestConfiguration *DeltaRequestBuilderGetRequestConfiguration,
adapter abs.RequestAdapter,
) (mscontactdelta.DeltaResponseable, error) {
requestInfo, err := createGetRequestInformationWithRequestConfiguration(
func() (*abs.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil)
},
requestConfiguration,
contactsURLTemplate,
)
if err != nil {
return nil, err
}
errorMapping := abs.ErrorMappings{
"4XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
"5XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
}
res, err := adapter.SendAsync(
ctx,
requestInfo,
mscontactdelta.CreateDeltaResponseFromDiscriminatorValue,
errorMapping,
)
if err != nil {
return nil, err
}
if res == nil {
return nil, nil
}
return res.(mscontactdelta.DeltaResponseable), nil
}

View File

@ -26,7 +26,9 @@ func TestEventSuite(t *testing.T) {
// TestEventInfo verifies that searchable event metadata // TestEventInfo verifies that searchable event metadata
// can be properly retrieved from a models.Eventable object // can be properly retrieved from a models.Eventable object
func (suite *EventSuite) TestEventInfo() { func (suite *EventSuite) TestEventInfo() {
initial := time.Now() // Exchange stores start/end times in UTC and the below compares hours
// directly so we need to "normalize" the timezone here.
initial := time.Now().UTC()
now := common.FormatTimeWith(initial, common.M365DateTimeTimeZone) now := common.FormatTimeWith(initial, common.M365DateTimeTimeZone)
suite.T().Logf("Initial: %v\nFormatted: %v\n", initial, now) suite.T().Logf("Initial: %v\nFormatted: %v\n", initial, now)

View File

@ -3,6 +3,7 @@ package exchange
import ( import (
"fmt" "fmt"
abs "github.com/microsoft/kiota-abstractions-go"
msuser "github.com/microsoftgraph/msgraph-sdk-go/users" msuser "github.com/microsoftgraph/msgraph-sdk-go/users"
mscalendars "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars" mscalendars "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars"
mscontactfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders" mscontactfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders"
@ -125,6 +126,24 @@ func CategoryToOptionIdentifier(category path.CategoryType) optionIdentifier {
// which reduces the overall latency of complex calls // which reduces the overall latency of complex calls
// ----------------------------------------------------------------------- // -----------------------------------------------------------------------
// Delta requests for mail and contacts have the same parameters and config
// structs.
type DeltaRequestBuilderGetQueryParameters struct {
Count *bool `uriparametername:"%24count"`
Filter *string `uriparametername:"%24filter"`
Orderby []string `uriparametername:"%24orderby"`
Search *string `uriparametername:"%24search"`
Select []string `uriparametername:"%24select"`
Skip *int32 `uriparametername:"%24skip"`
Top *int32 `uriparametername:"%24top"`
}
type DeltaRequestBuilderGetRequestConfiguration struct {
Headers map[string]string
Options []abs.RequestOption
QueryParameters *DeltaRequestBuilderGetQueryParameters
}
func optionsForFolderMessages(moreOps []string) (*msmfmessage.MessagesRequestBuilderGetRequestConfiguration, error) { func optionsForFolderMessages(moreOps []string) (*msmfmessage.MessagesRequestBuilderGetRequestConfiguration, error) {
selecting, err := buildOptions(moreOps, messages) selecting, err := buildOptions(moreOps, messages)
if err != nil { if err != nil {

View File

@ -81,6 +81,8 @@ func StringToPathCategory(input string) path.CategoryType {
return path.EventsCategory return path.EventsCategory
case "files": case "files":
return path.FilesCategory return path.FilesCategory
case "libraries":
return path.LibrariesCategory
default: default:
return path.UnknownCategory return path.UnknownCategory
} }

View File

@ -104,6 +104,10 @@ func NewGraphConnector(ctx context.Context, acct account.Account, r resource) (*
gc.graphService = *aService gc.graphService = *aService
// TODO(ashmrtn): When selectors only encapsulate a single resource owner that
// is not a wildcard don't populate users or sites when making the connector.
// For now this keeps things functioning if callers do pass in a selector like
// "*" instead of.
if r == AllResources || r == Users { if r == AllResources || r == Users {
if err = gc.setTenantUsers(ctx); err != nil { if err = gc.setTenantUsers(ctx); err != nil {
return nil, errors.Wrap(err, "retrieving tenant user list") return nil, errors.Wrap(err, "retrieving tenant user list")
@ -220,6 +224,8 @@ func (gc *GraphConnector) setTenantSites(ctx context.Context) error {
var errKnownSkippableCase = errors.New("case is known and skippable") var errKnownSkippableCase = errors.New("case is known and skippable")
const personalSitePath = "sharepoint.com/personal/"
// Transforms an interface{} into a key,value pair representing // Transforms an interface{} into a key,value pair representing
// siteName:siteID. // siteName:siteID.
func identifySite(item any) (string, string, error) { func identifySite(item any) (string, string, error) {
@ -237,6 +243,12 @@ func identifySite(item any) (string, string, error) {
return "", "", errors.Errorf("no name for Site: %s", *m.GetId()) return "", "", errors.Errorf("no name for Site: %s", *m.GetId())
} }
// personal (ie: oneDrive) sites have to be filtered out server-side.
url := m.GetWebUrl()
if url != nil && strings.Contains(*url, personalSitePath) {
return "", "", errKnownSkippableCase
}
return *m.GetName(), *m.GetId(), nil return *m.GetName(), *m.GetId(), nil
} }

View File

@ -18,7 +18,7 @@ import (
// --------------------------------------------------------------- // ---------------------------------------------------------------
// Disconnected Test Section // Disconnected Test Section
// ------------------------- // ---------------------------------------------------------------
type DisconnectedGraphConnectorSuite struct { type DisconnectedGraphConnectorSuite struct {
suite.Suite suite.Suite
} }
@ -206,12 +206,13 @@ func (suite *DisconnectedGraphConnectorSuite) TestRestoreFailsBadService() {
} }
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() { func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
users := make(map[string]string) users := []string{
users["elliotReid@someHospital.org"] = "" "elliotReid@someHospital.org",
users["chrisTurk@someHospital.org"] = "" "chrisTurk@someHospital.org",
users["carlaEspinosa@someHospital.org"] = "" "carlaEspinosa@someHospital.org",
users["bobKelso@someHospital.org"] = "" "bobKelso@someHospital.org",
users["johnDorian@someHospital.org"] = "" "johnDorian@someHospital.org",
}
tests := []struct { tests := []struct {
name string name string
@ -219,12 +220,10 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
checkError assert.ErrorAssertionFunc checkError assert.ErrorAssertionFunc
}{ }{
{ {
name: "Invalid User", name: "No scopes",
checkError: assert.Error, checkError: assert.NoError,
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup() return selectors.NewExchangeBackup().Selector
sel.Include(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
return sel.Selector
}, },
}, },
{ {
@ -260,7 +259,108 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
err := verifyBackupInputs(test.getSelector(t), users) err := verifyBackupInputs(test.getSelector(t), users, nil)
test.checkError(t, err)
})
}
}
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices() {
users := []string{"elliotReid@someHospital.org"}
sites := []string{"abc.site.foo", "bar.site.baz"}
tests := []struct {
name string
excludes func(t *testing.T) selectors.Selector
filters func(t *testing.T) selectors.Selector
includes func(t *testing.T) selectors.Selector
checkError assert.ErrorAssertionFunc
}{
{
name: "Valid User",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup()
sel.Exclude(sel.Folders([]string{"elliotReid@someHospital.org"}, selectors.Any()))
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup()
sel.Filter(sel.Folders([]string{"elliotReid@someHospital.org"}, selectors.Any()))
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup()
sel.Include(sel.Folders([]string{"elliotReid@someHospital.org"}, selectors.Any()))
return sel.Selector
},
},
{
name: "Invalid User",
checkError: assert.Error,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup()
sel.Exclude(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup()
sel.Filter(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup()
sel.Include(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
return sel.Selector
},
},
{
name: "valid sites",
checkError: assert.NoError,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup()
sel.Exclude(sel.Sites([]string{"abc.site.foo", "bar.site.baz"}))
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup()
sel.Filter(sel.Sites([]string{"abc.site.foo", "bar.site.baz"}))
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup()
sel.Include(sel.Sites([]string{"abc.site.foo", "bar.site.baz"}))
return sel.Selector
},
},
{
name: "invalid sites",
checkError: assert.Error,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup()
sel.Exclude(sel.Sites([]string{"fnords.smarfs.brawnhilda"}))
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup()
sel.Filter(sel.Sites([]string{"fnords.smarfs.brawnhilda"}))
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup()
sel.Include(sel.Sites([]string{"fnords.smarfs.brawnhilda"}))
return sel.Selector
},
},
}
for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) {
err := verifyBackupInputs(test.excludes(t), users, sites)
test.checkError(t, err)
err = verifyBackupInputs(test.filters(t), users, sites)
test.checkError(t, err)
err = verifyBackupInputs(test.includes(t), users, sites)
test.checkError(t, err) test.checkError(t, err)
}) })
} }

View File

@ -23,7 +23,7 @@ import (
func mustToDataLayerPath( func mustToDataLayerPath(
t *testing.T, t *testing.T,
service path.ServiceType, service path.ServiceType,
tenant, user string, tenant, resourceOwner string,
category path.CategoryType, category path.CategoryType,
elements []string, elements []string,
isItem bool, isItem bool,
@ -37,11 +37,13 @@ func mustToDataLayerPath(
switch service { switch service {
case path.ExchangeService: case path.ExchangeService:
res, err = pb.ToDataLayerExchangePathForCategory(tenant, user, category, isItem) res, err = pb.ToDataLayerExchangePathForCategory(tenant, resourceOwner, category, isItem)
case path.OneDriveService: case path.OneDriveService:
require.Equal(t, path.FilesCategory, category) require.Equal(t, path.FilesCategory, category)
res, err = pb.ToDataLayerOneDrivePath(tenant, user, isItem) res, err = pb.ToDataLayerOneDrivePath(tenant, resourceOwner, isItem)
case path.SharePointService:
res, err = pb.ToDataLayerSharePointPath(tenant, resourceOwner, category, isItem)
default: default:
err = errors.Errorf("bad service type %s", service.String()) err = errors.Errorf("bad service type %s", service.String())
@ -603,6 +605,27 @@ func compareExchangeEvent(
checkEvent(t, expectedEvent, itemEvent) checkEvent(t, expectedEvent, itemEvent)
} }
func compareOneDriveItem(
t *testing.T,
expected map[string][]byte,
item data.Stream,
) {
expectedData := expected[item.UUID()]
if !assert.NotNil(t, expectedData, "unexpected file with name %s", item.UUID) {
return
}
// OneDrive items are just byte buffers of the data. Nothing special to
// interpret. May need to do chunked comparisons in the future if we test
// large item equality.
buf, err := io.ReadAll(item.ToReader())
if !assert.NoError(t, err) {
return
}
assert.Equal(t, expectedData, buf)
}
func compareItem( func compareItem(
t *testing.T, t *testing.T,
expected map[string][]byte, expected map[string][]byte,
@ -622,6 +645,10 @@ func compareItem(
default: default:
assert.FailNowf(t, "unexpected Exchange category: %s", category.String()) assert.FailNowf(t, "unexpected Exchange category: %s", category.String())
} }
case path.OneDriveService:
compareOneDriveItem(t, expected, item)
default: default:
assert.FailNowf(t, "unexpected service: %s", service.String()) assert.FailNowf(t, "unexpected service: %s", service.String())
} }

View File

@ -1,14 +1,19 @@
package connector package connector
import ( import (
"context"
"strings"
"testing" "testing"
"time" "time"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mockconnector" "github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -61,7 +66,7 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantUsers() {
newConnector.graphService = *service newConnector.graphService = *service
suite.Equal(len(newConnector.Users), 0) suite.Empty(len(newConnector.Users))
err = newConnector.setTenantUsers(ctx) err = newConnector.setTenantUsers(ctx)
suite.NoError(err) suite.NoError(err)
suite.Less(0, len(newConnector.Users)) suite.Less(0, len(newConnector.Users))
@ -88,6 +93,10 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
err = newConnector.setTenantSites(ctx) err = newConnector.setTenantSites(ctx)
suite.NoError(err) suite.NoError(err)
suite.Less(0, len(newConnector.Sites)) suite.Less(0, len(newConnector.Sites))
for _, site := range newConnector.Sites {
suite.NotContains("sharepoint.com/personal/", site)
}
} }
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() { func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
@ -149,6 +158,31 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
// Exchange Functions // Exchange Functions
//------------------------------------------------------------- //-------------------------------------------------------------
//revive:disable:context-as-argument
func mustGetDefaultDriveID(
t *testing.T,
ctx context.Context,
service graph.Service,
userID string,
) string {
//revive:enable:context-as-argument
d, err := service.Client().UsersById(userID).Drive().Get(ctx, nil)
if err != nil {
err = errors.Wrapf(
err,
"failed to retrieve default user drive. user: %s, details: %s",
userID,
support.ConnectorStackErrorTrace(err),
)
}
require.NoError(t, err)
require.NotNil(t, d.GetId())
require.NotEmpty(t, *d.GetId())
return *d.GetId()
}
func runRestoreBackupTest( func runRestoreBackupTest(
t *testing.T, t *testing.T,
test restoreBackupInfo, test restoreBackupInfo,
@ -199,7 +233,7 @@ func runRestoreBackupTest(
assert.NotNil(t, deets) assert.NotNil(t, deets)
status := restoreGC.AwaitStatus() status := restoreGC.AwaitStatus()
runTime := time.Now().Sub(start) runTime := time.Since(start)
assert.Equal(t, totalItems, status.ObjectCount, "status.ObjectCount") assert.Equal(t, totalItems, status.ObjectCount, "status.ObjectCount")
assert.Equal(t, totalItems, status.Successful, "status.Successful") assert.Equal(t, totalItems, status.Successful, "status.Successful")
@ -234,7 +268,7 @@ func runRestoreBackupTest(
dcs, err := backupGC.DataCollections(ctx, backupSel) dcs, err := backupGC.DataCollections(ctx, backupSel)
require.NoError(t, err) require.NoError(t, err)
t.Logf("Backup enumeration complete in %v\n", time.Now().Sub(start)) t.Logf("Backup enumeration complete in %v\n", time.Since(start))
// Pull the data prior to waiting for the status as otherwise it will // Pull the data prior to waiting for the status as otherwise it will
// deadlock. // deadlock.
@ -249,6 +283,17 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
bodyText := "This email has some text. However, all the text is on the same line." bodyText := "This email has some text. However, all the text is on the same line."
subjectText := "Test message for restore" subjectText := "Test message for restore"
ctx, flush := tester.NewContext()
defer flush()
// Get the default drive ID for the test user.
driveID := mustGetDefaultDriveID(
suite.T(),
ctx,
suite.connector.Service(),
suite.user,
)
table := []restoreBackupInfo{ table := []restoreBackupInfo{
{ {
name: "EmailsWithAttachments", name: "EmailsWithAttachments",
@ -465,6 +510,95 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
// }, // },
// }, // },
// }, // },
{
name: "OneDriveMultipleFoldersAndFiles",
service: path.OneDriveService,
resource: Users,
collections: []colInfo{
{
pathElements: []string{
"drives",
driveID,
"root:",
},
category: path.FilesCategory,
items: []itemInfo{
{
name: "test-file.txt",
data: []byte(strings.Repeat("a", 33)),
lookupKey: "test-file.txt",
},
},
},
{
pathElements: []string{
"drives",
driveID,
"root:",
"folder-a",
},
category: path.FilesCategory,
items: []itemInfo{
{
name: "test-file.txt",
data: []byte(strings.Repeat("b", 65)),
lookupKey: "test-file.txt",
},
},
},
{
pathElements: []string{
"drives",
driveID,
"root:",
"folder-a",
"b",
},
category: path.FilesCategory,
items: []itemInfo{
{
name: "test-file.txt",
data: []byte(strings.Repeat("c", 129)),
lookupKey: "test-file.txt",
},
},
},
{
pathElements: []string{
"drives",
driveID,
"root:",
"folder-a",
"b",
"folder-a",
},
category: path.FilesCategory,
items: []itemInfo{
{
name: "test-file.txt",
data: []byte(strings.Repeat("d", 257)),
lookupKey: "test-file.txt",
},
},
},
{
pathElements: []string{
"drives",
driveID,
"root:",
"b",
},
category: path.FilesCategory,
items: []itemInfo{
{
name: "test-file.txt",
data: []byte(strings.Repeat("e", 257)),
lookupKey: "test-file.txt",
},
},
},
},
},
} }
for _, test := range table { for _, test := range table {

View File

@ -19,6 +19,7 @@ import (
// 6. subject // 6. subject
// 7. hasAttachments // 7. hasAttachments
// 8. attachments // 8. attachments
//
//nolint:lll //nolint:lll
const ( const (
eventTmpl = `{ eventTmpl = `{

View File

@ -123,7 +123,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
folderProgress, colCloser := observe.ProgressWithCount( folderProgress, colCloser := observe.ProgressWithCount(
observe.ItemQueueMsg, observe.ItemQueueMsg,
"Folder: /"+parentPathString, "/"+parentPathString,
int64(len(oc.driveItemIDs)), int64(len(oc.driveItemIDs)),
) )
defer colCloser() defer colCloser()

View File

@ -60,7 +60,7 @@ func (suite *OneDriveCollectionSuite) TestOneDriveCollection() {
wg := sync.WaitGroup{} wg := sync.WaitGroup{}
collStatus := support.ConnectorOperationStatus{} collStatus := support.ConnectorOperationStatus{}
folderPath, err := getCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "a-tenant", "a-user") folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "a-tenant", "a-user", OneDriveSource)
require.NoError(t, err) require.NoError(t, err)
driveFolderPath, err := getDriveFolderPath(folderPath) driveFolderPath, err := getDriveFolderPath(folderPath)
require.NoError(t, err) require.NoError(t, err)
@ -117,7 +117,7 @@ func (suite *OneDriveCollectionSuite) TestOneDriveCollectionReadError() {
wg := sync.WaitGroup{} wg := sync.WaitGroup{}
wg.Add(1) wg.Add(1)
folderPath, err := getCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user") folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", OneDriveSource)
require.NoError(t, err) require.NoError(t, err)
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus)) coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus))

View File

@ -14,79 +14,172 @@ import (
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
) )
// Collections is used to retrieve OneDrive data for a type driveSource int
// specified user
const (
unknownDriveSource driveSource = iota
OneDriveSource
SharePointSource
)
type folderMatcher interface {
IsAny() bool
Matches(string) bool
}
// Collections is used to retrieve drive data for a
// resource owner, which can be either a user or a sharepoint site.
type Collections struct { type Collections struct {
tenant string tenant string
user string resourceOwner string
scope selectors.OneDriveScope source driveSource
// collectionMap allows lookup of the data.Collection matcher folderMatcher
// for a OneDrive folder
collectionMap map[string]data.Collection
service graph.Service service graph.Service
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
// collectionMap allows lookup of the data.Collection
// for a OneDrive folder
CollectionMap map[string]data.Collection
// Track stats from drive enumeration. Represents the items backed up. // Track stats from drive enumeration. Represents the items backed up.
numItems int NumItems int
numFiles int NumFiles int
numContainers int NumContainers int
} }
func NewCollections( func NewCollections(
tenant string, tenant string,
user string, resourceOwner string,
scope selectors.OneDriveScope, source driveSource,
matcher folderMatcher,
service graph.Service, service graph.Service,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
) *Collections { ) *Collections {
return &Collections{ return &Collections{
tenant: tenant, tenant: tenant,
user: user, resourceOwner: resourceOwner,
scope: scope, source: source,
collectionMap: map[string]data.Collection{}, matcher: matcher,
CollectionMap: map[string]data.Collection{},
service: service, service: service,
statusUpdater: statusUpdater, statusUpdater: statusUpdater,
} }
} }
// Retrieves OneDrive data as set of `data.Collections` // Retrieves drive data as set of `data.Collections`
func (c *Collections) Get(ctx context.Context) ([]data.Collection, error) { func (c *Collections) Get(ctx context.Context) ([]data.Collection, error) {
// Enumerate drives for the specified user // Enumerate drives for the specified resourceOwner
drives, err := drives(ctx, c.service, c.user) drives, err := drives(ctx, c.service, c.resourceOwner, c.source)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Update the collection map with items from each drive // Update the collection map with items from each drive
for _, d := range drives { for _, d := range drives {
err = collectItems(ctx, c.service, *d.GetId(), c.updateCollections) err = collectItems(ctx, c.service, *d.GetId(), c.UpdateCollections)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} }
observe.Message(fmt.Sprintf("Discovered %d items to backup", c.numItems)) observe.Message(fmt.Sprintf("Discovered %d items to backup", c.NumItems))
collections := make([]data.Collection, 0, len(c.collectionMap)) collections := make([]data.Collection, 0, len(c.CollectionMap))
for _, coll := range c.collectionMap { for _, coll := range c.CollectionMap {
collections = append(collections, coll) collections = append(collections, coll)
} }
return collections, nil return collections, nil
} }
func getCanonicalPath(p, tenant, user string) (path.Path, error) { // UpdateCollections initializes and adds the provided drive items to Collections
pathBuilder := path.Builder{}.Append(strings.Split(p, "/")...) // A new collection is created for every drive folder (or package)
func (c *Collections) UpdateCollections(ctx context.Context, driveID string, items []models.DriveItemable) error {
for _, item := range items {
if item.GetRoot() != nil {
// Skip the root item
continue
}
if item.GetParentReference() == nil || item.GetParentReference().GetPath() == nil {
return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName())
}
// Create a collection for the parent of this item
collectionPath, err := GetCanonicalPath(
*item.GetParentReference().GetPath(),
c.tenant,
c.resourceOwner,
c.source,
)
if err != nil {
return err
}
// Skip items that don't match the folder selectors we were given.
if !includePath(ctx, c.matcher, collectionPath) {
logger.Ctx(ctx).Infof("Skipping path %s", collectionPath.String())
continue
}
switch {
case item.GetFolder() != nil, item.GetPackage() != nil:
// Leave this here so we don't fall into the default case.
// TODO: This is where we might create a "special file" to represent these in the backup repository
// e.g. a ".folderMetadataFile"
case item.GetFile() != nil:
col, found := c.CollectionMap[collectionPath.String()]
if !found {
col = NewCollection(
collectionPath,
driveID,
c.service,
c.statusUpdater,
)
c.CollectionMap[collectionPath.String()] = col
c.NumContainers++
c.NumItems++
}
collection := col.(*Collection)
collection.Add(*item.GetId())
c.NumFiles++
c.NumItems++
default:
return errors.Errorf("item type not supported. item name : %s", *item.GetName())
}
}
return nil
}
// GetCanonicalPath constructs the standard path for the given source.
func GetCanonicalPath(p, tenant, resourceOwner string, source driveSource) (path.Path, error) {
var (
pathBuilder = path.Builder{}.Append(strings.Split(p, "/")...)
result path.Path
err error
)
switch source {
case OneDriveSource:
result, err = pathBuilder.ToDataLayerOneDrivePath(tenant, resourceOwner, false)
case SharePointSource:
result, err = pathBuilder.ToDataLayerSharePointPath(tenant, resourceOwner, path.LibrariesCategory, false)
default:
return nil, errors.Errorf("unrecognized drive data source")
}
res, err := pathBuilder.ToDataLayerOneDrivePath(tenant, user, false)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "converting to canonical path") return nil, errors.Wrap(err, "converting to canonical path")
} }
return res, nil return result, nil
} }
// Returns the path to the folder within the drive (i.e. under `root:`) // Returns the path to the folder within the drive (i.e. under `root:`)
@ -99,70 +192,7 @@ func getDriveFolderPath(p path.Path) (string, error) {
return path.Builder{}.Append(drivePath.folders...).String(), nil return path.Builder{}.Append(drivePath.folders...).String(), nil
} }
// updateCollections initializes and adds the provided OneDrive items to Collections func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) bool {
// A new collection is created for every OneDrive folder (or package)
func (c *Collections) updateCollections(ctx context.Context, driveID string, items []models.DriveItemable) error {
for _, item := range items {
if item.GetRoot() != nil {
// Skip the root item
continue
}
if item.GetParentReference() == nil || item.GetParentReference().GetPath() == nil {
return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName())
}
// Create a collection for the parent of this item
collectionPath, err := getCanonicalPath(
*item.GetParentReference().GetPath(),
c.tenant,
c.user,
)
if err != nil {
return err
}
// Skip items that don't match the folder selectors we were given.
if !includePath(ctx, c.scope, collectionPath) {
logger.Ctx(ctx).Infof("Skipping path %s", collectionPath.String())
continue
}
switch {
case item.GetFolder() != nil, item.GetPackage() != nil:
// Leave this here so we don't fall into the default case.
// TODO: This is where we might create a "special file" to represent these in the backup repository
// e.g. a ".folderMetadataFile"
case item.GetFile() != nil:
col, found := c.collectionMap[collectionPath.String()]
if !found {
col = NewCollection(
collectionPath,
driveID,
c.service,
c.statusUpdater,
)
c.collectionMap[collectionPath.String()] = col
c.numContainers++
c.numItems++
}
collection := col.(*Collection)
collection.Add(*item.GetId())
c.numFiles++
c.numItems++
default:
return errors.Errorf("item type not supported. item name : %s", *item.GetName())
}
}
return nil
}
func includePath(ctx context.Context, scope selectors.OneDriveScope, folderPath path.Path) bool {
// Check if the folder is allowed by the scope. // Check if the folder is allowed by the scope.
folderPathString, err := getDriveFolderPath(folderPath) folderPathString, err := getDriveFolderPath(folderPath)
if err != nil { if err != nil {
@ -172,9 +202,9 @@ func includePath(ctx context.Context, scope selectors.OneDriveScope, folderPath
// Hack for the edge case where we're looking at the root folder and can // Hack for the edge case where we're looking at the root folder and can
// select any folder. Right now the root folder has an empty folder path. // select any folder. Right now the root folder has an empty folder path.
if len(folderPathString) == 0 && scope.IsAny(selectors.OneDriveFolder) { if len(folderPathString) == 0 && m.IsAny() {
return true return true
} }
return scope.Matches(selectors.OneDriveFolder, folderPathString) return m.Matches(folderPathString)
} }

View File

@ -1,6 +1,7 @@
package onedrive package onedrive
import ( import (
"strings"
"testing" "testing"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
@ -20,7 +21,7 @@ func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []st
res := make([]string, 0, len(rest)) res := make([]string, 0, len(rest))
for _, r := range rest { for _, r := range rest {
p, err := getCanonicalPath(r, tenant, user) p, err := GetCanonicalPath(r, tenant, user, OneDriveSource)
require.NoError(t, err) require.NoError(t, err)
res = append(res, p.String()) res = append(res, p.String())
@ -37,6 +38,49 @@ func TestOneDriveCollectionsSuite(t *testing.T) {
suite.Run(t, new(OneDriveCollectionsSuite)) suite.Run(t, new(OneDriveCollectionsSuite))
} }
func (suite *OneDriveCollectionsSuite) TestGetCanonicalPath() {
tenant, resourceOwner := "tenant", "resourceOwner"
table := []struct {
name string
source driveSource
dir []string
expect string
expectErr assert.ErrorAssertionFunc
}{
{
name: "onedrive",
source: OneDriveSource,
dir: []string{"onedrive"},
expect: "tenant/onedrive/resourceOwner/files/onedrive",
expectErr: assert.NoError,
},
{
name: "sharepoint",
source: SharePointSource,
dir: []string{"sharepoint"},
expect: "tenant/sharepoint/resourceOwner/libraries/sharepoint",
expectErr: assert.NoError,
},
{
name: "unknown",
source: unknownDriveSource,
dir: []string{"unknown"},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
p := strings.Join(test.dir, "/")
result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source)
test.expectErr(t, err)
if result != nil {
assert.Equal(t, test.expect, result.String())
}
})
}
}
func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any(), selectors.Any())[0] anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any(), selectors.Any())[0]
@ -211,15 +255,16 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
c := NewCollections(tenant, user, tt.scope, &MockGraphService{}, nil) c := NewCollections(tenant, user, OneDriveSource, testFolderMatcher{tt.scope}, &MockGraphService{}, nil)
err := c.updateCollections(ctx, "driveID", tt.items)
err := c.UpdateCollections(ctx, "driveID", tt.items)
tt.expect(t, err) tt.expect(t, err)
assert.Equal(t, len(tt.expectedCollectionPaths), len(c.collectionMap), "collection paths") assert.Equal(t, len(tt.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
assert.Equal(t, tt.expectedItemCount, c.numItems, "item count") assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.numFiles, "file count") assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, tt.expectedContainerCount, c.numContainers, "container count") assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count")
for _, collPath := range tt.expectedCollectionPaths { for _, collPath := range tt.expectedCollectionPaths {
assert.Contains(t, c.collectionMap, collPath) assert.Contains(t, c.CollectionMap, collPath)
} }
}) })
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/drives/item/root/delta" "github.com/microsoftgraph/msgraph-sdk-go/drives/item/root/delta"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors" "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
mssitedrives "github.com/microsoftgraph/msgraph-sdk-go/sites/item/drives"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -67,7 +68,39 @@ const (
) )
// Enumerates the drives for the specified user // Enumerates the drives for the specified user
func drives(ctx context.Context, service graph.Service, user string) ([]models.Driveable, error) { func drives(
ctx context.Context,
service graph.Service,
resourceOwner string,
source driveSource,
) ([]models.Driveable, error) {
switch source {
case OneDriveSource:
return userDrives(ctx, service, resourceOwner)
case SharePointSource:
return siteDrives(ctx, service, resourceOwner)
default:
return nil, errors.Errorf("unrecognized drive data source")
}
}
func siteDrives(ctx context.Context, service graph.Service, site string) ([]models.Driveable, error) {
options := &mssitedrives.DrivesRequestBuilderGetRequestConfiguration{
QueryParameters: &mssitedrives.DrivesRequestBuilderGetQueryParameters{
Select: []string{"id", "name", "weburl", "system"},
},
}
r, err := service.Client().SitesById(site).Drives().Get(ctx, options)
if err != nil {
return nil, errors.Wrapf(err, "failed to retrieve site drives. site: %s, details: %s",
site, support.ConnectorStackErrorTrace(err))
}
return r.GetValue(), nil
}
func userDrives(ctx context.Context, service graph.Service, user string) ([]models.Driveable, error) {
var hasDrive bool var hasDrive bool
hasDrive, err := hasDriveLicense(ctx, service, user) hasDrive, err := hasDriveLicense(ctx, service, user)
@ -237,7 +270,7 @@ func GetAllFolders(
userID string, userID string,
prefix string, prefix string,
) ([]*Displayable, error) { ) ([]*Displayable, error) {
drives, err := drives(ctx, gs, userID) drives, err := drives(ctx, gs, userID, OneDriveSource)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "getting OneDrive folders") return nil, errors.Wrap(err, "getting OneDrive folders")
} }
@ -321,7 +354,7 @@ func hasDriveLicense(
cb := func(pageItem any) bool { cb := func(pageItem any) bool {
entry, ok := pageItem.(models.LicenseDetailsable) entry, ok := pageItem.(models.LicenseDetailsable)
if !ok { if !ok {
err = errors.New("casting item to models.MailFolderable") err = errors.New("casting item to models.LicenseDetailsable")
return false return false
} }

View File

@ -43,7 +43,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
folderElements := []string{folderName1} folderElements := []string{folderName1}
gs := loadTestService(t) gs := loadTestService(t)
drives, err := drives(ctx, gs, suite.userID) drives, err := drives(ctx, gs, suite.userID, OneDriveSource)
require.NoError(t, err) require.NoError(t, err)
require.NotEmpty(t, drives) require.NotEmpty(t, drives)
@ -100,6 +100,18 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
} }
} }
type testFolderMatcher struct {
scope selectors.OneDriveScope
}
func (fm testFolderMatcher) IsAny() bool {
return fm.scope.IsAny(selectors.OneDriveFolder)
}
func (fm testFolderMatcher) Matches(path string) bool {
return fm.scope.Matches(selectors.OneDriveFolder, path)
}
func (suite *OneDriveSuite) TestOneDriveNewCollections() { func (suite *OneDriveSuite) TestOneDriveNewCollections() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
@ -129,7 +141,8 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
odcs, err := NewCollections( odcs, err := NewCollections(
creds.AzureTenantID, creds.AzureTenantID,
test.user, test.user,
scope, OneDriveSource,
testFolderMatcher{scope},
service, service,
service.updateStatus, service.updateStatus,
).Get(ctx) ).Get(ctx)

View File

@ -3,9 +3,10 @@ package onedrive
import ( import (
"context" "context"
"io" "io"
"net/http"
"time" "time"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
msup "github.com/microsoftgraph/msgraph-sdk-go/drives/item/items/item/createuploadsession" msup "github.com/microsoftgraph/msgraph-sdk-go/drives/item/items/item/createuploadsession"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -51,9 +52,13 @@ func driveItemReader(
downloadURL := item.GetAdditionalData()[downloadURLKey].(*string) downloadURL := item.GetAdditionalData()[downloadURLKey].(*string)
// TODO: We should use the `msgraphgocore` http client which has the right clientOptions := msgraphsdk.GetDefaultClientOptions()
// middleware/options configured middlewares := msgraphgocore.GetDefaultMiddlewaresWithOptions(&clientOptions)
resp, err := http.Get(*downloadURL)
httpClient := msgraphgocore.GetDefaultClient(&clientOptions, middlewares...)
httpClient.Timeout = 0 // need infinite timeout for pulling large files
resp, err := httpClient.Get(*downloadURL)
if err != nil { if err != nil {
return nil, nil, errors.Wrapf(err, "failed to download file from %s", *downloadURL) return nil, nil, errors.Wrapf(err, "failed to download file from %s", *downloadURL)
} }

View File

@ -67,7 +67,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.user = tester.SecondaryM365UserID(suite.T()) suite.user = tester.SecondaryM365UserID(suite.T())
drives, err := drives(ctx, suite, suite.user) drives, err := drives(ctx, suite, suite.user, OneDriveSource)
require.NoError(suite.T(), err) require.NoError(suite.T(), err)
// Test Requirement 1: Need a drive // Test Requirement 1: Need a drive
require.Greaterf(suite.T(), len(drives), 0, "user %s does not have a drive", suite.user) require.Greaterf(suite.T(), len(drives), 0, "user %s does not have a drive", suite.user)

View File

@ -0,0 +1,124 @@
package sharepoint
import (
"context"
"fmt"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
)
type statusUpdater interface {
UpdateStatus(status *support.ConnectorOperationStatus)
}
type connector interface {
statusUpdater
Service() graph.Service
}
// DataCollections returns a set of DataCollection which represents the SharePoint data
// for the specified user
func DataCollections(
ctx context.Context,
selector selectors.Selector,
siteIDs []string,
tenantID string,
con connector,
) ([]data.Collection, error) {
b, err := selector.ToSharePointBackup()
if err != nil {
return nil, errors.Wrap(err, "sharePointDataCollection: parsing selector")
}
var (
scopes = b.DiscreteScopes(siteIDs)
collections = []data.Collection{}
serv = con.Service()
errs error
)
for _, scope := range scopes {
// due to DiscreteScopes(siteIDs), each range should only contain one site.
for _, site := range scope.Get(selectors.SharePointSite) {
foldersComplete, closer := observe.MessageWithCompletion(fmt.Sprintf(
"∙ %s - %s:",
scope.Category().PathType(), site))
defer closer()
defer close(foldersComplete)
switch scope.Category().PathType() {
case path.LibrariesCategory:
spcs, err := collectLibraries(
ctx,
serv,
tenantID,
site,
scope,
con)
if err != nil {
return nil, support.WrapAndAppend(site, err, errs)
}
collections = append(collections, spcs...)
}
foldersComplete <- struct{}{}
}
}
return collections, errs
}
// collectLibraries constructs a onedrive Collections struct and Get()s
// all the drives associated with the site.
func collectLibraries(
ctx context.Context,
serv graph.Service,
tenantID, siteID string,
scope selectors.SharePointScope,
updater statusUpdater,
) ([]data.Collection, error) {
var (
collections = []data.Collection{}
errs error
)
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Library collections")
colls := onedrive.NewCollections(
tenantID,
siteID,
onedrive.SharePointSource,
folderMatcher{scope},
serv,
updater.UpdateStatus)
odcs, err := colls.Get(ctx)
if err != nil {
return nil, support.WrapAndAppend(siteID, err, errs)
}
return append(collections, odcs...), errs
}
type folderMatcher struct {
scope selectors.SharePointScope
}
func (fm folderMatcher) IsAny() bool {
return fm.scope.IsAny(selectors.SharePointLibrary)
}
func (fm folderMatcher) Matches(dir string) bool {
return fm.scope.Matches(selectors.SharePointLibrary, dir)
}

View File

@ -0,0 +1,156 @@
package sharepoint_test
import (
"testing"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors"
)
// ---------------------------------------------------------------------------
// consts, mocks
// ---------------------------------------------------------------------------
const (
testBaseDrivePath = "drive/driveID1/root:"
)
type testFolderMatcher struct {
scope selectors.SharePointScope
}
func (fm testFolderMatcher) IsAny() bool {
return fm.scope.IsAny(selectors.SharePointLibrary)
}
func (fm testFolderMatcher) Matches(path string) bool {
return fm.scope.Matches(selectors.SharePointLibrary, path)
}
type MockGraphService struct{}
func (ms *MockGraphService) Client() *msgraphsdk.GraphServiceClient {
return nil
}
func (ms *MockGraphService) Adapter() *msgraphsdk.GraphRequestAdapter {
return nil
}
func (ms *MockGraphService) ErrPolicy() bool {
return false
}
// ---------------------------------------------------------------------------
// tests
// ---------------------------------------------------------------------------
type SharePointLibrariesSuite struct {
suite.Suite
}
func TestSharePointLibrariesSuite(t *testing.T) {
suite.Run(t, new(SharePointLibrariesSuite))
}
func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
anyFolder := (&selectors.SharePointBackup{}).Libraries(selectors.Any(), selectors.Any())[0]
const (
tenant = "tenant"
site = "site"
)
tests := []struct {
testCase string
items []models.DriveItemable
scope selectors.SharePointScope
expect assert.ErrorAssertionFunc
expectedCollectionPaths []string
expectedItemCount int
expectedContainerCount int
expectedFileCount int
}{
{
testCase: "Single File",
items: []models.DriveItemable{
driveItem("file", testBaseDrivePath, true),
},
scope: anyFolder,
expect: assert.NoError,
expectedCollectionPaths: expectedPathAsSlice(
suite.T(),
tenant,
site,
testBaseDrivePath,
),
expectedItemCount: 2,
expectedFileCount: 1,
expectedContainerCount: 1,
},
}
for _, test := range tests {
suite.T().Run(test.testCase, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
c := onedrive.NewCollections(
tenant,
site,
onedrive.SharePointSource,
testFolderMatcher{test.scope},
&MockGraphService{},
nil)
err := c.UpdateCollections(ctx, "driveID", test.items)
test.expect(t, err)
assert.Equal(t, len(test.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
for _, collPath := range test.expectedCollectionPaths {
assert.Contains(t, c.CollectionMap, collPath)
}
})
}
}
func driveItem(name string, path string, isFile bool) models.DriveItemable {
item := models.NewDriveItem()
item.SetName(&name)
item.SetId(&name)
parentReference := models.NewItemReference()
parentReference.SetPath(&path)
item.SetParentReference(parentReference)
if isFile {
item.SetFile(models.NewFile())
}
return item
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string {
res := make([]string, 0, len(rest))
for _, r := range rest {
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
require.NoError(t, err)
res = append(res, p.String())
}
return res
}

View File

@ -10,6 +10,8 @@ import (
) )
// GetAllSitesForTenant makes a GraphQuery request retrieving all sites in the tenant. // GetAllSitesForTenant makes a GraphQuery request retrieving all sites in the tenant.
// Due to restrictions in filter capabilities for site queries, the returned iterable
// will contain all personal sites for all users in the org.
func GetAllSitesForTenant(ctx context.Context, gs graph.Service) (absser.Parsable, error) { func GetAllSitesForTenant(ctx context.Context, gs graph.Service) (absser.Parsable, error) {
options := &mssite.SitesRequestBuilderGetRequestConfiguration{ options := &mssite.SitesRequestBuilderGetRequestConfiguration{
QueryParameters: &mssite.SitesRequestBuilderGetQueryParameters{ QueryParameters: &mssite.SitesRequestBuilderGetQueryParameters{

View File

@ -17,30 +17,29 @@ import (
// keys for ease of use // keys for ease of use
const ( const (
corsoVersion = "corso-version" corsoVersion = "corso_version"
repoID = "repo-id" repoID = "repo_id"
payload = "payload"
// Event Keys // Event Keys
RepoInit = "repo-init" RepoInit = "repo_init"
BackupStart = "backup-start" BackupStart = "backup_start"
BackupEnd = "backup-end" BackupEnd = "backup_end"
RestoreStart = "restore-start" RestoreStart = "restore_start"
RestoreEnd = "restore-end" RestoreEnd = "restore_end"
// Event Data Keys // Event Data Keys
BackupCreateTime = "backup-creation-time" BackupCreateTime = "backup_creation_time"
BackupID = "backup-id" BackupID = "backup_id"
DataRetrieved = "data-retrieved" DataRetrieved = "data_retrieved"
DataStored = "data-stored" DataStored = "data_stored"
Duration = "duration" Duration = "duration"
EndTime = "end-time" EndTime = "end_time"
ItemsRead = "items-read" ItemsRead = "items_read"
ItemsWritten = "items-written" ItemsWritten = "items_written"
Resources = "resources" Resources = "resources"
RestoreID = "restore-id" RestoreID = "restore_id"
Service = "service" Service = "service"
StartTime = "start-time" StartTime = "start_time"
Status = "status" Status = "status"
) )
@ -120,8 +119,20 @@ func (b Bus) Event(ctx context.Context, key string, data map[string]any) {
Set(repoID, b.repoID). Set(repoID, b.repoID).
Set(corsoVersion, b.version) Set(corsoVersion, b.version)
if len(data) > 0 { for k, v := range data {
props.Set(payload, data) props.Set(k, v)
}
// need to setup identity when initializing a new repo
if key == RepoInit {
err := b.client.Enqueue(analytics.Identify{
UserId: b.repoID,
Traits: analytics.NewTraits().
SetName(b.repoID),
})
if err != nil {
logger.Ctx(ctx).Debugw("analytics event failure", "err", err)
}
} }
err := b.client.Enqueue(analytics.Track{ err := b.client.Enqueue(analytics.Track{

View File

@ -281,6 +281,8 @@ func getStreamItemFunc(
ctx, end := D.Span(ctx, "kopia:getStreamItemFunc") ctx, end := D.Span(ctx, "kopia:getStreamItemFunc")
defer end() defer end()
log := logger.Ctx(ctx)
// Collect all errors and return them at the end so that iteration for this // Collect all errors and return them at the end so that iteration for this
// directory doesn't end early. // directory doesn't end early.
var errs *multierror.Error var errs *multierror.Error
@ -314,11 +316,12 @@ func getStreamItemFunc(
err = errors.Wrap(err, "getting full item path") err = errors.Wrap(err, "getting full item path")
errs = multierror.Append(errs, err) errs = multierror.Append(errs, err)
logger.Ctx(ctx).Error(err) log.Error(err)
continue continue
} }
log.Debugw("reading item", "path", itemPath.String())
trace.Log(ctx, "kopia:getStreamItemFunc:item", itemPath.String()) trace.Log(ctx, "kopia:getStreamItemFunc:item", itemPath.String())
ei, ok := e.(data.StreamInfo) ei, ok := e.(data.StreamInfo)
@ -326,8 +329,7 @@ func getStreamItemFunc(
errs = multierror.Append( errs = multierror.Append(
errs, errors.Errorf("item %q does not implement DataStreamInfo", itemPath)) errs, errors.Errorf("item %q does not implement DataStreamInfo", itemPath))
logger.Ctx(ctx).Errorw( log.Errorw("item does not implement DataStreamInfo; skipping", "path", itemPath)
"item does not implement DataStreamInfo; skipping", "path", itemPath)
continue continue
} }

View File

@ -14,7 +14,8 @@ import (
) )
const ( const (
noProgressBarsFN = "no-progress-bars" hideProgressBarsFN = "hide-progress"
retainProgressBarsFN = "retain-progress"
progressBarWidth = 32 progressBarWidth = 32
) )
@ -22,7 +23,7 @@ var (
wg sync.WaitGroup wg sync.WaitGroup
// TODO: Revisit this being a global nd make it a parameter to the progress methods // TODO: Revisit this being a global nd make it a parameter to the progress methods
// so that each bar can be initialized with different contexts if needed. // so that each bar can be initialized with different contexts if needed.
con context.Context contxt context.Context
writer io.Writer writer io.Writer
progress *mpb.Progress progress *mpb.Progress
cfg *config cfg *config
@ -34,37 +35,49 @@ func init() {
makeSpinFrames(progressBarWidth) makeSpinFrames(progressBarWidth)
} }
// adds the persistent boolean flag --no-progress-bars to the provided command. // adds the persistent boolean flag --hide-progress to the provided command.
// This is a hack for help displays. Due to seeding the context, we also // This is a hack for help displays. Due to seeding the context, we also
// need to parse the configuration before we execute the command. // need to parse the configuration before we execute the command.
func AddProgressBarFlags(parent *cobra.Command) { func AddProgressBarFlags(parent *cobra.Command) {
fs := parent.PersistentFlags() fs := parent.PersistentFlags()
fs.Bool(noProgressBarsFN, false, "turn off the progress bar displays") fs.Bool(hideProgressBarsFN, false, "turn off the progress bar displays")
fs.Bool(retainProgressBarsFN, false, "retain the progress bar displays after completion")
} }
// Due to races between the lazy evaluation of flags in cobra and the need to init observer // Due to races between the lazy evaluation of flags in cobra and the need to init observer
// behavior in a ctx, these options get pre-processed manually here using pflags. The canonical // behavior in a ctx, these options get pre-processed manually here using pflags. The canonical
// AddProgressBarFlag() ensures the flags are displayed as part of the help/usage output. // AddProgressBarFlag() ensures the flags are displayed as part of the help/usage output.
func PreloadFlags() bool { func PreloadFlags() *config {
fs := pflag.NewFlagSet("seed-observer", pflag.ContinueOnError) fs := pflag.NewFlagSet("seed-observer", pflag.ContinueOnError)
fs.ParseErrorsWhitelist.UnknownFlags = true fs.ParseErrorsWhitelist.UnknownFlags = true
fs.Bool(noProgressBarsFN, false, "turn off the progress bar displays") fs.Bool(hideProgressBarsFN, false, "turn off the progress bar displays")
fs.Bool(retainProgressBarsFN, false, "retain the progress bar displays after completion")
// prevents overriding the corso/cobra help processor // prevents overriding the corso/cobra help processor
fs.BoolP("help", "h", false, "") fs.BoolP("help", "h", false, "")
// parse the os args list to find the log level flag // parse the os args list to find the observer display flags
if err := fs.Parse(os.Args[1:]); err != nil { if err := fs.Parse(os.Args[1:]); err != nil {
return false return nil
} }
// retrieve the user's preferred display // retrieve the user's preferred display
// automatically defaults to "info" // automatically defaults to "info"
shouldHide, err := fs.GetBool(noProgressBarsFN) shouldHide, err := fs.GetBool(hideProgressBarsFN)
if err != nil { if err != nil {
return false return nil
} }
return shouldHide // retrieve the user's preferred display
// automatically defaults to "info"
shouldAlwaysShow, err := fs.GetBool(retainProgressBarsFN)
if err != nil {
return nil
}
return &config{
doNotDisplay: shouldHide,
keepBarsAfterComplete: shouldAlwaysShow,
}
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -74,24 +87,29 @@ func PreloadFlags() bool {
// config handles observer configuration // config handles observer configuration
type config struct { type config struct {
doNotDisplay bool doNotDisplay bool
keepBarsAfterComplete bool
}
func (c config) hidden() bool {
return c.doNotDisplay || writer == nil
} }
// SeedWriter adds default writer to the observe package. // SeedWriter adds default writer to the observe package.
// Uses a noop writer until seeded. // Uses a noop writer until seeded.
func SeedWriter(ctx context.Context, w io.Writer, hide bool) { func SeedWriter(ctx context.Context, w io.Writer, c *config) {
writer = w writer = w
con = ctx contxt = ctx
if con == nil { if contxt == nil {
con = context.Background() contxt = context.Background()
} }
cfg = &config{ if c != nil {
doNotDisplay: hide, cfg = c
} }
progress = mpb.NewWithContext( progress = mpb.NewWithContext(
con, contxt,
mpb.WithWidth(progressBarWidth), mpb.WithWidth(progressBarWidth),
mpb.WithWaitGroup(&wg), mpb.WithWaitGroup(&wg),
mpb.WithOutput(writer), mpb.WithOutput(writer),
@ -105,7 +123,7 @@ func Complete() {
progress.Wait() progress.Wait()
} }
SeedWriter(con, writer, cfg.doNotDisplay) SeedWriter(contxt, writer, cfg)
} }
const ( const (
@ -118,7 +136,7 @@ const (
// Message is used to display a progress message // Message is used to display a progress message
func Message(message string) { func Message(message string) {
if writer == nil { if cfg.hidden() {
return return
} }
@ -143,7 +161,7 @@ func Message(message string) {
func MessageWithCompletion(message string) (chan<- struct{}, func()) { func MessageWithCompletion(message string) (chan<- struct{}, func()) {
completionCh := make(chan struct{}, 1) completionCh := make(chan struct{}, 1)
if writer == nil { if cfg.hidden() {
return completionCh, func() {} return completionCh, func() {}
} }
@ -156,6 +174,7 @@ func MessageWithCompletion(message string) (chan<- struct{}, func()) {
mpb.SpinnerStyle(frames...).PositionLeft(), mpb.SpinnerStyle(frames...).PositionLeft(),
mpb.PrependDecorators( mpb.PrependDecorators(
decor.Name(message), decor.Name(message),
decor.Elapsed(decor.ET_STYLE_GO, decor.WC{W: 8}),
), ),
mpb.BarFillerOnComplete("done"), mpb.BarFillerOnComplete("done"),
) )
@ -163,7 +182,7 @@ func MessageWithCompletion(message string) (chan<- struct{}, func()) {
go func(ci <-chan struct{}) { go func(ci <-chan struct{}) {
for { for {
select { select {
case <-con.Done(): case <-contxt.Done():
bar.SetTotal(-1, true) bar.SetTotal(-1, true)
case <-ci: case <-ci:
// We don't care whether the channel was signalled or closed // We don't care whether the channel was signalled or closed
@ -184,23 +203,26 @@ func MessageWithCompletion(message string) (chan<- struct{}, func()) {
// read through the provided readcloser, up until the byte count matches // read through the provided readcloser, up until the byte count matches
// the totalBytes. // the totalBytes.
func ItemProgress(rc io.ReadCloser, header, iname string, totalBytes int64) (io.ReadCloser, func()) { func ItemProgress(rc io.ReadCloser, header, iname string, totalBytes int64) (io.ReadCloser, func()) {
if cfg.doNotDisplay || writer == nil || rc == nil || totalBytes == 0 { if cfg.hidden() || rc == nil || totalBytes == 0 {
return rc, func() {} return rc, func() {}
} }
wg.Add(1) wg.Add(1)
bar := progress.New( barOpts := []mpb.BarOption{
totalBytes,
mpb.NopStyle(),
mpb.BarRemoveOnComplete(),
mpb.PrependDecorators( mpb.PrependDecorators(
decor.Name(header, decor.WCSyncSpaceR), decor.Name(header, decor.WCSyncSpaceR),
decor.Name(iname, decor.WCSyncSpaceR), decor.Name(iname, decor.WCSyncSpaceR),
decor.CountersKibiByte(" %.1f/%.1f ", decor.WC{W: 8}), decor.CountersKibiByte(" %.1f/%.1f ", decor.WC{W: 8}),
decor.NewPercentage("%d ", decor.WC{W: 4}), decor.NewPercentage("%d ", decor.WC{W: 4}),
), ),
) }
if !cfg.keepBarsAfterComplete {
barOpts = append(barOpts, mpb.BarRemoveOnComplete())
}
bar := progress.New(totalBytes, mpb.NopStyle(), barOpts...)
return bar.ProxyReader(rc), waitAndCloseBar(bar) return bar.ProxyReader(rc), waitAndCloseBar(bar)
} }
@ -212,7 +234,7 @@ func ItemProgress(rc io.ReadCloser, header, iname string, totalBytes int64) (io.
func ProgressWithCount(header, message string, count int64) (chan<- struct{}, func()) { func ProgressWithCount(header, message string, count int64) (chan<- struct{}, func()) {
progressCh := make(chan struct{}) progressCh := make(chan struct{})
if cfg.doNotDisplay || writer == nil { if cfg.hidden() {
go func(ci <-chan struct{}) { go func(ci <-chan struct{}) {
for { for {
_, ok := <-ci _, ok := <-ci
@ -227,23 +249,25 @@ func ProgressWithCount(header, message string, count int64) (chan<- struct{}, fu
wg.Add(1) wg.Add(1)
bar := progress.New( barOpts := []mpb.BarOption{
count,
mpb.NopStyle(),
mpb.BarRemoveOnComplete(),
mpb.PrependDecorators( mpb.PrependDecorators(
decor.Name(header, decor.WCSyncSpaceR), decor.Name(header, decor.WCSyncSpaceR),
decor.Counters(0, " %d/%d "),
decor.Name(message), decor.Name(message),
decor.Counters(0, " %d/%d "),
), ),
) }
if !cfg.keepBarsAfterComplete {
barOpts = append(barOpts, mpb.BarRemoveOnComplete())
}
bar := progress.New(count, mpb.NopStyle(), barOpts...)
ch := make(chan struct{}) ch := make(chan struct{})
go func(ci <-chan struct{}) { go func(ci <-chan struct{}) {
for { for {
select { select {
case <-con.Done(): case <-contxt.Done():
bar.Abort(true) bar.Abort(true)
return return
@ -297,7 +321,7 @@ func makeSpinFrames(barWidth int) {
// incrementing the count of items handled. Each write to the provided channel // incrementing the count of items handled. Each write to the provided channel
// counts as a single increment. The caller is expected to close the channel. // counts as a single increment. The caller is expected to close the channel.
func CollectionProgress(user, category, dirName string) (chan<- struct{}, func()) { func CollectionProgress(user, category, dirName string) (chan<- struct{}, func()) {
if cfg.doNotDisplay || writer == nil || len(user) == 0 || len(dirName) == 0 { if cfg.hidden() || len(user) == 0 || len(dirName) == 0 {
ch := make(chan struct{}) ch := make(chan struct{})
go func(ci <-chan struct{}) { go func(ci <-chan struct{}) {
@ -314,25 +338,29 @@ func CollectionProgress(user, category, dirName string) (chan<- struct{}, func()
wg.Add(1) wg.Add(1)
bar := progress.New( barOpts := []mpb.BarOption{
-1, // -1 to indicate an unbounded count mpb.PrependDecorators(decor.Name(category)),
mpb.SpinnerStyle(spinFrames...),
mpb.BarRemoveOnComplete(),
mpb.PrependDecorators(
decor.Name(category),
),
mpb.AppendDecorators( mpb.AppendDecorators(
decor.CurrentNoUnit("%d - ", decor.WCSyncSpace), decor.CurrentNoUnit("%d - ", decor.WCSyncSpace),
decor.Name(fmt.Sprintf("%s - %s", user, dirName)), decor.Name(fmt.Sprintf("%s - %s", user, dirName)),
), ),
}
if !cfg.keepBarsAfterComplete {
barOpts = append(barOpts, mpb.BarRemoveOnComplete())
}
bar := progress.New(
-1, // -1 to indicate an unbounded count
mpb.SpinnerStyle(spinFrames...),
barOpts...,
) )
ch := make(chan struct{}) ch := make(chan struct{})
go func(ci <-chan struct{}) { go func(ci <-chan struct{}) {
for { for {
select { select {
case <-con.Done(): case <-contxt.Done():
bar.SetTotal(-1, true) bar.SetTotal(-1, true)
return return

View File

@ -33,13 +33,13 @@ func (suite *ObserveProgressUnitSuite) TestItemProgress() {
t := suite.T() t := suite.T()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
observe.Complete() observe.Complete()
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
from := make([]byte, 100) from := make([]byte, 100)
@ -87,13 +87,13 @@ func (suite *ObserveProgressUnitSuite) TestCollectionProgress_unblockOnCtxCancel
t := suite.T() t := suite.T()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
observe.Complete() observe.Complete()
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
progCh, closer := observe.CollectionProgress("test", "testcat", "testertons") progCh, closer := observe.CollectionProgress("test", "testcat", "testertons")
@ -122,13 +122,13 @@ func (suite *ObserveProgressUnitSuite) TestCollectionProgress_unblockOnChannelCl
t := suite.T() t := suite.T()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
observe.Complete() observe.Complete()
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
progCh, closer := observe.CollectionProgress("test", "testcat", "testertons") progCh, closer := observe.CollectionProgress("test", "testcat", "testertons")
@ -153,12 +153,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgress() {
defer flush() defer flush()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
message := "Test Message" message := "Test Message"
@ -174,12 +174,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithCompletion() {
defer flush() defer flush()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
message := "Test Message" message := "Test Message"
@ -204,12 +204,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithChannelClosed() {
defer flush() defer flush()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
message := "Test Message" message := "Test Message"
@ -236,12 +236,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithContextCancelled()
ctx, cancel := context.WithCancel(ctx) ctx, cancel := context.WithCancel(ctx)
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
message := "Test Message" message := "Test Message"
@ -265,12 +265,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithCount() {
defer flush() defer flush()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
header := "Header" header := "Header"
@ -298,12 +298,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithCountChannelClosed
defer flush() defer flush()
recorder := strings.Builder{} recorder := strings.Builder{}
observe.SeedWriter(ctx, &recorder, false) observe.SeedWriter(ctx, &recorder, nil)
defer func() { defer func() {
// don't cross-contaminate other tests. // don't cross-contaminate other tests.
//nolint:forbidigo //nolint:forbidigo
observe.SeedWriter(context.Background(), nil, false) observe.SeedWriter(context.Background(), nil, nil)
}() }()
header := "Header" header := "Header"

View File

@ -1,6 +1,7 @@
package operations package operations
import ( import (
"context"
"testing" "testing"
"time" "time"
@ -114,6 +115,67 @@ func (suite *BackupOpSuite) TestBackupOperation_PersistResults() {
// integration // integration
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
//revive:disable:context-as-argument
func prepNewBackupOp(
t *testing.T,
ctx context.Context,
bus events.Eventer,
sel selectors.Selector,
) (BackupOperation, func()) {
//revive:enable:context-as-argument
acct := tester.NewM365Account(t)
// need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
// to close here.
closer := func() { k.Close(ctx) }
kw, err := kopia.NewWrapper(k)
if !assert.NoError(t, err) {
closer()
t.FailNow()
}
closer = func() {
k.Close(ctx)
kw.Close(ctx)
}
ms, err := kopia.NewModelStore(k)
if !assert.NoError(t, err) {
closer()
t.FailNow()
}
closer = func() {
k.Close(ctx)
kw.Close(ctx)
ms.Close(ctx)
}
sw := store.NewKopiaStore(ms)
bo, err := NewBackupOperation(
ctx,
control.Options{},
kw,
sw,
acct,
sel,
bus)
if !assert.NoError(t, err) {
closer()
t.FailNow()
}
return bo, closer
}
type BackupOpIntegrationSuite struct { type BackupOpIntegrationSuite struct {
suite.Suite suite.Suite
} }
@ -122,6 +184,7 @@ func TestBackupOpIntegrationSuite(t *testing.T) {
if err := tester.RunOnAny( if err := tester.RunOnAny(
tester.CorsoCITests, tester.CorsoCITests,
tester.CorsoOperationTests, tester.CorsoOperationTests,
"flomp",
); err != nil { ); err != nil {
t.Skip(err) t.Skip(err)
} }
@ -174,12 +237,11 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
// TestBackup_Run ensures that Integration Testing works // TestBackup_Run ensures that Integration Testing works
// for the following scopes: Contacts, Events, and Mail // for the following scopes: Contacts, Events, and Mail
func (suite *BackupOpIntegrationSuite) TestBackup_Run() { func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
m365UserID := tester.M365UserID(suite.T()) m365UserID := tester.M365UserID(suite.T())
acct := tester.NewM365Account(suite.T())
tests := []struct { tests := []struct {
name string name string
@ -215,36 +277,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run() {
} }
for _, test := range tests { for _, test := range tests {
suite.T().Run(test.name, func(t *testing.T) { suite.T().Run(test.name, func(t *testing.T) {
// need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
// to close here.
defer k.Close(ctx)
kw, err := kopia.NewWrapper(k)
require.NoError(t, err)
defer kw.Close(ctx)
ms, err := kopia.NewModelStore(k)
require.NoError(t, err)
defer ms.Close(ctx)
mb := evmock.NewBus() mb := evmock.NewBus()
bo, closer := prepNewBackupOp(t, ctx, mb, *test.selectFunc())
sw := store.NewKopiaStore(ms) defer closer()
selected := test.selectFunc()
bo, err := NewBackupOperation(
ctx,
control.Options{},
kw,
sw,
acct,
*selected,
mb)
require.NoError(t, err)
require.NoError(t, bo.Run(ctx)) require.NoError(t, bo.Run(ctx))
require.NotEmpty(t, bo.Results) require.NotEmpty(t, bo.Results)
@ -266,51 +301,54 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run() {
} }
} }
func (suite *BackupOpIntegrationSuite) TestBackupOneDrive_Run() { func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
t := suite.T() var (
t = suite.T()
mb = evmock.NewBus()
m365UserID = tester.SecondaryM365UserID(t)
sel = selectors.NewOneDriveBackup()
)
m365UserID := tester.SecondaryM365UserID(t)
acct := tester.NewM365Account(t)
// need to initialize the repository before we can test connecting to it.
st := tester.NewPrefixedS3Storage(t)
k := kopia.NewConn(st)
require.NoError(t, k.Initialize(ctx))
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
// to close here.
defer k.Close(ctx)
kw, err := kopia.NewWrapper(k)
require.NoError(t, err)
defer kw.Close(ctx)
ms, err := kopia.NewModelStore(k)
require.NoError(t, err)
defer ms.Close(ctx)
sw := store.NewKopiaStore(ms)
mb := evmock.NewBus()
sel := selectors.NewOneDriveBackup()
sel.Include(sel.Users([]string{m365UserID})) sel.Include(sel.Users([]string{m365UserID}))
bo, err := NewBackupOperation( bo, closer := prepNewBackupOp(t, ctx, mb, sel.Selector)
ctx, defer closer()
control.Options{},
kw, require.NoError(t, bo.Run(ctx))
sw, require.NotEmpty(t, bo.Results)
acct, require.NotEmpty(t, bo.Results.BackupID)
sel.Selector, assert.Equalf(t, Completed, bo.Status, "backup status %s is not Completed", bo.Status)
mb) assert.Equal(t, bo.Results.ItemsRead, bo.Results.ItemsWritten)
require.NoError(t, err) assert.Less(t, int64(0), bo.Results.BytesRead, "bytes read")
assert.Less(t, int64(0), bo.Results.BytesUploaded, "bytes uploaded")
assert.Equal(t, 1, bo.Results.ResourceOwners)
assert.NoError(t, bo.Results.ReadErrors)
assert.NoError(t, bo.Results.WriteErrors)
assert.Equal(t, 1, mb.TimesCalled[events.BackupStart], "backup-start events")
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
assert.Equal(t,
mb.CalledWith[events.BackupStart][0][events.BackupID],
bo.Results.BackupID, "backupID pre-declaration")
}
func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
ctx, flush := tester.NewContext()
defer flush()
var (
t = suite.T()
mb = evmock.NewBus()
siteID = tester.M365SiteID(t)
sel = selectors.NewSharePointBackup()
)
sel.Include(sel.Sites([]string{siteID}))
bo, closer := prepNewBackupOp(t, ctx, mb, sel.Selector)
defer closer()
require.NoError(t, bo.Run(ctx)) require.NoError(t, bo.Run(ctx))
require.NotEmpty(t, bo.Results) require.NotEmpty(t, bo.Results)

View File

@ -15,11 +15,14 @@ import (
// InProgress - the standard value for any process that has not // InProgress - the standard value for any process that has not
// arrived at an end state. The end states are Failed, Completed, // arrived at an end state. The end states are Failed, Completed,
// or NoData. // or NoData.
//
// Failed - the operation was unable to begin processing data at all. // Failed - the operation was unable to begin processing data at all.
// No items have been written by the consumer. // No items have been written by the consumer.
//
// Completed - the operation was able to process one or more of the // Completed - the operation was able to process one or more of the
// items in the request. Both partial success (0 < N < len(items) // items in the request. Both partial success (0 < N < len(items)
// errored) and total success (0 errors) are set as Completed. // errored) and total success (0 errors) are set as Completed.
//
// NoData - only occurs when no data was involved in an operation. // NoData - only occurs when no data was involved in an operation.
// For example, if a backup is requested for a specific user's // For example, if a backup is requested for a specific user's
// mail, but that account contains zero mail messages, the backup // mail, but that account contains zero mail messages, the backup

View File

@ -23,6 +23,7 @@ const (
CorsoGraphConnectorTests = "CORSO_GRAPH_CONNECTOR_TESTS" CorsoGraphConnectorTests = "CORSO_GRAPH_CONNECTOR_TESTS"
CorsoGraphConnectorExchangeTests = "CORSO_GRAPH_CONNECTOR_EXCHANGE_TESTS" CorsoGraphConnectorExchangeTests = "CORSO_GRAPH_CONNECTOR_EXCHANGE_TESTS"
CorsoGraphConnectorOneDriveTests = "CORSO_GRAPH_CONNECTOR_ONE_DRIVE_TESTS" CorsoGraphConnectorOneDriveTests = "CORSO_GRAPH_CONNECTOR_ONE_DRIVE_TESTS"
CorsoGraphConnectorSharePointTests = "CORSO_GRAPH_CONNECTOR_SHAREPOINT_TESTS"
CorsoKopiaWrapperTests = "CORSO_KOPIA_WRAPPER_TESTS" CorsoKopiaWrapperTests = "CORSO_KOPIA_WRAPPER_TESTS"
CorsoModelStoreTests = "CORSO_MODEL_STORE_TESTS" CorsoModelStoreTests = "CORSO_MODEL_STORE_TESTS"
CorsoOneDriveTests = "CORSO_ONE_DRIVE_TESTS" CorsoOneDriveTests = "CORSO_ONE_DRIVE_TESTS"

View File

@ -30,6 +30,19 @@ func SecondaryM365UserID(t *testing.T) string {
return cfg[TestCfgSecondaryUserID] return cfg[TestCfgSecondaryUserID]
} }
// LoadTestM365SiteID returns a siteID string representing the m365SiteID
// described by either the env var CORSO_M365_LOAD_TEST_SITE_ID, the
// corso_test.toml config file or the default value (in that order of priority).
// The default is a last-attempt fallback that will only work on alcion's
// testing org.
func LoadTestM365SiteID(t *testing.T) string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 site id from test configuration")
// TODO: load test site id, not standard test site id
return cfg[TestCfgSiteID]
}
// LoadTestM365UserID returns an userID string representing the m365UserID // LoadTestM365UserID returns an userID string representing the m365UserID
// described by either the env var CORSO_M365_LOAD_TEST_USER_ID, the // described by either the env var CORSO_M365_LOAD_TEST_USER_ID, the
// corso_test.toml config file or the default value (in that order of priority). // corso_test.toml config file or the default value (in that order of priority).
@ -42,8 +55,29 @@ func LoadTestM365UserID(t *testing.T) string {
return cfg[TestCfgLoadTestUserID] return cfg[TestCfgLoadTestUserID]
} }
// expects cfg value to be a string representing an array like: // expects cfg value to be a string representing an array such as:
// "['foo@example.com','bar@example.com']" // ["site1\,uuid","site2\,uuid"]
// the delimeter must be a |.
func LoadTestM365OrgSites(t *testing.T) []string {
cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 org sites from test configuration")
// TODO: proper handling of site slice input.
// sites := cfg[TestCfgLoadTestOrgSites]
// sites = strings.TrimPrefix(sites, "[")
// sites = strings.TrimSuffix(sites, "]")
// sites = strings.ReplaceAll(sites, `"`, "")
// sites = strings.ReplaceAll(sites, `'`, "")
// sites = strings.ReplaceAll(sites, "|", ",")
// return strings.Split(sites, ",")
return []string{cfg[TestCfgSiteID]}
}
// expects cfg value to be a string representing an array such as:
// ["foo@example.com","bar@example.com"]
// the delimeter may be either a , or |.
func LoadTestM365OrgUsers(t *testing.T) []string { func LoadTestM365OrgUsers(t *testing.T) []string {
cfg, err := readTestConfig() cfg, err := readTestConfig()
require.NoError(t, err, "retrieving load test m365 org users from test configuration") require.NoError(t, err, "retrieving load test m365 org users from test configuration")

View File

@ -352,12 +352,14 @@ func (i ExchangeInfo) Values() []string {
// SharePointInfo describes a sharepoint item // SharePointInfo describes a sharepoint item
type SharePointInfo struct { type SharePointInfo struct {
ItemType ItemType `json:"itemType,omitempty"`
ItemName string `json:"itemName,omitempty"`
Created time.Time `json:"created,omitempty"` Created time.Time `json:"created,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `josn:"modified,omitempty"` Modified time.Time `josn:"modified,omitempty"`
WebURL string `json:"webUrl,omitempty"` Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath"`
Size int64 `json:"size,omitempty"` Size int64 `json:"size,omitempty"`
WebURL string `json:"webUrl,omitempty"`
} }
// Headers returns the human-readable names of properties in a SharePointInfo // Headers returns the human-readable names of properties in a SharePointInfo
@ -374,13 +376,13 @@ func (i SharePointInfo) Values() []string {
// OneDriveInfo describes a oneDrive item // OneDriveInfo describes a oneDrive item
type OneDriveInfo struct { type OneDriveInfo struct {
ItemType ItemType `json:"itemType,omitempty"`
ParentPath string `json:"parentPath"`
ItemName string `json:"itemName"`
Size int64 `json:"size,omitempty"`
Owner string `json:"owner,omitempty"`
Created time.Time `json:"created,omitempty"` Created time.Time `json:"created,omitempty"`
ItemName string `json:"itemName"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"` Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath"`
Size int64 `json:"size,omitempty"`
} }
// Headers returns the human-readable names of properties in a OneDriveInfo // Headers returns the human-readable names of properties in a OneDriveInfo

View File

@ -25,6 +25,7 @@ const (
Info Info
Warn Warn
Production Production
Disabled
) )
const logLevelFN = "log-level" const logLevelFN = "log-level"
@ -80,6 +81,8 @@ func genLogger(level logLevel) (*zapcore.Core, *zap.SugaredLogger) {
return lvl >= zapcore.WarnLevel return lvl >= zapcore.WarnLevel
case Production: case Production:
return lvl >= zapcore.ErrorLevel return lvl >= zapcore.ErrorLevel
case Disabled:
return false
default: default:
return true return true
} }
@ -104,6 +107,8 @@ func genLogger(level logLevel) (*zapcore.Core, *zap.SugaredLogger) {
cfg.Level = zap.NewAtomicLevelAt(zapcore.InfoLevel) cfg.Level = zap.NewAtomicLevelAt(zapcore.InfoLevel)
case Warn: case Warn:
cfg.Level = zap.NewAtomicLevelAt(zapcore.WarnLevel) cfg.Level = zap.NewAtomicLevelAt(zapcore.WarnLevel)
case Disabled:
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
} }
lgr, err = cfg.Build() lgr, err = cfg.Build()
@ -144,7 +149,7 @@ type loggingKey string
const ctxKey loggingKey = "corsoLogger" const ctxKey loggingKey = "corsoLogger"
// Seed embeds a logger into the context for later retrieval. // Seed generates a logger within the context for later retrieval.
// It also parses the command line for flag values prior to executing // It also parses the command line for flag values prior to executing
// cobra. This early parsing is necessary since logging depends on // cobra. This early parsing is necessary since logging depends on
// a seeded context prior to cobra evaluating flags. // a seeded context prior to cobra evaluating flags.
@ -154,24 +159,30 @@ func Seed(ctx context.Context, lvl string) (context.Context, *zap.SugaredLogger)
} }
zsl := singleton(levelOf(lvl)) zsl := singleton(levelOf(lvl))
ctxOut := context.WithValue(ctx, ctxKey, zsl)
return ctxOut, zsl return Set(ctx, zsl), zsl
} }
// SeedLevel embeds a logger into the context with the given log-level. // SeedLevel generates a logger within the context with the given log-level.
func SeedLevel(ctx context.Context, level logLevel) (context.Context, *zap.SugaredLogger) { func SeedLevel(ctx context.Context, level logLevel) (context.Context, *zap.SugaredLogger) {
l := ctx.Value(ctxKey) l := ctx.Value(ctxKey)
if l == nil { if l == nil {
zsl := singleton(level) zsl := singleton(level)
ctxWV := context.WithValue(ctx, ctxKey, zsl) return Set(ctx, zsl), zsl
return ctxWV, zsl
} }
return ctx, l.(*zap.SugaredLogger) return ctx, l.(*zap.SugaredLogger)
} }
// Set allows users to embed their own zap.SugaredLogger within the context.
func Set(ctx context.Context, logger *zap.SugaredLogger) context.Context {
if logger == nil {
return ctx
}
return context.WithValue(ctx, ctxKey, logger)
}
// Ctx retrieves the logger embedded in the context. // Ctx retrieves the logger embedded in the context.
func Ctx(ctx context.Context) *zap.SugaredLogger { func Ctx(ctx context.Context) *zap.SugaredLogger {
l := ctx.Value(ctxKey) l := ctx.Value(ctxKey)
@ -191,6 +202,8 @@ func levelOf(lvl string) logLevel {
return Warn return Warn
case "error": case "error":
return Production return Production
case "disabled":
return Disabled
} }
return Info return Info

Some files were not shown because too many files have changed in this diff Show More