Merge branch 'main' into refactor_get_item
126
.github/workflows/ci.yml
vendored
@ -1,11 +1,16 @@
|
||||
name: Build/Release Corso
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
pull_request:
|
||||
|
||||
push:
|
||||
branches: [main]
|
||||
tags: ["v*.*.*"]
|
||||
|
||||
repository_dispatch:
|
||||
types: [ok-to-test-command]
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
@ -131,16 +136,15 @@ jobs:
|
||||
name: docs
|
||||
path: docs/build
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Integration and Unit Testing -------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Test-Suite:
|
||||
Test-Suite-Trusted:
|
||||
needs: [Precheck, Checkout]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
|
||||
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
@ -179,6 +183,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
@ -194,6 +199,108 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
Test-Suite-Fork:
|
||||
needs: [Precheck]
|
||||
environment: Testing
|
||||
if: (!startsWith(github.ref , 'refs/tags/') && github.ref != 'refs/heads/main') && (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name != github.repository)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- name: Fail check
|
||||
if: github.event_name != 'repository_dispatch'
|
||||
run: |
|
||||
echo "Workflow requires approval from a maintainer to run. It will be automatically rerun on approval."
|
||||
exit 1
|
||||
|
||||
# add comment to PR with link to workflow run
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
message: |
|
||||
https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
||||
|
||||
# Check out merge commit
|
||||
- name: Fork based /ok-to-test checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
# run the tests
|
||||
- name: Integration Tests
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_CI_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-json \
|
||||
-v \
|
||||
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test log as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-log
|
||||
path: src/testlog/gotest.log
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
# Update check run called "Test-Suite-Fork"
|
||||
- uses: actions/github-script@v5
|
||||
id: update-check-run
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
number: ${{ github.event.client_payload.pull_request.number }}
|
||||
job: ${{ github.job }}
|
||||
# Conveniently, job.status maps to https://developer.github.com/v3/checks/runs/#update-a-check-run
|
||||
conclusion: ${{ job.status }}
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const { data: pull } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: process.env.number
|
||||
});
|
||||
const ref = pull.head.sha;
|
||||
const { data: checks } = await github.rest.checks.listForRef({
|
||||
...context.repo,
|
||||
ref
|
||||
});
|
||||
const check = checks.check_runs.filter(c => c.name === process.env.job);
|
||||
const { data: result } = await github.rest.checks.update({
|
||||
...context.repo,
|
||||
check_run_id: check[0].id,
|
||||
status: 'completed',
|
||||
conclusion: process.env.conclusion
|
||||
});
|
||||
return result;
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Source Code Linting ----------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
@ -217,9 +324,12 @@ jobs:
|
||||
- name: Go Lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: v1.45.2
|
||||
# Keep pinned to a verson as sometimes updates will add new lint
|
||||
# failures in unchanged code.
|
||||
version: v1.50.1
|
||||
working-directory: src
|
||||
skip-cache: true
|
||||
skip-pkg-cache: true
|
||||
skip-build-cache: true
|
||||
|
||||
# check licenses
|
||||
- name: Get go-licenses
|
||||
@ -233,7 +343,7 @@ jobs:
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Publish-Binary:
|
||||
needs: [Test-Suite, Linting, Docs-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
@ -270,7 +380,7 @@ jobs:
|
||||
path: src/dist/*
|
||||
|
||||
Publish-Docs:
|
||||
needs: [Test-Suite, Linting, Docs-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
@ -308,7 +418,7 @@ jobs:
|
||||
aws cloudfront create-invalidation --distribution-id ${{ secrets.DOCS_CF_DISTRIBUTION }} --paths "/*"
|
||||
|
||||
Publish-Image:
|
||||
needs: [Test-Suite, Linting, Docs-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
|
||||
31
.github/workflows/ok-to-test.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
# If someone with write access comments "/ok-to-test" on a pull request, emit a repository_dispatch event
|
||||
name: Ok To Test
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
ok-to-test:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run for PRs, not issue comments
|
||||
if: ${{ github.event.issue.pull_request }}
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.PRIVATE_KEY }}
|
||||
|
||||
- name: Slash Command Dispatch
|
||||
uses: peter-evans/slash-command-dispatch@v1
|
||||
env:
|
||||
TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
token: ${{ env.TOKEN }} # GitHub App installation access token
|
||||
reaction-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-type: pull-request
|
||||
commands: ok-to-test
|
||||
named-args: true
|
||||
permission: write
|
||||
@ -19,12 +19,12 @@ services, possibly beyond M365, will expand based on the interest and needs of t
|
||||
|
||||
# Getting Started
|
||||
|
||||
See the [Corso Documentation](https://docs.corsobackup.io) for more information.
|
||||
See the [Corso Documentation](https://corsobackup.io/docs/intro) for more information.
|
||||
|
||||
# Building Corso
|
||||
|
||||
To learn more about working with the project source core and building Corso, see the
|
||||
[Developer section](https://docs.corsobackup.io/developers/build) of the Corso Documentation.
|
||||
[Developer section](https://corsobackup.io/docs/developers/build) of the Corso Documentation.
|
||||
|
||||
# Roadmap
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ LABEL MAINTAINER="Niraj Tolia"
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# NOTE for lines 13,15: update in CI when updating
|
||||
RUN apt-get -y update && apt-get -y install gpg emacs curl git make \
|
||||
RUN apt-get -y update && apt-get -y install gpg emacs curl zip git make \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \
|
||||
&& apt-get -y install nodejs \
|
||||
&& apt-get autoclean \
|
||||
@ -12,7 +12,9 @@ RUN apt-get -y update && apt-get -y install gpg emacs curl git make \
|
||||
&& npm --version \
|
||||
&& cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \
|
||||
&& tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \
|
||||
&& npm install -g markdownlint-cli@0.32.2
|
||||
&& npm install -g markdownlint-cli@0.32.2 \
|
||||
&& curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
|
||||
&& unzip awscliv2.zip && /bin/bash aws/install && rm -rf awscliv2.zip aws
|
||||
|
||||
WORKDIR /usr/src
|
||||
COPY package.json package-lock.json* ./
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
.PHONY: buildimage build dev shell check genclidocs _validatemdgen
|
||||
.PHONY: buildimage build serve dev shell check genclidocs _validatemdgen publish sync
|
||||
|
||||
CORSO_BUILD_DIR := /tmp/.corsobuild
|
||||
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
|
||||
@ -40,6 +40,9 @@ dockershell:
|
||||
build: genclidocs
|
||||
$(DOCSC) npm run build
|
||||
|
||||
serve:
|
||||
$(DOCSC) npm run serve
|
||||
|
||||
genclidocs: _validatemdgen ${MDGEN_BINARY}
|
||||
@echo 'Auto-generating Corso CLI docs...'
|
||||
$(DOCSC) rm -rf docs/cli
|
||||
@ -57,3 +60,13 @@ ${MDGEN_BINARY}: $(shell find ${CORSO_LOCAL_PATH}/src -type f -name *.go) $(shel
|
||||
clean:
|
||||
$(DOCSC) rm -rf docs/cli build node_modules
|
||||
$(GOC) rm -rf ${CORSO_BUILD_DIR}/*
|
||||
|
||||
publish: clean build
|
||||
docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY \
|
||||
-e AWS_SESSION_TOKEN -e AWS_REGION \
|
||||
--rm -v ${PWD}:/usr/src/docs corso/docs:latest \
|
||||
make sync
|
||||
|
||||
sync:
|
||||
aws s3 sync /usr/src/docs/build/ s3://corsobackup.io/ --exclude ".git/*" --delete
|
||||
aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*"
|
||||
|
||||
@ -41,6 +41,14 @@ make build
|
||||
|
||||
This command generates static content into the `build` directory for integration with any static contents hosting service.
|
||||
|
||||
## Serving static documentation
|
||||
|
||||
```bash
|
||||
make serve
|
||||
```
|
||||
|
||||
This command will serve the static content generated with `make build` at [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
## Style and linting
|
||||
|
||||
```bash
|
||||
|
||||
@ -5,6 +5,8 @@ authors: nica
|
||||
tags: [corso, microsoft 365]
|
||||
---
|
||||
|
||||

|
||||
|
||||
Have you had it with Google sheets? So have I. Excel is my home. It’s where I write all my best formulae. And what
|
||||
about PowerPoint? The way it just finds stock photos for you? The automatic ‘alternative designs for this slide’
|
||||
button? It’s too good. I can’t give up Microsoft 365.
|
||||
@ -60,7 +62,7 @@ support is coming soon).
|
||||
|
||||
Corso’s secure backup protects against accidental data loss, service provider downtime and malicious threats, including
|
||||
ransomware attacks. Plus, a robust user community provides a venue for admins to share and learn about data protection
|
||||
and find best practices for how to security configure their M365 environments. As a member of the community, you’ll
|
||||
and find best practices for how to securely configure their M365 environments. As a member of the community, you’ll
|
||||
have access to blogs, forums, and discussion, as well as updates on public and feedback-driven development.
|
||||
[Join the Corso community on Discord](https://discord.gg/63DTTSnuhT).
|
||||
|
||||
@ -73,8 +75,8 @@ costs, as well.
|
||||
|
||||
## Interested in Trying Corso?
|
||||
|
||||
Corso is currently a tool in alpha to give you a CLI for backups of your M365 data.
|
||||
[Follow the quickstart guide](https://docs.corsobackup.io/) to start protecting your business-critical M365 data in
|
||||
Corso, currently in alpha, provides a CLI-based tool for backups of your M365 data.
|
||||
[Follow the quickstart guide](../../docs/quickstart) to start protecting your business-critical M365 data in
|
||||
just a few minutes. Because Corso is currently in alpha, it should NOT be used in production.
|
||||
|
||||
Corso supports Microsoft 365 Exchange and OneDrive, with SharePoint and Teams support in active development. Coverage
|
||||
|
||||
@ -1,5 +0,0 @@
|
||||
# Architecture
|
||||
|
||||
<!-- vale proselint.Annotations = NO -->
|
||||
TODO
|
||||
<!-- vale proselint.Annotations = YES -->
|
||||
@ -30,7 +30,7 @@
|
||||
|
||||
- Set M365 Credentials environment variables
|
||||
|
||||
> You can find more information on how to get these values in our [M365 docs](../setup/m365_access).
|
||||
> You can find more information on how to get these values in our [M365 docs](../../setup/m365_access/).
|
||||
|
||||
```bash
|
||||
export AZURE_CLIENT_ID=<id>
|
||||
|
||||
@ -12,6 +12,6 @@ beyond M365, will expand based on the interest and needs of the community.
|
||||
|
||||
## Getting started
|
||||
|
||||
You can follow the [Quick Start](quickstart) guide for an end-to-end Corso walk through. Alternatively, follow
|
||||
the instructions in the [Corso Setup](setup/concepts) section to dive into the details on how to configure and
|
||||
You can follow the [Quick Start](../quickstart) guide for an end-to-end Corso walk through. Alternatively, follow
|
||||
the instructions in the [Corso Setup](../setup/concepts) section to dive into the details on how to configure and
|
||||
run Corso.
|
||||
|
||||
@ -21,13 +21,13 @@ pull the image.
|
||||
## Connect to Microsoft 365
|
||||
|
||||
Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time operation. Follow the instructions
|
||||
[here](setup/m365_access) to obtain the necessary credentials and then make them available to Corso.
|
||||
[here](../setup/m365_access) to obtain the necessary credentials and then make them available to Corso.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
$Env:AZURE_CLIENT_ID = "<Application (client) ID for configured app>"
|
||||
$Env:AZURE_CLIENT_ID = "<Application (../client) ID for configured app>"
|
||||
$Env:AZURE_TENANT_ID = "<Directory (tenant) ID for configured app>"
|
||||
$Env:AZURE_CLIENT_SECRET = "<Client secret value>"
|
||||
```
|
||||
@ -36,7 +36,7 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
|
||||
<TabItem value="unix" label="Linux/macOS">
|
||||
|
||||
```bash
|
||||
export AZURE_CLIENT_ID=<Application (client) ID for configured app>
|
||||
export AZURE_CLIENT_ID=<Application (../client) ID for configured app>
|
||||
export AZURE_TENANT_ID=<Directory (tenant) ID for configured app>
|
||||
export AZURE_CLIENT_SECRET=<Client secret value>
|
||||
```
|
||||
@ -45,7 +45,7 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
```bash
|
||||
export AZURE_CLIENT_ID=<Application (client) ID for configured app>
|
||||
export AZURE_CLIENT_ID=<Application (../client) ID for configured app>
|
||||
export AZURE_TENANT_ID=<Directory (tenant) ID for configured app>
|
||||
export AZURE_CLIENT_SECRET=<Client secret value>
|
||||
```
|
||||
@ -55,9 +55,9 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
|
||||
|
||||
## Create a Corso repository
|
||||
|
||||
To create a secure backup location for Corso, you will first need to [download Corso](setup/download).
|
||||
To create a secure backup location for Corso, you will first need to [download Corso](../setup/download).
|
||||
Use the container or native executable to initialize the Corso repository using an
|
||||
[encryption passphrase](setup/configuration#environment-variables) and a pre-created S3 bucket (Corso doesn't create
|
||||
[encryption passphrase](../setup/configuration#environment-variables) and a pre-created S3 bucket (Corso doesn't create
|
||||
the bucket if it doesn't exist). The steps below use `corso-test` as the bucket name but, if you are using AWS, you
|
||||
will need a different unique name for the bucket.
|
||||
|
||||
@ -118,7 +118,7 @@ docker run --env-file $HOME/.corso/corso.env \\
|
||||
## Create your first backup
|
||||
|
||||
Corso can do much more, but you can start by creating a backup of your Exchange mailbox. If it has been a while since
|
||||
you initialized the Corso repository, you might need to [connect to it again](setup/repos#connect-to-a-repository).
|
||||
you initialized the Corso repository, you might need to [connect to it again](../setup/repos#connect-to-a-repository).
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
@ -202,7 +202,7 @@ docker run --env-file $HOME/.corso/corso.env \\
|
||||
```
|
||||
|
||||
Next, select one of the available backups and list all backed up emails. See
|
||||
[here](cli/corso_backup_details_exchange) for more advanced filtering options.
|
||||
[here](../cli/corso_backup_details_exchange) for more advanced filtering options.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
@ -285,5 +285,5 @@ A confirmation of the recovered email will be shown and the email will appear in
|
||||
|
||||
The above tutorial only scratches the surface for Corso's capabilities. We encourage you to dig deeper by:
|
||||
|
||||
* Learning about [Corso concepts and setup](setup/concepts)
|
||||
* Explore Corso backup and restore options for Exchange and Onedrive in the [Command Line Reference](cli/corso)
|
||||
* Learning about [Corso concepts and setup](../setup/concepts)
|
||||
* Explore Corso backup and restore options for Exchange and Onedrive in the [Command Line Reference](../cli/corso)
|
||||
|
||||
@ -21,7 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
|
||||
## Corso concepts {#corso-concepts}
|
||||
|
||||
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
|
||||
*M365 Services* data. See [Repositories](repos) for more information.
|
||||
*M365 Services* data. See [Repositories](../repos) for more information.
|
||||
|
||||
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
|
||||
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.
|
||||
|
||||
@ -14,7 +14,7 @@ Two things are needed to configure Corso:
|
||||
|
||||
Three distinct pieces of configuration are required by Corso:
|
||||
|
||||
* S3 object storage configuration to store backups. See [AWS Credentials Setup](repos#s3-creds-setup) for
|
||||
* S3 object storage configuration to store backups. See [AWS Credentials Setup](../repos#s3-creds-setup) for
|
||||
alternate ways to pass AWS credentials.
|
||||
* `AWS_ACCESS_KEY_ID`: Access key for an IAM user or role for accessing an S3 bucket
|
||||
* `AWS_SECRET_ACCESS_KEY`: Secret key associated with the access key
|
||||
|
||||
@ -7,8 +7,8 @@ description: "Connect to a Microsft 365 tenant"
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
To perform backup and restore operations, Corso requires access to your [M365 tenant](concepts#m365-concepts)
|
||||
by creating an [Azure AD application](concepts#m365-concepts) with appropriate permissions.
|
||||
To perform backup and restore operations, Corso requires access to your [M365 tenant](../concepts#m365-concepts)
|
||||
by creating an [Azure AD application](../concepts#m365-concepts) with appropriate permissions.
|
||||
|
||||
The following steps outline a simplified procedure for creating an Azure Ad application suitable for use with Corso.
|
||||
For more details, please refer to the
|
||||
@ -25,7 +25,7 @@ AD application.
|
||||
From the list of [Azure services](https://portal.azure.com/#allservices), select
|
||||
**Azure Active Directory → App Registrations → New Registration**
|
||||
|
||||

|
||||

|
||||
|
||||
### Configure basic settings
|
||||
|
||||
@ -36,14 +36,14 @@ Next, configure the following:
|
||||
* Skip the **Redirect URI** option
|
||||
* Click **Register** at the bottom of the screen
|
||||
|
||||

|
||||

|
||||
|
||||
### Configure required permissions
|
||||
|
||||
Within the new application (`CorsoApp` in the below diagram), select **API Permissions → Add a permission** from
|
||||
the management panel.
|
||||
|
||||

|
||||

|
||||
|
||||
Select the following permissions from **Microsoft API → Microsoft Graph → Application Permissions** and
|
||||
then click **Add permissions**.
|
||||
@ -63,7 +63,7 @@ then click **Add permissions**.
|
||||
Finally, grant admin consent to this application. This step is required even if the user that created the application
|
||||
is an Microsoft 365 admin.
|
||||
|
||||

|
||||

|
||||
|
||||
## Export application credentials
|
||||
|
||||
@ -74,7 +74,7 @@ as environment variables.
|
||||
|
||||
To view the tenant and client ID, select Overview from the app management panel.
|
||||
|
||||

|
||||

|
||||
|
||||
Copy the client and tenant IDs and export them into the following environment variables.
|
||||
|
||||
@ -112,7 +112,7 @@ management panel.
|
||||
|
||||
Click **New Client Secret** under **Client secrets** and follow the instructions to create a secret.
|
||||
|
||||

|
||||

|
||||
|
||||
After creating the secret, immediately copy the secret **Value** because it won't be available later. Export it as an
|
||||
environment variable.
|
||||
|
||||
@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
|
||||
import TOCInline from '@theme/TOCInline';
|
||||
import {Version} from '@site/src/corsoEnv';
|
||||
|
||||
A Corso [repository](concepts#corso-concepts) stores encrypted copies of your backup data. Repositories are
|
||||
A Corso [repository](../concepts#corso-concepts) stores encrypted copies of your backup data. Repositories are
|
||||
supported on the following object storage systems:
|
||||
|
||||
<TOCInline toc={toc} maxHeadingLevel={2}/><br/>
|
||||
@ -67,7 +67,7 @@ The two most commonly-used options are:
|
||||
### Initialize repository
|
||||
|
||||
Before first use, you need to initialize a Corso repository with `corso repo init s3`. See the command details
|
||||
[here](../cli/corso_repo_init_s3).
|
||||
[here](../../cli/corso_repo_init_s3).
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
@ -104,7 +104,7 @@ docker run --env-file $HOME/.corso/corso.env \\
|
||||
### Connect to a repository
|
||||
|
||||
If a repository already exists, you can connect to it with `corso repo connect s3`. See the command details
|
||||
[here](../cli/corso_repo_connect_s3).
|
||||
[here](../../cli/corso_repo_connect_s3).
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
@ -144,3 +144,9 @@ need to use the following flag with the initial Corso `repo init` command:
|
||||
```bash
|
||||
--endpoint <domain.example.com>
|
||||
```
|
||||
|
||||
### Testing with insecure TLS configurations
|
||||
|
||||
Corso also supports the use of object storage systems with no TLS certificate or with self-signed
|
||||
TLS certificates with the `--disable-tls` or `--disable-tls-verification` flags.
|
||||
[These flags](../../cli/corso_repo_init_s3) should only be used for testing.
|
||||
|
||||
@ -16,7 +16,7 @@ metadata for basic information about installed versions and usage in a privacy-p
|
||||
generic description of most-commonly used backup operations and statistics on the duration and size of backups. No user
|
||||
data is stored or transmitted during this process.
|
||||
|
||||
Telemetry reporting can be turned off by using the `--no-stats` flag. See the [Command Line Reference](../cli/corso)
|
||||
Telemetry reporting can be turned off by using the `--no-stats` flag. See the [Command Line Reference](../../cli/corso)
|
||||
section for more information.
|
||||
|
||||
</details>
|
||||
|
||||
@ -6,13 +6,14 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula');
|
||||
|
||||
/** @type {import('@docusaurus/types').Config} */
|
||||
const config = {
|
||||
title: 'Corso Documentation',
|
||||
title: 'Corso',
|
||||
tagline: 'Free, Secure, and Open-Source Backup for Microsoft 365',
|
||||
url: 'https://docs.corsobackup.io',
|
||||
url: 'https://corsobackup.io',
|
||||
baseUrl: process.env.CORSO_DOCS_BASEURL || '/',
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'throw',
|
||||
favicon: 'img/corso_logo.svg',
|
||||
trailingSlash: true,
|
||||
|
||||
// GitHub pages deployment config.
|
||||
// If you aren't using GitHub pages, you don't need these.
|
||||
@ -84,7 +85,11 @@ const config = {
|
||||
position: 'left',
|
||||
label: 'Docs',
|
||||
},
|
||||
{to: '/blog', label: 'Blog', position: 'left'},
|
||||
{
|
||||
to: '/blog',
|
||||
label: 'Blog',
|
||||
position: 'left'
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/alcionai/corso',
|
||||
label: 'GitHub',
|
||||
@ -133,11 +138,6 @@ const config = {
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/alcionai/corso',
|
||||
},
|
||||
{
|
||||
label: 'Corso Website',
|
||||
href: 'https://corsobackup.io/',
|
||||
},
|
||||
|
||||
],
|
||||
},
|
||||
],
|
||||
@ -145,8 +145,8 @@ const config = {
|
||||
},
|
||||
colorMode: {
|
||||
defaultMode: 'dark',
|
||||
disableSwitch: false,
|
||||
respectPrefersColorScheme: true,
|
||||
disableSwitch: true,
|
||||
respectPrefersColorScheme: false,
|
||||
},
|
||||
|
||||
zoom: {
|
||||
@ -168,8 +168,8 @@ const config = {
|
||||
metadata : [
|
||||
{name: 'twitter:card', content: 'summary_large_image'},
|
||||
{name: 'twitter:site', content: '@corsobackup'},
|
||||
{name: 'twitter:title', content: 'Corso Documentation: Free, Secure, and Open-Source Backup for Microsoft 365'},
|
||||
{name: 'twitter:description', content: 'Documentation for Corso, an open-source tool, that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage.'},
|
||||
{name: 'twitter:title', content: 'Corso: Free, Secure, and Open-Source Backup for Microsoft 365'},
|
||||
{name: 'twitter:description', content: 'Corso is an open-source tool that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage.'},
|
||||
],
|
||||
|
||||
prism: {
|
||||
|
||||
5885
docs/package-lock.json
generated
@ -17,19 +17,29 @@
|
||||
"@docusaurus/core": "2.2.0",
|
||||
"@docusaurus/plugin-google-gtag": "^2.2.0",
|
||||
"@docusaurus/preset-classic": "2.2.0",
|
||||
"@loadable/component": "^5.15.2",
|
||||
"@mdx-js/react": "^1.6.22",
|
||||
"animate.css": "^4.1.1",
|
||||
"clsx": "^1.2.1",
|
||||
"docusaurus-plugin-image-zoom": "^0.1.1",
|
||||
"docusaurus-plugin-sass": "^0.2.2",
|
||||
"feather-icons": "^4.29.0",
|
||||
"jarallax": "^2.0.4",
|
||||
"mdx-mermaid": "^1.3.2",
|
||||
"mermaid": "^9.2.2",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"sass": "^1.56.1"
|
||||
"sass": "^1.56.1",
|
||||
"tw-elements": "^1.0.0-alpha12",
|
||||
"wowjs": "^1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "2.2.0"
|
||||
"@docusaurus/module-type-aliases": "2.2.0",
|
||||
"@iconify/react": "^4.0.0",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"postcss": "^8.4.19",
|
||||
"tailwindcss": "^3.2.4"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
||||
6
docs/postcss.config.js
Normal file
@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
@ -1,61 +0,0 @@
|
||||
import React from 'react';
|
||||
import clsx from 'clsx';
|
||||
import styles from './styles.module.css';
|
||||
|
||||
const FeatureList = [
|
||||
{
|
||||
title: 'Secure',
|
||||
Svg: require('@site/static/img/security.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Corso provides secure data backup that protects customers against accidental data loss, service provider downtime, and malicious threats including ransomware attacks.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Robust',
|
||||
Svg: require('@site/static/img/data.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Corso, purpose-built for M365 protection, provides easy-to-use comprehensive backup and restore workflows that reduce backup time, improve time-to-recovery, reduce admin overhead, and replace unreliable scripts or workarounds.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Low Cost',
|
||||
Svg: require('@site/static/img/savings.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Corso, a 100% open-source tool, provides a free alternative for cost-conscious teams. It further reduces storage costs by supporting flexible retention policies and efficiently compressing and deduplicating data before storing it in low-cost cloud object storage.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
function Feature({Svg, title, description}) {
|
||||
return (
|
||||
<div className={clsx('col col--4')}>
|
||||
<div className="text--center">
|
||||
<Svg className={styles.featureSvg} role="img" />
|
||||
</div>
|
||||
<div className="text--center padding-horiz--md">
|
||||
<h3>{title}</h3>
|
||||
<p>{description}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function HomepageFeatures() {
|
||||
return (
|
||||
<section className={styles.features}>
|
||||
<div className="container">
|
||||
<div className="row">
|
||||
{FeatureList.map((props, idx) => (
|
||||
<Feature key={idx} {...props} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
.features {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 2rem 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.featureSvg {
|
||||
height: 200px;
|
||||
width: 200px;
|
||||
}
|
||||
38
docs/src/components/parts/BackToTop.js
Normal file
@ -0,0 +1,38 @@
|
||||
import { Icon } from "@iconify/react";
|
||||
import React, { useEffect } from "react";
|
||||
|
||||
export default function BackToTop() {
|
||||
function scroll() {
|
||||
window.scrollTo({ top: 0, left: 0, behavior: "smooth" });
|
||||
}
|
||||
function scrollFunction() {
|
||||
var mybutton = document.getElementById("back-to-top");
|
||||
if (mybutton != null) {
|
||||
if (
|
||||
document.body.scrollTop > 500 ||
|
||||
document.documentElement.scrollTop > 500
|
||||
) {
|
||||
mybutton.classList.add("flex");
|
||||
mybutton.classList.remove("hidden");
|
||||
} else {
|
||||
mybutton.classList.add("hidden");
|
||||
mybutton.classList.remove("flex");
|
||||
}
|
||||
}
|
||||
}
|
||||
useEffect(() => {
|
||||
window.onscroll = function () {
|
||||
scrollFunction();
|
||||
};
|
||||
}, []);
|
||||
return (
|
||||
<a
|
||||
href="#"
|
||||
onClick={() => scroll()}
|
||||
id="back-to-top"
|
||||
className="back-to-top flex-col justify-center items-center fixed hidden text-lg rounded-full z-10 bottom-5 right-5 h-9 w-9 text-center bg-indigo-600 text-white leading-9"
|
||||
>
|
||||
<Icon icon="mdi:arrow-up" color="#fff" />
|
||||
</a>
|
||||
);
|
||||
}
|
||||
40
docs/src/components/parts/Cookies.js
Normal file
@ -0,0 +1,40 @@
|
||||
import React, { useEffect } from "react";
|
||||
import { Icon } from "@iconify/react";
|
||||
|
||||
export default function Cookies() {
|
||||
function acceptCookies() {
|
||||
document.cookie = "cookies=accepted; expires=Fri, 31 Dec 9999 23:59:59 GMT";
|
||||
document.getElementById("cookies").style.display = "none";
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
id="cookies"
|
||||
className="cookie-popup !tracking-wide fixed max-w-lg bottom-3 right-3 left-3 sm:left-0 sm:right-0 mx-auto bg-white dark:bg-slate-900 shadow dark:shadow-gray-800 rounded-md pt-6 pb-2 px-6 z-50"
|
||||
>
|
||||
<p className="text-slate-400">
|
||||
This website uses cookies to provide you with a great user experience.
|
||||
By using it, you accept our{" "}
|
||||
<a
|
||||
href="cookies.html"
|
||||
target="_blank"
|
||||
className="text-emerald-600 dark:text-emerald-500 font-semibold"
|
||||
>
|
||||
use of cookies
|
||||
</a>
|
||||
.
|
||||
</p>
|
||||
<div className="cookie-popup-actions text-right">
|
||||
<button
|
||||
onClick={() => acceptCookies()}
|
||||
className="absolute border-none !bg-transparent p-0 cursor-pointer font-semibold top-2 right-2"
|
||||
>
|
||||
<Icon
|
||||
className="text-dark dark:text-slate-200 text-2xl"
|
||||
icon="humbleicons:times"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
32
docs/src/components/parts/Demo.js
Normal file
@ -0,0 +1,32 @@
|
||||
import React from "react";
|
||||
import "animate.css";
|
||||
|
||||
export default function Demo() {
|
||||
return (
|
||||
<section className="relative flex !tracking-wide flex-col items-center overflow-hidden">
|
||||
<div className="!container relative">
|
||||
<div className="flex flex-col content-center items-center justify-start relative md:mt-24 mt-16 text-center">
|
||||
<div className="wow w-[95%] sm:w-[80%] animate__animated relative animate__fadeIn">
|
||||
<div className="flex flex-row items-center bg-gray-200 rounded-t-lg h-6">
|
||||
<div className="align-middle flex flex-col items-center justify-center">
|
||||
<img className="h-4 px-2" src="assets/images/powershell.svg" />
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className="!p-2 relative rounded-b-lg overflow-clip"
|
||||
style={{
|
||||
borderLeft: "2px solid #e5e7eb",
|
||||
borderRight: "2px solid #e5e7eb",
|
||||
borderBottom: "2px solid #e5e7eb",
|
||||
}}
|
||||
>
|
||||
<video className="w-full" poster="assets/images/corso_demo_thumbnail.png" muted loop autoPlay playsInline>
|
||||
<source src="assets/images/corso_demo.mp4" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
69
docs/src/components/parts/FourthSection.js
Normal file
@ -0,0 +1,69 @@
|
||||
import React from "react";
|
||||
import "animate.css";
|
||||
import { Icon } from "@iconify/react";
|
||||
|
||||
export default function FourthSection() {
|
||||
return (
|
||||
<section className="relative !tracking-wide md:py-16 py-12 md:pt-0 pt-0">
|
||||
<div className="absolute bottom-0 left-0 !z-0 right-0 sm:h-2/3 h-4/5 bg-gradient-to-b from-indigo-500 to-indigo-600"></div>
|
||||
|
||||
<div className="container !z-50">
|
||||
<div
|
||||
className="grid grid-cols-1 justify-center wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".1s"
|
||||
>
|
||||
<div className="relative flex flex-col items-center justify-center z-1">
|
||||
<div className="grid grid-cols-1 md:text-left text-center justify-center">
|
||||
<div className="relative">
|
||||
<img
|
||||
src="assets/images/laptop-macbook.png"
|
||||
className="mx-auto"
|
||||
alt="Laptop image showing Microsoft 365 icons"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="content md:mt-0">
|
||||
<div className="grid lg:grid-cols-12 grid-cols-1 md:text-left text-center justify-center">
|
||||
<div className="lg:col-start-2 lg:col-span-10">
|
||||
<div className="grid md:grid-cols-2 grid-cols-1 items-center">
|
||||
<div className="mt-8">
|
||||
<div className="section-title text-md-start">
|
||||
<h3 className="md:text-3xl text-2xl md:leading-normal leading-normal font-semibold text-white mt-2">
|
||||
Start Protecting Your
|
||||
<br /> Microsoft 365 Data!
|
||||
</h3>
|
||||
<h6 className="text-white/50 text-lg font-semibold">
|
||||
Corso is Free and Open Source
|
||||
</h6>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-8">
|
||||
<div className="section-title text-md-start">
|
||||
<p className="text-white/50 max-w-xl mx-auto mb-2">
|
||||
Follow our quick-start guide to start protecting your
|
||||
business-critical Microsoft 365 data in just a few
|
||||
minutes.
|
||||
</p>
|
||||
<a
|
||||
href="docs/quickstart"
|
||||
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
|
||||
>
|
||||
Get Started{" "}
|
||||
<Icon
|
||||
icon="uim:angle-right-b"
|
||||
className="align-middle"
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
74
docs/src/components/parts/Hero.js
Normal file
@ -0,0 +1,74 @@
|
||||
import React from "react";
|
||||
import "animate.css";
|
||||
|
||||
export default function Hero() {
|
||||
return (
|
||||
<section className="relative !tracking-wide flex flex-col home-wrapper items-center overflow-hidden">
|
||||
<div
|
||||
className="bg-[#151C3D] absolute"
|
||||
style={{
|
||||
left: "-20rem",
|
||||
right: 0,
|
||||
zIndex: 1,
|
||||
top: "-30%",
|
||||
height: "62rem",
|
||||
width: "140rem",
|
||||
transform: "rotate(-12deg)",
|
||||
}}
|
||||
></div>
|
||||
<div
|
||||
style={{
|
||||
zIndex: "1 !important",
|
||||
}}
|
||||
className="!container relative !z-10"
|
||||
>
|
||||
<div className="grid !z-10 grid-cols-1 mt-28 text-center">
|
||||
<div className="wow !z-10 animate__animated animate__fadeIn">
|
||||
<h4 className="font-bold !text-white !z-10 !leading-normal text-4xl lg:text-5xl mb-5">
|
||||
Free, Secure, and Open-Source
|
||||
<br /> Backup for Microsoft 365
|
||||
</h4>
|
||||
<p className="text-slate-300 !z-10 text-xl max-w-xl mx-auto">
|
||||
The #1 open-source backup tool for Microsoft 365
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="mt-12 !z-10 mb-6 space-x-4">
|
||||
<a
|
||||
href="../docs/quickstart"
|
||||
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
|
||||
>
|
||||
Quickstart
|
||||
</a>
|
||||
<a
|
||||
href="../docs/setup/download"
|
||||
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="home-dashboard mt-8 !z-10 wow animate__ animate__fadeIn animated"
|
||||
style={{
|
||||
visibility: "visible",
|
||||
animationName: "fadeIn",
|
||||
}}
|
||||
>
|
||||
<img
|
||||
src="img/cloudbackup.svg"
|
||||
className="w-[70%] inline-block object-contain"
|
||||
alt="Cloud backup and storage"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="bg-indigo-600 w-8 h-16 !z-10 absolute left-8 lg:bottom-28 md:bottom-36 sm:bottom-40 bottom-16"></div>
|
||||
<div className="bg-indigo-600/20 w-8 h-16 !z-10 absolute left-20 lg:bottom-32 md:bottom-40 sm:bottom-44 bottom-20"></div>
|
||||
|
||||
<div className="bg-indigo-600/20 !z-10 w-8 h-16 absolute right-20 xl:bottom-[420px] lg:bottom-[315px] md:bottom-[285px] sm:bottom-80 bottom-32"></div>
|
||||
<div className="bg-indigo-600 w-8 h-16 !z-10 absolute right-8 xl:bottom-[440px] lg:bottom-[335px] md:bottom-[305px] sm:bottom-[340px] bottom-36"></div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
673
docs/src/components/parts/KeyLoveFAQ.js
Normal file
@ -0,0 +1,673 @@
|
||||
import React, { useEffect } from "react";
|
||||
import feather from "feather-icons";
|
||||
import { WOW } from "wowjs";
|
||||
import { jarallax } from "jarallax";
|
||||
import { Icon } from "@iconify/react";
|
||||
import "animate.css";
|
||||
import "tw-elements";
|
||||
|
||||
export default function KeyLoveFAQ() {
|
||||
useEffect(() => {
|
||||
new WOW().init();
|
||||
feather.replace();
|
||||
jarallax(document.querySelectorAll(".jarallax"), {
|
||||
speed: 0.2,
|
||||
});
|
||||
});
|
||||
|
||||
return (
|
||||
<section className="relative md:py-24 !tracking-wide py-16 overflow-hidden">
|
||||
<div className="container">
|
||||
<div
|
||||
className="grid grid-cols-1 pb-8 text-center wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".1s"
|
||||
>
|
||||
<h3 className="mb-6 mt-8 md:text-4xl text-white text-3xl md:leading-normal leading-normal font-bold">
|
||||
Key Features
|
||||
</h3>
|
||||
|
||||
<p className="text-slate-400 max-w-xl mx-auto">
|
||||
See why Corso is a perfect fit for your Microsoft 365 backup and
|
||||
recovery needs.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="grid lg:grid-cols-3 md:grid-cols-2 grid-cols-1 grid-flow-row-dense gap-[30px] mt-8">
|
||||
<div
|
||||
className="col-start-1 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".1s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="share-2" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">
|
||||
Comprehensive Workflows
|
||||
</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="col-start-1 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".1s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="zap" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">High Throughput</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="col-start-1 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".1s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="activity" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">Fault Tolerance</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:col-start-2 md:col-start-1 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".3s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="lock" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">
|
||||
End-to-End Encryption
|
||||
</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:col-start-2 md:col-start-2 md:order-last wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".3s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="copy" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">Deduplication</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:col-start-2 md:col-start-2 md:order-last wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".3s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="minimize-2" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">Compression</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:col-start-3 md:col-start-2 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i data-feather="code" className="h-5 w-5 rotate-45"></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">Open Source</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:col-start-3 md:col-start-2 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i
|
||||
data-feather="upload-cloud"
|
||||
className="h-5 w-5 rotate-45"
|
||||
></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">
|
||||
Choice of Object Storage
|
||||
</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:col-start-3 md:col-start-2 wow animate__animated animate__fadeInUp"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<div className="flex transition-all duration-500 scale-hover shadow dark:shadow-gray-800 hover:shadow-md dark:hover:shadow-gray-700 ease-in-out items-center p-3 rounded-md bg-white dark:bg-slate-900">
|
||||
<div className="flex items-center justify-center h-[45px] min-w-[45px] -rotate-45 bg-gradient-to-r from-transparent to-indigo-600/10 text-indigo-600 text-center rounded-full mr-3">
|
||||
<i
|
||||
data-feather="check-circle"
|
||||
className="h-5 w-5 rotate-45"
|
||||
></i>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<h4 className="mb-0 text-lg font-bold">Retention Policies</h4>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="container md:mt-24 mt-16">
|
||||
<div className="container lg mx-auto">
|
||||
<div className="grid grid-cols-1 pb-2 text-center wow animate__animated animate__fadeInUp">
|
||||
<h3 className="mb-6 mt-8 md:text-4xl text-3xl md:leading-normal leading-normal font-bold">
|
||||
Why Everyone{" "}
|
||||
<span className="after:absolute after:right-0 after:left-0 after:bottom-1 after:lg:h-3 after:h-2 after:w-auto after:rounded-md after:bg-indigo-600/30 relative text-indigo-600">
|
||||
Loves
|
||||
<div className="absolute right-0 left-0 bottom-1 lg:h-3 h-2 w-auto rounded-md bg-indigo-600/30"></div>
|
||||
</span>{" "}
|
||||
Corso
|
||||
</h3>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="grid md:grid-cols-2 grid-cols-1 items-center gap-[30px]">
|
||||
<div
|
||||
className="relative wow animate__animated animate__fadeInLeft"
|
||||
data-wow-delay=".3s"
|
||||
>
|
||||
<img
|
||||
src="/img/why/chat.svg"
|
||||
className="rounded-lg"
|
||||
alt="Group discussion"
|
||||
/>
|
||||
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 left-0 rotate-45 -z-1 rounded-3xl"></div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:ml-8 wow animate__animated animate__fadeInRight"
|
||||
data-wow-delay=".3s"
|
||||
>
|
||||
<h3 className="mb-4 text-3xl leading-normal font-bold">
|
||||
Community
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
The Corso community provides a venue for M365 admins to share and
|
||||
learn about the importance of data protection as well as best
|
||||
practices around M365 secure configuration and compliance
|
||||
management.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Community-led blogs, forums, and discussions
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Public and feedback-driven development roadmap{" "}
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
All community contributions welcome
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<div className="mt-4">
|
||||
<a
|
||||
href="https://discord.gg/63DTTSnuhT"
|
||||
target="_blank"
|
||||
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
|
||||
>
|
||||
Join Us On Discord{" "}
|
||||
<Icon icon="uim:angle-right-b" className="align-middle" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="container md:mt-24 mt-16">
|
||||
<div className="grid md:grid-cols-2 grid-cols-1 items-center gap-[30px]">
|
||||
<div
|
||||
className="relative order-1 md:order-2 wow animate__animated animate__fadeInRight"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<img
|
||||
src="/img/why/security.svg"
|
||||
className="rounded-lg"
|
||||
alt="Approval of fingerprint security"
|
||||
/>
|
||||
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 right-0 rotate-45 -z-1 rounded-3xl"></div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:mr-8 order-2 md:order-1 wow animate__animated animate__fadeInLeft"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<h3 className="mb-4 text-3xl leading-normal font-bold">
|
||||
Data Security
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso provides secure data backup that protects customers against
|
||||
accidental data loss, service provider downtime, and malicious
|
||||
threats including ransomware attacks.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
End-to-end zero-trust AES-256 and TLS encryption
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Support for air-gapped backup storage
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Choice of backup storage provider and geo location
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="container md:mt-24 mt-16">
|
||||
<div className="grid md:grid-cols-2 grid-cols-1 items-center mt-8 gap-[30px]">
|
||||
<div
|
||||
className="relative wow animate__animated animate__fadeInLeft"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<img
|
||||
src="/img/why/data.svg"
|
||||
className="rounded-lg"
|
||||
alt="Data extraction dashboard"
|
||||
/>
|
||||
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 left-0 rotate-45 -z-1 rounded-3xl"></div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:ml-8 wow animate__animated animate__fadeInRight"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<h3 className="mb-4 text-3xl leading-normal font-bold">
|
||||
Robust Backups
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso, purpose-built for M365 protection, provides easy-to-use
|
||||
comprehensive backup and restore workflows that reduces backup
|
||||
time, improve time-to-recovery, reduce admin overhead, and replace
|
||||
unreliable scripts or workarounds.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Constantly updated M365 Graph Data engine
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Purpose-built, flexible, fine-grained data protection workflows
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
High-performance backup and recovery data movers
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<div className="mt-4">
|
||||
<a
|
||||
href="docs/quickstart"
|
||||
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
|
||||
>
|
||||
Use The Quick Start For Your First Backup{" "}
|
||||
<Icon icon="uim:angle-right-b" className="align-middle" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="container md:mt-24 mt-16">
|
||||
<div className="grid md:grid-cols-2 grid-cols-1 items-center gap-[30px]">
|
||||
<div
|
||||
className="relative order-1 md:order-2 wow animate__animated animate__fadeInRight"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<img
|
||||
src="/img/why/savings.svg"
|
||||
className="rounded-lg"
|
||||
alt="Adding money to a savings jar"
|
||||
/>
|
||||
<div className="overflow-hidden absolute lg:h-[400px] h-[320px] lg:w-[400px] w-[320px] bg-indigo-600/5 bottom-0 right-0 rotate-45 -z-1 rounded-3xl"></div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="lg:mr-8 order-2 md:order-1 wow animate__animated animate__fadeInLeft"
|
||||
data-wow-delay=".5s"
|
||||
>
|
||||
<h3 className="mb-4 text-3xl leading-normal font-bold">
|
||||
Cost Savings
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso, a 100% open-source tool, provides a free alternative for
|
||||
cost-conscious teams. It further reduces storage costs by
|
||||
supporting flexible retention policies and efficiently compressing
|
||||
and deduplicating data before storing it in low-cost cloud object
|
||||
storage.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Free forever OSS with no licensing costs
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Client-side compression and deduplication
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Support for S3-compliant storage including AWS Glacier IA
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<div className="mt-4">
|
||||
<a
|
||||
href="docs/setup/repos"
|
||||
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
|
||||
>
|
||||
Read about our Object Storage support{" "}
|
||||
<Icon icon="uim:angle-right-b" className="align-middle" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Accordions */}
|
||||
|
||||
<div className="container md:mb-8 mb-4 md:mt-24 mt-16 wow animate__animated animate__fadeInUp">
|
||||
<div className="grid grid-cols-1 pb-8 text-center">
|
||||
<h3 className="mb-6 mt-8 text-white md:text-4xl text-3xl md:leading-normal leading-normal font-bold">
|
||||
Frequently Asked Questions
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
<div className="relative grid md:grid-cols-12 grid-cols-1 items-center gap-[30px]">
|
||||
<div className="md:col-span-6">
|
||||
<div className="relative">
|
||||
<div className="relative rounded-xl overflow-hidden shadow-md dark:shadow-gray-800">
|
||||
<div
|
||||
className="w-full jarallax py-72 bg-slate-400 custom-bg_ bg-no-repeat bg-top"
|
||||
data-jarallax='{"speed": 0.1}'
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="md:col-span-6">
|
||||
<div className="accordion space-y-3" id="accordionExample">
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="headingOne"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapseOne"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapseOne"
|
||||
>
|
||||
<span>What platforms does Corso run on?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapseOne"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="headingOne"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso has both native binaries and container images for
|
||||
Windows, Linux, and macOS.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading2"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse2"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse2"
|
||||
>
|
||||
<span>
|
||||
What Microsoft 365 services can I backup using Corso?
|
||||
</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse2"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading2"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso currently supports OneDrive and Exchange. Support
|
||||
for Teams and SharePoint is in active development and is
|
||||
therefore not recommended for production use.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading3"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse3"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse3"
|
||||
>
|
||||
<span>What object storage does Corso support?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse3"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading3"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 dark:text-gray-400 !visible">
|
||||
Corso supports any S3-compliant object storage system
|
||||
including AWS S3 (including Glacier Instant Access),
|
||||
Google Cloud Storage, and Backblaze. Azure Blob support is
|
||||
coming soon.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 font-semibold"
|
||||
id="heading4"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse4"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse4"
|
||||
>
|
||||
<span>How can I get help for Corso?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse4"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading4"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 dark:text-gray-400 !visible">
|
||||
If you are unable to find an answer in our documentation,
|
||||
please file{" "}
|
||||
<a
|
||||
href="https://github.com/alcionai/corso/issues"
|
||||
className="text-indigo-600"
|
||||
target="_blank"
|
||||
>
|
||||
GitHub issues
|
||||
</a>{" "}
|
||||
for bugs or join the{" "}
|
||||
<a
|
||||
href="https://discord.gg/63DTTSnuhT"
|
||||
className="text-indigo-600"
|
||||
target="_blank"
|
||||
>
|
||||
Discord community
|
||||
</a>
|
||||
.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading5"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse5"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse5"
|
||||
>
|
||||
<span>What is Corso's open-source license?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse5"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading5"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 dark:text-gray-400 !visible">
|
||||
Corso's source code is licensed under the OSI-approved
|
||||
Apache v2 open-source license.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading6"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse6"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse6"
|
||||
>
|
||||
<span>How do I request a new feature?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse6"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading6"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 dark:text-gray-400 !visible">
|
||||
You can request new features by creating a{" "}
|
||||
<a
|
||||
href="https://github.com/alcionai/corso/issues"
|
||||
className="text-indigo-600"
|
||||
target="_blank"
|
||||
>
|
||||
new GitHub issue
|
||||
</a>{" "}
|
||||
and labeling it as an enhancement.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
23
docs/src/components/parts/MainComp.js
Normal file
@ -0,0 +1,23 @@
|
||||
import React from "react";
|
||||
import "animate.css";
|
||||
import loadable from "@loadable/component";
|
||||
import Hero from "./Hero";
|
||||
import Demo from "./Demo";
|
||||
import FourthSection from "./FourthSection";
|
||||
import Cookies from "./Cookies";
|
||||
|
||||
const KeyLoveFAQComp = loadable(() => import("./KeyLoveFAQ"));
|
||||
const BackToTopComp = loadable(() => import("./BackToTop"));
|
||||
|
||||
export function MainComp() {
|
||||
return (
|
||||
<>
|
||||
<Hero />
|
||||
<Demo />
|
||||
<KeyLoveFAQComp />
|
||||
<FourthSection />
|
||||
<BackToTopComp />
|
||||
<Cookies />
|
||||
</>
|
||||
);
|
||||
}
|
||||
@ -1,9 +1,71 @@
|
||||
/**
|
||||
* Any CSS included here will be global. The classic template
|
||||
* bundles Infima by default. Infima is a CSS framework designed to
|
||||
* work well for content-centric websites.
|
||||
CUSTOM TO THE NEW HOME PAGE
|
||||
*/
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Nunito&display=swap');
|
||||
|
||||
*{
|
||||
font-family: 'Nunito', sans-serif !important;
|
||||
}
|
||||
|
||||
html{
|
||||
scroll-behavior: smooth !important;
|
||||
}
|
||||
.accordion-button-custom::after {
|
||||
-ms-flex-shrink: 0;
|
||||
flex-shrink: 0;
|
||||
width: 1.25rem;
|
||||
height: 1.25rem;
|
||||
margin-left: auto;
|
||||
content: "";
|
||||
background-image: url("../../static/assets/download.svg");
|
||||
background-repeat: no-repeat;
|
||||
background-size: 1.25rem;
|
||||
transition: transform 0.2s ease-in-out;
|
||||
}
|
||||
|
||||
.accordion-button-custom:not(.collapsed)::after {
|
||||
background-image: url("../../static/assets/download_blue.svg");
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.accordion-button-custom:not(.collapsed) {
|
||||
color: #2563eb;
|
||||
background-color: #fff;
|
||||
box-shadow: inset 0 -1px 0 #e5e7eb;
|
||||
}
|
||||
|
||||
.link-underline {
|
||||
border-bottom-width: 0;
|
||||
background-image: linear-gradient(transparent, transparent), linear-gradient(#4f46E5, #4f46E5);
|
||||
background-size: 0 2px;
|
||||
background-position: 0 100%;
|
||||
background-repeat: no-repeat;
|
||||
transition: background-size .5s ease-in-out;
|
||||
}
|
||||
|
||||
.link-underline-black {
|
||||
background-image: linear-gradient(transparent, transparent), linear-gradient(#4f46E5, #4f46E5);
|
||||
}
|
||||
|
||||
.link-underline:hover {
|
||||
background-size: 100% 2px;
|
||||
background-position: 0 100%
|
||||
}
|
||||
|
||||
|
||||
@tailwind utilities;
|
||||
@tailwind components;
|
||||
|
||||
@layer utilities {
|
||||
.scale-hover:hover {
|
||||
transform: scale(1.05);
|
||||
}
|
||||
}
|
||||
|
||||
.custom-bg_{
|
||||
background: url("../../static/assets/images/cta.jpg")
|
||||
}
|
||||
|
||||
/* You can override the default Infima variables here. */
|
||||
:root {
|
||||
--ifm-color-primary: #1e204e;
|
||||
|
||||
@ -1,40 +1,14 @@
|
||||
import React from 'react';
|
||||
import clsx from 'clsx';
|
||||
import Layout from '@theme/Layout';
|
||||
import Link from '@docusaurus/Link';
|
||||
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
|
||||
import styles from './index.module.css';
|
||||
import HomepageFeatures from '@site/src/components/HomepageFeatures';
|
||||
|
||||
function HomepageHeader() {
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
return (
|
||||
<header className={clsx('hero hero--primary', styles.heroBanner)}>
|
||||
<div className="container">
|
||||
<h1 className="hero__title">{siteConfig.title}</h1>
|
||||
<p className="hero__subtitle">{siteConfig.tagline}</p>
|
||||
<div className={styles.buttons}>
|
||||
<Link
|
||||
className="button button--secondary button--lg"
|
||||
to="/docs/quickstart">
|
||||
Corso Quickstart
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
);
|
||||
}
|
||||
import React from "react";
|
||||
import Layout from "@theme/Layout";
|
||||
import { MainComp } from "@site/src/components/parts/MainComp";
|
||||
|
||||
export default function Home() {
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
return (
|
||||
<Layout
|
||||
title={`${siteConfig.title}`}
|
||||
description="Documentation for Corso, a free, secure, and open-source backup tool for Microsoft 365">
|
||||
<HomepageHeader />
|
||||
<main>
|
||||
<HomepageFeatures />
|
||||
</main>
|
||||
title="Free, Secure, and Open-Source Backup for Microsoft 365"
|
||||
description="Intro, docs, and blog for Corso, an open-source tool, that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage."
|
||||
>
|
||||
<MainComp />
|
||||
</Layout>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,7 +0,0 @@
|
||||
---
|
||||
title: Markdown page example
|
||||
---
|
||||
|
||||
# Markdown page example
|
||||
|
||||
You don't need React to write simple standalone pages.
|
||||
1
docs/static/assets/download.svg
vendored
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='#fff'><path fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/></svg>
|
||||
|
After Width: | Height: | Size: 228 B |
1
docs/static/assets/download_blue.svg
vendored
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='#2563eb'><path fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/></svg>
|
||||
|
After Width: | Height: | Size: 231 B |
BIN
docs/static/assets/images/corso_demo.mp4
vendored
Normal file
BIN
docs/static/assets/images/corso_demo_thumbnail.png
vendored
Normal file
|
After Width: | Height: | Size: 335 KiB |
BIN
docs/static/assets/images/cta.jpg
vendored
Normal file
|
After Width: | Height: | Size: 80 KiB |
BIN
docs/static/assets/images/laptop-macbook.png
vendored
Normal file
|
After Width: | Height: | Size: 419 KiB |
BIN
docs/static/assets/images/laptop-surface.png
vendored
Normal file
|
After Width: | Height: | Size: 415 KiB |
29
docs/static/assets/images/powershell.svg
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="PowerShell" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
width="204.691px" height="154.521px" viewBox="0 0 204.691 154.521" style="enable-background:new 0 0 204.691 154.521;"
|
||||
xml:space="preserve">
|
||||
<g>
|
||||
<path style="display:none;fill-rule:evenodd;clip-rule:evenodd;fill:#2671BE;" d="M-47.547,226.872
|
||||
c0-97.129,0.094-194.259-0.195-291.387c-0.021-6.982,1.404-8.411,8.388-8.389c94.397,0.292,188.798,0.292,283.195,0
|
||||
c6.984-0.022,8.41,1.407,8.389,8.389c-0.289,97.128-0.195,194.258-0.195,291.387c-3.238,2.008-6.837,1.129-10.268,1.131
|
||||
c-93.015,0.049-186.031,0.049-279.047,0C-40.711,228.001-44.31,228.88-47.547,226.872z"/>
|
||||
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#E0EAF5;" d="M120.14,0.032c23.011-0.008,46.023-0.078,69.034,0.019
|
||||
c13.68,0.056,17.537,4.627,14.588,18.137c-8.636,39.566-17.466,79.092-26.415,118.589c-2.83,12.484-9.332,17.598-22.465,17.637
|
||||
c-46.023,0.137-92.046,0.152-138.068-0.006c-15.043-0.053-19-5.148-15.759-19.404C9.849,96.287,18.69,57.582,27.602,18.892
|
||||
C30.997,4.148,36.099,0.1,51.104,0.057C74.116-0.008,97.128,0.04,120.14,0.032z"/>
|
||||
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#2671BE;" d="M85.365,149.813c-23.014-0.008-46.029,0.098-69.042-0.053
|
||||
c-11.67-0.076-13.792-2.83-11.165-14.244c8.906-38.71,18.099-77.355,26.807-116.109C34.3,9.013,39.337,4.419,50.473,4.522
|
||||
c46.024,0.427,92.056,0.137,138.083,0.184c11.543,0.011,13.481,2.48,10.89,14.187c-8.413,38.007-16.879,76.003-25.494,113.965
|
||||
c-3.224,14.207-6.938,16.918-21.885,16.951C129.833,149.856,107.598,149.821,85.365,149.813z"/>
|
||||
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#FDFDFE;" d="M104.948,73.951c-1.543-1.81-3.237-3.894-5.031-5.886
|
||||
c-10.173-11.3-20.256-22.684-30.61-33.815c-4.738-5.094-6.248-10.041-0.558-15.069c5.623-4.97,11.148-4.53,16.306,1.188
|
||||
c14.365,15.919,28.713,31.856,43.316,47.556c5.452,5.864,4.182,9.851-1.823,14.196c-23.049,16.683-45.968,33.547-68.862,50.443
|
||||
c-5.146,3.799-10.052,4.75-14.209-0.861c-4.586-6.189-0.343-9.871,4.414-13.335c17.013-12.392,33.993-24.83,50.9-37.366
|
||||
C101.146,79.256,104.527,78.238,104.948,73.951z"/>
|
||||
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:#FCFDFD;" d="M112.235,133.819c-6.196,0-12.401,0.213-18.583-0.068
|
||||
c-4.932-0.223-7.9-2.979-7.838-8.174c0.06-4.912,2.536-8.605,7.463-8.738c13.542-0.363,27.104-0.285,40.651-0.02
|
||||
c4.305,0.084,7.483,2.889,7.457,7.375c-0.031,5.146-2.739,9.133-8.25,9.465c-6.944,0.42-13.931,0.104-20.899,0.104
|
||||
C112.235,133.78,112.235,133.8,112.235,133.819z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.6 KiB |
BIN
docs/static/img/blog/office_desk.jpg
vendored
Normal file
|
After Width: | Height: | Size: 252 KiB |
1
docs/static/img/cloudbackup.svg
vendored
Normal file
|
After Width: | Height: | Size: 138 KiB |
BIN
docs/static/img/favicon.ico
vendored
|
Before Width: | Height: | Size: 3.5 KiB |
1
docs/static/img/why/chat.svg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
1
docs/static/img/why/data.svg
vendored
Normal file
|
After Width: | Height: | Size: 38 KiB |
1
docs/static/img/why/savings.svg
vendored
Normal file
|
After Width: | Height: | Size: 115 KiB |
1
docs/static/img/why/security.svg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
2
docs/static/robots.txt
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow:
|
||||
65
docs/tailwind.config.js
Normal file
@ -0,0 +1,65 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: ["./src/**/*.{js,jsx,ts,tsx}"],
|
||||
darkMode: ['class', '[data-theme="dark"]'],
|
||||
theme: {
|
||||
screens: {
|
||||
xs: "540px",
|
||||
sm: "640px",
|
||||
md: "768px",
|
||||
lg: "1024px",
|
||||
xl: "1280px",
|
||||
"2xl": "1536px",
|
||||
},
|
||||
fontFamily: {
|
||||
nunito: ['"Nunito", sans-serif'],
|
||||
},
|
||||
container: {
|
||||
center: true,
|
||||
padding: {
|
||||
DEFAULT: "12px",
|
||||
sm: "1rem",
|
||||
lg: "45px",
|
||||
xl: "5rem",
|
||||
"2xl": "13rem",
|
||||
},
|
||||
},
|
||||
extend: {
|
||||
colors: {
|
||||
dark: "#3c4858",
|
||||
black: "#161c2d",
|
||||
"dark-footer": "#192132",
|
||||
},
|
||||
|
||||
boxShadow: {
|
||||
sm: "0 2px 4px 0 rgb(60 72 88 / 0.15)",
|
||||
DEFAULT: "0 0 3px rgb(60 72 88 / 0.15)",
|
||||
md: "0 5px 13px rgb(60 72 88 / 0.20)",
|
||||
lg: "0 10px 25px -3px rgb(60 72 88 / 0.15)",
|
||||
xl: "0 20px 25px -5px rgb(60 72 88 / 0.1), 0 8px 10px -6px rgb(60 72 88 / 0.1)",
|
||||
"2xl": "0 25px 50px -12px rgb(60 72 88 / 0.25)",
|
||||
inner: "inset 0 2px 4px 0 rgb(60 72 88 / 0.05)",
|
||||
testi: "2px 2px 2px -1px rgb(60 72 88 / 0.15)",
|
||||
},
|
||||
|
||||
spacing: {
|
||||
0.75: "0.1875rem",
|
||||
3.25: "0.8125rem",
|
||||
},
|
||||
|
||||
maxWidth: ({ theme, breakpoints }) => ({
|
||||
1200: "71.25rem",
|
||||
992: "60rem",
|
||||
768: "45rem",
|
||||
}),
|
||||
|
||||
zIndex: {
|
||||
1: "1",
|
||||
2: "2",
|
||||
3: "3",
|
||||
999: "999",
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [require("tw-elements/dist/plugin")],
|
||||
};
|
||||
@ -1,5 +1,5 @@
|
||||
run:
|
||||
timeout: 15m
|
||||
timeout: 20m
|
||||
|
||||
linters:
|
||||
enable:
|
||||
@ -13,6 +13,11 @@ linters:
|
||||
- revive
|
||||
- wsl
|
||||
|
||||
disable:
|
||||
# Consumes a large amount of memory when running with Graph SDK in the
|
||||
# project causing OOM failures in Github actions.
|
||||
- staticcheck
|
||||
|
||||
linters-settings:
|
||||
gci:
|
||||
sections:
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# This must match the version defined in .github/workflows/lint.yaml.
|
||||
WANTED_LINT_VERSION := 1.45.2
|
||||
WANTED_LINT_VERSION := 1.50.1
|
||||
LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4)
|
||||
HAS_LINT := $(shell which golangci-lint)
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@ var subCommandFuncs = []func() *cobra.Command{
|
||||
var serviceCommands = []func(parent *cobra.Command) *cobra.Command{
|
||||
addExchangeCommands,
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
}
|
||||
|
||||
// AddCommands attaches all `corso backup * *` commands to the parent.
|
||||
|
||||
@ -3,6 +3,7 @@ package backup
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
@ -19,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
@ -270,27 +272,62 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
|
||||
sel := exchangeBackupCreateSelectors(user, exchangeData)
|
||||
|
||||
bo, err := r.NewBackup(ctx, sel)
|
||||
users, err := m365.UserIDs(ctx, acct)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to initialize Exchange backup"))
|
||||
return Only(ctx, errors.Wrap(err, "Failed to retrieve M365 users"))
|
||||
}
|
||||
|
||||
var (
|
||||
errs *multierror.Error
|
||||
bIDs []model.StableID
|
||||
)
|
||||
|
||||
for _, scope := range sel.DiscreteScopes(users) {
|
||||
for _, selUser := range scope.Get(selectors.ExchangeUser) {
|
||||
opSel := selectors.NewExchangeBackup()
|
||||
opSel.Include([]selectors.ExchangeScope{scope.DiscreteCopy(selUser)})
|
||||
|
||||
bo, err := r.NewBackup(ctx, opSel.Selector)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, errors.Wrapf(
|
||||
err,
|
||||
"Failed to initialize Exchange backup for user %s",
|
||||
scope.Get(selectors.ExchangeUser),
|
||||
))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
err = bo.Run(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to run Exchange backup"))
|
||||
errs = multierror.Append(errs, errors.Wrapf(
|
||||
err,
|
||||
"Failed to run Exchange backup for user %s",
|
||||
scope.Get(selectors.ExchangeUser),
|
||||
))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
bu, err := r.Backup(ctx, bo.Results.BackupID)
|
||||
bIDs = append(bIDs, bo.Results.BackupID)
|
||||
}
|
||||
}
|
||||
|
||||
bups, err := r.Backups(ctx, bIDs)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage"))
|
||||
}
|
||||
|
||||
bu.Print(ctx)
|
||||
backup.PrintAll(ctx, bups)
|
||||
|
||||
if e := errs.ErrorOrNil(); e != nil {
|
||||
return Only(ctx, e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func exchangeBackupCreateSelectors(userIDs, data []string) selectors.Selector {
|
||||
func exchangeBackupCreateSelectors(userIDs, data []string) *selectors.ExchangeBackup {
|
||||
sel := selectors.NewExchangeBackup()
|
||||
|
||||
if len(data) == 0 {
|
||||
@ -310,7 +347,7 @@ func exchangeBackupCreateSelectors(userIDs, data []string) selectors.Selector {
|
||||
}
|
||||
}
|
||||
|
||||
return sel.Selector
|
||||
return sel
|
||||
}
|
||||
|
||||
func validateExchangeBackupCreateFlags(userIDs, data []string) error {
|
||||
@ -373,7 +410,7 @@ func listExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
bs, err := r.Backups(ctx, store.Service(path.ExchangeService))
|
||||
bs, err := r.BackupsByTag(ctx, store.Service(path.ExchangeService))
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository"))
|
||||
}
|
||||
@ -453,8 +490,7 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// runDetailsExchangeCmd actually performs the lookup in backup details. Assumes
|
||||
// len(backupID) > 0.
|
||||
// runDetailsExchangeCmd actually performs the lookup in backup details.
|
||||
func runDetailsExchangeCmd(
|
||||
ctx context.Context,
|
||||
r repository.BackupGetter,
|
||||
|
||||
@ -3,6 +3,7 @@ package backup
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
@ -18,6 +19,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
@ -90,7 +92,7 @@ func addOneDriveCommands(parent *cobra.Command) *cobra.Command {
|
||||
c, fs = utils.AddCommand(parent, oneDriveListCmd())
|
||||
|
||||
fs.StringVar(&backupID,
|
||||
"backup", "",
|
||||
utils.BackupFN, "",
|
||||
"ID of the backup to retrieve.")
|
||||
|
||||
case detailsCommand:
|
||||
@ -192,22 +194,57 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
|
||||
sel := oneDriveBackupCreateSelectors(user)
|
||||
|
||||
bo, err := r.NewBackup(ctx, sel)
|
||||
users, err := m365.UserIDs(ctx, acct)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to initialize OneDrive backup"))
|
||||
return Only(ctx, errors.Wrap(err, "Failed to retrieve M365 users"))
|
||||
}
|
||||
|
||||
var (
|
||||
errs *multierror.Error
|
||||
bIDs []model.StableID
|
||||
)
|
||||
|
||||
for _, scope := range sel.DiscreteScopes(users) {
|
||||
for _, selUser := range scope.Get(selectors.OneDriveUser) {
|
||||
opSel := selectors.NewOneDriveBackup()
|
||||
opSel.Include([]selectors.OneDriveScope{scope.DiscreteCopy(selUser)})
|
||||
|
||||
bo, err := r.NewBackup(ctx, opSel.Selector)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, errors.Wrapf(
|
||||
err,
|
||||
"Failed to initialize OneDrive backup for user %s",
|
||||
scope.Get(selectors.OneDriveUser),
|
||||
))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
err = bo.Run(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to run OneDrive backup"))
|
||||
errs = multierror.Append(errs, errors.Wrapf(
|
||||
err,
|
||||
"Failed to run OneDrive backup for user %s",
|
||||
scope.Get(selectors.OneDriveUser),
|
||||
))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
bu, err := r.Backup(ctx, bo.Results.BackupID)
|
||||
bIDs = append(bIDs, bo.Results.BackupID)
|
||||
}
|
||||
}
|
||||
|
||||
bups, err := r.Backups(ctx, bIDs)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "Unable to retrieve backup results from storage")
|
||||
return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage"))
|
||||
}
|
||||
|
||||
bu.Print(ctx)
|
||||
backup.PrintAll(ctx, bups)
|
||||
|
||||
if e := errs.ErrorOrNil(); e != nil {
|
||||
return Only(ctx, e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -220,11 +257,11 @@ func validateOneDriveBackupCreateFlags(users []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func oneDriveBackupCreateSelectors(users []string) selectors.Selector {
|
||||
func oneDriveBackupCreateSelectors(users []string) *selectors.OneDriveBackup {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Include(sel.Users(users))
|
||||
|
||||
return sel.Selector
|
||||
return sel
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -272,7 +309,7 @@ func listOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
bs, err := r.Backups(ctx, store.Service(path.OneDriveService))
|
||||
bs, err := r.BackupsByTag(ctx, store.Service(path.OneDriveService))
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository"))
|
||||
}
|
||||
@ -344,8 +381,7 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// runDetailsOneDriveCmd actually performs the lookup in backup details. Assumes
|
||||
// len(backupID) > 0.
|
||||
// runDetailsOneDriveCmd actually performs the lookup in backup details.
|
||||
func runDetailsOneDriveCmd(
|
||||
ctx context.Context,
|
||||
r repository.BackupGetter,
|
||||
@ -388,7 +424,7 @@ func oneDriveDeleteCmd() *cobra.Command {
|
||||
}
|
||||
}
|
||||
|
||||
// deletes an exchange service backup.
|
||||
// deletes a oneDrive service backup.
|
||||
func deleteOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
|
||||
443
src/cli/backup/sharepoint.go
Normal file
@ -0,0 +1,443 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
"github.com/alcionai/corso/src/internal/model"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
var (
|
||||
site []string
|
||||
libraryPaths []string
|
||||
libraryItems []string
|
||||
|
||||
sharepointData []string
|
||||
)
|
||||
|
||||
const (
|
||||
dataLibraries = "libraries"
|
||||
)
|
||||
|
||||
const (
|
||||
sharePointServiceCommand = "sharepoint"
|
||||
sharePointServiceCommandCreateUseSuffix = "--site <siteId> | '" + utils.Wildcard + "'"
|
||||
sharePointServiceCommandDeleteUseSuffix = "--backup <backupId>"
|
||||
sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
const (
|
||||
sharePointServiceCommandCreateExamples = `# Backup SharePoint data for <site>
|
||||
corso backup create sharepoint --site <site_id>
|
||||
|
||||
# Backup SharePoint for Alice and Bob
|
||||
corso backup create sharepoint --site <site_id_1>,<site_id_2>
|
||||
|
||||
# TODO: Site IDs may contain commas. We'll need to warn the site about escaping them.
|
||||
|
||||
# Backup all SharePoint data for all sites
|
||||
corso backup create sharepoint --site '*'`
|
||||
|
||||
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup delete sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
|
||||
sharePointServiceCommandDetailsExamples = `# Explore <site>'s files from backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --site <site_id>`
|
||||
)
|
||||
|
||||
// called by backup.go to map parent subcommands to provider-specific handling.
|
||||
func addSharePointCommands(parent *cobra.Command) *cobra.Command {
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch parent.Use {
|
||||
case createCommand:
|
||||
c, fs = utils.AddCommand(parent, sharePointCreateCmd(), utils.HideCommand())
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
|
||||
c.Example = sharePointServiceCommandCreateExamples
|
||||
|
||||
fs.StringArrayVar(&site,
|
||||
utils.SiteFN, nil,
|
||||
"Backup SharePoint data by site ID; accepts '"+utils.Wildcard+"' to select all sites. (required)")
|
||||
// TODO: implement
|
||||
fs.StringSliceVar(
|
||||
&sharepointData,
|
||||
utils.DataFN, nil,
|
||||
"Select one or more types of data to backup: "+dataLibraries+".")
|
||||
options.AddOperationFlags(c)
|
||||
|
||||
case listCommand:
|
||||
c, fs = utils.AddCommand(parent, sharePointListCmd(), utils.HideCommand())
|
||||
|
||||
fs.StringVar(&backupID,
|
||||
utils.BackupFN, "",
|
||||
"ID of the backup to retrieve.")
|
||||
|
||||
case detailsCommand:
|
||||
c, fs = utils.AddCommand(parent, sharePointDetailsCmd())
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
|
||||
c.Example = sharePointServiceCommandDetailsExamples
|
||||
|
||||
fs.StringVar(&backupID,
|
||||
utils.BackupFN, "",
|
||||
"ID of the backup to retrieve.")
|
||||
cobra.CheckErr(c.MarkFlagRequired(utils.BackupFN))
|
||||
|
||||
// sharepoint hierarchy flags
|
||||
|
||||
fs.StringSliceVar(
|
||||
&libraryPaths,
|
||||
utils.LibraryFN, nil,
|
||||
"Select backup details by Library name.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&libraryItems,
|
||||
utils.LibraryItemFN, nil,
|
||||
"Select backup details by library item name or ID.")
|
||||
|
||||
// info flags
|
||||
|
||||
// fs.StringVar(
|
||||
// &fileCreatedAfter,
|
||||
// utils.FileCreatedAfterFN, "",
|
||||
// "Select backup details for items created after this datetime.")
|
||||
|
||||
case deleteCommand:
|
||||
c, fs = utils.AddCommand(parent, sharePointDeleteCmd(), utils.HideCommand())
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
|
||||
c.Example = sharePointServiceCommandDeleteExamples
|
||||
|
||||
fs.StringVar(&backupID,
|
||||
utils.BackupFN, "",
|
||||
"ID of the backup to delete. (required)")
|
||||
cobra.CheckErr(c.MarkFlagRequired(utils.BackupFN))
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup create
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup create sharepoint [<flag>...]`
|
||||
func sharePointCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
Short: "Backup M365 SharePoint service data",
|
||||
RunE: createSharePointCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: sharePointServiceCommandCreateExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an sharepoint service backup.
|
||||
func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateSharePointBackupCreateFlags(site); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
r, err := repository.Connect(ctx, acct, s, options.Control())
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
sel := sharePointBackupCreateSelectors(site)
|
||||
|
||||
sites, err := m365.Sites(ctx, acct)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to retrieve SharePoint sites"))
|
||||
}
|
||||
|
||||
var (
|
||||
errs *multierror.Error
|
||||
bIDs []model.StableID
|
||||
)
|
||||
|
||||
for _, scope := range sel.DiscreteScopes(sites) {
|
||||
for _, selSite := range scope.Get(selectors.SharePointSite) {
|
||||
opSel := selectors.NewSharePointBackup()
|
||||
opSel.Include([]selectors.SharePointScope{scope.DiscreteCopy(selSite)})
|
||||
|
||||
bo, err := r.NewBackup(ctx, opSel.Selector)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, errors.Wrapf(
|
||||
err,
|
||||
"Failed to initialize SharePoint backup for site %s",
|
||||
scope.Get(selectors.SharePointSite),
|
||||
))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
err = bo.Run(ctx)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, errors.Wrapf(
|
||||
err,
|
||||
"Failed to run SharePoint backup for site %s",
|
||||
scope.Get(selectors.SharePointSite),
|
||||
))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
bIDs = append(bIDs, bo.Results.BackupID)
|
||||
}
|
||||
}
|
||||
|
||||
bups, err := r.Backups(ctx, bIDs)
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Unable to retrieve backup results from storage"))
|
||||
}
|
||||
|
||||
backup.PrintAll(ctx, bups)
|
||||
|
||||
if e := errs.ErrorOrNil(); e != nil {
|
||||
return Only(ctx, e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateSharePointBackupCreateFlags(sites []string) error {
|
||||
if len(sites) == 0 {
|
||||
return errors.New("requires one or more --site ids or the wildcard --site *")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func sharePointBackupCreateSelectors(sites []string) *selectors.SharePointBackup {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Include(sel.Sites(sites))
|
||||
|
||||
return sel
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup list
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup list sharepoint [<flag>...]`
|
||||
func sharePointListCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
Short: "List the history of M365 SharePoint service backups",
|
||||
RunE: listSharePointCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func listSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
r, err := repository.Connect(ctx, acct, s, options.Control())
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
if len(backupID) > 0 {
|
||||
b, err := r.Backup(ctx, model.StableID(backupID))
|
||||
if err != nil {
|
||||
if errors.Is(err, kopia.ErrNotFound) {
|
||||
return Only(ctx, errors.Errorf("No backup exists with the id %s", backupID))
|
||||
}
|
||||
|
||||
return Only(ctx, errors.Wrap(err, "Failed to find backup "+backupID))
|
||||
}
|
||||
|
||||
b.Print(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
bs, err := r.BackupsByTag(ctx, store.Service(path.SharePointService))
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrap(err, "Failed to list backups in the repository"))
|
||||
}
|
||||
|
||||
backup.PrintAll(ctx, bs)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup delete sharepoint [<flag>...]`
|
||||
func sharePointDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
Short: "Delete backed-up M365 SharePoint service data",
|
||||
RunE: deleteSharePointCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: sharePointServiceCommandDeleteExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// deletes a sharePoint service backup.
|
||||
func deleteSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
r, err := repository.Connect(ctx, acct, s, options.Control())
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
if err := r.DeleteBackup(ctx, model.StableID(backupID)); err != nil {
|
||||
return Only(ctx, errors.Wrapf(err, "Deleting backup %s", backupID))
|
||||
}
|
||||
|
||||
Info(ctx, "Deleted SharePoint backup ", backupID)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details onedrive [<flag>...]`
|
||||
func sharePointDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
Short: "Shows the details of a M365 SharePoint service backup",
|
||||
RunE: detailsSharePointCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: sharePointServiceCommandDetailsExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, acct, err := config.GetStorageAndAccount(ctx, true, nil)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
r, err := repository.Connect(ctx, acct, s, options.Control())
|
||||
if err != nil {
|
||||
return Only(ctx, errors.Wrapf(err, "Failed to connect to the %s repository", s.Provider))
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
opts := utils.SharePointOpts{
|
||||
Sites: site,
|
||||
LibraryPaths: libraryPaths,
|
||||
LibraryItems: libraryItems,
|
||||
|
||||
Populated: utils.GetPopulatedFlags(cmd),
|
||||
}
|
||||
|
||||
ds, err := runDetailsSharePointCmd(ctx, r, backupID, opts)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) == 0 {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
return nil
|
||||
}
|
||||
|
||||
ds.PrintEntries(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// runDetailsSharePointCmd actually performs the lookup in backup details.
|
||||
func runDetailsSharePointCmd(
|
||||
ctx context.Context,
|
||||
r repository.BackupGetter,
|
||||
backupID string,
|
||||
opts utils.SharePointOpts,
|
||||
) (*details.Details, error) {
|
||||
if err := utils.ValidateSharePointRestoreFlags(backupID, opts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
d, _, err := r.BackupDetails(ctx, backupID)
|
||||
if err != nil {
|
||||
if errors.Is(err, kopia.ErrNotFound) {
|
||||
return nil, errors.Errorf("no backup exists with the id %s", backupID)
|
||||
}
|
||||
|
||||
return nil, errors.Wrap(err, "Failed to get backup details in the repository")
|
||||
}
|
||||
|
||||
sel := selectors.NewSharePointRestore()
|
||||
utils.IncludeSharePointRestoreDataSelectors(sel, opts)
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
|
||||
// if no selector flags were specified, get all data in the service.
|
||||
if len(sel.Scopes()) == 0 {
|
||||
sel.Include(sel.Sites(selectors.Any()))
|
||||
}
|
||||
|
||||
return sel.Reduce(ctx, d), nil
|
||||
}
|
||||
236
src/cli/backup/sharepoint_integration_test.go
Normal file
@ -0,0 +1,236 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests with no prior backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type NoBackupSharePointIntegrationSuite struct {
|
||||
suite.Suite
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
m365SiteID string
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestNoBackupSharePointIntegrationSuite(t *testing.T) {
|
||||
if err := tester.RunOnAny(
|
||||
tester.CorsoCITests,
|
||||
tester.CorsoCLITests,
|
||||
tester.CorsoCLIBackupTests,
|
||||
); err != nil {
|
||||
t.Skip(err)
|
||||
}
|
||||
|
||||
suite.Run(t, new(NoBackupSharePointIntegrationSuite))
|
||||
}
|
||||
|
||||
func (suite *NoBackupSharePointIntegrationSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext()
|
||||
|
||||
defer flush()
|
||||
|
||||
_, err := tester.GetRequiredEnvSls(
|
||||
tester.AWSStorageCredEnvs,
|
||||
tester.M365AcctCredEnvs)
|
||||
require.NoError(t, err)
|
||||
|
||||
// prepare common details
|
||||
suite.acct = tester.NewM365Account(t)
|
||||
suite.st = tester.NewPrefixedS3Storage(t)
|
||||
|
||||
cfg, err := suite.st.S3Config()
|
||||
require.NoError(t, err)
|
||||
|
||||
force := map[string]string{
|
||||
tester.TestCfgAccountProvider: "M365",
|
||||
tester.TestCfgStorageProvider: "S3",
|
||||
tester.TestCfgPrefix: cfg.Prefix,
|
||||
}
|
||||
|
||||
suite.vpr, suite.cfgFP, err = tester.MakeTempTestConfigClone(t, force)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
suite.m365SiteID = tester.M365SiteID(t)
|
||||
|
||||
// init the repo first
|
||||
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func (suite *NoBackupSharePointIntegrationSuite) TestSharePointBackupListCmd_empty() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.recorder.Reset()
|
||||
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "sharepoint",
|
||||
"--config-file", suite.cfgFP)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
require.NoError(t, cmd.ExecuteContext(ctx))
|
||||
|
||||
result := suite.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 sitet id
|
||||
assert.Equal(t, "No backups available\n", result)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests for deleting backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupDeleteSharePointIntegrationSuite struct {
|
||||
suite.Suite
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
backupOp operations.BackupOperation
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestBackupDeleteSharePointIntegrationSuite(t *testing.T) {
|
||||
if err := tester.RunOnAny(
|
||||
tester.CorsoCITests,
|
||||
tester.CorsoCLITests,
|
||||
tester.CorsoCLIBackupTests,
|
||||
); err != nil {
|
||||
t.Skip(err)
|
||||
}
|
||||
|
||||
suite.Run(t, new(BackupDeleteSharePointIntegrationSuite))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteSharePointIntegrationSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
_, err := tester.GetRequiredEnvSls(
|
||||
tester.AWSStorageCredEnvs,
|
||||
tester.M365AcctCredEnvs)
|
||||
require.NoError(t, err)
|
||||
|
||||
// prepare common details
|
||||
suite.acct = tester.NewM365Account(t)
|
||||
suite.st = tester.NewPrefixedS3Storage(t)
|
||||
|
||||
cfg, err := suite.st.S3Config()
|
||||
require.NoError(t, err)
|
||||
|
||||
force := map[string]string{
|
||||
tester.TestCfgAccountProvider: "M365",
|
||||
tester.TestCfgStorageProvider: "S3",
|
||||
tester.TestCfgPrefix: cfg.Prefix,
|
||||
}
|
||||
suite.vpr, suite.cfgFP, err = tester.MakeTempTestConfigClone(t, force)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
// init the repo first
|
||||
suite.repo, err = repository.Initialize(ctx, suite.acct, suite.st, control.Options{})
|
||||
require.NoError(t, err)
|
||||
|
||||
m365SiteID := tester.M365SiteID(t)
|
||||
|
||||
// some tests require an existing backup
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Include(sel.Libraries([]string{m365SiteID}, selectors.Any()))
|
||||
|
||||
suite.backupOp, err = suite.repo.NewBackup(ctx, sel.Selector)
|
||||
require.NoError(t, suite.backupOp.Run(ctx))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.recorder.Reset()
|
||||
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "sharepoint",
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
require.NoError(t, cmd.ExecuteContext(ctx))
|
||||
|
||||
result := suite.recorder.String()
|
||||
|
||||
assert.Equal(t, fmt.Sprintf("Deleted SharePoint backup %s\n", string(suite.backupOp.Results.BackupID)), result)
|
||||
}
|
||||
|
||||
// moved out of the func above to make the linter happy
|
||||
// // a follow-up details call should fail, due to the backup ID being deleted
|
||||
// cmd = tester.StubRootCmd(
|
||||
// "backup", "details", "sharepoint",
|
||||
// "--config-file", suite.cfgFP,
|
||||
// "--backup", string(suite.backupOp.Results.BackupID))
|
||||
// cli.BuildCommandTree(cmd)
|
||||
|
||||
// require.Error(t, cmd.ExecuteContext(ctx))
|
||||
|
||||
func (suite *BackupDeleteSharePointIntegrationSuite) TestSharePointBackupDeleteCmd_unknownID() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "sharepoint",
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
require.Error(t, cmd.ExecuteContext(ctx))
|
||||
}
|
||||
127
src/cli/backup/sharepoint_test.go
Normal file
@ -0,0 +1,127 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type SharePointSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func TestSharePointSuite(t *testing.T) {
|
||||
suite.Run(t, new(SharePointSuite))
|
||||
}
|
||||
|
||||
func (suite *SharePointSuite) TestAddSharePointCommands() {
|
||||
expectUse := sharePointServiceCommand
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
"create sharepoint", createCommand, expectUse + " " + sharePointServiceCommandCreateUseSuffix,
|
||||
sharePointCreateCmd().Short, createSharePointCmd,
|
||||
},
|
||||
{
|
||||
"list sharepoint", listCommand, expectUse,
|
||||
sharePointListCmd().Short, listSharePointCmd,
|
||||
},
|
||||
{
|
||||
"details sharepoint", detailsCommand, expectUse + " " + sharePointServiceCommandDetailsUseSuffix,
|
||||
sharePointDetailsCmd().Short, detailsSharePointCmd,
|
||||
},
|
||||
{
|
||||
"delete sharepoint", deleteCommand, expectUse + " " + sharePointServiceCommandDeleteUseSuffix,
|
||||
sharePointDeleteCmd().Short, deleteSharePointCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
c := addSharePointCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointSuite) TestValidateSharePointBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
site []string
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no sites",
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "sites",
|
||||
site: []string{"fnord"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
test.expect(t, validateSharePointBackupCreateFlags(test.site))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointSuite) TestSharePointBackupDetailsSelectors() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.SharePointOptionDetailLookups {
|
||||
suite.T().Run(test.Name, func(t *testing.T) {
|
||||
output, err := runDetailsSharePointCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.ElementsMatch(t, test.Expected, output.Entries)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.BadSharePointOptionsFormats {
|
||||
suite.T().Run(test.Name, func(t *testing.T) {
|
||||
output, err := runDetailsSharePointCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
)
|
||||
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, output)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -40,7 +40,7 @@ var corsoCmd = &cobra.Command{
|
||||
func handleCorsoCmd(cmd *cobra.Command, args []string) error {
|
||||
v, _ := cmd.Flags().GetBool("version")
|
||||
if v {
|
||||
print.Infof(cmd.Context(), "Corso\nversion: "+version)
|
||||
print.Outf(cmd.Context(), "Corso\nversion: "+version)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@ -89,6 +89,38 @@ func err(w io.Writer, s ...any) {
|
||||
fmt.Fprint(w, msg...)
|
||||
}
|
||||
|
||||
// Out prints the params to cobra's output writer (stdOut by default)
|
||||
// if s is nil, prints nothing.
|
||||
func Out(ctx context.Context, s ...any) {
|
||||
out(getRootCmd(ctx).OutOrStdout(), s...)
|
||||
}
|
||||
|
||||
// out is the testable core of Out()
|
||||
func out(w io.Writer, s ...any) {
|
||||
if len(s) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprint(w, s...)
|
||||
fmt.Fprintf(w, "\n")
|
||||
}
|
||||
|
||||
// Out prints the formatted strings to cobra's output writer (stdOut by default)
|
||||
// if t is empty, prints nothing.
|
||||
func Outf(ctx context.Context, t string, s ...any) {
|
||||
outf(getRootCmd(ctx).OutOrStdout(), t, s...)
|
||||
}
|
||||
|
||||
// outf is the testable core of Outf()
|
||||
func outf(w io.Writer, t string, s ...any) {
|
||||
if len(t) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, t, s...)
|
||||
fmt.Fprintf(w, "\n")
|
||||
}
|
||||
|
||||
// Info prints the params to cobra's error writer (stdErr by default)
|
||||
// if s is nil, prints nothing.
|
||||
func Info(ctx context.Context, s ...any) {
|
||||
@ -138,14 +170,12 @@ type Printable interface {
|
||||
|
||||
// Item prints the printable, according to the caller's requested format.
|
||||
func Item(ctx context.Context, p Printable) {
|
||||
print(getRootCmd(ctx).OutOrStdout(), p)
|
||||
printItem(getRootCmd(ctx).OutOrStdout(), p)
|
||||
}
|
||||
|
||||
// print prints the printable items,
|
||||
// according to the caller's requested format.
|
||||
//
|
||||
//revive:disable:redefines-builtin-id
|
||||
func print(w io.Writer, p Printable) {
|
||||
func printItem(w io.Writer, p Printable) {
|
||||
if outputAsJSON || outputAsJSONDebug {
|
||||
outputJSON(w, p, outputAsJSONDebug)
|
||||
return
|
||||
|
||||
@ -4,12 +4,21 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
func (suite *ExchangeUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
|
||||
type OneDriveUtilsSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func TestOneDriveUtilsSuite(t *testing.T) {
|
||||
suite.Run(t, new(OneDriveUtilsSuite))
|
||||
}
|
||||
|
||||
func (suite *OneDriveUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
|
||||
var (
|
||||
empty = []string{}
|
||||
single = []string{"single"}
|
||||
|
||||
97
src/cli/utils/sharepoint.go
Normal file
@ -0,0 +1,97 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
const (
|
||||
LibraryItemFN = "library-item"
|
||||
LibraryFN = "library"
|
||||
)
|
||||
|
||||
type SharePointOpts struct {
|
||||
Sites []string
|
||||
LibraryItems []string
|
||||
LibraryPaths []string
|
||||
|
||||
Populated PopulatedFlags
|
||||
}
|
||||
|
||||
// ValidateSharePointRestoreFlags checks common flags for correctness and interdependencies
|
||||
func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error {
|
||||
if len(backupID) == 0 {
|
||||
return errors.New("a backup ID is required")
|
||||
}
|
||||
|
||||
// if _, ok := opts.Populated[FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
|
||||
// return errors.New("invalid time format for created-after")
|
||||
// }
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddSharePointFilter adds the scope of the provided values to the selector's
|
||||
// filter set
|
||||
func AddSharePointFilter(
|
||||
sel *selectors.SharePointRestore,
|
||||
v string,
|
||||
f func(string) []selectors.SharePointScope,
|
||||
) {
|
||||
if len(v) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
sel.Filter(f(v))
|
||||
}
|
||||
|
||||
// IncludeSharePointRestoreDataSelectors builds the common data-selector
|
||||
// inclusions for SharePoint commands.
|
||||
func IncludeSharePointRestoreDataSelectors(
|
||||
sel *selectors.SharePointRestore,
|
||||
opts SharePointOpts,
|
||||
) {
|
||||
lp, ln := len(opts.LibraryPaths), len(opts.LibraryItems)
|
||||
|
||||
// only use the inclusion if either a path or item name
|
||||
// is specified
|
||||
if lp+ln == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if len(opts.Sites) == 0 {
|
||||
opts.Sites = selectors.Any()
|
||||
}
|
||||
|
||||
// either scope the request to a set of sites
|
||||
if lp+ln == 0 {
|
||||
sel.Include(sel.Sites(opts.Sites))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
opts.LibraryPaths = trimFolderSlash(opts.LibraryPaths)
|
||||
|
||||
if ln == 0 {
|
||||
opts.LibraryItems = selectors.Any()
|
||||
}
|
||||
|
||||
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.LibraryPaths)
|
||||
|
||||
if len(containsFolders) > 0 {
|
||||
sel.Include(sel.LibraryItems(opts.Sites, containsFolders, opts.LibraryItems))
|
||||
}
|
||||
|
||||
if len(prefixFolders) > 0 {
|
||||
sel.Include(sel.LibraryItems(opts.Sites, prefixFolders, opts.LibraryItems, selectors.PrefixMatch()))
|
||||
}
|
||||
}
|
||||
|
||||
// FilterSharePointRestoreInfoSelectors builds the common info-selector filters.
|
||||
func FilterSharePointRestoreInfoSelectors(
|
||||
sel *selectors.SharePointRestore,
|
||||
opts SharePointOpts,
|
||||
) {
|
||||
// AddSharePointFilter(sel, opts.FileCreatedAfter, sel.CreatedAfter)
|
||||
}
|
||||
99
src/cli/utils/sharepoint_test.go
Normal file
@ -0,0 +1,99 @@
|
||||
package utils_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
type SharePointUtilsSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func TestSharePointUtilsSuite(t *testing.T) {
|
||||
suite.Run(t, new(SharePointUtilsSuite))
|
||||
}
|
||||
|
||||
func (suite *ExchangeUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
|
||||
var (
|
||||
empty = []string{}
|
||||
single = []string{"single"}
|
||||
multi = []string{"more", "than", "one"}
|
||||
containsOnly = []string{"contains"}
|
||||
prefixOnly = []string{"/prefix"}
|
||||
containsAndPrefix = []string{"contains", "/prefix"}
|
||||
)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
opts utils.SharePointOpts
|
||||
expectIncludeLen int
|
||||
}{
|
||||
{
|
||||
name: "no inputs",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: empty,
|
||||
LibraryItems: empty,
|
||||
},
|
||||
expectIncludeLen: 0,
|
||||
},
|
||||
{
|
||||
name: "single inputs",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: single,
|
||||
LibraryPaths: single,
|
||||
LibraryItems: single,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "multi inputs",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: multi,
|
||||
LibraryPaths: multi,
|
||||
LibraryItems: multi,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library contains",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: containsOnly,
|
||||
LibraryItems: empty,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library prefixes",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: prefixOnly,
|
||||
LibraryItems: empty,
|
||||
},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "library prefixes and contains",
|
||||
opts: utils.SharePointOpts{
|
||||
Sites: empty,
|
||||
LibraryPaths: containsAndPrefix,
|
||||
LibraryItems: empty,
|
||||
},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
sel := selectors.NewSharePointRestore()
|
||||
// no return, mutates sel as a side effect
|
||||
utils.IncludeSharePointRestoreDataSelectors(sel, test.opts)
|
||||
assert.Len(t, sel.Includes, test.expectIncludeLen)
|
||||
})
|
||||
}
|
||||
}
|
||||
97
src/cli/utils/testdata/opts.go
vendored
@ -395,6 +395,94 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type SharePointOptionsTest struct {
|
||||
Name string
|
||||
Opts utils.SharePointOpts
|
||||
BackupGetter *MockBackupGetter
|
||||
Expected []details.DetailsEntry
|
||||
}
|
||||
|
||||
var (
|
||||
// BadSharePointOptionsFormats contains SharePointOpts with flags that should
|
||||
// cause errors about the format of the input flag. Mocks are configured to
|
||||
// allow the system to run if it doesn't throw an error on formatting.
|
||||
BadSharePointOptionsFormats = []SharePointOptionsTest{
|
||||
// {
|
||||
// Name: "BadFileCreatedBefore",
|
||||
// Opts: utils.OneDriveOpts{
|
||||
// FileCreatedBefore: "foo",
|
||||
// Populated: utils.PopulatedFlags{
|
||||
// utils.FileCreatedBeforeFN: struct{}{},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// Name: "EmptyFileCreatedBefore",
|
||||
// Opts: utils.OneDriveOpts{
|
||||
// FileCreatedBefore: "",
|
||||
// Populated: utils.PopulatedFlags{
|
||||
// utils.FileCreatedBeforeFN: struct{}{},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
}
|
||||
|
||||
// SharePointOptionDetailLookups contains flag inputs and expected results for
|
||||
// some choice input patterns. This set is not exhaustive. All inputs and
|
||||
// outputs are according to the data laid out in selectors/testdata. Mocks are
|
||||
// configured to return the full dataset listed in selectors/testdata.
|
||||
SharePointOptionDetailLookups = []SharePointOptionsTest{
|
||||
{
|
||||
Name: "AllLibraryItems",
|
||||
Expected: testdata.SharePointLibraryItems,
|
||||
Opts: utils.SharePointOpts{
|
||||
LibraryPaths: selectors.Any(),
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "FolderPrefixMatch",
|
||||
Expected: testdata.SharePointLibraryItems,
|
||||
Opts: utils.SharePointOpts{
|
||||
LibraryPaths: []string{testdata.SharePointLibraryFolder},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "FolderPrefixMatchTrailingSlash",
|
||||
Expected: testdata.SharePointLibraryItems,
|
||||
Opts: utils.SharePointOpts{
|
||||
LibraryPaths: []string{testdata.SharePointLibraryFolder + "/"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "FolderPrefixMatchTrailingSlash",
|
||||
Expected: testdata.SharePointLibraryItems,
|
||||
Opts: utils.SharePointOpts{
|
||||
LibraryPaths: []string{testdata.SharePointLibraryFolder + "/"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "ShortRef",
|
||||
Expected: []details.DetailsEntry{
|
||||
testdata.SharePointLibraryItems[0],
|
||||
testdata.SharePointLibraryItems[1],
|
||||
},
|
||||
Opts: utils.SharePointOpts{
|
||||
LibraryItems: []string{
|
||||
testdata.SharePointLibraryItems[0].ShortRef,
|
||||
testdata.SharePointLibraryItems[1].ShortRef,
|
||||
},
|
||||
},
|
||||
},
|
||||
// {
|
||||
// Name: "CreatedBefore",
|
||||
// Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[1]},
|
||||
// Opts: utils.SharePointOpts{
|
||||
// FileCreatedBefore: common.FormatTime(testdata.Time1.Add(time.Second)),
|
||||
// },
|
||||
// },
|
||||
}
|
||||
)
|
||||
|
||||
// MockBackupGetter implements the repo.BackupGetter interface and returns
|
||||
// (selectors/testdata.GetDetailsSet(), nil, nil) when BackupDetails is called
|
||||
// on the nil instance. If an instance is given or Backups is called returns an
|
||||
@ -408,7 +496,14 @@ func (MockBackupGetter) Backup(
|
||||
return nil, errors.New("unexpected call to mock")
|
||||
}
|
||||
|
||||
func (MockBackupGetter) Backups(context.Context, ...store.FilterOption) ([]*backup.Backup, error) {
|
||||
func (MockBackupGetter) Backups(context.Context, []model.StableID) ([]*backup.Backup, error) {
|
||||
return nil, errors.New("unexpected call to mock")
|
||||
}
|
||||
|
||||
func (MockBackupGetter) BackupsByTag(
|
||||
context.Context,
|
||||
...store.FilterOption,
|
||||
) ([]*backup.Backup, error) {
|
||||
return nil, errors.New("unexpected call to mock")
|
||||
}
|
||||
|
||||
|
||||
@ -17,6 +17,7 @@ import (
|
||||
const (
|
||||
BackupFN = "backup"
|
||||
DataFN = "data"
|
||||
SiteFN = "site"
|
||||
UserFN = "user"
|
||||
)
|
||||
|
||||
@ -57,10 +58,32 @@ func HasNoFlagsAndShownHelp(cmd *cobra.Command) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type cmdCfg struct {
|
||||
hidden bool
|
||||
}
|
||||
|
||||
type cmdOpt func(*cmdCfg)
|
||||
|
||||
func (cc *cmdCfg) populate(opts ...cmdOpt) {
|
||||
for _, opt := range opts {
|
||||
opt(cc)
|
||||
}
|
||||
}
|
||||
|
||||
func HideCommand() cmdOpt {
|
||||
return func(cc *cmdCfg) {
|
||||
cc.hidden = true
|
||||
}
|
||||
}
|
||||
|
||||
// AddCommand adds a clone of the subCommand to the parent,
|
||||
// and returns both the clone and its pflags.
|
||||
func AddCommand(parent, c *cobra.Command) (*cobra.Command, *pflag.FlagSet) {
|
||||
func AddCommand(parent, c *cobra.Command, opts ...cmdOpt) (*cobra.Command, *pflag.FlagSet) {
|
||||
cc := &cmdCfg{}
|
||||
cc.populate(opts...)
|
||||
|
||||
parent.AddCommand(c)
|
||||
c.Hidden = cc.hidden
|
||||
|
||||
c.Flags().SortFlags = false
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ go 1.18
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0
|
||||
github.com/aws/aws-sdk-go v1.44.132
|
||||
github.com/aws/aws-sdk-go v1.44.145
|
||||
github.com/aws/aws-xray-sdk-go v1.7.1
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/hashicorp/go-multierror v1.1.1
|
||||
|
||||
@ -58,8 +58,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
|
||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
|
||||
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/aws/aws-sdk-go v1.44.132 h1:+IjL9VoR0OXScQ5gyme9xjcolwUkd3uaH144f4Ao+4s=
|
||||
github.com/aws/aws-sdk-go v1.44.132/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/aws/aws-sdk-go v1.44.145 h1:KMVRrIyjBsNz3xGPuHIRnhIuKlb5h3Ii5e5jbi3cgnc=
|
||||
github.com/aws/aws-sdk-go v1.44.145/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/aws/aws-xray-sdk-go v1.7.1 h1:mji68Db4oWipJ6SiQQuFiWBYWI8sUvPfcv86mLFVKHQ=
|
||||
github.com/aws/aws-xray-sdk-go v1.7.1/go.mod h1:aNQo1pqFaaeKaf18CSWCkoaXUI+PQZ7yfNE28YyE2CI=
|
||||
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||
|
||||
@ -89,8 +89,6 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
|
||||
parseT("2006-01-02T03:00:04-01:00"),
|
||||
}
|
||||
|
||||
type timeFormatter func(time.Time) string
|
||||
|
||||
formats := []common.TimeFormat{
|
||||
common.ClippedSimple,
|
||||
common.ClippedSimpleOneDrive,
|
||||
|
||||
@ -29,7 +29,7 @@ func (gc *GraphConnector) DataCollections(ctx context.Context, sels selectors.Se
|
||||
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
|
||||
defer end()
|
||||
|
||||
err := verifyBackupInputs(sels, gc.Users)
|
||||
err := verifyBackupInputs(sels, gc.GetUsers(), gc.GetSiteIds())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -40,52 +40,59 @@ func (gc *GraphConnector) DataCollections(ctx context.Context, sels selectors.Se
|
||||
case selectors.ServiceOneDrive:
|
||||
return gc.OneDriveDataCollections(ctx, sels)
|
||||
case selectors.ServiceSharePoint:
|
||||
return gc.SharePointDataCollections(ctx, sels)
|
||||
colls, err := sharepoint.DataCollections(ctx, sels, gc.GetSiteIds(), gc.credentials.AzureTenantID, gc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for range colls {
|
||||
gc.incrementAwaitingMessages()
|
||||
}
|
||||
|
||||
return colls, nil
|
||||
default:
|
||||
return nil, errors.Errorf("service %s not supported", sels)
|
||||
return nil, errors.Errorf("service %s not supported", sels.Service.String())
|
||||
}
|
||||
}
|
||||
|
||||
func verifyBackupInputs(sel selectors.Selector, mapOfUsers map[string]string) error {
|
||||
var personnel []string
|
||||
func verifyBackupInputs(sels selectors.Selector, userPNs, siteIDs []string) error {
|
||||
var ids []string
|
||||
|
||||
// retrieve users from selectors
|
||||
switch sel.Service {
|
||||
case selectors.ServiceExchange:
|
||||
backup, err := sel.ToExchangeBackup()
|
||||
resourceOwners, err := sels.ResourceOwners()
|
||||
if err != nil {
|
||||
return err
|
||||
return errors.Wrap(err, "invalid backup inputs")
|
||||
}
|
||||
|
||||
for _, scope := range backup.Scopes() {
|
||||
temp := scope.Get(selectors.ExchangeUser)
|
||||
personnel = append(personnel, temp...)
|
||||
}
|
||||
case selectors.ServiceOneDrive:
|
||||
backup, err := sel.ToOneDriveBackup()
|
||||
if err != nil {
|
||||
return err
|
||||
switch sels.Service {
|
||||
case selectors.ServiceExchange, selectors.ServiceOneDrive:
|
||||
ids = userPNs
|
||||
|
||||
case selectors.ServiceSharePoint:
|
||||
ids = siteIDs
|
||||
}
|
||||
|
||||
for _, user := range backup.Scopes() {
|
||||
temp := user.Get(selectors.OneDriveUser)
|
||||
personnel = append(personnel, temp...)
|
||||
// verify resourceOwners
|
||||
normROs := map[string]struct{}{}
|
||||
|
||||
for _, id := range ids {
|
||||
normROs[strings.ToLower(id)] = struct{}{}
|
||||
}
|
||||
|
||||
default:
|
||||
return errors.New("service %s not supported")
|
||||
for _, ro := range resourceOwners.Includes {
|
||||
if _, ok := normROs[strings.ToLower(ro)]; !ok {
|
||||
return fmt.Errorf("included resource owner %s not found within tenant", ro)
|
||||
}
|
||||
}
|
||||
|
||||
// verify personnel
|
||||
normUsers := map[string]struct{}{}
|
||||
|
||||
for k := range mapOfUsers {
|
||||
normUsers[strings.ToLower(k)] = struct{}{}
|
||||
for _, ro := range resourceOwners.Excludes {
|
||||
if _, ok := normROs[strings.ToLower(ro)]; !ok {
|
||||
return fmt.Errorf("excluded resource owner %s not found within tenant", ro)
|
||||
}
|
||||
}
|
||||
|
||||
for _, user := range personnel {
|
||||
if _, ok := normUsers[strings.ToLower(user)]; !ok {
|
||||
return fmt.Errorf("%s user not found within tenant", user)
|
||||
for _, ro := range resourceOwners.Filters {
|
||||
if _, ok := normROs[strings.ToLower(ro)]; !ok {
|
||||
return fmt.Errorf("filtered resource owner %s not found within tenant", ro)
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,6 +200,18 @@ func (gc *GraphConnector) ExchangeDataCollection(
|
||||
// OneDrive
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type odFolderMatcher struct {
|
||||
scope selectors.OneDriveScope
|
||||
}
|
||||
|
||||
func (fm odFolderMatcher) IsAny() bool {
|
||||
return fm.scope.IsAny(selectors.OneDriveFolder)
|
||||
}
|
||||
|
||||
func (fm odFolderMatcher) Matches(dir string) bool {
|
||||
return fm.scope.Matches(selectors.OneDriveFolder, dir)
|
||||
}
|
||||
|
||||
// OneDriveDataCollections returns a set of DataCollection which represents the OneDrive data
|
||||
// for the specified user
|
||||
func (gc *GraphConnector) OneDriveDataCollections(
|
||||
@ -218,7 +237,8 @@ func (gc *GraphConnector) OneDriveDataCollections(
|
||||
odcs, err := onedrive.NewCollections(
|
||||
gc.credentials.AzureTenantID,
|
||||
user,
|
||||
scope,
|
||||
onedrive.OneDriveSource,
|
||||
odFolderMatcher{scope},
|
||||
&gc.graphService,
|
||||
gc.UpdateStatus,
|
||||
).Get(ctx)
|
||||
@ -236,104 +256,3 @@ func (gc *GraphConnector) OneDriveDataCollections(
|
||||
|
||||
return collections, errs
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SharePoint
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// createSharePointCollections - utility function that retrieves M365
|
||||
// IDs through Microsoft Graph API. The selectors.SharePointScope
|
||||
// determines the type of collections that are retrieved.
|
||||
func (gc *GraphConnector) createSharePointCollections(
|
||||
ctx context.Context,
|
||||
scope selectors.SharePointScope,
|
||||
) ([]*sharepoint.Collection, error) {
|
||||
var (
|
||||
errs *multierror.Error
|
||||
sites = scope.Get(selectors.SharePointSite)
|
||||
colls = make([]*sharepoint.Collection, 0)
|
||||
)
|
||||
|
||||
// Create collection of ExchangeDataCollection
|
||||
for _, site := range sites {
|
||||
collections := make(map[string]*sharepoint.Collection)
|
||||
|
||||
qp := graph.QueryParams{
|
||||
Category: scope.Category().PathType(),
|
||||
ResourceOwner: site,
|
||||
FailFast: gc.failFast,
|
||||
Credentials: gc.credentials,
|
||||
}
|
||||
|
||||
foldersComplete, closer := observe.MessageWithCompletion(fmt.Sprintf("∙ %s - %s:", qp.Category, site))
|
||||
defer closer()
|
||||
defer close(foldersComplete)
|
||||
|
||||
// resolver, err := exchange.PopulateExchangeContainerResolver(
|
||||
// ctx,
|
||||
// qp,
|
||||
// qp.Scope.Category().PathType(),
|
||||
// )
|
||||
// if err != nil {
|
||||
// return nil, errors.Wrap(err, "getting folder cache")
|
||||
// }
|
||||
|
||||
// err = sharepoint.FilterContainersAndFillCollections(
|
||||
// ctx,
|
||||
// qp,
|
||||
// collections,
|
||||
// gc.UpdateStatus,
|
||||
// resolver)
|
||||
|
||||
// if err != nil {
|
||||
// return nil, errors.Wrap(err, "filling collections")
|
||||
// }
|
||||
|
||||
foldersComplete <- struct{}{}
|
||||
|
||||
for _, collection := range collections {
|
||||
gc.incrementAwaitingMessages()
|
||||
|
||||
colls = append(colls, collection)
|
||||
}
|
||||
}
|
||||
|
||||
return colls, errs.ErrorOrNil()
|
||||
}
|
||||
|
||||
// SharePointDataCollections returns a set of DataCollection which represents the SharePoint data
|
||||
// for the specified user
|
||||
func (gc *GraphConnector) SharePointDataCollections(
|
||||
ctx context.Context,
|
||||
selector selectors.Selector,
|
||||
) ([]data.Collection, error) {
|
||||
b, err := selector.ToSharePointBackup()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "sharePointDataCollection: parsing selector")
|
||||
}
|
||||
|
||||
var (
|
||||
scopes = b.DiscreteScopes(gc.GetSites())
|
||||
collections = []data.Collection{}
|
||||
errs error
|
||||
)
|
||||
|
||||
// for each scope that includes oneDrive items, get all
|
||||
for _, scope := range scopes {
|
||||
// Creates a map of collections based on scope
|
||||
dcs, err := gc.createSharePointCollections(ctx, scope)
|
||||
if err != nil {
|
||||
return nil, support.WrapAndAppend(scope.Get(selectors.SharePointSite)[0], err, errs)
|
||||
}
|
||||
|
||||
for _, collection := range dcs {
|
||||
collections = append(collections, collection)
|
||||
}
|
||||
}
|
||||
|
||||
for range collections {
|
||||
gc.incrementAwaitingMessages()
|
||||
}
|
||||
|
||||
return collections, errs
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
@ -162,16 +163,16 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
connector := loadConnector(ctx, suite.T(), Users)
|
||||
connector := loadConnector(ctx, suite.T(), Sites)
|
||||
tests := []struct {
|
||||
name string
|
||||
getSelector func(t *testing.T) selectors.Selector
|
||||
}{
|
||||
{
|
||||
name: "Items - TODO: actual sharepoint categories",
|
||||
name: "Libraries",
|
||||
getSelector: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Include(sel.Folders([]string{suite.site}, selectors.Any()))
|
||||
sel.Include(sel.Libraries([]string{suite.site}, selectors.Any()))
|
||||
|
||||
return sel.Selector
|
||||
},
|
||||
@ -180,24 +181,31 @@ func (suite *ConnectorDataCollectionIntegrationSuite) TestSharePointDataCollecti
|
||||
|
||||
for _, test := range tests {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
_, err := connector.SharePointDataCollections(ctx, test.getSelector(t))
|
||||
collection, err := sharepoint.DataCollections(
|
||||
ctx,
|
||||
test.getSelector(t),
|
||||
[]string{suite.site},
|
||||
connector.credentials.AzureTenantID,
|
||||
connector)
|
||||
require.NoError(t, err)
|
||||
|
||||
// TODO: Implementation
|
||||
// assert.Equal(t, len(collection), 1)
|
||||
// we don't know an exact count of drives this will produce,
|
||||
// but it should be more than one.
|
||||
assert.Less(t, 1, len(collection))
|
||||
|
||||
// channel := collection[0].Items()
|
||||
// the test only reads the firstt collection
|
||||
connector.incrementAwaitingMessages()
|
||||
|
||||
// for object := range channel {
|
||||
// buf := &bytes.Buffer{}
|
||||
// _, err := buf.ReadFrom(object.ToReader())
|
||||
// assert.NoError(t, err, "received a buf.Read error")
|
||||
// }
|
||||
for object := range collection[0].Items() {
|
||||
buf := &bytes.Buffer{}
|
||||
_, err := buf.ReadFrom(object.ToReader())
|
||||
assert.NoError(t, err, "received a buf.Read error")
|
||||
}
|
||||
|
||||
// status := connector.AwaitStatus()
|
||||
// assert.NotZero(t, status.Successful)
|
||||
status := connector.AwaitStatus()
|
||||
assert.NotZero(t, status.Successful)
|
||||
|
||||
// t.Log(status.String())
|
||||
t.Log(status.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -274,10 +282,8 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestMailFetch()
|
||||
require.NotEmpty(t, c.FullPath().Folder())
|
||||
folder := c.FullPath().Folder()
|
||||
|
||||
if _, ok := test.folderNames[folder]; ok {
|
||||
delete(test.folderNames, folder)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Empty(t, test.folderNames)
|
||||
})
|
||||
@ -361,7 +367,7 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestContactSeria
|
||||
assert.NotZero(t, read)
|
||||
contact, err := support.CreateContactFromBytes(buf.Bytes())
|
||||
assert.NotNil(t, contact)
|
||||
assert.NoError(t, err, "error on converting contact bytes: "+string(buf.Bytes()))
|
||||
assert.NoError(t, err, "error on converting contact bytes: "+buf.String())
|
||||
count++
|
||||
}
|
||||
assert.NotZero(t, count)
|
||||
@ -426,7 +432,7 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestEventsSerial
|
||||
assert.NotZero(t, read)
|
||||
event, err := support.CreateEventFromBytes(buf.Bytes())
|
||||
assert.NotNil(t, event)
|
||||
assert.NoError(t, err, "experienced error parsing event bytes: "+string(buf.Bytes()))
|
||||
assert.NoError(t, err, "experienced error parsing event bytes: "+buf.String())
|
||||
}
|
||||
|
||||
status := connector.AwaitStatus()
|
||||
@ -499,16 +505,17 @@ func (suite *ConnectorCreateSharePointCollectionIntegrationSuite) TestCreateShar
|
||||
|
||||
var (
|
||||
t = suite.T()
|
||||
userID = tester.M365UserID(t)
|
||||
siteID = tester.M365SiteID(t)
|
||||
gc = loadConnector(ctx, t, Sites)
|
||||
sel = selectors.NewSharePointBackup()
|
||||
)
|
||||
|
||||
gc := loadConnector(ctx, t, Sites)
|
||||
scope := selectors.NewSharePointBackup().Folders(
|
||||
[]string{userID},
|
||||
sel.Include(sel.Libraries(
|
||||
[]string{siteID},
|
||||
[]string{"foo"},
|
||||
selectors.PrefixMatch(),
|
||||
)[0]
|
||||
))
|
||||
|
||||
_, err := gc.createSharePointCollections(ctx, scope)
|
||||
_, err := gc.DataCollections(ctx, sel.Selector)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
@ -3,8 +3,7 @@ package exchange
|
||||
import (
|
||||
"context"
|
||||
|
||||
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
cf "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/childfolders"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
@ -70,43 +69,39 @@ func (cfc *contactFolderCache) Populate(
|
||||
}
|
||||
|
||||
var (
|
||||
containers = make(map[string]graph.Container)
|
||||
errs error
|
||||
errUpdater = func(s string, e error) {
|
||||
errs = support.WrapAndAppend(s, e, errs)
|
||||
}
|
||||
options, err = optionsForContactChildFolders([]string{"displayName", "parentFolderId"})
|
||||
)
|
||||
|
||||
query, err := cfc.
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "contact cache resolver option")
|
||||
}
|
||||
|
||||
builder := cfc.
|
||||
gs.Client().
|
||||
UsersById(cfc.userID).
|
||||
ContactFoldersById(baseID).
|
||||
ChildFolders().
|
||||
Get(ctx, nil)
|
||||
ChildFolders()
|
||||
|
||||
for {
|
||||
resp, err := builder.Get(ctx, options)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
}
|
||||
|
||||
iter, err := msgraphgocore.NewPageIterator(query, cfc.gs.Adapter(),
|
||||
models.CreateContactFolderCollectionResponseFromDiscriminatorValue)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
for _, fold := range resp.GetValue() {
|
||||
if err := checkIDAndName(fold); err != nil {
|
||||
errs = support.WrapAndAppend(
|
||||
"adding folder to contact resolver",
|
||||
err,
|
||||
errs,
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
cb := IterativeCollectContactContainers(containers,
|
||||
"",
|
||||
errUpdater)
|
||||
if err := iter.Iterate(ctx, cb); err != nil {
|
||||
return errors.Wrap(err, support.ConnectorStackErrorTrace(err))
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return errs
|
||||
}
|
||||
|
||||
for _, entry := range containers {
|
||||
temp := cacheFolder{
|
||||
Container: entry,
|
||||
Container: fold,
|
||||
}
|
||||
|
||||
err = cfc.addFolder(temp)
|
||||
@ -118,6 +113,13 @@ func (cfc *contactFolderCache) Populate(
|
||||
}
|
||||
}
|
||||
|
||||
if resp.GetOdataNextLink() == nil {
|
||||
break
|
||||
}
|
||||
|
||||
builder = cf.NewChildFoldersRequestBuilder(*resp.GetOdataNextLink(), cfc.gs.Adapter())
|
||||
}
|
||||
|
||||
if err := cfc.populatePaths(ctx); err != nil {
|
||||
errs = support.WrapAndAppend(
|
||||
"contacts resolver",
|
||||
|
||||
124
src/internal/connector/exchange/delta_get.go
Normal file
@ -0,0 +1,124 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
abs "github.com/microsoft/kiota-abstractions-go"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
|
||||
mscontactdelta "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders/item/contacts/delta"
|
||||
msmaildelta "github.com/microsoftgraph/msgraph-sdk-go/users/item/mailfolders/item/messages/delta"
|
||||
)
|
||||
|
||||
//nolint:lll
|
||||
const (
|
||||
mailURLTemplate = "{+baseurl}/users/{user%2Did}/mailFolders/{mailFolder%2Did}/messages/microsoft.graph.delta(){?%24top,%24skip,%24search,%24filter,%24count,%24select,%24orderby}"
|
||||
contactsURLTemplate = "{+baseurl}/users/{user%2Did}/contactFolders/{contactFolder%2Did}/contacts/microsoft.graph.delta(){?%24top,%24skip,%24search,%24filter,%24count,%24select,%24orderby}"
|
||||
)
|
||||
|
||||
// The following functions are based off the code in v0.41.0 of msgraph-sdk-go
|
||||
// for sending delta requests with query parameters.
|
||||
|
||||
//nolint:unused
|
||||
func createGetRequestInformationWithRequestConfiguration(
|
||||
baseRequestInfoFunc func() (*abs.RequestInformation, error),
|
||||
requestConfig *DeltaRequestBuilderGetRequestConfiguration,
|
||||
template string,
|
||||
) (*abs.RequestInformation, error) {
|
||||
requestInfo, err := baseRequestInfoFunc()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
requestInfo.UrlTemplate = template
|
||||
|
||||
if requestConfig != nil {
|
||||
if requestConfig.QueryParameters != nil {
|
||||
requestInfo.AddQueryParameters(*(requestConfig.QueryParameters))
|
||||
}
|
||||
|
||||
requestInfo.AddRequestHeaders(requestConfig.Headers)
|
||||
requestInfo.AddRequestOptions(requestConfig.Options)
|
||||
}
|
||||
|
||||
return requestInfo, nil
|
||||
}
|
||||
|
||||
//nolint:unused
|
||||
func sendMessagesDeltaGet(
|
||||
ctx context.Context,
|
||||
m *msmaildelta.DeltaRequestBuilder,
|
||||
requestConfiguration *DeltaRequestBuilderGetRequestConfiguration,
|
||||
adapter abs.RequestAdapter,
|
||||
) (msmaildelta.DeltaResponseable, error) {
|
||||
requestInfo, err := createGetRequestInformationWithRequestConfiguration(
|
||||
func() (*abs.RequestInformation, error) {
|
||||
return m.CreateGetRequestInformationWithRequestConfiguration(nil)
|
||||
},
|
||||
requestConfiguration,
|
||||
mailURLTemplate,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
errorMapping := abs.ErrorMappings{
|
||||
"4XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
|
||||
"5XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
|
||||
}
|
||||
|
||||
res, err := adapter.SendAsync(
|
||||
ctx,
|
||||
requestInfo,
|
||||
msmaildelta.CreateDeltaResponseFromDiscriminatorValue,
|
||||
errorMapping,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return res.(msmaildelta.DeltaResponseable), nil
|
||||
}
|
||||
|
||||
//nolint:unused
|
||||
func sendContactsDeltaGet(
|
||||
ctx context.Context,
|
||||
m *mscontactdelta.DeltaRequestBuilder,
|
||||
requestConfiguration *DeltaRequestBuilderGetRequestConfiguration,
|
||||
adapter abs.RequestAdapter,
|
||||
) (mscontactdelta.DeltaResponseable, error) {
|
||||
requestInfo, err := createGetRequestInformationWithRequestConfiguration(
|
||||
func() (*abs.RequestInformation, error) {
|
||||
return m.CreateGetRequestInformationWithRequestConfiguration(nil)
|
||||
},
|
||||
requestConfiguration,
|
||||
contactsURLTemplate,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
errorMapping := abs.ErrorMappings{
|
||||
"4XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
|
||||
"5XX": odataerrors.CreateODataErrorFromDiscriminatorValue,
|
||||
}
|
||||
|
||||
res, err := adapter.SendAsync(
|
||||
ctx,
|
||||
requestInfo,
|
||||
mscontactdelta.CreateDeltaResponseFromDiscriminatorValue,
|
||||
errorMapping,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return res.(mscontactdelta.DeltaResponseable), nil
|
||||
}
|
||||
@ -26,7 +26,9 @@ func TestEventSuite(t *testing.T) {
|
||||
// TestEventInfo verifies that searchable event metadata
|
||||
// can be properly retrieved from a models.Eventable object
|
||||
func (suite *EventSuite) TestEventInfo() {
|
||||
initial := time.Now()
|
||||
// Exchange stores start/end times in UTC and the below compares hours
|
||||
// directly so we need to "normalize" the timezone here.
|
||||
initial := time.Now().UTC()
|
||||
now := common.FormatTimeWith(initial, common.M365DateTimeTimeZone)
|
||||
|
||||
suite.T().Logf("Initial: %v\nFormatted: %v\n", initial, now)
|
||||
|
||||
@ -3,6 +3,7 @@ package exchange
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
abs "github.com/microsoft/kiota-abstractions-go"
|
||||
msuser "github.com/microsoftgraph/msgraph-sdk-go/users"
|
||||
mscalendars "github.com/microsoftgraph/msgraph-sdk-go/users/item/calendars"
|
||||
mscontactfolder "github.com/microsoftgraph/msgraph-sdk-go/users/item/contactfolders"
|
||||
@ -125,6 +126,24 @@ func CategoryToOptionIdentifier(category path.CategoryType) optionIdentifier {
|
||||
// which reduces the overall latency of complex calls
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
// Delta requests for mail and contacts have the same parameters and config
|
||||
// structs.
|
||||
type DeltaRequestBuilderGetQueryParameters struct {
|
||||
Count *bool `uriparametername:"%24count"`
|
||||
Filter *string `uriparametername:"%24filter"`
|
||||
Orderby []string `uriparametername:"%24orderby"`
|
||||
Search *string `uriparametername:"%24search"`
|
||||
Select []string `uriparametername:"%24select"`
|
||||
Skip *int32 `uriparametername:"%24skip"`
|
||||
Top *int32 `uriparametername:"%24top"`
|
||||
}
|
||||
|
||||
type DeltaRequestBuilderGetRequestConfiguration struct {
|
||||
Headers map[string]string
|
||||
Options []abs.RequestOption
|
||||
QueryParameters *DeltaRequestBuilderGetQueryParameters
|
||||
}
|
||||
|
||||
func optionsForFolderMessages(moreOps []string) (*msmfmessage.MessagesRequestBuilderGetRequestConfiguration, error) {
|
||||
selecting, err := buildOptions(moreOps, messages)
|
||||
if err != nil {
|
||||
|
||||
@ -81,6 +81,8 @@ func StringToPathCategory(input string) path.CategoryType {
|
||||
return path.EventsCategory
|
||||
case "files":
|
||||
return path.FilesCategory
|
||||
case "libraries":
|
||||
return path.LibrariesCategory
|
||||
default:
|
||||
return path.UnknownCategory
|
||||
}
|
||||
|
||||
@ -104,6 +104,10 @@ func NewGraphConnector(ctx context.Context, acct account.Account, r resource) (*
|
||||
|
||||
gc.graphService = *aService
|
||||
|
||||
// TODO(ashmrtn): When selectors only encapsulate a single resource owner that
|
||||
// is not a wildcard don't populate users or sites when making the connector.
|
||||
// For now this keeps things functioning if callers do pass in a selector like
|
||||
// "*" instead of.
|
||||
if r == AllResources || r == Users {
|
||||
if err = gc.setTenantUsers(ctx); err != nil {
|
||||
return nil, errors.Wrap(err, "retrieving tenant user list")
|
||||
@ -220,6 +224,8 @@ func (gc *GraphConnector) setTenantSites(ctx context.Context) error {
|
||||
|
||||
var errKnownSkippableCase = errors.New("case is known and skippable")
|
||||
|
||||
const personalSitePath = "sharepoint.com/personal/"
|
||||
|
||||
// Transforms an interface{} into a key,value pair representing
|
||||
// siteName:siteID.
|
||||
func identifySite(item any) (string, string, error) {
|
||||
@ -237,6 +243,12 @@ func identifySite(item any) (string, string, error) {
|
||||
return "", "", errors.Errorf("no name for Site: %s", *m.GetId())
|
||||
}
|
||||
|
||||
// personal (ie: oneDrive) sites have to be filtered out server-side.
|
||||
url := m.GetWebUrl()
|
||||
if url != nil && strings.Contains(*url, personalSitePath) {
|
||||
return "", "", errKnownSkippableCase
|
||||
}
|
||||
|
||||
return *m.GetName(), *m.GetId(), nil
|
||||
}
|
||||
|
||||
|
||||
@ -18,7 +18,7 @@ import (
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// Disconnected Test Section
|
||||
// -------------------------
|
||||
// ---------------------------------------------------------------
|
||||
type DisconnectedGraphConnectorSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
@ -206,12 +206,13 @@ func (suite *DisconnectedGraphConnectorSuite) TestRestoreFailsBadService() {
|
||||
}
|
||||
|
||||
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
|
||||
users := make(map[string]string)
|
||||
users["elliotReid@someHospital.org"] = ""
|
||||
users["chrisTurk@someHospital.org"] = ""
|
||||
users["carlaEspinosa@someHospital.org"] = ""
|
||||
users["bobKelso@someHospital.org"] = ""
|
||||
users["johnDorian@someHospital.org"] = ""
|
||||
users := []string{
|
||||
"elliotReid@someHospital.org",
|
||||
"chrisTurk@someHospital.org",
|
||||
"carlaEspinosa@someHospital.org",
|
||||
"bobKelso@someHospital.org",
|
||||
"johnDorian@someHospital.org",
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -219,12 +220,10 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
|
||||
checkError assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "Invalid User",
|
||||
checkError: assert.Error,
|
||||
name: "No scopes",
|
||||
checkError: assert.NoError,
|
||||
getSelector: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Include(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
return selectors.NewExchangeBackup().Selector
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -260,7 +259,108 @@ func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs() {
|
||||
|
||||
for _, test := range tests {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
err := verifyBackupInputs(test.getSelector(t), users)
|
||||
err := verifyBackupInputs(test.getSelector(t), users, nil)
|
||||
test.checkError(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *DisconnectedGraphConnectorSuite) TestVerifyBackupInputs_allServices() {
|
||||
users := []string{"elliotReid@someHospital.org"}
|
||||
sites := []string{"abc.site.foo", "bar.site.baz"}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
excludes func(t *testing.T) selectors.Selector
|
||||
filters func(t *testing.T) selectors.Selector
|
||||
includes func(t *testing.T) selectors.Selector
|
||||
checkError assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "Valid User",
|
||||
checkError: assert.NoError,
|
||||
excludes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Exclude(sel.Folders([]string{"elliotReid@someHospital.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
},
|
||||
filters: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Filter(sel.Folders([]string{"elliotReid@someHospital.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
},
|
||||
includes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Include(sel.Folders([]string{"elliotReid@someHospital.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Invalid User",
|
||||
checkError: assert.Error,
|
||||
excludes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Exclude(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
},
|
||||
filters: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Filter(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
},
|
||||
includes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Include(sel.Folders([]string{"foo@SomeCompany.org"}, selectors.Any()))
|
||||
return sel.Selector
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid sites",
|
||||
checkError: assert.NoError,
|
||||
excludes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Exclude(sel.Sites([]string{"abc.site.foo", "bar.site.baz"}))
|
||||
return sel.Selector
|
||||
},
|
||||
filters: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Filter(sel.Sites([]string{"abc.site.foo", "bar.site.baz"}))
|
||||
return sel.Selector
|
||||
},
|
||||
includes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Include(sel.Sites([]string{"abc.site.foo", "bar.site.baz"}))
|
||||
return sel.Selector
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid sites",
|
||||
checkError: assert.Error,
|
||||
excludes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Exclude(sel.Sites([]string{"fnords.smarfs.brawnhilda"}))
|
||||
return sel.Selector
|
||||
},
|
||||
filters: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Filter(sel.Sites([]string{"fnords.smarfs.brawnhilda"}))
|
||||
return sel.Selector
|
||||
},
|
||||
includes: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewSharePointBackup()
|
||||
sel.Include(sel.Sites([]string{"fnords.smarfs.brawnhilda"}))
|
||||
return sel.Selector
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
err := verifyBackupInputs(test.excludes(t), users, sites)
|
||||
test.checkError(t, err)
|
||||
err = verifyBackupInputs(test.filters(t), users, sites)
|
||||
test.checkError(t, err)
|
||||
err = verifyBackupInputs(test.includes(t), users, sites)
|
||||
test.checkError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
@ -23,7 +23,7 @@ import (
|
||||
func mustToDataLayerPath(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
@ -37,11 +37,13 @@ func mustToDataLayerPath(
|
||||
|
||||
switch service {
|
||||
case path.ExchangeService:
|
||||
res, err = pb.ToDataLayerExchangePathForCategory(tenant, user, category, isItem)
|
||||
res, err = pb.ToDataLayerExchangePathForCategory(tenant, resourceOwner, category, isItem)
|
||||
case path.OneDriveService:
|
||||
require.Equal(t, path.FilesCategory, category)
|
||||
|
||||
res, err = pb.ToDataLayerOneDrivePath(tenant, user, isItem)
|
||||
res, err = pb.ToDataLayerOneDrivePath(tenant, resourceOwner, isItem)
|
||||
case path.SharePointService:
|
||||
res, err = pb.ToDataLayerSharePointPath(tenant, resourceOwner, category, isItem)
|
||||
|
||||
default:
|
||||
err = errors.Errorf("bad service type %s", service.String())
|
||||
@ -603,6 +605,27 @@ func compareExchangeEvent(
|
||||
checkEvent(t, expectedEvent, itemEvent)
|
||||
}
|
||||
|
||||
func compareOneDriveItem(
|
||||
t *testing.T,
|
||||
expected map[string][]byte,
|
||||
item data.Stream,
|
||||
) {
|
||||
expectedData := expected[item.UUID()]
|
||||
if !assert.NotNil(t, expectedData, "unexpected file with name %s", item.UUID) {
|
||||
return
|
||||
}
|
||||
|
||||
// OneDrive items are just byte buffers of the data. Nothing special to
|
||||
// interpret. May need to do chunked comparisons in the future if we test
|
||||
// large item equality.
|
||||
buf, err := io.ReadAll(item.ToReader())
|
||||
if !assert.NoError(t, err) {
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedData, buf)
|
||||
}
|
||||
|
||||
func compareItem(
|
||||
t *testing.T,
|
||||
expected map[string][]byte,
|
||||
@ -622,6 +645,10 @@ func compareItem(
|
||||
default:
|
||||
assert.FailNowf(t, "unexpected Exchange category: %s", category.String())
|
||||
}
|
||||
|
||||
case path.OneDriveService:
|
||||
compareOneDriveItem(t, expected, item)
|
||||
|
||||
default:
|
||||
assert.FailNowf(t, "unexpected service: %s", service.String())
|
||||
}
|
||||
|
||||
@ -1,14 +1,19 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/mockconnector"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
@ -61,7 +66,7 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantUsers() {
|
||||
|
||||
newConnector.graphService = *service
|
||||
|
||||
suite.Equal(len(newConnector.Users), 0)
|
||||
suite.Empty(len(newConnector.Users))
|
||||
err = newConnector.setTenantUsers(ctx)
|
||||
suite.NoError(err)
|
||||
suite.Less(0, len(newConnector.Users))
|
||||
@ -88,6 +93,10 @@ func (suite *GraphConnectorIntegrationSuite) TestSetTenantSites() {
|
||||
err = newConnector.setTenantSites(ctx)
|
||||
suite.NoError(err)
|
||||
suite.Less(0, len(newConnector.Sites))
|
||||
|
||||
for _, site := range newConnector.Sites {
|
||||
suite.NotContains("sharepoint.com/personal/", site)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
||||
@ -149,6 +158,31 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
||||
// Exchange Functions
|
||||
//-------------------------------------------------------------
|
||||
|
||||
//revive:disable:context-as-argument
|
||||
func mustGetDefaultDriveID(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
service graph.Service,
|
||||
userID string,
|
||||
) string {
|
||||
//revive:enable:context-as-argument
|
||||
d, err := service.Client().UsersById(userID).Drive().Get(ctx, nil)
|
||||
if err != nil {
|
||||
err = errors.Wrapf(
|
||||
err,
|
||||
"failed to retrieve default user drive. user: %s, details: %s",
|
||||
userID,
|
||||
support.ConnectorStackErrorTrace(err),
|
||||
)
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, d.GetId())
|
||||
require.NotEmpty(t, *d.GetId())
|
||||
|
||||
return *d.GetId()
|
||||
}
|
||||
|
||||
func runRestoreBackupTest(
|
||||
t *testing.T,
|
||||
test restoreBackupInfo,
|
||||
@ -199,7 +233,7 @@ func runRestoreBackupTest(
|
||||
assert.NotNil(t, deets)
|
||||
|
||||
status := restoreGC.AwaitStatus()
|
||||
runTime := time.Now().Sub(start)
|
||||
runTime := time.Since(start)
|
||||
|
||||
assert.Equal(t, totalItems, status.ObjectCount, "status.ObjectCount")
|
||||
assert.Equal(t, totalItems, status.Successful, "status.Successful")
|
||||
@ -234,7 +268,7 @@ func runRestoreBackupTest(
|
||||
dcs, err := backupGC.DataCollections(ctx, backupSel)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Logf("Backup enumeration complete in %v\n", time.Now().Sub(start))
|
||||
t.Logf("Backup enumeration complete in %v\n", time.Since(start))
|
||||
|
||||
// Pull the data prior to waiting for the status as otherwise it will
|
||||
// deadlock.
|
||||
@ -249,6 +283,17 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
bodyText := "This email has some text. However, all the text is on the same line."
|
||||
subjectText := "Test message for restore"
|
||||
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
// Get the default drive ID for the test user.
|
||||
driveID := mustGetDefaultDriveID(
|
||||
suite.T(),
|
||||
ctx,
|
||||
suite.connector.Service(),
|
||||
suite.user,
|
||||
)
|
||||
|
||||
table := []restoreBackupInfo{
|
||||
{
|
||||
name: "EmailsWithAttachments",
|
||||
@ -465,6 +510,95 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
{
|
||||
name: "OneDriveMultipleFoldersAndFiles",
|
||||
service: path.OneDriveService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
{
|
||||
pathElements: []string{
|
||||
"drives",
|
||||
driveID,
|
||||
"root:",
|
||||
},
|
||||
category: path.FilesCategory,
|
||||
items: []itemInfo{
|
||||
{
|
||||
name: "test-file.txt",
|
||||
data: []byte(strings.Repeat("a", 33)),
|
||||
lookupKey: "test-file.txt",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{
|
||||
"drives",
|
||||
driveID,
|
||||
"root:",
|
||||
"folder-a",
|
||||
},
|
||||
category: path.FilesCategory,
|
||||
items: []itemInfo{
|
||||
{
|
||||
name: "test-file.txt",
|
||||
data: []byte(strings.Repeat("b", 65)),
|
||||
lookupKey: "test-file.txt",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{
|
||||
"drives",
|
||||
driveID,
|
||||
"root:",
|
||||
"folder-a",
|
||||
"b",
|
||||
},
|
||||
category: path.FilesCategory,
|
||||
items: []itemInfo{
|
||||
{
|
||||
name: "test-file.txt",
|
||||
data: []byte(strings.Repeat("c", 129)),
|
||||
lookupKey: "test-file.txt",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{
|
||||
"drives",
|
||||
driveID,
|
||||
"root:",
|
||||
"folder-a",
|
||||
"b",
|
||||
"folder-a",
|
||||
},
|
||||
category: path.FilesCategory,
|
||||
items: []itemInfo{
|
||||
{
|
||||
name: "test-file.txt",
|
||||
data: []byte(strings.Repeat("d", 257)),
|
||||
lookupKey: "test-file.txt",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{
|
||||
"drives",
|
||||
driveID,
|
||||
"root:",
|
||||
"b",
|
||||
},
|
||||
category: path.FilesCategory,
|
||||
items: []itemInfo{
|
||||
{
|
||||
name: "test-file.txt",
|
||||
data: []byte(strings.Repeat("e", 257)),
|
||||
lookupKey: "test-file.txt",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
|
||||
@ -19,6 +19,7 @@ import (
|
||||
// 6. subject
|
||||
// 7. hasAttachments
|
||||
// 8. attachments
|
||||
//
|
||||
//nolint:lll
|
||||
const (
|
||||
eventTmpl = `{
|
||||
|
||||
@ -123,7 +123,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
|
||||
|
||||
folderProgress, colCloser := observe.ProgressWithCount(
|
||||
observe.ItemQueueMsg,
|
||||
"Folder: /"+parentPathString,
|
||||
"/"+parentPathString,
|
||||
int64(len(oc.driveItemIDs)),
|
||||
)
|
||||
defer colCloser()
|
||||
|
||||
@ -60,7 +60,7 @@ func (suite *OneDriveCollectionSuite) TestOneDriveCollection() {
|
||||
wg := sync.WaitGroup{}
|
||||
collStatus := support.ConnectorOperationStatus{}
|
||||
|
||||
folderPath, err := getCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "a-tenant", "a-user")
|
||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "a-tenant", "a-user", OneDriveSource)
|
||||
require.NoError(t, err)
|
||||
driveFolderPath, err := getDriveFolderPath(folderPath)
|
||||
require.NoError(t, err)
|
||||
@ -117,7 +117,7 @@ func (suite *OneDriveCollectionSuite) TestOneDriveCollectionReadError() {
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(1)
|
||||
|
||||
folderPath, err := getCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user")
|
||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", OneDriveSource)
|
||||
require.NoError(t, err)
|
||||
|
||||
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus))
|
||||
|
||||
@ -14,79 +14,172 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// Collections is used to retrieve OneDrive data for a
|
||||
// specified user
|
||||
type driveSource int
|
||||
|
||||
const (
|
||||
unknownDriveSource driveSource = iota
|
||||
OneDriveSource
|
||||
SharePointSource
|
||||
)
|
||||
|
||||
type folderMatcher interface {
|
||||
IsAny() bool
|
||||
Matches(string) bool
|
||||
}
|
||||
|
||||
// Collections is used to retrieve drive data for a
|
||||
// resource owner, which can be either a user or a sharepoint site.
|
||||
type Collections struct {
|
||||
tenant string
|
||||
user string
|
||||
scope selectors.OneDriveScope
|
||||
// collectionMap allows lookup of the data.Collection
|
||||
// for a OneDrive folder
|
||||
collectionMap map[string]data.Collection
|
||||
resourceOwner string
|
||||
source driveSource
|
||||
matcher folderMatcher
|
||||
service graph.Service
|
||||
statusUpdater support.StatusUpdater
|
||||
|
||||
// collectionMap allows lookup of the data.Collection
|
||||
// for a OneDrive folder
|
||||
CollectionMap map[string]data.Collection
|
||||
|
||||
// Track stats from drive enumeration. Represents the items backed up.
|
||||
numItems int
|
||||
numFiles int
|
||||
numContainers int
|
||||
NumItems int
|
||||
NumFiles int
|
||||
NumContainers int
|
||||
}
|
||||
|
||||
func NewCollections(
|
||||
tenant string,
|
||||
user string,
|
||||
scope selectors.OneDriveScope,
|
||||
resourceOwner string,
|
||||
source driveSource,
|
||||
matcher folderMatcher,
|
||||
service graph.Service,
|
||||
statusUpdater support.StatusUpdater,
|
||||
) *Collections {
|
||||
return &Collections{
|
||||
tenant: tenant,
|
||||
user: user,
|
||||
scope: scope,
|
||||
collectionMap: map[string]data.Collection{},
|
||||
resourceOwner: resourceOwner,
|
||||
source: source,
|
||||
matcher: matcher,
|
||||
CollectionMap: map[string]data.Collection{},
|
||||
service: service,
|
||||
statusUpdater: statusUpdater,
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieves OneDrive data as set of `data.Collections`
|
||||
// Retrieves drive data as set of `data.Collections`
|
||||
func (c *Collections) Get(ctx context.Context) ([]data.Collection, error) {
|
||||
// Enumerate drives for the specified user
|
||||
drives, err := drives(ctx, c.service, c.user)
|
||||
// Enumerate drives for the specified resourceOwner
|
||||
drives, err := drives(ctx, c.service, c.resourceOwner, c.source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Update the collection map with items from each drive
|
||||
for _, d := range drives {
|
||||
err = collectItems(ctx, c.service, *d.GetId(), c.updateCollections)
|
||||
err = collectItems(ctx, c.service, *d.GetId(), c.UpdateCollections)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
observe.Message(fmt.Sprintf("Discovered %d items to backup", c.numItems))
|
||||
observe.Message(fmt.Sprintf("Discovered %d items to backup", c.NumItems))
|
||||
|
||||
collections := make([]data.Collection, 0, len(c.collectionMap))
|
||||
for _, coll := range c.collectionMap {
|
||||
collections := make([]data.Collection, 0, len(c.CollectionMap))
|
||||
for _, coll := range c.CollectionMap {
|
||||
collections = append(collections, coll)
|
||||
}
|
||||
|
||||
return collections, nil
|
||||
}
|
||||
|
||||
func getCanonicalPath(p, tenant, user string) (path.Path, error) {
|
||||
pathBuilder := path.Builder{}.Append(strings.Split(p, "/")...)
|
||||
// UpdateCollections initializes and adds the provided drive items to Collections
|
||||
// A new collection is created for every drive folder (or package)
|
||||
func (c *Collections) UpdateCollections(ctx context.Context, driveID string, items []models.DriveItemable) error {
|
||||
for _, item := range items {
|
||||
if item.GetRoot() != nil {
|
||||
// Skip the root item
|
||||
continue
|
||||
}
|
||||
|
||||
if item.GetParentReference() == nil || item.GetParentReference().GetPath() == nil {
|
||||
return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName())
|
||||
}
|
||||
|
||||
// Create a collection for the parent of this item
|
||||
collectionPath, err := GetCanonicalPath(
|
||||
*item.GetParentReference().GetPath(),
|
||||
c.tenant,
|
||||
c.resourceOwner,
|
||||
c.source,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Skip items that don't match the folder selectors we were given.
|
||||
if !includePath(ctx, c.matcher, collectionPath) {
|
||||
logger.Ctx(ctx).Infof("Skipping path %s", collectionPath.String())
|
||||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case item.GetFolder() != nil, item.GetPackage() != nil:
|
||||
// Leave this here so we don't fall into the default case.
|
||||
// TODO: This is where we might create a "special file" to represent these in the backup repository
|
||||
// e.g. a ".folderMetadataFile"
|
||||
|
||||
case item.GetFile() != nil:
|
||||
col, found := c.CollectionMap[collectionPath.String()]
|
||||
if !found {
|
||||
col = NewCollection(
|
||||
collectionPath,
|
||||
driveID,
|
||||
c.service,
|
||||
c.statusUpdater,
|
||||
)
|
||||
|
||||
c.CollectionMap[collectionPath.String()] = col
|
||||
c.NumContainers++
|
||||
c.NumItems++
|
||||
}
|
||||
|
||||
collection := col.(*Collection)
|
||||
collection.Add(*item.GetId())
|
||||
c.NumFiles++
|
||||
c.NumItems++
|
||||
|
||||
default:
|
||||
return errors.Errorf("item type not supported. item name : %s", *item.GetName())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetCanonicalPath constructs the standard path for the given source.
|
||||
func GetCanonicalPath(p, tenant, resourceOwner string, source driveSource) (path.Path, error) {
|
||||
var (
|
||||
pathBuilder = path.Builder{}.Append(strings.Split(p, "/")...)
|
||||
result path.Path
|
||||
err error
|
||||
)
|
||||
|
||||
switch source {
|
||||
case OneDriveSource:
|
||||
result, err = pathBuilder.ToDataLayerOneDrivePath(tenant, resourceOwner, false)
|
||||
case SharePointSource:
|
||||
result, err = pathBuilder.ToDataLayerSharePointPath(tenant, resourceOwner, path.LibrariesCategory, false)
|
||||
default:
|
||||
return nil, errors.Errorf("unrecognized drive data source")
|
||||
}
|
||||
|
||||
res, err := pathBuilder.ToDataLayerOneDrivePath(tenant, user, false)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "converting to canonical path")
|
||||
}
|
||||
|
||||
return res, nil
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Returns the path to the folder within the drive (i.e. under `root:`)
|
||||
@ -99,70 +192,7 @@ func getDriveFolderPath(p path.Path) (string, error) {
|
||||
return path.Builder{}.Append(drivePath.folders...).String(), nil
|
||||
}
|
||||
|
||||
// updateCollections initializes and adds the provided OneDrive items to Collections
|
||||
// A new collection is created for every OneDrive folder (or package)
|
||||
func (c *Collections) updateCollections(ctx context.Context, driveID string, items []models.DriveItemable) error {
|
||||
for _, item := range items {
|
||||
if item.GetRoot() != nil {
|
||||
// Skip the root item
|
||||
continue
|
||||
}
|
||||
|
||||
if item.GetParentReference() == nil || item.GetParentReference().GetPath() == nil {
|
||||
return errors.Errorf("item does not have a parent reference. item name : %s", *item.GetName())
|
||||
}
|
||||
|
||||
// Create a collection for the parent of this item
|
||||
collectionPath, err := getCanonicalPath(
|
||||
*item.GetParentReference().GetPath(),
|
||||
c.tenant,
|
||||
c.user,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Skip items that don't match the folder selectors we were given.
|
||||
if !includePath(ctx, c.scope, collectionPath) {
|
||||
logger.Ctx(ctx).Infof("Skipping path %s", collectionPath.String())
|
||||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case item.GetFolder() != nil, item.GetPackage() != nil:
|
||||
// Leave this here so we don't fall into the default case.
|
||||
// TODO: This is where we might create a "special file" to represent these in the backup repository
|
||||
// e.g. a ".folderMetadataFile"
|
||||
|
||||
case item.GetFile() != nil:
|
||||
col, found := c.collectionMap[collectionPath.String()]
|
||||
if !found {
|
||||
col = NewCollection(
|
||||
collectionPath,
|
||||
driveID,
|
||||
c.service,
|
||||
c.statusUpdater,
|
||||
)
|
||||
|
||||
c.collectionMap[collectionPath.String()] = col
|
||||
c.numContainers++
|
||||
c.numItems++
|
||||
}
|
||||
|
||||
collection := col.(*Collection)
|
||||
collection.Add(*item.GetId())
|
||||
c.numFiles++
|
||||
c.numItems++
|
||||
|
||||
default:
|
||||
return errors.Errorf("item type not supported. item name : %s", *item.GetName())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func includePath(ctx context.Context, scope selectors.OneDriveScope, folderPath path.Path) bool {
|
||||
func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) bool {
|
||||
// Check if the folder is allowed by the scope.
|
||||
folderPathString, err := getDriveFolderPath(folderPath)
|
||||
if err != nil {
|
||||
@ -172,9 +202,9 @@ func includePath(ctx context.Context, scope selectors.OneDriveScope, folderPath
|
||||
|
||||
// Hack for the edge case where we're looking at the root folder and can
|
||||
// select any folder. Right now the root folder has an empty folder path.
|
||||
if len(folderPathString) == 0 && scope.IsAny(selectors.OneDriveFolder) {
|
||||
if len(folderPathString) == 0 && m.IsAny() {
|
||||
return true
|
||||
}
|
||||
|
||||
return scope.Matches(selectors.OneDriveFolder, folderPathString)
|
||||
return m.Matches(folderPathString)
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package onedrive
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
@ -20,7 +21,7 @@ func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []st
|
||||
res := make([]string, 0, len(rest))
|
||||
|
||||
for _, r := range rest {
|
||||
p, err := getCanonicalPath(r, tenant, user)
|
||||
p, err := GetCanonicalPath(r, tenant, user, OneDriveSource)
|
||||
require.NoError(t, err)
|
||||
|
||||
res = append(res, p.String())
|
||||
@ -37,6 +38,49 @@ func TestOneDriveCollectionsSuite(t *testing.T) {
|
||||
suite.Run(t, new(OneDriveCollectionsSuite))
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestGetCanonicalPath() {
|
||||
tenant, resourceOwner := "tenant", "resourceOwner"
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
source driveSource
|
||||
dir []string
|
||||
expect string
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "onedrive",
|
||||
source: OneDriveSource,
|
||||
dir: []string{"onedrive"},
|
||||
expect: "tenant/onedrive/resourceOwner/files/onedrive",
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "sharepoint",
|
||||
source: SharePointSource,
|
||||
dir: []string{"sharepoint"},
|
||||
expect: "tenant/sharepoint/resourceOwner/libraries/sharepoint",
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "unknown",
|
||||
source: unknownDriveSource,
|
||||
dir: []string{"unknown"},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
p := strings.Join(test.dir, "/")
|
||||
result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source)
|
||||
test.expectErr(t, err)
|
||||
if result != nil {
|
||||
assert.Equal(t, test.expect, result.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any(), selectors.Any())[0]
|
||||
|
||||
@ -211,15 +255,16 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
c := NewCollections(tenant, user, tt.scope, &MockGraphService{}, nil)
|
||||
err := c.updateCollections(ctx, "driveID", tt.items)
|
||||
c := NewCollections(tenant, user, OneDriveSource, testFolderMatcher{tt.scope}, &MockGraphService{}, nil)
|
||||
|
||||
err := c.UpdateCollections(ctx, "driveID", tt.items)
|
||||
tt.expect(t, err)
|
||||
assert.Equal(t, len(tt.expectedCollectionPaths), len(c.collectionMap), "collection paths")
|
||||
assert.Equal(t, tt.expectedItemCount, c.numItems, "item count")
|
||||
assert.Equal(t, tt.expectedFileCount, c.numFiles, "file count")
|
||||
assert.Equal(t, tt.expectedContainerCount, c.numContainers, "container count")
|
||||
assert.Equal(t, len(tt.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
|
||||
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
|
||||
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
|
||||
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count")
|
||||
for _, collPath := range tt.expectedCollectionPaths {
|
||||
assert.Contains(t, c.collectionMap, collPath)
|
||||
assert.Contains(t, c.CollectionMap, collPath)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -11,6 +11,7 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drives/item/root/delta"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
|
||||
mssitedrives "github.com/microsoftgraph/msgraph-sdk-go/sites/item/drives"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
@ -67,7 +68,39 @@ const (
|
||||
)
|
||||
|
||||
// Enumerates the drives for the specified user
|
||||
func drives(ctx context.Context, service graph.Service, user string) ([]models.Driveable, error) {
|
||||
func drives(
|
||||
ctx context.Context,
|
||||
service graph.Service,
|
||||
resourceOwner string,
|
||||
source driveSource,
|
||||
) ([]models.Driveable, error) {
|
||||
switch source {
|
||||
case OneDriveSource:
|
||||
return userDrives(ctx, service, resourceOwner)
|
||||
case SharePointSource:
|
||||
return siteDrives(ctx, service, resourceOwner)
|
||||
default:
|
||||
return nil, errors.Errorf("unrecognized drive data source")
|
||||
}
|
||||
}
|
||||
|
||||
func siteDrives(ctx context.Context, service graph.Service, site string) ([]models.Driveable, error) {
|
||||
options := &mssitedrives.DrivesRequestBuilderGetRequestConfiguration{
|
||||
QueryParameters: &mssitedrives.DrivesRequestBuilderGetQueryParameters{
|
||||
Select: []string{"id", "name", "weburl", "system"},
|
||||
},
|
||||
}
|
||||
|
||||
r, err := service.Client().SitesById(site).Drives().Get(ctx, options)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to retrieve site drives. site: %s, details: %s",
|
||||
site, support.ConnectorStackErrorTrace(err))
|
||||
}
|
||||
|
||||
return r.GetValue(), nil
|
||||
}
|
||||
|
||||
func userDrives(ctx context.Context, service graph.Service, user string) ([]models.Driveable, error) {
|
||||
var hasDrive bool
|
||||
|
||||
hasDrive, err := hasDriveLicense(ctx, service, user)
|
||||
@ -237,7 +270,7 @@ func GetAllFolders(
|
||||
userID string,
|
||||
prefix string,
|
||||
) ([]*Displayable, error) {
|
||||
drives, err := drives(ctx, gs, userID)
|
||||
drives, err := drives(ctx, gs, userID, OneDriveSource)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "getting OneDrive folders")
|
||||
}
|
||||
@ -321,7 +354,7 @@ func hasDriveLicense(
|
||||
cb := func(pageItem any) bool {
|
||||
entry, ok := pageItem.(models.LicenseDetailsable)
|
||||
if !ok {
|
||||
err = errors.New("casting item to models.MailFolderable")
|
||||
err = errors.New("casting item to models.LicenseDetailsable")
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@ -43,7 +43,7 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
|
||||
folderElements := []string{folderName1}
|
||||
gs := loadTestService(t)
|
||||
|
||||
drives, err := drives(ctx, gs, suite.userID)
|
||||
drives, err := drives(ctx, gs, suite.userID, OneDriveSource)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, drives)
|
||||
|
||||
@ -100,6 +100,18 @@ func (suite *OneDriveSuite) TestCreateGetDeleteFolder() {
|
||||
}
|
||||
}
|
||||
|
||||
type testFolderMatcher struct {
|
||||
scope selectors.OneDriveScope
|
||||
}
|
||||
|
||||
func (fm testFolderMatcher) IsAny() bool {
|
||||
return fm.scope.IsAny(selectors.OneDriveFolder)
|
||||
}
|
||||
|
||||
func (fm testFolderMatcher) Matches(path string) bool {
|
||||
return fm.scope.Matches(selectors.OneDriveFolder, path)
|
||||
}
|
||||
|
||||
func (suite *OneDriveSuite) TestOneDriveNewCollections() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
@ -129,7 +141,8 @@ func (suite *OneDriveSuite) TestOneDriveNewCollections() {
|
||||
odcs, err := NewCollections(
|
||||
creds.AzureTenantID,
|
||||
test.user,
|
||||
scope,
|
||||
OneDriveSource,
|
||||
testFolderMatcher{scope},
|
||||
service,
|
||||
service.updateStatus,
|
||||
).Get(ctx)
|
||||
|
||||
@ -3,9 +3,10 @@ package onedrive
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
||||
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
||||
msup "github.com/microsoftgraph/msgraph-sdk-go/drives/item/items/item/createuploadsession"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/pkg/errors"
|
||||
@ -51,9 +52,13 @@ func driveItemReader(
|
||||
|
||||
downloadURL := item.GetAdditionalData()[downloadURLKey].(*string)
|
||||
|
||||
// TODO: We should use the `msgraphgocore` http client which has the right
|
||||
// middleware/options configured
|
||||
resp, err := http.Get(*downloadURL)
|
||||
clientOptions := msgraphsdk.GetDefaultClientOptions()
|
||||
middlewares := msgraphgocore.GetDefaultMiddlewaresWithOptions(&clientOptions)
|
||||
|
||||
httpClient := msgraphgocore.GetDefaultClient(&clientOptions, middlewares...)
|
||||
httpClient.Timeout = 0 // need infinite timeout for pulling large files
|
||||
|
||||
resp, err := httpClient.Get(*downloadURL)
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrapf(err, "failed to download file from %s", *downloadURL)
|
||||
}
|
||||
|
||||
@ -67,7 +67,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
|
||||
|
||||
suite.user = tester.SecondaryM365UserID(suite.T())
|
||||
|
||||
drives, err := drives(ctx, suite, suite.user)
|
||||
drives, err := drives(ctx, suite, suite.user, OneDriveSource)
|
||||
require.NoError(suite.T(), err)
|
||||
// Test Requirement 1: Need a drive
|
||||
require.Greaterf(suite.T(), len(drives), 0, "user %s does not have a drive", suite.user)
|
||||
|
||||
124
src/internal/connector/sharepoint/data_collections.go
Normal file
@ -0,0 +1,124 @@
|
||||
package sharepoint
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
type statusUpdater interface {
|
||||
UpdateStatus(status *support.ConnectorOperationStatus)
|
||||
}
|
||||
|
||||
type connector interface {
|
||||
statusUpdater
|
||||
|
||||
Service() graph.Service
|
||||
}
|
||||
|
||||
// DataCollections returns a set of DataCollection which represents the SharePoint data
|
||||
// for the specified user
|
||||
func DataCollections(
|
||||
ctx context.Context,
|
||||
selector selectors.Selector,
|
||||
siteIDs []string,
|
||||
tenantID string,
|
||||
con connector,
|
||||
) ([]data.Collection, error) {
|
||||
b, err := selector.ToSharePointBackup()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "sharePointDataCollection: parsing selector")
|
||||
}
|
||||
|
||||
var (
|
||||
scopes = b.DiscreteScopes(siteIDs)
|
||||
collections = []data.Collection{}
|
||||
serv = con.Service()
|
||||
errs error
|
||||
)
|
||||
|
||||
for _, scope := range scopes {
|
||||
// due to DiscreteScopes(siteIDs), each range should only contain one site.
|
||||
for _, site := range scope.Get(selectors.SharePointSite) {
|
||||
foldersComplete, closer := observe.MessageWithCompletion(fmt.Sprintf(
|
||||
"∙ %s - %s:",
|
||||
scope.Category().PathType(), site))
|
||||
defer closer()
|
||||
defer close(foldersComplete)
|
||||
|
||||
switch scope.Category().PathType() {
|
||||
case path.LibrariesCategory:
|
||||
spcs, err := collectLibraries(
|
||||
ctx,
|
||||
serv,
|
||||
tenantID,
|
||||
site,
|
||||
scope,
|
||||
con)
|
||||
if err != nil {
|
||||
return nil, support.WrapAndAppend(site, err, errs)
|
||||
}
|
||||
|
||||
collections = append(collections, spcs...)
|
||||
}
|
||||
|
||||
foldersComplete <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return collections, errs
|
||||
}
|
||||
|
||||
// collectLibraries constructs a onedrive Collections struct and Get()s
|
||||
// all the drives associated with the site.
|
||||
func collectLibraries(
|
||||
ctx context.Context,
|
||||
serv graph.Service,
|
||||
tenantID, siteID string,
|
||||
scope selectors.SharePointScope,
|
||||
updater statusUpdater,
|
||||
) ([]data.Collection, error) {
|
||||
var (
|
||||
collections = []data.Collection{}
|
||||
errs error
|
||||
)
|
||||
|
||||
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Library collections")
|
||||
|
||||
colls := onedrive.NewCollections(
|
||||
tenantID,
|
||||
siteID,
|
||||
onedrive.SharePointSource,
|
||||
folderMatcher{scope},
|
||||
serv,
|
||||
updater.UpdateStatus)
|
||||
|
||||
odcs, err := colls.Get(ctx)
|
||||
if err != nil {
|
||||
return nil, support.WrapAndAppend(siteID, err, errs)
|
||||
}
|
||||
|
||||
return append(collections, odcs...), errs
|
||||
}
|
||||
|
||||
type folderMatcher struct {
|
||||
scope selectors.SharePointScope
|
||||
}
|
||||
|
||||
func (fm folderMatcher) IsAny() bool {
|
||||
return fm.scope.IsAny(selectors.SharePointLibrary)
|
||||
}
|
||||
|
||||
func (fm folderMatcher) Matches(dir string) bool {
|
||||
return fm.scope.Matches(selectors.SharePointLibrary, dir)
|
||||
}
|
||||
156
src/internal/connector/sharepoint/data_collections_test.go
Normal file
@ -0,0 +1,156 @@
|
||||
package sharepoint_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// consts, mocks
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
testBaseDrivePath = "drive/driveID1/root:"
|
||||
)
|
||||
|
||||
type testFolderMatcher struct {
|
||||
scope selectors.SharePointScope
|
||||
}
|
||||
|
||||
func (fm testFolderMatcher) IsAny() bool {
|
||||
return fm.scope.IsAny(selectors.SharePointLibrary)
|
||||
}
|
||||
|
||||
func (fm testFolderMatcher) Matches(path string) bool {
|
||||
return fm.scope.Matches(selectors.SharePointLibrary, path)
|
||||
}
|
||||
|
||||
type MockGraphService struct{}
|
||||
|
||||
func (ms *MockGraphService) Client() *msgraphsdk.GraphServiceClient {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ms *MockGraphService) Adapter() *msgraphsdk.GraphRequestAdapter {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ms *MockGraphService) ErrPolicy() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type SharePointLibrariesSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func TestSharePointLibrariesSuite(t *testing.T) {
|
||||
suite.Run(t, new(SharePointLibrariesSuite))
|
||||
}
|
||||
|
||||
func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
||||
anyFolder := (&selectors.SharePointBackup{}).Libraries(selectors.Any(), selectors.Any())[0]
|
||||
|
||||
const (
|
||||
tenant = "tenant"
|
||||
site = "site"
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
testCase string
|
||||
items []models.DriveItemable
|
||||
scope selectors.SharePointScope
|
||||
expect assert.ErrorAssertionFunc
|
||||
expectedCollectionPaths []string
|
||||
expectedItemCount int
|
||||
expectedContainerCount int
|
||||
expectedFileCount int
|
||||
}{
|
||||
{
|
||||
testCase: "Single File",
|
||||
items: []models.DriveItemable{
|
||||
driveItem("file", testBaseDrivePath, true),
|
||||
},
|
||||
scope: anyFolder,
|
||||
expect: assert.NoError,
|
||||
expectedCollectionPaths: expectedPathAsSlice(
|
||||
suite.T(),
|
||||
tenant,
|
||||
site,
|
||||
testBaseDrivePath,
|
||||
),
|
||||
expectedItemCount: 2,
|
||||
expectedFileCount: 1,
|
||||
expectedContainerCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
suite.T().Run(test.testCase, func(t *testing.T) {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
c := onedrive.NewCollections(
|
||||
tenant,
|
||||
site,
|
||||
onedrive.SharePointSource,
|
||||
testFolderMatcher{test.scope},
|
||||
&MockGraphService{},
|
||||
nil)
|
||||
err := c.UpdateCollections(ctx, "driveID", test.items)
|
||||
test.expect(t, err)
|
||||
assert.Equal(t, len(test.expectedCollectionPaths), len(c.CollectionMap), "collection paths")
|
||||
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
|
||||
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
|
||||
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
|
||||
for _, collPath := range test.expectedCollectionPaths {
|
||||
assert.Contains(t, c.CollectionMap, collPath)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func driveItem(name string, path string, isFile bool) models.DriveItemable {
|
||||
item := models.NewDriveItem()
|
||||
item.SetName(&name)
|
||||
item.SetId(&name)
|
||||
|
||||
parentReference := models.NewItemReference()
|
||||
parentReference.SetPath(&path)
|
||||
item.SetParentReference(parentReference)
|
||||
|
||||
if isFile {
|
||||
item.SetFile(models.NewFile())
|
||||
}
|
||||
|
||||
return item
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string {
|
||||
res := make([]string, 0, len(rest))
|
||||
|
||||
for _, r := range rest {
|
||||
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
|
||||
require.NoError(t, err)
|
||||
|
||||
res = append(res, p.String())
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
@ -10,6 +10,8 @@ import (
|
||||
)
|
||||
|
||||
// GetAllSitesForTenant makes a GraphQuery request retrieving all sites in the tenant.
|
||||
// Due to restrictions in filter capabilities for site queries, the returned iterable
|
||||
// will contain all personal sites for all users in the org.
|
||||
func GetAllSitesForTenant(ctx context.Context, gs graph.Service) (absser.Parsable, error) {
|
||||
options := &mssite.SitesRequestBuilderGetRequestConfiguration{
|
||||
QueryParameters: &mssite.SitesRequestBuilderGetQueryParameters{
|
||||
|
||||
@ -17,30 +17,29 @@ import (
|
||||
|
||||
// keys for ease of use
|
||||
const (
|
||||
corsoVersion = "corso-version"
|
||||
repoID = "repo-id"
|
||||
payload = "payload"
|
||||
corsoVersion = "corso_version"
|
||||
repoID = "repo_id"
|
||||
|
||||
// Event Keys
|
||||
RepoInit = "repo-init"
|
||||
BackupStart = "backup-start"
|
||||
BackupEnd = "backup-end"
|
||||
RestoreStart = "restore-start"
|
||||
RestoreEnd = "restore-end"
|
||||
RepoInit = "repo_init"
|
||||
BackupStart = "backup_start"
|
||||
BackupEnd = "backup_end"
|
||||
RestoreStart = "restore_start"
|
||||
RestoreEnd = "restore_end"
|
||||
|
||||
// Event Data Keys
|
||||
BackupCreateTime = "backup-creation-time"
|
||||
BackupID = "backup-id"
|
||||
DataRetrieved = "data-retrieved"
|
||||
DataStored = "data-stored"
|
||||
BackupCreateTime = "backup_creation_time"
|
||||
BackupID = "backup_id"
|
||||
DataRetrieved = "data_retrieved"
|
||||
DataStored = "data_stored"
|
||||
Duration = "duration"
|
||||
EndTime = "end-time"
|
||||
ItemsRead = "items-read"
|
||||
ItemsWritten = "items-written"
|
||||
EndTime = "end_time"
|
||||
ItemsRead = "items_read"
|
||||
ItemsWritten = "items_written"
|
||||
Resources = "resources"
|
||||
RestoreID = "restore-id"
|
||||
RestoreID = "restore_id"
|
||||
Service = "service"
|
||||
StartTime = "start-time"
|
||||
StartTime = "start_time"
|
||||
Status = "status"
|
||||
)
|
||||
|
||||
@ -120,8 +119,20 @@ func (b Bus) Event(ctx context.Context, key string, data map[string]any) {
|
||||
Set(repoID, b.repoID).
|
||||
Set(corsoVersion, b.version)
|
||||
|
||||
if len(data) > 0 {
|
||||
props.Set(payload, data)
|
||||
for k, v := range data {
|
||||
props.Set(k, v)
|
||||
}
|
||||
|
||||
// need to setup identity when initializing a new repo
|
||||
if key == RepoInit {
|
||||
err := b.client.Enqueue(analytics.Identify{
|
||||
UserId: b.repoID,
|
||||
Traits: analytics.NewTraits().
|
||||
SetName(b.repoID),
|
||||
})
|
||||
if err != nil {
|
||||
logger.Ctx(ctx).Debugw("analytics event failure", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
err := b.client.Enqueue(analytics.Track{
|
||||
|
||||
@ -281,6 +281,8 @@ func getStreamItemFunc(
|
||||
ctx, end := D.Span(ctx, "kopia:getStreamItemFunc")
|
||||
defer end()
|
||||
|
||||
log := logger.Ctx(ctx)
|
||||
|
||||
// Collect all errors and return them at the end so that iteration for this
|
||||
// directory doesn't end early.
|
||||
var errs *multierror.Error
|
||||
@ -314,11 +316,12 @@ func getStreamItemFunc(
|
||||
err = errors.Wrap(err, "getting full item path")
|
||||
errs = multierror.Append(errs, err)
|
||||
|
||||
logger.Ctx(ctx).Error(err)
|
||||
log.Error(err)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
log.Debugw("reading item", "path", itemPath.String())
|
||||
trace.Log(ctx, "kopia:getStreamItemFunc:item", itemPath.String())
|
||||
|
||||
ei, ok := e.(data.StreamInfo)
|
||||
@ -326,8 +329,7 @@ func getStreamItemFunc(
|
||||
errs = multierror.Append(
|
||||
errs, errors.Errorf("item %q does not implement DataStreamInfo", itemPath))
|
||||
|
||||
logger.Ctx(ctx).Errorw(
|
||||
"item does not implement DataStreamInfo; skipping", "path", itemPath)
|
||||
log.Errorw("item does not implement DataStreamInfo; skipping", "path", itemPath)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
@ -14,7 +14,8 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
noProgressBarsFN = "no-progress-bars"
|
||||
hideProgressBarsFN = "hide-progress"
|
||||
retainProgressBarsFN = "retain-progress"
|
||||
progressBarWidth = 32
|
||||
)
|
||||
|
||||
@ -22,7 +23,7 @@ var (
|
||||
wg sync.WaitGroup
|
||||
// TODO: Revisit this being a global nd make it a parameter to the progress methods
|
||||
// so that each bar can be initialized with different contexts if needed.
|
||||
con context.Context
|
||||
contxt context.Context
|
||||
writer io.Writer
|
||||
progress *mpb.Progress
|
||||
cfg *config
|
||||
@ -34,37 +35,49 @@ func init() {
|
||||
makeSpinFrames(progressBarWidth)
|
||||
}
|
||||
|
||||
// adds the persistent boolean flag --no-progress-bars to the provided command.
|
||||
// adds the persistent boolean flag --hide-progress to the provided command.
|
||||
// This is a hack for help displays. Due to seeding the context, we also
|
||||
// need to parse the configuration before we execute the command.
|
||||
func AddProgressBarFlags(parent *cobra.Command) {
|
||||
fs := parent.PersistentFlags()
|
||||
fs.Bool(noProgressBarsFN, false, "turn off the progress bar displays")
|
||||
fs.Bool(hideProgressBarsFN, false, "turn off the progress bar displays")
|
||||
fs.Bool(retainProgressBarsFN, false, "retain the progress bar displays after completion")
|
||||
}
|
||||
|
||||
// Due to races between the lazy evaluation of flags in cobra and the need to init observer
|
||||
// behavior in a ctx, these options get pre-processed manually here using pflags. The canonical
|
||||
// AddProgressBarFlag() ensures the flags are displayed as part of the help/usage output.
|
||||
func PreloadFlags() bool {
|
||||
func PreloadFlags() *config {
|
||||
fs := pflag.NewFlagSet("seed-observer", pflag.ContinueOnError)
|
||||
fs.ParseErrorsWhitelist.UnknownFlags = true
|
||||
fs.Bool(noProgressBarsFN, false, "turn off the progress bar displays")
|
||||
fs.Bool(hideProgressBarsFN, false, "turn off the progress bar displays")
|
||||
fs.Bool(retainProgressBarsFN, false, "retain the progress bar displays after completion")
|
||||
// prevents overriding the corso/cobra help processor
|
||||
fs.BoolP("help", "h", false, "")
|
||||
|
||||
// parse the os args list to find the log level flag
|
||||
// parse the os args list to find the observer display flags
|
||||
if err := fs.Parse(os.Args[1:]); err != nil {
|
||||
return false
|
||||
return nil
|
||||
}
|
||||
|
||||
// retrieve the user's preferred display
|
||||
// automatically defaults to "info"
|
||||
shouldHide, err := fs.GetBool(noProgressBarsFN)
|
||||
shouldHide, err := fs.GetBool(hideProgressBarsFN)
|
||||
if err != nil {
|
||||
return false
|
||||
return nil
|
||||
}
|
||||
|
||||
return shouldHide
|
||||
// retrieve the user's preferred display
|
||||
// automatically defaults to "info"
|
||||
shouldAlwaysShow, err := fs.GetBool(retainProgressBarsFN)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &config{
|
||||
doNotDisplay: shouldHide,
|
||||
keepBarsAfterComplete: shouldAlwaysShow,
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -74,24 +87,29 @@ func PreloadFlags() bool {
|
||||
// config handles observer configuration
|
||||
type config struct {
|
||||
doNotDisplay bool
|
||||
keepBarsAfterComplete bool
|
||||
}
|
||||
|
||||
func (c config) hidden() bool {
|
||||
return c.doNotDisplay || writer == nil
|
||||
}
|
||||
|
||||
// SeedWriter adds default writer to the observe package.
|
||||
// Uses a noop writer until seeded.
|
||||
func SeedWriter(ctx context.Context, w io.Writer, hide bool) {
|
||||
func SeedWriter(ctx context.Context, w io.Writer, c *config) {
|
||||
writer = w
|
||||
con = ctx
|
||||
contxt = ctx
|
||||
|
||||
if con == nil {
|
||||
con = context.Background()
|
||||
if contxt == nil {
|
||||
contxt = context.Background()
|
||||
}
|
||||
|
||||
cfg = &config{
|
||||
doNotDisplay: hide,
|
||||
if c != nil {
|
||||
cfg = c
|
||||
}
|
||||
|
||||
progress = mpb.NewWithContext(
|
||||
con,
|
||||
contxt,
|
||||
mpb.WithWidth(progressBarWidth),
|
||||
mpb.WithWaitGroup(&wg),
|
||||
mpb.WithOutput(writer),
|
||||
@ -105,7 +123,7 @@ func Complete() {
|
||||
progress.Wait()
|
||||
}
|
||||
|
||||
SeedWriter(con, writer, cfg.doNotDisplay)
|
||||
SeedWriter(contxt, writer, cfg)
|
||||
}
|
||||
|
||||
const (
|
||||
@ -118,7 +136,7 @@ const (
|
||||
|
||||
// Message is used to display a progress message
|
||||
func Message(message string) {
|
||||
if writer == nil {
|
||||
if cfg.hidden() {
|
||||
return
|
||||
}
|
||||
|
||||
@ -143,7 +161,7 @@ func Message(message string) {
|
||||
func MessageWithCompletion(message string) (chan<- struct{}, func()) {
|
||||
completionCh := make(chan struct{}, 1)
|
||||
|
||||
if writer == nil {
|
||||
if cfg.hidden() {
|
||||
return completionCh, func() {}
|
||||
}
|
||||
|
||||
@ -156,6 +174,7 @@ func MessageWithCompletion(message string) (chan<- struct{}, func()) {
|
||||
mpb.SpinnerStyle(frames...).PositionLeft(),
|
||||
mpb.PrependDecorators(
|
||||
decor.Name(message),
|
||||
decor.Elapsed(decor.ET_STYLE_GO, decor.WC{W: 8}),
|
||||
),
|
||||
mpb.BarFillerOnComplete("done"),
|
||||
)
|
||||
@ -163,7 +182,7 @@ func MessageWithCompletion(message string) (chan<- struct{}, func()) {
|
||||
go func(ci <-chan struct{}) {
|
||||
for {
|
||||
select {
|
||||
case <-con.Done():
|
||||
case <-contxt.Done():
|
||||
bar.SetTotal(-1, true)
|
||||
case <-ci:
|
||||
// We don't care whether the channel was signalled or closed
|
||||
@ -184,23 +203,26 @@ func MessageWithCompletion(message string) (chan<- struct{}, func()) {
|
||||
// read through the provided readcloser, up until the byte count matches
|
||||
// the totalBytes.
|
||||
func ItemProgress(rc io.ReadCloser, header, iname string, totalBytes int64) (io.ReadCloser, func()) {
|
||||
if cfg.doNotDisplay || writer == nil || rc == nil || totalBytes == 0 {
|
||||
if cfg.hidden() || rc == nil || totalBytes == 0 {
|
||||
return rc, func() {}
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
bar := progress.New(
|
||||
totalBytes,
|
||||
mpb.NopStyle(),
|
||||
mpb.BarRemoveOnComplete(),
|
||||
barOpts := []mpb.BarOption{
|
||||
mpb.PrependDecorators(
|
||||
decor.Name(header, decor.WCSyncSpaceR),
|
||||
decor.Name(iname, decor.WCSyncSpaceR),
|
||||
decor.CountersKibiByte(" %.1f/%.1f ", decor.WC{W: 8}),
|
||||
decor.NewPercentage("%d ", decor.WC{W: 4}),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
if !cfg.keepBarsAfterComplete {
|
||||
barOpts = append(barOpts, mpb.BarRemoveOnComplete())
|
||||
}
|
||||
|
||||
bar := progress.New(totalBytes, mpb.NopStyle(), barOpts...)
|
||||
|
||||
return bar.ProxyReader(rc), waitAndCloseBar(bar)
|
||||
}
|
||||
@ -212,7 +234,7 @@ func ItemProgress(rc io.ReadCloser, header, iname string, totalBytes int64) (io.
|
||||
func ProgressWithCount(header, message string, count int64) (chan<- struct{}, func()) {
|
||||
progressCh := make(chan struct{})
|
||||
|
||||
if cfg.doNotDisplay || writer == nil {
|
||||
if cfg.hidden() {
|
||||
go func(ci <-chan struct{}) {
|
||||
for {
|
||||
_, ok := <-ci
|
||||
@ -227,23 +249,25 @@ func ProgressWithCount(header, message string, count int64) (chan<- struct{}, fu
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
bar := progress.New(
|
||||
count,
|
||||
mpb.NopStyle(),
|
||||
mpb.BarRemoveOnComplete(),
|
||||
barOpts := []mpb.BarOption{
|
||||
mpb.PrependDecorators(
|
||||
decor.Name(header, decor.WCSyncSpaceR),
|
||||
decor.Counters(0, " %d/%d "),
|
||||
decor.Name(message),
|
||||
decor.Counters(0, " %d/%d "),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
if !cfg.keepBarsAfterComplete {
|
||||
barOpts = append(barOpts, mpb.BarRemoveOnComplete())
|
||||
}
|
||||
|
||||
bar := progress.New(count, mpb.NopStyle(), barOpts...)
|
||||
|
||||
ch := make(chan struct{})
|
||||
|
||||
go func(ci <-chan struct{}) {
|
||||
for {
|
||||
select {
|
||||
case <-con.Done():
|
||||
case <-contxt.Done():
|
||||
bar.Abort(true)
|
||||
return
|
||||
|
||||
@ -297,7 +321,7 @@ func makeSpinFrames(barWidth int) {
|
||||
// incrementing the count of items handled. Each write to the provided channel
|
||||
// counts as a single increment. The caller is expected to close the channel.
|
||||
func CollectionProgress(user, category, dirName string) (chan<- struct{}, func()) {
|
||||
if cfg.doNotDisplay || writer == nil || len(user) == 0 || len(dirName) == 0 {
|
||||
if cfg.hidden() || len(user) == 0 || len(dirName) == 0 {
|
||||
ch := make(chan struct{})
|
||||
|
||||
go func(ci <-chan struct{}) {
|
||||
@ -314,25 +338,29 @@ func CollectionProgress(user, category, dirName string) (chan<- struct{}, func()
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
bar := progress.New(
|
||||
-1, // -1 to indicate an unbounded count
|
||||
mpb.SpinnerStyle(spinFrames...),
|
||||
mpb.BarRemoveOnComplete(),
|
||||
mpb.PrependDecorators(
|
||||
decor.Name(category),
|
||||
),
|
||||
barOpts := []mpb.BarOption{
|
||||
mpb.PrependDecorators(decor.Name(category)),
|
||||
mpb.AppendDecorators(
|
||||
decor.CurrentNoUnit("%d - ", decor.WCSyncSpace),
|
||||
decor.Name(fmt.Sprintf("%s - %s", user, dirName)),
|
||||
),
|
||||
}
|
||||
|
||||
if !cfg.keepBarsAfterComplete {
|
||||
barOpts = append(barOpts, mpb.BarRemoveOnComplete())
|
||||
}
|
||||
|
||||
bar := progress.New(
|
||||
-1, // -1 to indicate an unbounded count
|
||||
mpb.SpinnerStyle(spinFrames...),
|
||||
barOpts...,
|
||||
)
|
||||
|
||||
ch := make(chan struct{})
|
||||
|
||||
go func(ci <-chan struct{}) {
|
||||
for {
|
||||
select {
|
||||
case <-con.Done():
|
||||
case <-contxt.Done():
|
||||
bar.SetTotal(-1, true)
|
||||
return
|
||||
|
||||
|
||||
@ -33,13 +33,13 @@ func (suite *ObserveProgressUnitSuite) TestItemProgress() {
|
||||
t := suite.T()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
observe.Complete()
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
from := make([]byte, 100)
|
||||
@ -87,13 +87,13 @@ func (suite *ObserveProgressUnitSuite) TestCollectionProgress_unblockOnCtxCancel
|
||||
t := suite.T()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
observe.Complete()
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
progCh, closer := observe.CollectionProgress("test", "testcat", "testertons")
|
||||
@ -122,13 +122,13 @@ func (suite *ObserveProgressUnitSuite) TestCollectionProgress_unblockOnChannelCl
|
||||
t := suite.T()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
observe.Complete()
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
progCh, closer := observe.CollectionProgress("test", "testcat", "testertons")
|
||||
@ -153,12 +153,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgress() {
|
||||
defer flush()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
message := "Test Message"
|
||||
@ -174,12 +174,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithCompletion() {
|
||||
defer flush()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
message := "Test Message"
|
||||
@ -204,12 +204,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithChannelClosed() {
|
||||
defer flush()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
message := "Test Message"
|
||||
@ -236,12 +236,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithContextCancelled()
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
message := "Test Message"
|
||||
@ -265,12 +265,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithCount() {
|
||||
defer flush()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
header := "Header"
|
||||
@ -298,12 +298,12 @@ func (suite *ObserveProgressUnitSuite) TestObserveProgressWithCountChannelClosed
|
||||
defer flush()
|
||||
|
||||
recorder := strings.Builder{}
|
||||
observe.SeedWriter(ctx, &recorder, false)
|
||||
observe.SeedWriter(ctx, &recorder, nil)
|
||||
|
||||
defer func() {
|
||||
// don't cross-contaminate other tests.
|
||||
//nolint:forbidigo
|
||||
observe.SeedWriter(context.Background(), nil, false)
|
||||
observe.SeedWriter(context.Background(), nil, nil)
|
||||
}()
|
||||
|
||||
header := "Header"
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package operations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -114,6 +115,67 @@ func (suite *BackupOpSuite) TestBackupOperation_PersistResults() {
|
||||
// integration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
//revive:disable:context-as-argument
|
||||
func prepNewBackupOp(
|
||||
t *testing.T,
|
||||
ctx context.Context,
|
||||
bus events.Eventer,
|
||||
sel selectors.Selector,
|
||||
) (BackupOperation, func()) {
|
||||
//revive:enable:context-as-argument
|
||||
acct := tester.NewM365Account(t)
|
||||
|
||||
// need to initialize the repository before we can test connecting to it.
|
||||
st := tester.NewPrefixedS3Storage(t)
|
||||
|
||||
k := kopia.NewConn(st)
|
||||
require.NoError(t, k.Initialize(ctx))
|
||||
|
||||
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
|
||||
// to close here.
|
||||
closer := func() { k.Close(ctx) }
|
||||
|
||||
kw, err := kopia.NewWrapper(k)
|
||||
if !assert.NoError(t, err) {
|
||||
closer()
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
closer = func() {
|
||||
k.Close(ctx)
|
||||
kw.Close(ctx)
|
||||
}
|
||||
|
||||
ms, err := kopia.NewModelStore(k)
|
||||
if !assert.NoError(t, err) {
|
||||
closer()
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
closer = func() {
|
||||
k.Close(ctx)
|
||||
kw.Close(ctx)
|
||||
ms.Close(ctx)
|
||||
}
|
||||
|
||||
sw := store.NewKopiaStore(ms)
|
||||
|
||||
bo, err := NewBackupOperation(
|
||||
ctx,
|
||||
control.Options{},
|
||||
kw,
|
||||
sw,
|
||||
acct,
|
||||
sel,
|
||||
bus)
|
||||
if !assert.NoError(t, err) {
|
||||
closer()
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
return bo, closer
|
||||
}
|
||||
|
||||
type BackupOpIntegrationSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
@ -122,6 +184,7 @@ func TestBackupOpIntegrationSuite(t *testing.T) {
|
||||
if err := tester.RunOnAny(
|
||||
tester.CorsoCITests,
|
||||
tester.CorsoOperationTests,
|
||||
"flomp",
|
||||
); err != nil {
|
||||
t.Skip(err)
|
||||
}
|
||||
@ -174,12 +237,11 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
|
||||
|
||||
// TestBackup_Run ensures that Integration Testing works
|
||||
// for the following scopes: Contacts, Events, and Mail
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run() {
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
m365UserID := tester.M365UserID(suite.T())
|
||||
acct := tester.NewM365Account(suite.T())
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -215,36 +277,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run() {
|
||||
}
|
||||
for _, test := range tests {
|
||||
suite.T().Run(test.name, func(t *testing.T) {
|
||||
// need to initialize the repository before we can test connecting to it.
|
||||
st := tester.NewPrefixedS3Storage(t)
|
||||
k := kopia.NewConn(st)
|
||||
require.NoError(t, k.Initialize(ctx))
|
||||
|
||||
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
|
||||
// to close here.
|
||||
defer k.Close(ctx)
|
||||
|
||||
kw, err := kopia.NewWrapper(k)
|
||||
require.NoError(t, err)
|
||||
defer kw.Close(ctx)
|
||||
|
||||
ms, err := kopia.NewModelStore(k)
|
||||
require.NoError(t, err)
|
||||
defer ms.Close(ctx)
|
||||
|
||||
mb := evmock.NewBus()
|
||||
|
||||
sw := store.NewKopiaStore(ms)
|
||||
selected := test.selectFunc()
|
||||
bo, err := NewBackupOperation(
|
||||
ctx,
|
||||
control.Options{},
|
||||
kw,
|
||||
sw,
|
||||
acct,
|
||||
*selected,
|
||||
mb)
|
||||
require.NoError(t, err)
|
||||
bo, closer := prepNewBackupOp(t, ctx, mb, *test.selectFunc())
|
||||
defer closer()
|
||||
|
||||
require.NoError(t, bo.Run(ctx))
|
||||
require.NotEmpty(t, bo.Results)
|
||||
@ -266,51 +301,54 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *BackupOpIntegrationSuite) TestBackupOneDrive_Run() {
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
t := suite.T()
|
||||
var (
|
||||
t = suite.T()
|
||||
mb = evmock.NewBus()
|
||||
m365UserID = tester.SecondaryM365UserID(t)
|
||||
sel = selectors.NewOneDriveBackup()
|
||||
)
|
||||
|
||||
m365UserID := tester.SecondaryM365UserID(t)
|
||||
acct := tester.NewM365Account(t)
|
||||
|
||||
// need to initialize the repository before we can test connecting to it.
|
||||
st := tester.NewPrefixedS3Storage(t)
|
||||
|
||||
k := kopia.NewConn(st)
|
||||
require.NoError(t, k.Initialize(ctx))
|
||||
|
||||
// kopiaRef comes with a count of 1 and Wrapper bumps it again so safe
|
||||
// to close here.
|
||||
defer k.Close(ctx)
|
||||
|
||||
kw, err := kopia.NewWrapper(k)
|
||||
require.NoError(t, err)
|
||||
|
||||
defer kw.Close(ctx)
|
||||
|
||||
ms, err := kopia.NewModelStore(k)
|
||||
require.NoError(t, err)
|
||||
|
||||
defer ms.Close(ctx)
|
||||
|
||||
sw := store.NewKopiaStore(ms)
|
||||
|
||||
mb := evmock.NewBus()
|
||||
|
||||
sel := selectors.NewOneDriveBackup()
|
||||
sel.Include(sel.Users([]string{m365UserID}))
|
||||
|
||||
bo, err := NewBackupOperation(
|
||||
ctx,
|
||||
control.Options{},
|
||||
kw,
|
||||
sw,
|
||||
acct,
|
||||
sel.Selector,
|
||||
mb)
|
||||
require.NoError(t, err)
|
||||
bo, closer := prepNewBackupOp(t, ctx, mb, sel.Selector)
|
||||
defer closer()
|
||||
|
||||
require.NoError(t, bo.Run(ctx))
|
||||
require.NotEmpty(t, bo.Results)
|
||||
require.NotEmpty(t, bo.Results.BackupID)
|
||||
assert.Equalf(t, Completed, bo.Status, "backup status %s is not Completed", bo.Status)
|
||||
assert.Equal(t, bo.Results.ItemsRead, bo.Results.ItemsWritten)
|
||||
assert.Less(t, int64(0), bo.Results.BytesRead, "bytes read")
|
||||
assert.Less(t, int64(0), bo.Results.BytesUploaded, "bytes uploaded")
|
||||
assert.Equal(t, 1, bo.Results.ResourceOwners)
|
||||
assert.NoError(t, bo.Results.ReadErrors)
|
||||
assert.NoError(t, bo.Results.WriteErrors)
|
||||
assert.Equal(t, 1, mb.TimesCalled[events.BackupStart], "backup-start events")
|
||||
assert.Equal(t, 1, mb.TimesCalled[events.BackupEnd], "backup-end events")
|
||||
assert.Equal(t,
|
||||
mb.CalledWith[events.BackupStart][0][events.BackupID],
|
||||
bo.Results.BackupID, "backupID pre-declaration")
|
||||
}
|
||||
|
||||
func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
t = suite.T()
|
||||
mb = evmock.NewBus()
|
||||
siteID = tester.M365SiteID(t)
|
||||
sel = selectors.NewSharePointBackup()
|
||||
)
|
||||
|
||||
sel.Include(sel.Sites([]string{siteID}))
|
||||
|
||||
bo, closer := prepNewBackupOp(t, ctx, mb, sel.Selector)
|
||||
defer closer()
|
||||
|
||||
require.NoError(t, bo.Run(ctx))
|
||||
require.NotEmpty(t, bo.Results)
|
||||
|
||||
@ -15,11 +15,14 @@ import (
|
||||
// InProgress - the standard value for any process that has not
|
||||
// arrived at an end state. The end states are Failed, Completed,
|
||||
// or NoData.
|
||||
//
|
||||
// Failed - the operation was unable to begin processing data at all.
|
||||
// No items have been written by the consumer.
|
||||
//
|
||||
// Completed - the operation was able to process one or more of the
|
||||
// items in the request. Both partial success (0 < N < len(items)
|
||||
// errored) and total success (0 errors) are set as Completed.
|
||||
//
|
||||
// NoData - only occurs when no data was involved in an operation.
|
||||
// For example, if a backup is requested for a specific user's
|
||||
// mail, but that account contains zero mail messages, the backup
|
||||
|
||||
@ -23,6 +23,7 @@ const (
|
||||
CorsoGraphConnectorTests = "CORSO_GRAPH_CONNECTOR_TESTS"
|
||||
CorsoGraphConnectorExchangeTests = "CORSO_GRAPH_CONNECTOR_EXCHANGE_TESTS"
|
||||
CorsoGraphConnectorOneDriveTests = "CORSO_GRAPH_CONNECTOR_ONE_DRIVE_TESTS"
|
||||
CorsoGraphConnectorSharePointTests = "CORSO_GRAPH_CONNECTOR_SHAREPOINT_TESTS"
|
||||
CorsoKopiaWrapperTests = "CORSO_KOPIA_WRAPPER_TESTS"
|
||||
CorsoModelStoreTests = "CORSO_MODEL_STORE_TESTS"
|
||||
CorsoOneDriveTests = "CORSO_ONE_DRIVE_TESTS"
|
||||
|
||||
@ -30,6 +30,19 @@ func SecondaryM365UserID(t *testing.T) string {
|
||||
return cfg[TestCfgSecondaryUserID]
|
||||
}
|
||||
|
||||
// LoadTestM365SiteID returns a siteID string representing the m365SiteID
|
||||
// described by either the env var CORSO_M365_LOAD_TEST_SITE_ID, the
|
||||
// corso_test.toml config file or the default value (in that order of priority).
|
||||
// The default is a last-attempt fallback that will only work on alcion's
|
||||
// testing org.
|
||||
func LoadTestM365SiteID(t *testing.T) string {
|
||||
cfg, err := readTestConfig()
|
||||
require.NoError(t, err, "retrieving load test m365 site id from test configuration")
|
||||
|
||||
// TODO: load test site id, not standard test site id
|
||||
return cfg[TestCfgSiteID]
|
||||
}
|
||||
|
||||
// LoadTestM365UserID returns an userID string representing the m365UserID
|
||||
// described by either the env var CORSO_M365_LOAD_TEST_USER_ID, the
|
||||
// corso_test.toml config file or the default value (in that order of priority).
|
||||
@ -42,8 +55,29 @@ func LoadTestM365UserID(t *testing.T) string {
|
||||
return cfg[TestCfgLoadTestUserID]
|
||||
}
|
||||
|
||||
// expects cfg value to be a string representing an array like:
|
||||
// "['foo@example.com','bar@example.com']"
|
||||
// expects cfg value to be a string representing an array such as:
|
||||
// ["site1\,uuid","site2\,uuid"]
|
||||
// the delimeter must be a |.
|
||||
func LoadTestM365OrgSites(t *testing.T) []string {
|
||||
cfg, err := readTestConfig()
|
||||
require.NoError(t, err, "retrieving load test m365 org sites from test configuration")
|
||||
|
||||
// TODO: proper handling of site slice input.
|
||||
// sites := cfg[TestCfgLoadTestOrgSites]
|
||||
// sites = strings.TrimPrefix(sites, "[")
|
||||
// sites = strings.TrimSuffix(sites, "]")
|
||||
// sites = strings.ReplaceAll(sites, `"`, "")
|
||||
// sites = strings.ReplaceAll(sites, `'`, "")
|
||||
// sites = strings.ReplaceAll(sites, "|", ",")
|
||||
|
||||
// return strings.Split(sites, ",")
|
||||
|
||||
return []string{cfg[TestCfgSiteID]}
|
||||
}
|
||||
|
||||
// expects cfg value to be a string representing an array such as:
|
||||
// ["foo@example.com","bar@example.com"]
|
||||
// the delimeter may be either a , or |.
|
||||
func LoadTestM365OrgUsers(t *testing.T) []string {
|
||||
cfg, err := readTestConfig()
|
||||
require.NoError(t, err, "retrieving load test m365 org users from test configuration")
|
||||
|
||||
@ -352,12 +352,14 @@ func (i ExchangeInfo) Values() []string {
|
||||
|
||||
// SharePointInfo describes a sharepoint item
|
||||
type SharePointInfo struct {
|
||||
ItemType ItemType `json:"itemType,omitempty"`
|
||||
ItemName string `json:"itemName,omitempty"`
|
||||
Created time.Time `json:"created,omitempty"`
|
||||
ItemName string `json:"itemName,omitempty"`
|
||||
ItemType ItemType `json:"itemType,omitempty"`
|
||||
Modified time.Time `josn:"modified,omitempty"`
|
||||
WebURL string `json:"webUrl,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
ParentPath string `json:"parentPath"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
WebURL string `json:"webUrl,omitempty"`
|
||||
}
|
||||
|
||||
// Headers returns the human-readable names of properties in a SharePointInfo
|
||||
@ -374,13 +376,13 @@ func (i SharePointInfo) Values() []string {
|
||||
|
||||
// OneDriveInfo describes a oneDrive item
|
||||
type OneDriveInfo struct {
|
||||
ItemType ItemType `json:"itemType,omitempty"`
|
||||
ParentPath string `json:"parentPath"`
|
||||
ItemName string `json:"itemName"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
Created time.Time `json:"created,omitempty"`
|
||||
ItemName string `json:"itemName"`
|
||||
ItemType ItemType `json:"itemType,omitempty"`
|
||||
Modified time.Time `json:"modified,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
ParentPath string `json:"parentPath"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
}
|
||||
|
||||
// Headers returns the human-readable names of properties in a OneDriveInfo
|
||||
|
||||
@ -25,6 +25,7 @@ const (
|
||||
Info
|
||||
Warn
|
||||
Production
|
||||
Disabled
|
||||
)
|
||||
|
||||
const logLevelFN = "log-level"
|
||||
@ -80,6 +81,8 @@ func genLogger(level logLevel) (*zapcore.Core, *zap.SugaredLogger) {
|
||||
return lvl >= zapcore.WarnLevel
|
||||
case Production:
|
||||
return lvl >= zapcore.ErrorLevel
|
||||
case Disabled:
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
@ -104,6 +107,8 @@ func genLogger(level logLevel) (*zapcore.Core, *zap.SugaredLogger) {
|
||||
cfg.Level = zap.NewAtomicLevelAt(zapcore.InfoLevel)
|
||||
case Warn:
|
||||
cfg.Level = zap.NewAtomicLevelAt(zapcore.WarnLevel)
|
||||
case Disabled:
|
||||
cfg.Level = zap.NewAtomicLevelAt(zapcore.FatalLevel)
|
||||
}
|
||||
|
||||
lgr, err = cfg.Build()
|
||||
@ -144,7 +149,7 @@ type loggingKey string
|
||||
|
||||
const ctxKey loggingKey = "corsoLogger"
|
||||
|
||||
// Seed embeds a logger into the context for later retrieval.
|
||||
// Seed generates a logger within the context for later retrieval.
|
||||
// It also parses the command line for flag values prior to executing
|
||||
// cobra. This early parsing is necessary since logging depends on
|
||||
// a seeded context prior to cobra evaluating flags.
|
||||
@ -154,24 +159,30 @@ func Seed(ctx context.Context, lvl string) (context.Context, *zap.SugaredLogger)
|
||||
}
|
||||
|
||||
zsl := singleton(levelOf(lvl))
|
||||
ctxOut := context.WithValue(ctx, ctxKey, zsl)
|
||||
|
||||
return ctxOut, zsl
|
||||
return Set(ctx, zsl), zsl
|
||||
}
|
||||
|
||||
// SeedLevel embeds a logger into the context with the given log-level.
|
||||
// SeedLevel generates a logger within the context with the given log-level.
|
||||
func SeedLevel(ctx context.Context, level logLevel) (context.Context, *zap.SugaredLogger) {
|
||||
l := ctx.Value(ctxKey)
|
||||
if l == nil {
|
||||
zsl := singleton(level)
|
||||
ctxWV := context.WithValue(ctx, ctxKey, zsl)
|
||||
|
||||
return ctxWV, zsl
|
||||
return Set(ctx, zsl), zsl
|
||||
}
|
||||
|
||||
return ctx, l.(*zap.SugaredLogger)
|
||||
}
|
||||
|
||||
// Set allows users to embed their own zap.SugaredLogger within the context.
|
||||
func Set(ctx context.Context, logger *zap.SugaredLogger) context.Context {
|
||||
if logger == nil {
|
||||
return ctx
|
||||
}
|
||||
|
||||
return context.WithValue(ctx, ctxKey, logger)
|
||||
}
|
||||
|
||||
// Ctx retrieves the logger embedded in the context.
|
||||
func Ctx(ctx context.Context) *zap.SugaredLogger {
|
||||
l := ctx.Value(ctxKey)
|
||||
@ -191,6 +202,8 @@ func levelOf(lvl string) logLevel {
|
||||
return Warn
|
||||
case "error":
|
||||
return Production
|
||||
case "disabled":
|
||||
return Disabled
|
||||
}
|
||||
|
||||
return Info
|
||||
|
||||