Compare commits

..

1 Commits

Author SHA1 Message Date
Vaibhav Kamra
8840116380 Test case for reference attachment 2022-10-19 11:20:59 -07:00
1268 changed files with 62553 additions and 236818 deletions

View File

@ -1,40 +0,0 @@
name: Bug Report
description: File a Corso bug report
title: "[Bug]: "
labels: ["bug", "triage"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: textarea
id: what-happened
attributes:
label: What happened?
description: Also tell us, what did you expect to happen?
placeholder: Tell us what you see!
validations:
required: true
- type: textarea
id: version
attributes:
label: Corso Version?
description: What version of Corso (`corso --version`) are you running?
value: "Corso vX.X.X"
validations:
required: true
- type: textarea
id: os
attributes:
label: Where are you running Corso?
description: Include OS version (e.g., macOS 13.0.1, Windows 11 Pro) and Object Storage system being used.
placeholder: Also include additional system details if relevant (e.g., shell, antivirus, firewall/proxy used on the network, etc.)
validations:
required: true
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please run Corso with `--log-level debug --mask-sensitive-data` and attach the log file.
placeholder: This will be automatically formatted, so no need for backticks.
render: shell

View File

@ -1,5 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Corso Documentation
url: https://corsobackup.io/docs/intro/
about: Did you search the documentation?

View File

@ -1,268 +0,0 @@
name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs:
service:
description: Service to test
required: true
kind:
description: Kind of test
required: true
backup-id:
description: Backup to retrieve data out of
required: false
backup-args:
description: Arguments to pass for backup
required: false
default: ""
restore-args:
description: Arguments to pass for restore; restore is skipped when missing.
required: false
default: ""
export-args:
description: Arguments to pass for export.
required: false
default: ""
restore-container:
description: Folder to use for testing
required: true
log-dir:
description: Folder to store test log files
required: true
on-collision:
description: Value for the --collisions flag
required: false
default: "replace"
with-export:
description: Runs export tests when true
required: false
default: false
category:
description: category of data for given service
required: false
outputs:
backup-id:
value: ${{ steps.backup.outputs.result }}
runs:
using: composite
steps:
- name: Backup ${{ inputs.service }} ${{ inputs.kind }}
id: backup
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
./corso backup create '${{ inputs.service }}' \
--no-stats --hide-progress --json \
${{ inputs.backup-args }} |
tee /dev/stderr | # for printing logs
jq -r '.[0] | .id' |
sed 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: Restore ${{ inputs.service }} ${{ inputs.kind }}
if: inputs.restore-args
id: restore
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso restore '${{ inputs.service }}' \
--no-stats \
--hide-progress \
--collisions ${{ inputs.on-collision }} \
${{ inputs.restore-args }} \
--backup '${{ steps.backup.outputs.result }}' \
2>&1 |
tee /tmp/corsologs |
grep -i -e 'Restoring to folder ' |
sed "s/Restoring to folder /result=/" |
tee $GITHUB_OUTPUT
cat /tmp/corsologs
- name: Check restore ${{ inputs.service }} ${{ inputs.kind }}
if: inputs.restore-args
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# lists are not restored to a different folder. they get created adjacent to their originals
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test restore ${{ inputs.service }}
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
id: export
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \
--no-stats \
--hide-progress \
${{ inputs.export-args }} \
--backup '${{ steps.backup.outputs.result }}'
cat /tmp/corsologs
- name: Check export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }}
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# applies only for sharepoint lists
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }}
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
id: export-archive
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \
--no-stats \
--hide-progress \
--archive \
${{ inputs.export-args }} \
--backup '${{ steps.backup.outputs.result }}'
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped
cat /tmp/corsologs
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# applies only for sharepoint lists
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }}
- name: List ${{ inputs.service }} ${{ inputs.kind }}
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \
--no-stats \
--hide-progress \
2>&1 |
tee /tmp/corso-backup-list.log
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list.log
then
echo "Unable to find backup from previous run in backup list"
exit 1
fi
- name: List item ${{ inputs.service }} ${{ inputs.kind }}
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
# Include category in the log file name if present
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \
--no-stats \
--hide-progress \
--backup "${{ steps.backup.outputs.result }}" \
2>&1 |
tee /tmp/corso-backup-list-item.log
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list-item.log
then
echo "Unable to list previous backup"
exit 1
fi
- if: always()
shell: bash
run: |
echo "---------------------------"
echo Logging Results
echo "---------------------------"
- name: Upload test log
if: always()
uses: actions/upload-artifact@v3
with:
name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
path: ${{ inputs.log-dir }}/*
if-no-files-found: error
retention-days: 14

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
#
@ -38,8 +37,8 @@ runs:
- name: Determine Golang cache paths
id: golang-path
run: |
echo "build=$(go env GOCACHE)" | tee -a $GITHUB_OUTPUT
echo "module=$(go env GOMODCACHE)" | tee -a $GITHUB_OUTPUT
echo "::set-output name=build::$(go env GOCACHE)"
echo "::set-output name=module::$(go env GOMODCACHE)"
shell: bash
- name: Setup Golang cache
@ -57,6 +56,6 @@ runs:
# cover all the bases, just to be make sure we loaded everything we'll use
run: |
go mod download -x
go build ./...
go test -run=nope ./...
go build -o compile
go test -v -c -o compile_test
shell: bash

View File

@ -1,76 +0,0 @@
name: Publish Binary
description: Publish binary artifacts.
inputs:
version:
description: Corso version to use for publishing
required: true
github_token:
description: GitHub token for publishing
required: true
rudderstack_write_key:
description: Write key for RudderStack
required: true
rudderstack_data_plane_url:
description: Data plane URL for RudderStack
required: true
runs:
using: composite
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # needed to pull changelog
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- name: Mark snapshot release
shell: bash
if: ${{ !startsWith(github.ref , 'refs/tags/') }}
run: |
echo "grflags=--snapshot" >> $GITHUB_ENV
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4
with:
version: latest
args: release --clean --timeout 500m --parallelism 1 ${{ env.grflags }}
workdir: src
env:
GITHUB_TOKEN: ${{ inputs.github_token }}
RUDDERSTACK_CORSO_WRITE_KEY: ${{ inputs.rudderstack_write_key }}
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ inputs.rudderstack_data_plane_url }}
CORSO_VERSION: ${{ inputs.version }}
- name: Upload darwin arm64
uses: actions/upload-artifact@v3
with:
name: corso_Darwin_arm64
path: src/dist/corso_darwin_arm64/corso
- name: Upload linux arm64
uses: actions/upload-artifact@v3
with:
name: corso_Linux_arm64
path: src/dist/corso_linux_arm64/corso
- name: Upload darwin amd64
uses: actions/upload-artifact@v3
with:
name: corso_Darwin_amd64
path: src/dist/corso_darwin_amd64_v1/corso
- name: Upload linux amd64
uses: actions/upload-artifact@v3
with:
name: corso_Linux_amd64
path: src/dist/corso_linux_amd64_v1/corso
- name: Upload windows amd64
uses: actions/upload-artifact@v3
with:
name: corso_Windows_amd64
path: src/dist/corso_windows_amd64_v1/corso.exe

View File

@ -1,47 +0,0 @@
name: Publish Website
description: Publish website artifacts.
inputs:
aws-iam-role:
description: IAM role for connecting to AWS
bucket:
description: Bucket to push the website files
cfid:
description: Cloudfront ID for invalidation
runs:
using: composite
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@master
name: Download website from build step
with:
name: website
path: website/build
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ inputs.aws-iam-role }}
role-session-name: integration-testing
aws-region: us-east-1
- name: Add rotbots.txt
if: github.ref == 'refs/heads/asset-validation-explore'
working-directory: website
shell: bash
run: |
printf 'User-agent: *\nDisallow: /' > build/robots.txt
- name: Push website
working-directory: website
shell: bash
run: |
aws s3 sync build "s3://${{ inputs.bucket }}" --delete
- name: Invalidate cloudfront
working-directory: website
shell: bash
run: |
aws cloudfront create-invalidation --distribution-id ${{ inputs.cfid }} --paths "/*"

View File

@ -1,121 +0,0 @@
name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes
# of which wouldn't otherwise be seen by users of the system. Standard
# APIs don't have the tooling to gut out all the cruft which we accrue
# in microsoft's hidden nooks and secret crannies. A manual, SOAPy
# exorcism is the only way.
#
# The script focuses on the cleaning up the following:
# * All folders, descending from the exchange root, of a given prefix.
# * All folders in PersonMetadata
# * All already soft-deleted items
# * All folders under recoverable items
inputs:
user:
description: User whose data is to be purged.
site:
description: Sharepoint site where data is to be purged.
libraries:
description: List of library names within the site where data is to be purged.
library-prefix:
description: List of library names within the site where the library will get deleted entirely.
folder-prefix:
description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead.
older-than:
description: Minimum-age of folders to be deleted.
azure-client-id:
description: Secret value of for AZURE_CLIENT_ID
azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id:
description: Secret value of AZURE_TENANT_ID
m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs:
using: composite
steps:
################################################################################################################
# Exchange
#
- name: Run the Exchange purge scripts for user
if: ${{ inputs.user != '' }}
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0
# if: ${{ inputs.user == '' }}
# shell: pwsh
# working-directory: ./src/cmd/purge/scripts
# env:
# M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
# M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
# run: ./exchangeRetention.ps1
################################################################################################################
# OneDrive
#
- name: Run the OneDrive purge scripts for user
if: ${{ inputs.user != '' }}
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
################################################################################################################
# Sharepoint
#
- name: Run SharePoint purge script
if: ${{ inputs.site != '' }}
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}

View File

@ -1,92 +0,0 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:
description: The teams message text
teams_url:
description: passthrough for secrets.TEAMS_CORSO_CI_WEBHOOK_URL
runs:
using: composite
steps:
- uses: actions/checkout@v3
- name: set github ref
shell: bash
run: |
echo "github_reference=${{ github.ref }}" >> $GITHUB_ENV
- name: trim github ref
shell: bash
run: |
echo "trimmed_ref=${github_reference#refs/}" >> $GITHUB_ENV
- name: build urls
shell: bash
run: |
echo "logurl=$(printf 'https://github.com/alcionai/corso/actions/runs/%s' ${{ github.run_id }})" >> $GITHUB_ENV
echo "commiturl=$(printf 'https://github.com/alcionai/corso/commit/%s' ${{ github.sha }})" >> $GITHUB_ENV
echo "refurl=$(printf 'https://github.com/alcionai/corso/%s' ${{ env.trimmed_ref }})" >> $GITHUB_ENV
- name: use url or blank val
shell: bash
run: |
echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
echo "REF=${{ env.trimmed_ref && env.refurl || '-' }}" >> $GITHUB_ENV
- name: Send JSON payload to Teams Webhook
shell: bash
run: |
curl -X POST \
-H "Content-Type: application/json" \
-d '{
"type":"message",
"attachments":[
{
"contentType":"application/vnd.microsoft.card.adaptive",
"contentUrl":null,
"content":{
"$schema":"http://adaptivecards.io/schemas/adaptive-card.json",
"type":"AdaptiveCard",
"body": [
{
"type": "TextBlock",
"size": "Medium",
"weight": "Bolder",
"text": "${{ inputs.msg }}",
"color": "Attention"
},
{
"type": "TextBlock",
"text": "${{ env.JOB }} :: ${{ env.STEP }}",
"wrap": true
}
],
"actions": [
{
"type": "Action.OpenUrl",
"title": "Action",
"url": "${{ env.LOGS }}"
},
{
"type": "Action.OpenUrl",
"title": "Commit",
"url": "${{ env.COMMIT }}"
},
{
"type": "Action.OpenUrl",
"title": "Ref",
"url": "${{ env.REF }}"
}
],
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"version": "1.5"
}
}
]
}' \
${{ inputs.teams_url }}

View File

@ -1,58 +0,0 @@
name: Lint Website
description: Lint website content.
inputs:
version:
description: Corso version to use for publishing
runs:
using: composite
steps:
- uses: actions/checkout@v3
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
- name: Generate CLI Docs
working-directory: ./src
shell: bash
run: |
go run ./cmd/mdgen/mdgen.go generate
# migrate generated md files into /website/docs/cli
- name: Move CLI .md to Docs
shell: bash
run: |
mkdir -p ./website/docs/cli
mv ./src/cmd/mdgen/cli_markdown/* ./website/docs/cli/
rm -R ./src/cmd/mdgen/cli_markdown/
- name: Install dependencies for website lint
shell: bash
run: |
wget --quiet https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating
mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin
echo "$PWD/bin" >> $GITHUB_PATH
npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating
- name: Run website lint
shell: bash
run: |
cd website && make -o genclidocs localcheck
- name: Build website
shell: bash
env:
CORSO_VERSION: ${{ inputs.version }}
run: |
cd website &&
npm ci &&
npm run build
- uses: actions/upload-artifact@master
name: Upload website as artifacts
with:
name: website
path: website/build

View File

@ -1,4 +0,0 @@
# auto-merge config file (see https://github.com/marketplace/actions/dependabot-auto-merge#configuration-file-syntax)
- match:
dependency_type: all
update_type: "semver:minor" # includes patch updates!

View File

@ -1,35 +0,0 @@
# See: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
reviewers:
- "alcionai/corso-maintainers"
open-pull-requests-limit: 50
rebase-strategy: "disabled"
# Maintain dependencies for npm - website
- package-ecosystem: "npm"
directory: "website/"
schedule:
interval: "daily"
reviewers:
- "alcionai/corso-maintainers"
- "ntolia"
- "gmatev"
open-pull-requests-limit: 50
rebase-strategy: "disabled"
# Maintain dependencies for go - src
- package-ecosystem: "gomod"
directory: "src/"
schedule:
interval: "daily"
reviewers:
- "alcionai/corso-maintainers"
open-pull-requests-limit: 50
rebase-strategy: "disabled"

View File

@ -1,29 +1,23 @@
<!-- PR description-->
## Description
---
<!-- Insert PR description-->
#### Does this PR need a docs update or release note?
- [ ] :white_check_mark: Yes, it's included
- [ ] :clock1: Yes, but in a later PR
- [ ] :no_entry: No
#### Type of change
## Type of change
<!--- Please check the type of change your PR introduces: --->
- [ ] :sunflower: Feature
- [ ] :bug: Bugfix
- [ ] :world_map: Documentation
- [ ] :robot: Supportability/Tests
- [ ] :robot: Test
- [ ] :computer: CI/Deployment
- [ ] :broom: Tech Debt/Cleanup
- [ ] :hamster: Trivial/Minor
#### Issue(s)
## Issue(s)
<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>
#### Test Plan
## Test Plan
<!-- How will this be tested prior to merging.-->
- [ ] :muscle: Manual

View File

@ -6,12 +6,9 @@ on:
srcfileschanged:
description: "'true' if src/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.srcfileschanged }}
websitefileschanged:
description: "'true' if websites/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.websitefileschanged }}
actionsfileschanged:
description: "'true' if .github/actions/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.actionsfileschanged }}
docfileschanged:
description: "'true' if docs/** or src/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.docfileschanged }}
jobs:
file-change-check:
@ -21,22 +18,21 @@ jobs:
pull-requests: read
outputs:
srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }}
websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }}
actionsfileschanged: ${{ steps.actionschecker.outputs.actionsfileschanged }}
docfileschanged: ${{ steps.docchecker.outputs.docfileschanged }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
# only run CI tests if the src folder or workflow actions have changed
- name: Check for file changes in src/ or .github/workflows/
uses: dorny/paths-filter@v3
uses: dorny/paths-filter@v2
id: dornycheck
with:
list-files: json
filters: |
src:
- 'src/**'
website:
- 'website/**'
docs:
- 'docs/**'
actions:
- '.github/workflows/**'
- '.github/actions/**'
@ -46,18 +42,11 @@ jobs:
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.actions == 'true'
run: |
echo "src or workflow file changes occurred"
echo srcfileschanged=true >> $GITHUB_OUTPUT
echo ::set-output name=srcfileschanged::true
- name: Check dorny for changes in website related filepaths
id: websitechecker
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true'
- name: Check dorny for changes in docs related filepaths
id: docchecker
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.docs == 'true' || steps.dornycheck.outputs.actions == 'true'
run: |
echo "website or workflow file changes occurred"
echo websitefileschanged=true >> $GITHUB_OUTPUT
- name: Check dorny for changes in actions filepaths
id: actionschecker
if: steps.dornycheck.outputs.actions == 'true'
run: |
echo "actions file changes occurred"
echo actionsfileschanged=true >> $GITHUB_OUTPUT
echo "docs, src or workflow file changes occurred"
echo ::set-output name=docfileschanged::true

View File

@ -1,46 +0,0 @@
name: SetM365AppAcc
on:
workflow_call:
outputs:
client_app_slot:
value: ${{ jobs.GetM365App.outputs.client_app_slot }}
client_id_env:
value: ${{ jobs.GetM365App.outputs.client_id_env }}
client_secret_env:
value: ${{ jobs.GetM365App.outputs.client_secret_env }}
jobs:
GetM365App:
environment: Testing
runs-on: ubuntu-latest
outputs:
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
steps:
- name: Figure out which client id to use
id: roundrobin
run: |
slot=$((GITHUB_RUN_NUMBER % 4))
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
case $slot in
0)
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
;;
1)
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
;;
2)
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
;;
3)
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
;;
esac

View File

@ -1,29 +0,0 @@
# See https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#common-dependabot-automations
name: auto-merge
on:
pull_request:
paths-ignore:
- "src/**" # prevent auto-merge for go dependencies
permissions:
pull-requests: write
jobs:
auto-approve-label:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
if: ${{steps.metadata.outputs.update-type == 'version-update:semver-minor'}}
run: |
gh pr edit "$PR_URL" --add-label "mergequeue"
gh pr review --approve "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -1,44 +0,0 @@
name: Publish binary
on:
workflow_dispatch:
jobs:
SetEnv:
environment: Testing
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v4
- name: Get version string
id: version
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
else
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
fi
Publish-Binary:
needs: [SetEnv]
environment: Testing
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Publish Binary
uses: ./.github/actions/publish-binary
with:
version: ${{ needs.SetEnv.outputs.version }}
github_token: ${{ secrets.GITHUB_TOKEN }}
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -1,17 +1,11 @@
name: Build/Release Corso
env:
IMAGE_NAME: ghcr.io/alcionai/corso
on:
workflow_dispatch:
pull_request:
branches: [ main ]
push:
branches: [main]
tags: ["v*.*.*"]
repository_dispatch:
types: [ok-to-test-command]
branches: [ main ]
tags: [ 'v*.*.*' ]
permissions:
# required to retrieve AWS credentials
@ -26,6 +20,7 @@ concurrency:
cancel-in-progress: true
jobs:
# ----------------------------------------------------------------------------------------------------
# --- Prechecks and Checkouts ------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
@ -40,332 +35,86 @@ jobs:
run:
working-directory: src
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
# single setup and sum cache handling here.
# the results will cascade onto both testing and linting.
- name: Setup Golang with cache
uses: ./.github/actions/go-setup-cache
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true'
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.docfileschanged == 'true'
with:
go-version-file: src/go.mod
# SetM365App will decide which M365 app to use for this CI run
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
SetEnv:
environment: Testing
runs-on: ubuntu-latest
outputs:
environment: ${{ steps.environment.outputs.environment }}
version: ${{ steps.version.outputs.version }}
website-bucket: ${{ steps.website-bucket.outputs.website-bucket }}
website-cfid: ${{ steps.website-cfid.outputs.website-cfid }}
steps:
- uses: actions/checkout@v4
- name: Figure out environment
id: environment
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "environment=Production" | tee -a $GITHUB_OUTPUT
else
echo "environment=Testing" | tee -a $GITHUB_OUTPUT
fi
- name: Get version string
id: version
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
else
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
fi
- name: Get bucket name for website
id: website-bucket
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "website-bucket=corsobackup.io" | tee -a $GITHUB_OUTPUT
else
echo "website-bucket=test-corso-docs" | tee -a $GITHUB_OUTPUT
fi
- name: Get cfid for website
id: website-cfid
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "website-cfid=E1W9NGI9YTVZ1A" | tee -a $GITHUB_OUTPUT
else
echo "website-cfid=ESFTEIYTIP7Y3" | tee -a $GITHUB_OUTPUT
fi
# ----------------------------------------------------------------------------------------------------
# --- Website Linting -----------------------------------------------------------------------------------
# --- Docs Linting -----------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Website-Linting:
needs: [Precheck, Checkout, SetEnv]
Docs-Linting:
needs: [Precheck, Checkout]
environment: Testing
runs-on: ubuntu-latest
timeout-minutes: 30
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.docfileschanged == 'true' # docsfileschanged also includes srcfileschanged
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Lint Website
uses: ./.github/actions/website-linting
- name: Setup Golang with cache
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
uses: magnetikonline/action-golang-cache@v3
with:
version: ${{ needs.SetEnv.outputs.version }}
go-version-file: src/go.mod
- name: Generate CLI Docs
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
working-directory: ./src
run: |
go run ./cmd/mdgen/mdgen.go generate
# migrate generated md files into /docs/docs/cli
- name: Move CLI .md to Docs
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
run: |
mkdir -p ./docs/docs/cli
mv ./src/cmd/mdgen/cli_markdown/* ./docs/docs/cli/
rm -R ./src/cmd/mdgen/cli_markdown/
- uses: actions/upload-artifact@master
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
name: Upload cli docs as artifacts
with:
name: cli-docs
path: docs/docs/cli
- name: Install dependencies for docs lint
run: |
wget https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating
mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin
echo "$PWD/bin" >> $GITHUB_PATH
npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating
- name: Run docs lint
env:
CORSO_USE_DOCKER: -1 # prevent using docker inside makefile
run: |
cd docs && make -o genclidocs check
# ----------------------------------------------------------------------------------------------------
# --- Integration and Unit Testing -------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted:
needs: [Precheck, Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
timeout-minutes: 120
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults:
run:
working-directory: src
env:
# Resolve the name of the secret that contains the Azure client ID/secret
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: mkdir testlog
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
# run the tests
- name: Integration Tests
env:
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_CI_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
run: |
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 20m \
./... \
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
with:
name: ci-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
Retention-Test-Suite-Trusted:
needs: [Precheck, Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
timeout-minutes: 30
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults:
run:
working-directory: src
env:
# Resolve the name of the secret that contains the Azure client ID/secret
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci-retention.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: mkdir testlog
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
# run the tests
- name: Retention Tests
env:
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_RETENTION_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
run: |
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 10m \
./... \
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
with:
name: ci-retention-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
Unit-Test-Suite:
Test-Suite:
needs: [Precheck, Checkout]
environment: Testing
runs-on: ubuntu-latest
timeout-minutes: 30
if: needs.precheck.outputs.srcfileschanged == 'true'
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
defaults:
run:
working-directory: src
env:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: mkdir testlog
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# run the tests
- name: Unit Tests
env:
# Set these to a bad value so we don't accidentally fall back to
# something elsewhere.
CORSO_M365_TEST_USER_ID: 'foo'
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
run: |
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 20m \
./... \
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
with:
name: unit-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
Test-Suite-Fork:
needs: [Precheck]
environment: Testing
if: (!startsWith(github.ref , 'refs/tags/') && github.ref != 'refs/heads/main') && (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name != github.repository)
runs-on: ubuntu-latest
defaults:
run:
working-directory: src
env:
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
LOG_GRAPH_REQUESTS: true
steps:
- name: Fail check if not repository_dispatch
if: github.event_name != 'repository_dispatch'
run: |
echo "Workflow requires approval from a maintainer to run. It will be automatically rerun on approval."
exit 1
- uses: marocchino/sticky-pull-request-comment@v2
if: github.event.client_payload.slash_command.args.named.sha != '' && contains(github.event.client_payload.pull_request.head.sha, github.event.client_payload.slash_command.args.named.sha)
with:
message: |
Workflow run sha specified via `ok-to-test` is not the latest commit on PR. Run canceled.
- name: Fail check if not head of PR
if: github.event.client_payload.slash_command.args.named.sha != '' && contains(github.event.client_payload.pull_request.head.sha, github.event.client_payload.slash_command.args.named.sha)
run: |
echo "Workflow run sha specified is not the latest commit on PR. Exiting."
exit 1
# add comment to PR with link to workflow run
- uses: marocchino/sticky-pull-request-comment@v2
with:
message: |
Test suite run will be available at https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
# Check out merge commit
- name: Fork based /ok-to-test checkout
uses: actions/checkout@v4
with:
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
@ -377,7 +126,7 @@ jobs:
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
@ -389,88 +138,52 @@ jobs:
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_CI_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_CI_TESTS: true
CORSO_M356_TEST_USER_ID: ${{ secrets.CORSO_M356_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
run: |
set -euo pipefail
go test \
-json \
-v \
-timeout 15m \
./... \
2>&1 | tee ./testlog/gotest-fork.log | gotestfmt -hide successful-tests
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
# Upload the original go test log as an artifact for later review.
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: fork-test-log
path: src/testlog/*
name: test-log
path: src/testlog/gotest.log
if-no-files-found: error
retention-days: 14
# Update check run called "Test-Suite-Fork"
- uses: actions/github-script@v7
id: update-check-run
if: failure()
env:
number: ${{ github.event.client_payload.pull_request.number }}
job: ${{ github.job }}
# Conveniently, job.status maps to https://developer.github.com/v3/checks/runs/#update-a-check-run
conclusion: ${{ job.status }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const { data: pull } = await github.rest.pulls.get({
...context.repo,
pull_number: process.env.number
});
const ref = pull.head.sha;
const { data: checks } = await github.rest.checks.listForRef({
...context.repo,
ref
});
const check = checks.check_runs.filter(c => c.name === process.env.job);
const { data: result } = await github.rest.checks.update({
...context.repo,
check_run_id: check[0].id,
status: 'completed',
conclusion: process.env.conclusion
});
return result;
# ----------------------------------------------------------------------------------------------------
# --- Source Code Linting ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Source-Code-Linting:
Linting:
needs: [Precheck, Checkout]
environment: Testing
runs-on: ubuntu-latest
timeout-minutes: 30
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
defaults:
run:
working-directory: src
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
- name: Go Lint
uses: golangci/golangci-lint-action@v4
uses: golangci/golangci-lint-action@v3
with:
# Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code.
version: v1.54.2
version: v1.45.2
working-directory: src
skip-pkg-cache: true
skip-build-cache: true
skip-cache: true
# check licenses
- name: Get go-licenses
@ -479,127 +192,131 @@ jobs:
- name: Run go-licenses
run: go-licenses check github.com/alcionai/corso/src --ignore github.com/alcionai/corso/src
- name: Run staticcheck
uses: dominikh/staticcheck-action@v1.3.0
with:
install-go: false
working-directory: src
- name: Run allowtags
run: |
go install github.com/ashmrtn/allowtags@latest
allowtags --allow-key json --allow-key uriparametername ./...
# I could not find a way to install tree-grepper without nix
# https://github.com/BrianHicks/tree-grepper/issues/293
- uses: cachix/install-nix-action@v25
- uses: cachix/cachix-action@v14
with:
name: tree-grepper
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
- name: Run trailing comma lint rule
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '(argument_list "," @no-trailing-comma .)' | grep .; then
echo "No trailing commas for function calls"
exit 1
fi
- name: Check for empty string comparison
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((binary_expression (identifier) ["==" "!="] (interpreted_string_literal) @_ri) @exp (#eq? @_ri "\"\""))' | grep .; then
echo "Use len check instead of empty string comparison"
exit 1
fi
- name: Check for cases where errors are not propagated
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
echo "Make sure to propagate errors with clues"
exit 1
fi
- name: Check if clues without context are used when context is passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
echo "Do not use clues.*WC when context is passed in"
exit 1
fi
- name: Check clues with context is used when context is not passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
echo "Use clues.*WC when context is not passed in"
exit 1
fi
# ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting -------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Actions-Lint:
needs: [Precheck]
environment: Testing
runs-on: ubuntu-latest
if: needs.precheck.outputs.actionsfileschanged == 'true'
steps:
- uses: actions/checkout@v4
- name: actionlint
uses: raven-actions/actionlint@v1
with:
fail-on-error: true
cache: true
# Ignore
# * combining commands into a subshell and using single output
# redirect
# * various variable quoting patterns
# * possible ineffective echo commands
flags: "-ignore SC2129 -ignore SC2086 -ignore SC2046 -ignore 2116"
# ----------------------------------------------------------------------------------------------------
# --- Publish steps ----------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
SetEnv:
environment: Testing
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
outputs:
environment: ${{ steps.set-env.outputs.environment }}
steps:
- name: Figure out environment
id: set-env
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "set-output name=environment::Production"
echo "::set-output name=environment::Production"
else
echo "set-output name=environment::Testing"
echo "::set-output name=environment::Testing"
fi
Publish-Binary:
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
needs: [Test-Suite, Linting, Docs-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
defaults:
run:
working-directory: src
steps:
- uses: actions/checkout@v4
- name: Publish Binary
uses: ./.github/actions/publish-binary
- uses: actions/checkout@v3
with:
version: ${{ needs.SetEnv.outputs.version }}
github_token: ${{ secrets.GITHUB_TOKEN }}
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
fetch-depth: 0 # needed to pull changelog
Publish-Image:
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
- id: version
run: echo "::set-output name=version::$(git describe --exact-match --tags $(git rev-parse HEAD) 2>/dev/null || echo unreleased)-$(git rev-parse --short HEAD)"
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v3
with:
version: latest
args: release --rm-dist --timeout 500m
workdir: src
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RUDDERSTACK_CORSO_WRITE_KEY: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
CORSO_VERSION: ${{ steps.version.outputs.version }}
- name: Upload assets
uses: actions/upload-artifact@v3
with:
name: corso
path: src/dist/*
Publish-Docs:
needs: [Test-Suite, Linting, Docs-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
defaults:
run:
working-directory: docs
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@master
name: Download cli docs from build step
with:
name: cli-docs
path: docs/docs/cli
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
- name: Build docs
run: |
npm ci
CORSO_DOCS_BASEURL="/preview/" npm run build # TODO: update base url once finalized
- name: Push docs
run: |
echo "$DOCS_BUCKET" | base64
aws s3 sync build "s3://${{ secrets.DOCS_S3_BUCKET }}/preview"
- name: Invalidate cloudfront
run: |
aws cloudfront create-invalidation --distribution-id ${{ secrets.DOCS_CF_DISTRIBUTION }} --paths "/*"
Publish-Image:
needs: [Test-Suite, Linting, Docs-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
defaults:
run:
working-directory: build
env:
imageName: ghcr.io/alcionai/corso
PLATFORMS: linux/amd64,linux/arm64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
# Setup buildx
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
# retrieve credentials for ghcr.io
- name: Login to Github Packages
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@ -607,17 +324,16 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: ${{ env.IMAGE_NAME }}
images: ${{ env.imageName }}
tags: |
type=ref,event=tag
type=sha,format=short,prefix=
type=raw,value=nightly
# deploy the image
- name: Build image and push to GitHub Container Registry
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
file: ./build/Dockerfile
@ -626,140 +342,7 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
CORSO_BUILD_LDFLAGS=-X 'github.com/alcionai/corso/src/internal/events.RudderStackWriteKey=${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}' -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL=${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}' -X 'github.com/alcionai/corso/src/internal/version.Version=${{ needs.SetEnv.outputs.version }}'
CORSO_BUILD_LDFLAGS=-X 'github.com/alcionai/corso/src/internal/events.RudderStackWriteKey=${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}' -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL=${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}' -X 'github.com/alcionai/corso/src/cli.version=$(git describe --exact-match --tags $(git rev-parse HEAD) 2>/dev/null || echo unreleased)-$(git rev-parse --short HEAD)'
# use the github cache
cache-from: type=gha
cache-to: type=gha,mode=max
Validate-Linux-Artifacts:
needs: [Publish-Binary, Publish-Image, SetEnv]
environment: Testing
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
steps:
- name: Validate x86_64 binary artifacts
run: |
set -ex
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Linux_x86_64.tar.gz > corso.tar.gz
tar -xf corso.tar.gz
./corso --help
./corso --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
- name: Validate arm64 binary artifacts
uses: uraimo/run-on-arch-action@v2
with:
arch: armv7
distro: ubuntu18.04
install: |
apt-get -y update && apt-get -y install curl
run: |
set -ex
sudo apt-get update && sudo apt-get install curl
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Linux_arm64.tar.gz > corso.tar.gz
tar -xf corso.tar.gz
./corso --help
./corso --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
Validate-Docker-Artifacts:
needs: [Publish-Binary, Publish-Image, SetEnv]
environment: Testing
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Validate amd64 container images
run: |
docker run --platform linux/amd64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --help
docker run --platform linux/amd64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --version | grep -E "version: ${{ env.CORSO_VERSION }}$"
- name: Validate arm64 container images
run: |
docker run --platform linux/arm64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --help
docker run --platform linux/amd64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --version | grep -E "version: ${{ env.CORSO_VERSION }}$"
Validate-MacOS-Artifacts:
needs: [Publish-Binary, Publish-Image, SetEnv]
environment: Testing
runs-on: macos-latest
if: startsWith(github.ref, 'refs/tags/')
env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
steps:
- name: Validate x86_64 binary artifacts
run: |
set -ex
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Darwin_x86_64.tar.gz > corso.tar.gz
tar -xf corso.tar.gz
./corso --help
./corso --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
- name: Validate arm64 binary artifacts
run: |
set -ex
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Darwin_arm64.tar.gz > corso.tar.gz
tar -xf corso.tar.gz
# TODO: test/validate arm64 executable once we have arm64 CI
Validate-Windows-Artifacts:
needs: [Publish-Binary, Publish-Image, SetEnv]
environment: Testing
runs-on: windows-latest
if: startsWith(github.ref, 'refs/tags/')
env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
steps:
- name: Validate x86_64 binary artifacts
run: |
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Windows_x86_64.zip -o corso.zip
7z x corso.zip
./corso.exe --help
./corso.exe --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
Publish-Website-Test:
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4 # need to checkout to make the action available
- name: Publish website
uses: ./.github/actions/publish-website
with:
aws-iam-role: ${{ secrets.AWS_IAM_ROLE }}
bucket: ${{ needs.SetEnv.outputs.website-bucket }}
cfid: ${{ needs.SetEnv.outputs.website-cfid }}
Publish-Website-Prod:
needs: [SetEnv, Validate-Linux-Artifacts, Validate-MacOS-Artifacts, Validate-Docker-Artifacts, Validate-Windows-Artifacts]
environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@v4 # need to checkout to make the action available
- name: Publish website
uses: ./.github/actions/publish-website
with:
aws-iam-role: ${{ secrets.AWS_IAM_ROLE }}
bucket: ${{ needs.SetEnv.outputs.website-bucket }}
cfid: ${{ needs.SetEnv.outputs.website-cfid }}
Validate-Website-Artifacts:
needs: [Publish-Website-Prod, SetEnv]
environment: Testing
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
env:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
steps:
- name: Validate docs
run: |
curl https://corsobackup.io/docs/quickstart/ | grep https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Linux_x86_64.tar.gz
cache-to: type=gha,mode=max

View File

@ -1,6 +1,5 @@
name: CI Test Cleanup
on:
workflow_dispatch:
schedule:
# every half hour
- cron: "*/30 * * * *"
@ -9,77 +8,50 @@ jobs:
Test-User-Data-Cleanup:
environment: Testing
runs-on: ubuntu-latest
continue-on-error: true
strategy:
matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}
# sets the maximum time to now-30m.
# CI test have a 20 minute timeout.
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
# sets the maximimum time to now-30m.
# CI test have a 10 minute timeout.
# At 20 minutes ago, we should be safe from conflicts.
# The additional 10 minutes is just to be good citizens.
- name: Set purge boundary
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
run: |
echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users
uses: ./.github/actions/purge-m365-data
with:
user: ${{ vars[matrix.user] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
older-than: ${{ env.HALF_HOUR_AGO }}
azure-client-id: ${{ secrets.CLIENT_ID }}
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
# run the folder purge
- name: Purge primary user folders
working-directory: ./src
env:
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_M356_TEST_USER_ID: ${{ secrets.CORSO_M356_TEST_USER_ID }}
DELETE_FOLDER_PREFIX: "Corso_Restore_"
run: >
go run ./cmd/purge/purge.go
--user ${{ secrets.CORSO_M356_TEST_USER_ID }}
--prefix ${{ env.DELETE_FOLDER_PREFIX }}
--before ${{ env.HALF_HOUR_AGO }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup:
environment: Testing
runs-on: ubuntu-latest
continue-on-error: true
strategy:
matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
steps:
- uses: actions/checkout@v4
# sets the maximum time to now-30m.
# CI test have a 20 minute timeout.
- name: Set purge boundary
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Sites
uses: ./.github/actions/purge-m365-data
with:
site: ${{ vars[matrix.site] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
library-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
older-than: ${{ env.HALF_HOUR_AGO }}
azure-client-id: ${{ secrets.CLIENT_ID }}
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
- name: Purge secondary user folders
working-directory: ./src
env:
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_M356_TEST_USER_ID: ${{ secrets.CORSO_SECONDARY_M356_TEST_USER_ID }}
DELETE_FOLDER_PREFIX: "Corso_Restore_"
run: >
go run ./cmd/purge/purge.go
--user ${{ secrets.CORSO_SECONDARY_M356_TEST_USER_ID }}
--prefix ${{ env.DELETE_FOLDER_PREFIX }}
--before ${{ env.HALF_HOUR_AGO }}

View File

@ -1,8 +1,10 @@
name: Nightly Load Testing
on:
schedule:
# every day at 03:59 GMT (roughly 8pm PST)
- cron: "59 3 * * *"
# every day at 01:59 (01:59am) UTC
# - cron: "59 1 * * *"
# temp, for testing: every 4 hours
- cron: "0 */4 * * *"
permissions:
# required to retrieve AWS credentials
@ -16,51 +18,41 @@ concurrency:
jobs:
Load-Tests:
environment: Load Testing
environment: Testing
runs-on: ubuntu-latest
# Skipping load testing for now. They need some love to get up and
# running properly, and it's better to not fight for resources with
# tests that are guaranteed to fail.
if: false
defaults:
run:
working-directory: src
steps:
- uses: actions/checkout@v4
- name: Build the otel-daemon
run: make build-otel-daemon
# TODO: write logs to a file in src/testlog for archival
- name: Run the otel-daemon
run: make otel-daemon
- uses: actions/checkout@v3
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
uses: magnetikonline/action-golang-cache@v3
with:
go-version-file: src/go.mod
- run: mkdir ${{ github.workspace }}/testlog
- run: mkdir test_results
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
run: go install github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
# run the tests
- name: Integration Tests
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOAD_TESTS: true
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-load.log
CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }}
CORSO_M365_LOAD_TEST_ORG_USERS: ${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}
CORSO_PASSPHRASE: ${{ secrets.CORSO_PASSPHRASE }}
IGNORE_LOAD_TEST_USER_ID: ${{ vars.EXT_SDK_TEST_USER_ID }}
LOG_GRAPH_REQUESTS: true
run: |
set -euo pipefail
go test \
@ -73,88 +65,31 @@ jobs:
-memprofile=mem.prof \
-mutexprofile=mutex.prof \
-trace=trace.out \
-outputdir=testlog \
-outputdir=test_results \
./pkg/repository/repository_load_test.go \
2>&1 | tee ${{ github.workspace }}/testlog/gotest-load.log | gotestfmt -hide successful-tests
# generate new entries to roll into the next load test
# only runs if the test was successful
- name: New Data Creation
working-directory: ./src/cmd/factory
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }}
run: |
go run . exchange emails \
--mailbox ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
--destination lt_${{ env.NOW }} \
--count 10
go run . exchange contacts \
--mailbox ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
--destination lt_${{ env.NOW }} \
--count 10
go run . exchange events \
--mailbox ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
--destination lt_${{ env.NOW }} \
--count 10
- name: Put Down the Daemons Arisen
if: always()
run: docker kill otel-daemon
2>&1 | tee ./test_results/goloadtest.log | gotestfmt -hide successful-tests
# package all artifacts for later review
- name: Upload Log, Profilers, Traces
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: load-test-profiling
path: ${{ github.workspace }}/testlog/*
path: src/test_results/*
if-no-files-found: error
retention-days: 14
setup:
environment: Load Testing
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.build.outputs.matrix }}
steps:
- uses: actions/checkout@v4
- id: build
run: |
u=$(echo "${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}" | sed 's/\[/["/g' | sed 's/\]/"]/g' | sed 's/|/","/g')
marr=$(cat <<-end
{ "user": $u }
end
)
m=$(echo $marr | jq -c .)
echo "matrix=$m" >> $GITHUB_OUTPUT
purge-load-test-user-data:
needs: [setup, Load-Tests]
if: always()
environment: Load Testing
runs-on: ubuntu-latest
strategy:
matrix:
user: [ CORSO_M365_LOAD_TEST_USER_ID, '' ]
steps:
- uses: actions/checkout@v4
- name: Set folder boundary datetime
run: |
echo "NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge Load-Test-Produced Folders
uses: ./.github/actions/purge-m365-data
with:
user: ${{ secrets[matrix.user] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
older-than: ${{ env.HALF_HOUR_AGO }}
azure-client-id: ${{ secrets.CLIENT_ID }}
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
# cleanup folders produced by load test
- name: Restored Folder Purge
if: always()
working-directory: ./src
env:
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
DELETE_FOLDER_PREFIX: "Corso_Restore_"
run: >
go run ./cmd/purge/purge.go
--user '*'
--prefix ${{ env.DELETE_FOLDER_PREFIX }}

View File

@ -1,396 +0,0 @@
name: Longevity Testing
on:
schedule:
# Run every day at 04:00 GMT (roughly 8pm PST)
- cron: "0 4 * * *"
workflow_dispatch:
inputs:
user:
description: "User to run longevity test on"
permissions:
# required to retrieve AWS credentials
id-token: write
contents: write
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: longevity_testing-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests:
needs: [SetM365App]
environment: Testing
runs-on: ubuntu-latest
env:
# Need these in the local env so that corso can read them
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
# re-used values
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity"
# Options for retention.
RETENTION_MODE: GOVERNANCE
# Time to retain blobs for in hours.
RETENTION_DURATION: 216
defaults:
run:
working-directory: src
############################################################################
# setup
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # needed to get latest tag
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: |
go build -o longevity-test ./cmd/longevity_test
go build -o s3checker ./cmd/s3checker
- name: Get version string
id: version
run: |
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
# Checkout the .github directory at the original branch's ref so we have a
# stable view of the actions.
- name: Code Checkout
working-directory: ${{ github.workspace }}
run: |
git checkout ${{ steps.version.outputs.version }}
git checkout ${{ github.ref }} -- .github
- run: go build -o corso
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these
# runs haven't been taking long.
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4
timeout-minutes: 10
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
##########################################################################
# Repository commands
- name: Version Test
timeout-minutes: 10
run: |
./corso --version | grep -c 'Corso version:'
- name: Repo init test
id: repo-init
timeout-minutes: 10
run: |
set -euo pipefail
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo init s3 \
--no-stats \
--hide-progress \
--retention-mode $(echo "${{ env.RETENTION_MODE }}" | tr '[:upper:]' '[:lower:]') \
--retention-duration "${{ env.RETENTION_DURATION }}h" \
--extend-retention \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
then
echo "Repo could not be initialized"
exit 1
fi
- name: Repo connect test
timeout-minutes: 10
run: |
set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo connect s3 \
--no-stats \
--hide-progress \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
then
echo "Repo could not be connected"
exit 1
fi
##########################################################################
# Exchange
- name: Backup exchange test
id: exchange-test
timeout-minutes: 30
run: |
echo -e "\nBackup Exchange test\n" >> ${CORSO_LOG_FILE}
./corso backup create exchange \
--no-stats \
--mailbox "${TEST_USER}" \
--hide-progress \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Onedrive
- name: Backup onedrive test
id: onedrive-test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nBackup OneDrive test\n" >> ${CORSO_LOG_FILE}
./corso backup create onedrive \
--no-stats \
--hide-progress \
--user "${TEST_USER}" \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Sharepoint test
- name: Backup sharepoint test
id: sharepoint-test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nBackup SharePoint test\n" >> ${CORSO_LOG_FILE}
./corso backup create sharepoint \
--no-stats \
--hide-progress \
--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Backup Exchange Deletion test
- name: Backup Delete exchange test
id: delete-exchange-test
timeout-minutes: 30
env:
SERVICE: "exchange"
DELETION_DAYS: 10
run: |
set -euo pipefail
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################
# Backup Onedrive Deletion test
- name: Backup Delete onedrive test
id: delete-onedrive-test
timeout-minutes: 30
env:
SERVICE: "onedrive"
DELETION_DAYS: 10
run: |
set -euo pipefail
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################
# Backup Sharepoint Deletion test
- name: Backup Delete Sharepoint test
id: delete-sharepoint-test
timeout-minutes: 30
env:
SERVICE: "sharepoint"
DELETION_DAYS: 5
run: |
set -euo pipefail
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################
# Export OneDrive Test
- name: OneDrive Export test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
while read -r line; do
./corso export onedrive \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
done
echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
while read -r line; do
./corso export onedrive \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
done
##########################################################################
# Export SharePoint Test
- name: SharePoint Export test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
while read -r line; do
./corso export sharepoint \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
done
echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
while read -r line; do
./corso export sharepoint \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
done
##########################################################################
# Maintenance test
- name: Maintenance test Daily
id: maintenance-test-daily
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\n Maintenance test Daily\n" >> ${CORSO_LOG_FILE}
# Run with the force flag so it doesn't fail if the github runner
# hostname isn't what's expected. This is only safe because we can
# guarantee only one runner will be executing maintenance at a time.
./corso repo maintenance --mode metadata \
--no-stats \
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly
id: maintenance-test-weekly
timeout-minutes: 30
run: |
if [[ $(date +%A) == "Saturday" ]]; then
set -euo pipefail
echo -e "\n Maintenance test Weekly\n" >> ${CORSO_LOG_FILE}
./corso repo maintenance --mode complete \
--no-stats \
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_complete.txt
# TODO(ashmrtn): We can also check that non-current versions of
# blobs don't have their retention extended if we want.
#
# Assuming no failures during full maintenance, current versions of
# objects with the below versions should have retention times that
# are roughly (now + RETENTION_DURATION). We can explicitly check
# for this, but leave a little breathing room since maintenance may
# take some time to run.
#
# If we pick a live-retention-duration that is too small then we'll
# start seeing failures. The check for live objects is a lower bound
# check.
#
# Blob prefixes are as follows:
# - kopia.blobcfg - repo-wide config
# - kopia.repository - repo-wide config
# - p - data pack blobs (i.e. file data)
# - q - metadata pack blobs (i.e. manifests, directory listings, etc)
# - x - index blobs
./s3checker \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--prefix ${{ env.PREFIX }} \
--retention-mode ${{ env.RETENTION_MODE }} \
--live-retention-duration "$((${{ env.RETENTION_DURATION }}-1))h" \
--object-prefix "kopia.blobcfg" \
--object-prefix "kopia.repository" \
--object-prefix "p" \
--object-prefix "q" \
--object-prefix "x"
fi
##########################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()
uses: actions/upload-artifact@v4
with:
name: longevity-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -1,122 +0,0 @@
name: Nightly Test Corso
on:
workflow_dispatch:
schedule:
# Run every day at 04:00 GMT (roughly 8pm PST)
- cron: "0 4 * * *"
permissions:
# required to retrieve AWS credentials
id-token: write
contents: write
packages: write
pull-requests: read
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: nightly-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
# ----------------------------------------------------------------------------------------------------
# --- Checkouts ------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Checkout:
environment: Testing
runs-on: ubuntu-latest
defaults:
run:
working-directory: src
steps:
- uses: actions/checkout@v4
# single setup and sum cache handling here.
# the results will cascade onto both testing and linting.
- name: Setup Golang with cache
uses: ./.github/actions/go-setup-cache
with:
go-version-file: src/go.mod
# SetM365App will decide which M365 app to use for this CI run
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
# ----------------------------------------------------------------------------------------------------
# --- Nightly Testing -------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted:
needs: [Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
defaults:
run:
working-directory: src
env:
# Resolve the name of the secret that contains the Azure client ID/secret
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: mkdir testlog
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# run the tests
- name: Integration Tests
env:
# Use long-lived AWS credentials.
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_NIGHTLY_TESTS: true
CORSO_E2E_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-nightly.log
LOG_GRAPH_REQUESTS: true
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
run: |
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-p 1 \
-timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
##########################################################################################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()
uses: actions/upload-artifact@v4
with:
name: nightly-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[COROS FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -1,30 +0,0 @@
# If someone with write access comments "/ok-to-test" on a pull request, emit a repository_dispatch event
name: Ok To Test
on:
issue_comment:
types: [created]
jobs:
ok-to-test:
runs-on: ubuntu-latest
# Only run for PRs, not issue comments
if: ${{ github.event.issue.pull_request }}
steps:
- name: Generate token
id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.PRIVATE_KEY }}
- name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v4
env:
TOKEN: ${{ steps.generate_token.outputs.token }}
with:
token: ${{ env.TOKEN }} # GitHub App installation access token
reaction-token: ${{ secrets.GITHUB_TOKEN }}
issue-type: pull-request
commands: ok-to-test
permission: write

View File

@ -1,540 +0,0 @@
name: Sanity Testing
on:
push:
branches:
- main
workflow_dispatch:
inputs:
user:
description: "User to run sanity test on"
permissions:
# required to retrieve AWS credentials
id-token: write
contents: write
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: sanity_testing-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests:
needs: [SetM365App]
environment: Testing
runs-on: ubuntu-latest
env:
# Need these in the local env so that corso can read them
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
# re-used values
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
RESTORE_DEST_PFX: Corso_Test_Sanity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
defaults:
run:
working-directory: src
##########################################################################################################################################
# setup
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: go build -o corso
timeout-minutes: 10
- run: go build -o sanity-test ./cmd/sanity_test
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
##########################################################################################################################################
# Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
# the latest release. If we wait to clean up the production til after the tests
# It would be possible to complete all the testing but cancel the run before
# cleanup occurs. Setting the cleanup before the tests ensures we always begin
# with a clean slate, and cannot compound data production.
- name: Set purge boundary
if: always()
run: |
echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users
timeout-minutes: 30
uses: ./.github/actions/purge-m365-data
with:
user: ${{ env.TEST_USER }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
if: always()
uses: ./.github/actions/purge-m365-data
with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
##########################################################################################################################################
# Repository commands
- name: Version Test
timeout-minutes: 10
run: |
./corso --version | grep -c 'Corso version:'
- name: Repo init test
timeout-minutes: 10
id: repo-init
run: |
set -euo pipefail
prefix=$(date +"%Y-%m-%d-%T")
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo init s3 \
--no-stats \
--hide-progress \
--prefix $prefix \
--bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if ! grep -q 'Initialized a S3 repository within bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
then
echo "Repo could not be initialized"
exit 1
fi
echo result="$prefix" >> $GITHUB_OUTPUT
- name: Repo connect test
timeout-minutes: 10
run: |
set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo connect s3 \
--no-stats \
--hide-progress \
--prefix ${{ steps.repo-init.outputs.result }} \
--bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
then
echo "Repo could not be connected"
exit 1
fi
# Run maintenance on an empty repo just to make sure the command still
# works.
- name: Repo maintenance test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nRepo maintenance test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo maintenance \
--no-stats \
--hide-progress \
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
##########################################################################################################################################
# Exchange
# generate new entries to roll into the next load test
# only runs if the test was successful
- name: Exchange - Create new data
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . exchange emails \
--user ${{ env.TEST_USER }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }} \
--count 4
- name: Exchange - Backup
timeout-minutes: 30
id: exchange-backup
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup
timeout-minutes: 30
id: exchange-backup-incremental
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Non delta backup
timeout-minutes: 30
id: exchange-backup-non-delta
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup after non-delta
timeout-minutes: 30
id: exchange-backup-incremental-after-non-delta
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
id: new-data-creation-onedrive
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . onedrive files \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: OneDrive - Backup
id: onedrive-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: onedrive
kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more enteries for incremental check
- name: OneDrive - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . onedrive files \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} \
--count 4
- name: OneDrive - Incremental backup
id: onedrive-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: onedrive
kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Sharepoint Library
# generate new entries for test
- name: SharePoint - Create new data
id: new-data-creation-sharepoint
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: SharePoint - Backup
id: sharepoint-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
# generate some more enteries for incremental check
- name: SharePoint - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} \
--count 4
- name: SharePoint - Incremental backup
id: sharepoint-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
##########################################################################################################################################
# Sharepoint Lists
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
- name: SharePoint Lists - Create new data
id: new-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
# Extracts the common prefix for the Sharepoint list names.
- name: SharePoint Lists - Store restore container
id: sharepoint-lists-store-restore-container
run: |
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Backup
id: sharepoint-lists-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
# generate some more enteries for incremental check
- name: SharePoint Lists - Create new data (for incremental)
id: inc-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Store restore container (for incremental)
id: sharepoint-lists-store-restore-container-inc
run: |
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Incremental backup
id: sharepoint-lists-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
##########################################################################################################################################
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
id: new-data-creation-groups
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: Groups - Backup
id: groups-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more entries for incremental check
- name: Groups - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
--count 4
- name: Groups - Incremental backup
id: groups-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()
uses: actions/upload-artifact@v4
with:
name: sanity-test-log
path: ${{ env.CORSO_LOG_DIR }}/*
if-no-files-found: error
retention-days: 14
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -1,23 +0,0 @@
name: Manually Test Teams Action
on:
workflow_dispatch:
inputs:
msg:
description: 'Message to send:'
required: true
default: 'This is a test message'
jobs:
notify:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Send notification
uses: ./.github/actions/teams-message
with:
msg: ${{ github.event.inputs.msg }}
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -1,66 +0,0 @@
name: Publish website
on:
workflow_dispatch:
permissions:
# required to retrieve AWS credentials
id-token: write
contents: write
packages: write
pull-requests: read
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: push-website-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
SetEnv:
environment: Testing
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # needed to get latest tag
- name: Get version string
id: version
run: |
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
# ----------------------------------------------------------------------------------------------------
# --- Website Linting -----------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Website-Linting:
needs: [SetEnv]
environment: Testing
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Lint Website
uses: ./.github/actions/website-linting
with:
version: ${{ needs.SetEnv.outputs.version }}
Publish-Website:
needs: [Website-Linting]
environment: Production
runs-on: ubuntu-latest
defaults:
run:
working-directory: website
steps:
- uses: actions/checkout@v4 # need to checkout to make the action available
- name: Publish website
uses: ./.github/actions/publish-website
with:
aws-iam-role: ${{ secrets.AWS_IAM_ROLE }}
bucket: "corsobackup.io"
cfid: "E1W9NGI9YTVZ1A"

26
.github/workflows/weekly_cleanup.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Weekly S3 Test Bucket Cleanup
on:
schedule:
# every saturday at 23:59 (11:59pm)
- cron: "59 23 * * 6"
permissions:
# required to retrieve AWS credentials
id-token: write
jobs:
S3-Test-Cleanup:
runs-on: ubuntu-latest
environment: Testing
steps:
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
- name: Delete all files in the test bucket
run: |
aws s3 rm s3://${{ secrets.CI_TESTS_S3_BUCKET }} --recursive

10
.gitignore vendored
View File

@ -4,12 +4,9 @@
*.dll
*.so
*.dylib
.DS_Store
# Test binary, built with `go test -c`
*.test
test_results/
testlog/
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
@ -22,10 +19,9 @@ testlog/
.corso_test.toml
.corso.toml
# Logging
*.log
# Build directories
/bin
/docker/bin
/website/dist
/website/dist
*/test_results/**

View File

@ -1,529 +1,10 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased] (beta)
### Fixed
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [Unreleased]
## [v0.19.0] (beta) - 2024-02-06
[Unreleased]: https://github.com/https://github.com/alcionai/corso/compare/...HEAD
### Added
- Events can now be exported from Exchange backups as .ics files.
- Update repo init configuration to reduce the total number of GET requests sent
to the object store when using corso. This affects repos that have many
backups created in them per day the most.
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
### Fixed
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
- Backup attachments associated with group mailbox items.
- Groups and Teams backups no longer fail when a resource has no display name.
- Contacts in-place restore failed if the restore destination was empty.
- Link shares with external users are now backed up and restored as expected
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
### Changed
- When running `backup details` on an empty backup returns a more helpful error message.
- Backup List additionally shows the data category for each backup.
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
### Known issues
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
- Event description for exchange exports might look slightly different for certain events.
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
- External users with access through shared links will not receive these links as they are not sent via email during restore.
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
- SharePoint list item attachments are not available due to graph API limitations.
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
## [v0.18.0] (beta) - 2024-01-02
### Fixed
- Handle the case where an email cannot be retrieved from Exchange due to an `ErrorInvalidRecipients` error. In
this case, Corso will skip over the item but report this in the backup summary.
- Fix `ErrorItemNotFound` errors when restoring emails with multiple attachments.
- Avoid Graph SDK `Requests must contain extension changes exclusively.` errors by removing server-populated field from restored event items.
- Improve Group mailbox(conversations) backup performance by only downloading new items or items with modified content.
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore.
### Known issues
- Restoring OneDrive, SharePoint, or Teams & Groups items shared with external users while the tenant or site is configured to not allow sharing with external users will not restore permissions.
### Added
- Contacts can now be exported from Exchange backups as .vcf files
## [v0.17.0] (beta) - 2023-12-11
### Changed
- Memory optimizations for large scale OneDrive and Sharepoint backups.
### Fixed
- Resolved a possible deadlock when backing up Teams Channel Messages.
- Fixed an attachment download failure(ErrorTooManyObjectsOpened) during exchange backup.
## [v0.16.0] (beta) - 2023-11-28
### Added
- Export support for emails in exchange backups as `.eml` files.
- More colorful and informational cli display.
### Changed
- The file extension in Teams messages exports has switched to json to match the content type.
- SDK consumption of the /services/m365 package has shifted from independent functions to a client-based api.
- SDK consumers can now configure the /services/m365 graph api client configuration when constructing a new m365 client.
- Dynamic api rate limiting allows small-scale Exchange backups to complete more quickly.
- Kopia's local config files now uses unique filenames that match Corso configurations. This can protect concurrent Corso operations from mistakenly clobbering storage configs during runtime.
### Fixed
- Handle OneDrive folders being deleted and recreated midway through a backup.
- Automatically re-run a full delta query on incremental if the prior backup is found to have malformed prior-state information.
- Retry drive item permission downloads during long-running backups after the jwt token expires and refreshes.
- Retry item downloads during connection timeouts.
## [v0.15.0] (beta) - 2023-10-31
### Added
- Added `corso repo update-passphrase` command to update the passphrase of an existing Corso repository
- Added Subject and Message preview to channel messages detail entries
### Fixed
- SharePoint backup would fail if any site had an empty display name
- Fix a bug with exports hanging post completion
- Handle 503 errors in nested OneDrive packages
### Changed
- Item Details formatting in Groups and Teams backups
## [v0.14.2] (beta) - 2023-10-17
### Added
- Skips graph calls for expired item download URLs.
- Export operation now shows the stats at the end of the run
### Fixed
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
- Fix a case where missing item LastModifiedTimes could cause incremental backups to fail.
- Email size metadata was incorrectly set to the size of the last attachment. Emails will now correctly report the size of the mail content plus the size of all attachments.
- Improves the filtering capabilities for Groups restore and backup
- Improve check to skip OneNote files that cannot be downloaded.
- Fix Groups backup for non Team groups
### Changed
- Groups restore now expects the site whose backup we should restore
## [v0.14.0] (beta) - 2023-10-09
### Added
- Enables local or network-attached storage for Corso repositories.
- Reduce backup runtime for OneDrive and SharePoint incremental backups that have no file changes.
- Increase Exchange backup performance by lazily fetching data only for items whose content changed.
- Added `--backups` flag to delete multiple backups in `corso backup delete` command.
- Backup now includes all sites that belongs to a team, not just the root site.
### Fixed
- Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup.
### Known issues
- Restoring the data into a different Group from the one it was backed up from is not currently supported
### Other
- Groups and Teams service support is still in feature preview
## [v0.13.0] (beta) - 2023-09-18
### Added
- Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'`
- The cli commands for "groups" and "teams" can be used interchangeably, and will operate on the same backup data.
- New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details.
Even though Channel message restoration is not available, message write permissions are included to cover future integration.
- This is a feature preview, and may be subject to breaking changes based on feedback and testing.
### Changed
- Switched to Go 1.21
- SharePoint exported libraries are now exported with a `Libraries` prefix.
### Fixed
- Contacts backups no longer slices root-folder data if outlook is set to languages other than english.
- Failed backups if the --disable-incrementals flag was passed when there was a valid merge base under some conditions.
## [v0.12.0] (beta) - 2023-08-29
### Added
- Added `export` command to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
- Improve `--mask-sensitive-data` logging mode.
- Reliability: Handle connection cancellation and resets observed when backing up or restoring large data sets.
- Reliability: Recover from Graph SDK panics when the Graph API returns incomplete responses.
- Performance: Improve backup delete performance by batching multiple storage operations into a single operation.
### Fixed
- SharePoint document libraries deleted after the last backup can now be restored.
- Restore requires the protected resource to have access to the service being restored.
- SharePoint data from multiple document libraries are not merged in exports
- `corso backup delete` was not removing the backup details data associated with that snapshot
- Fix OneDrive restores could fail with a concurrent map write error
- Fix backup list displaying backups that had errors
- Fix OneDrive backup could fail if item was deleted during backup
- Exchange backups would fail attempting to use delta tokens even if the user was over quota
## [v0.11.1] (beta) - 2023-07-20
### Fixed
- Allow repo connect to succeed when a `corso.toml` file was not provided but configuration is specified using environment variables and flags.
## [v0.11.0] (beta) - 2023-07-18
### Added
- Drive items backup and restore link shares
- Restore commands now accept an optional top-level restore destination with the `--destination` flag. Setting the destination to '/' will restore items back into their original location.
- Restore commands can specify item collision behavior. Options are Skip (default), Replace, and Copy.
- Introduced repository maintenance commands to help optimize the repository as well as unreferenced data.
### Fixed
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
- Added retries for http/2 stream connection failures when downloading large item content.
- SharePoint document libraries that were deleted after the last backup can now be restored.
### Known issues
- If a link share is created for an item with inheritance disabled
(via the Graph API), the link shares restored in that item will
not be inheritable by children
- Link shares with password protection can't be restored
## [v0.10.0] (beta) - 2023-06-26
### Added
- Exceptions and cancellations for recurring events are now backed up and restored
- Introduced a URL cache for OneDrive that helps reduce Graph API calls for long running (>1hr) backups
- Improve incremental backup behavior by leveraging information from incomplete backups
- Improve restore performance and memory use for Exchange and OneDrive
### Fixed
- Handle OLE conversion errors when trying to fetch attachments
- Fix uploading large attachments for emails and calendar
- Fixed high memory use in OneDrive backup related to logging
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
### Changed
- Switched to Go 1.20
## [v0.9.0] (beta) - 2023-06-05
### Added
- Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type.
- Rework base selection logic for incremental backups so it's more likely to find a valid base.
- Improve OneDrive restore performance by paralleling item restores
### Fixed
- Fix Exchange folder cache population error when parent folder isn't found.
- Fix Exchange backup issue caused by incorrect json serialization
- Fix issues with details model containing duplicate entry for api consumers
### Changed
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.
## [v0.8.0] (beta) - 2023-05-15
### Added
- Released the --mask-sensitive-data flag, which will automatically obscure private data in logs.
- Added `--disable-delta` flag to disable delta based backups for Exchange
- Permission support for SharePoint libraries.
### Fixed
- Graph requests now automatically retry in case of a Bad Gateway or Gateway Timeout.
- POST Retries following certain status codes (500, 502, 504) will re-use the post body instead of retrying with a no-content request.
- Fix nil pointer exception when running an incremental backup on SharePoint where the base backup used an older index data format.
- --user and --mailbox flags have been removed from CLI examples for details and restore commands (they were already not supported, this only updates the docs).
- Improve restore time on large restores by optimizing how items are loaded from the remote repository.
- Remove exchange item filtering based on m365 item ID via the CLI.
- OneDrive backups no longer include a user's non-default drives.
- OneDrive and SharePoint file downloads will properly redirect from 3xx responses.
- Refined oneDrive rate limiter controls to reduce throttling errors.
- Fix handling of duplicate folders at the same hierarchy level in Exchange. Duplicate folders will be merged during restore operations.
- Fix backup for mailboxes that has used up all their storage quota
- Restored folders no longer appear in the Restore results. Only restored items will be displayed.
### Known Issues
- Restore operations will merge duplicate Exchange folders at the same hierarchy level into a single folder.
- Sharepoint SiteGroup permissions are not restored.
- SharePoint document library data can't be restored after the library has been deleted.
## [v0.7.0] (beta) - 2023-05-02
### Added
- Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.)
- LocationRef is now populated for all services and data types. It should be used in place of RepoRef if a location for an item is required.
- User selection for Exchange and OneDrive can accept either a user PrincipalName or the user's canonical ID.
- Add path information to items that were skipped during backup because they were flagged as malware.
### Fixed
- Fixed permissions restore in latest backup version.
- Incremental OneDrive backups could panic if the delta token expired and a folder was seen and deleted in the course of item enumeration for the backup.
- Incorrectly moving subfolder hierarchy from a deleted folder to a new folder at the same path during OneDrive incremental backup.
- Handle calendar events with no body.
- Items not being deleted if they were created and deleted during item enumeration of a OneDrive backup.
- Enable compression for all data uploaded by kopia.
- SharePoint --folder selectors correctly return items.
- Fix Exchange cli args for filtering items
- Skip OneNote items bigger than 2GB (Graph API prevents us from downloading them)
- ParentPath of json output for Exchange calendar now shows names instead of IDs.
- Fixed failure when downloading huge amount of attachments
- Graph API requests that return an ECONNRESET error are now retried.
- Fixed edge case in incremental backups where moving a subfolder, deleting and recreating the subfolder's original parent folder, and moving the subfolder back to where it started would skip backing up unchanged items in the subfolder.
- SharePoint now correctly displays site urls on `backup list`, instead of the site id.
- Drives with a directory containing a folder named 'folder' will now restore without error.
- The CORSO_LOG_FILE env is appropriately utilized if no --log-file flag is provided.
- Fixed Exchange events progress output to show calendar names instead of IDs.
- Fixed reporting no items match if restoring or listing details on an older Exchange backup and filtering by folder.
- Fix backup for mailboxes that has used up all their storage quota
### Known Issues
- Restoring a OneDrive or SharePoint file with the same name as a file with that name as its M365 ID may restore both items.
- Exchange event restores will display calendar IDs instead of names in the progress output.
## [v0.6.1] (beta) - 2023-03-21
### Added
- Sharepoint library (document files) support: backup, list, details, and restore.
- OneDrive item downloads that return 404 during backup (normally due to external deletion while Corso processes) are now skipped instead of quietly dropped. These items will appear in the skipped list alongside other skipped cases such as malware detection.
- Listing a single backup by id will also list the skipped and failed items that occurred during the backup. These can be filtered out with the flags `--failed-items hide`, `--skipped-items hide`, and `--recovered-errors hide`.
- Enable incremental backups for OneDrive if permissions aren't being backed up.
- Show progressbar while files for user are enumerated
- Hidden flag to control parallelism for fetching Exchange items (`--fetch-parallelism`). May help reduce `ApplicationThrottled` errors but will slow down backup.
### Fixed
- Fix repo connect not working without a config file
- Fix item re-download on expired links silently being skipped
- Improved permissions backup and restore for OneDrive
### Known Issues
- Owner (Full control) or empty (Restricted View) roles cannot be restored for OneDrive
- OneDrive will not do an incremental backup if permissions are being backed up.
- SharePoint --folder selection in details and restore always return "no items match the specified selectors".
- Event instance exceptions (ie: changes to a single event within a recurring series) are not backed up.
## [v0.5.0] (beta) - 2023-03-13
### Added
- Show owner information when doing backup list in json format
- Permissions for groups can now be backed up and restored
- Onedrive files that are flagged as malware get skipped during backup. Skipped files are listed in the backup results as part of the status, including a reference to their categorization, eg: "Completed (0 errors, 1 skipped: 1 malware)".
### Fixed
- Corso-generated .meta files and permissions no longer appear in the backup details.
- Panic and recovery if a user didn't exist in the tenant.
### Known Issues
- Folders and Calendars containing zero items or subfolders are not included in the backup.
- OneDrive files ending in `.meta` or `.dirmeta` are omitted from details and restores.
- Backups generated prior to this version will show `0 errors` when listed, even if error count was originally non-zero.
## [v0.4.0] (beta) - 2023-02-20
### Fixed
- Support for item.Attachment:Mail restore
- Errors from duplicate names in Exchange Calendars
- Resolved an issue where progress bar displays could fail to exit, causing unbounded CPU consumption.
- Fix Corso panic within Docker images
- Debugging with the CORSO_URL_LOGGING env variable no longer causes accidental request failures.
- Don't discover all users when backing up each user in a multi-user backup
### Changed
- When using Restore and Details on Exchange Calendars, the `--event-calendar` flag can now identify calendars by either a Display Name or a Microsoft 365 ID.
- Exchange Calendars storage entries now construct their paths using container IDs instead of display names. This fixes cases where duplicate display names caused system failures.
### Known Issues
- Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs
- Breaking changes to Exchange Calendar backups.
- The debugging env variable CORSO_URL_LOGGING causes exchange get requests to fail.
- Onedrive files that are flagged as Malware consistently fail during backup.
## [v0.3.0] (alpha) - 2023-02-07
### Added
- Document Corso's fault-tolerance and restartability features
- Add retries on timeouts and status code 500 for Exchange
- Increase page size preference for delta requests for Exchange to reduce number of roundtrips
- OneDrive file/folder permissions can now be backed up and restored
- Add `--restore-permissions` flag to toggle restoration of OneDrive permissions
- Add versions to backups so that we can understand/handle older backup formats
### Fixed
- Added additional backoff-retry to all OneDrive queries.
- Users with `null` userType values are no longer excluded from user queries.
- Fix bug when backing up a calendar that has the same name as the default calendar
### Known Issues
- When the same user has permissions to a file and the containing
folder, we only restore folder level permissions for the user and no
separate file only permission is restored.
- Link shares are not restored
## [v0.2.0] (alpha) - 2023-01-29
### Fixed
- Check if the user specified for an exchange backup operation has a mailbox.
### Changed
- Item.Attachments are disabled from being restored for the patching of ([#2353](https://github.com/alcionai/corso/issues/2353))
- BetaClient introduced. Enables Corso to be able to interact with SharePoint Page objects. Package located `/internal/connector/graph/betasdk`
- Handle case where user's drive has not been initialized
- Inline attachments (e.g. copy/paste ) are discovered and backed up correctly ([#2163](https://github.com/alcionai/corso/issues/2163))
- Guest and External users (for cloud accounts) and non-on-premise users (for systems that use on-prem AD syncs) are now excluded from backup and restore operations.
- Remove the M365 license guid check in OneDrive backup which wasn't reliable.
- Reduced extra socket consumption while downloading multiple drive files.
- Extended timeout boundaries for exchange attachment downloads, reducing risk of cancellation on large files.
- Identify all drives associated with a user or SharePoint site instead of just the results on the first page returned by Graph API.
## [v0.1.0] (alpha) - 2023-01-13
### Added
- Folder entries in backup details now indicate whether an item in the hierarchy was updated
- Incremental backup support for exchange is now enabled by default.
### Changed
- The selectors Reduce() process will only include details that match the DiscreteOwner, if one is specified.
- New selector constructors will automatically set the DiscreteOwner if given a single-item slice.
- Write logs to disk by default ([#2082](https://github.com/alcionai/corso/pull/2082))
### Fixed
- Issue where repository connect progress bar was clobbering backup/restore operation output.
- Issue where a `backup create exchange` produced one backup record per data type.
- Specifying multiple users in a onedrive backup (ex: `--user a,b,c`) now properly delimits the input along the commas.
- Updated the list of M365 SKUs used to check if a user has a OneDrive license.
### Known Issues
- `backup list` will not display a resource owner for backups created prior to this release.
## [v0.0.4] (alpha) - 2022-12-23
### Added
- Incremental backup support for Exchange ([#1777](https://github.com/alcionai/corso/issues/1777)). This is currently enabled by specifying the `--enable-incrementals`
with the `backup create` command. This functionality will be enabled by default in an upcoming release.
- Folder entries in backup details now include size and modified time for the hierarchy ([#1896](https://github.com/alcionai/corso/issues/1896))
### Changed
- **Breaking Change**:
Changed how backup details are stored in the repository to
improve memory usage ([#1735](https://github.com/alcionai/corso/issues/1735))
- Improve OneDrive backup speed ([#1842](https://github.com/alcionai/corso/issues/1842))
- Upgrade MS Graph SDK libraries ([#1856](https://github.com/alcionai/corso/issues/1856))
- Docs: Add Algolia docsearch to Corso docs ([#1844](https://github.com/alcionai/corso/pull/1844))
- Add an `updated` flag to backup details ([#1813](https://github.com/alcionai/corso/pull/1813))
- Docs: Speed up Windows Powershell download ([#1798](https://github.com/alcionai/corso/pull/1798))
- Switch to Go 1.19 ([#1632](https://github.com/alcionai/corso/pull/1632))
### Fixed
- Fixed retry logic in the Graph SDK that would result in an `400 Empty Payload` error when the request was retried ([1778](https://github.com/alcionai/corso/issues/1778))([msgraph-sdk-go #341](https://github.com/microsoftgraph/msgraph-sdk-go/issues/341))
- Don't error out if a folder was deleted during an exchange backup operation ([#1849](https://github.com/alcionai/corso/pull/1849))
- Docs: Fix CLI auto-generated docs headers ([#1845](https://github.com/alcionai/corso/pull/1845))
## [v0.0.3] (alpha) - 2022-12-05
### Added
- Display backup size in backup list command (#1648) from [meain](https://github.com/meain)
- Improve OneDrive backup performance (#1607) from [meain](https://github.com/meain)
- Improve Exchange backup performance (#1608) from [meain](https://github.com/meain)
- Add flag to retain all progress bars (#1582) from [ryanfkeepers](https://github.com/ryanfkeepers)
- Fix resource owner display on backup list (#1580) from [ryanfkeepers](https://github.com/ryanfkeepers)
### Changed
- Improve logging (#1642) from [ryanfkeepers](https://github.com/ryanfkeepers)
- Generate separate backup for each resource owner (#1609) from [ashmrtn](https://github.com/ashmrtn)
- Print version info to stdout instead of stderr (#1503) from [meain](https://github.com/meain)
## [v0.0.2] (alpha) - 2022-11-14
### Added
- Added AWS X-Ray support for better observability (#1111) from [ryanfkeepers](https://github.com/ryanfkeepers)
- Allow disabling TLS and TLS verification (#1415) from [vkamra](https://github.com/vkamra)
- Add filtering based on path prefix/contains (#1224) from [ryanfkeepers](https://github.com/ryanfkeepers)
- Add info about doc owner for OneDrive files (#1366) from [meain](https://github.com/meain)
- Add end time for Exchange events from (#1366) [meain](https://github.com/meain)
### Changed
- Export `RepoAlreadyExists` error for sdk users (#1136)from [ryanfkeepers](https://github.com/ryanfkeepers)
- RudderStack logger now respects corso logger settings (#1324) from [ryanfkeepers](https://github.com/ryanfkeepers)
## [v0.0.1] (alpha) - 2022-10-24
### New features
- Supported M365 Services
- Exchange - email, events, contacts ([RM-8](https://github.com/alcionai/corso-roadmap/issues/28))
- OneDrive - files ([RM-12](https://github.com/alcionai/corso-roadmap/issues/28))
- Backup workflows
- Create a full backup ([RM-19](https://github.com/alcionai/corso-roadmap/issues/19))
- Create a backup for a specific service and all or some data types ([RM-19](https://github.com/alcionai/corso-roadmap/issues/19))
- Create a backup for all or a specific user ([RM-20](https://github.com/alcionai/corso-roadmap/issues/20))
- Delete a backup manually ([RM-24](https://github.com/alcionai/corso-roadmap/issues/24))
- Restore workflows
- List, filter, and view backup content details ([RM-23](https://github.com/alcionai/corso-roadmap/issues/23))
- Restore one or more items or folders from backup ([RM-28](https://github.com/alcionai/corso-roadmap/issues/28), [RM-29](https://github.com/alcionai/corso-roadmap/issues/29))
- Non-destructive restore to a new folder/calendar in the same account ([RM-30](https://github.com/alcionai/corso-roadmap/issues/30))
- Backup storage
- Zero knowledge encrypted backups with user conrolled passphrase ([RM-6](https://github.com/alcionai/corso-roadmap/issues/6))
- Initialize and connect to an S3-compliant backup repository ([RM-5](https://github.com/alcionai/corso-roadmap/issues/5))
- Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
[v0.15.0]: https://github.com/alcionai/corso/compare/v0.14.0...v0.15.0
[v0.14.0]: https://github.com/alcionai/corso/compare/v0.13.0...v0.14.0
[v0.13.0]: https://github.com/alcionai/corso/compare/v0.12.0...v0.13.0
[v0.12.0]: https://github.com/alcionai/corso/compare/v0.11.1...v0.12.0
[v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1
[v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0
[v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0
[v0.9.0]: https://github.com/alcionai/corso/compare/v0.8.1...v0.9.0
[v0.8.0]: https://github.com/alcionai/corso/compare/v0.7.1...v0.8.0
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0
[v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1
[v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0
[v0.4.0]: https://github.com/alcionai/corso/compare/v0.3.0...v0.4.0
[v0.3.0]: https://github.com/alcionai/corso/compare/v0.2.0...v0.3.0
[v0.2.0]: https://github.com/alcionai/corso/compare/v0.1.0...v0.2.0
[v0.1.0]: https://github.com/alcionai/corso/compare/v0.0.4...v0.1.0
[v0.0.4]: https://github.com/alcionai/corso/compare/v0.0.3...v0.0.4
[v0.0.3]: https://github.com/alcionai/corso/compare/v0.0.2...v0.0.3
[v0.0.2]: https://github.com/alcionai/corso/compare/v0.0.1...v0.0.2
[v0.0.1]: https://github.com/alcionai/corso/tag/v0.0.1

View File

@ -1,12 +1,7 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<img src="https://github.com/alcionai/corso/blob/main/docs/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
<p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p>
<h1 align="center">Corso</h1>
# Corso
[![Go Report Card](https://goreportcard.com/badge/github.com/alcionai/corso/src)](https://goreportcard.com/report/github.com/alcionai/corso/src)
[![Discord](https://img.shields.io/badge/discuss-discord-blue)](https://discord.gg/63DTTSnuhT)
[![License](https://img.shields.io/badge/License-Apache_2.0-green.svg)](https://opensource.org/licenses/Apache-2.0)
[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_OF_CONDUCT.md)
@ -16,26 +11,29 @@ Microsoft 365 data. It provides a reliable, secure, and efficient data protectio
the backup data and have the flexibility to perform backups of their desired service through an intuitive interface.
As Corso evolves, it can become a great building block for more complex data protection workflows.
**Corso is currently in Beta.**
**Corso is currently in ALPHA and should NOT be used in production.**
Corso supports M365 Exchange and OneDrive with SharePoint and Teams support in active development. Coverage for more
services, possibly beyond M365, will expand based on the interest and needs of the community.
# Getting Started
See the [Corso Quickstart](https://corsobackup.io/docs/quickstart/) on our docs page.
See the [Corso Documentation](https://docs.corsobackup.io) for more information.
# Corso container images
Corso container images are convenienty hosted on [ghrc.io](https://github.com/alcionai/corso/pkgs/container/corso).
For a sepcific release, use the following command:
```sh
docker pull ghcr.io/alcionai/corso:<release tag>
```
# Building Corso
To learn more about working with the project source core and building Corso, see the
[Developer section](https://corsobackup.io/docs/developers/build) of the Corso Documentation.
# Roadmap
You can learn more about the Corso roadmap and how to interpret it [here](https://github.com/alcionai/corso-roadmap).
If you have feature requests, please file a [GitHub issue](https://github.com/alcionai/corso/issues/)
and attach the `enhancement` label to the issue.
[Developer section](https://docs.corsobackup.io/developers/build) of the Corso Documentation.
# Contribution Guidelines

View File

@ -1,7 +0,0 @@
# Security Policy
## Reporting a Vulnerability
If you find a security vulnerability, please do not file a public GitHub
issue. Please report this to security@corsobackup.io. In most cases, we
will respond within 48 hours or less.

View File

@ -1,12 +1,12 @@
FROM golang:1.21-alpine as builder
FROM golang:1.18-alpine as builder
WORKDIR /go/src/app
COPY src .
ARG CORSO_BUILD_LDFLAGS=""
ARG CORSO_BUILD_LDFLAGS="" # ldflags
RUN go build -o corso -ldflags "$CORSO_BUILD_LDFLAGS"
FROM alpine:3
FROM alpine:3.16
LABEL org.opencontainers.image.title="Corso"
LABEL org.opencontainers.image.description="Free, Secure, and Open-Source Backup for Microsoft 365"
@ -16,12 +16,14 @@ LABEL org.opencontainers.image.vendor="Alcion, Inc."
COPY --from=builder /go/src/app/corso /corso
RUN apk add --no-cache ca-certificates
# Pull tls certs directly from latest upstream image
COPY --from=alpine:latest /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
ENV CORSO_HOME=/app/corso
ENV CORSO_CONFIG_DIR=$CORSO_HOME \
KOPIA_CONFIG_PATH=$CORSO_HOME/kopia/config/repository.config \
KOPIA_LOG_DIR=$CORSO_HOME/kopia/logs \
KOPIA_CACHE_DIRECTORY=$CORSO_HOME/kopia/cache \
RCLONE_CONFIG=$CORSO_HOME/kopia/rclone/rclone.conf \
KOPIA_PERSIST_CREDENTIALS_ON_CONNECT=false \
KOPIA_CHECK_FOR_UPDATES=false

View File

@ -2,44 +2,40 @@
set -e
usage() {
echo "Usage: $(basename $0) binary | image [--platforms ...] [--tag ...]"
echo ""
echo "OPTIONS"
echo " -p | --platforms Platforms to build for (default: $PLATFORMS)"
echo " Specify multiple platforms using ',' (eg: linux/amd64,darwin/arm)"
echo " -t | --tag Tag for container image (default: $TAG)"
}
ROOT=$(dirname $(dirname $(readlink -f $0)))
GOVER=1.21 # go version
GOVER=1.18 # go version
CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache
# Figure out os and architecture
case "$(uname -m)" in
x86_64) GOARCH="amd64" ;;
aarch64) GOARCH="arm64" ;;
arm64) GOARCH="arm64" ;;
arm) GOARCH="arm" ;;
i386) GOARCH="386" ;;
*) echo "Unknown architecture" && exit 0 ;;
esac
case "$(uname)" in
Linux) GOOS="linux" ;;
Darwin) GOOS="darwin" ;;
Darwin) GOOS="darwin" ;; # TODO: verify this
*) echo "Unknown OS" && exit 0 ;;
esac
PLATFORMS="$GOOS/$GOARCH" # default platform
TAG="alcionai/corso" # default image tag
usage() {
echo "Usage: $(basename $0) <binary|image> [--platforms ...] [--tag ...]"
echo ""
echo "OPTIONS"
echo " -p|--platforms Platforms to build for (default: $PLATFORMS)"
echo " Specify multiple platforms using ',' (eg: linux/amd64,darwin/arm)"
echo " -t|--tag Tag for container image (default: $TAG)"
}
MODE="binary"
case "$1" in
binary) MODE="binary" && shift ;;
image)
MODE="image"
shift
GOOS="linux" # darwin container images are not a thing
;;
image) MODE="image" && shift ;;
-h | --help) usage && exit 0 ;;
*) usage && exit 1 ;;
esac
@ -62,31 +58,24 @@ if [ "$MODE" == "binary" ]; then
printf "Building for %s...\r" "$platform"
docker run --rm \
--mount type=bind,src="${ROOT}",dst="/app" \
--mount type=bind,src="${CORSO_BUILD_CACHE}",dst="${CORSO_BUILD_CACHE}" \
--env GOMODCACHE="${CORSO_BUILD_CACHE}/mod" --env GOCACHE="${CORSO_BUILD_CACHE}/cache" \
--mount type=bind,src=${ROOT},dst="/app" \
--mount type=bind,src=${CORSO_BUILD_CACHE},dst=${CORSO_BUILD_CACHE} \
--env GOMODCACHE=${CORSO_BUILD_CACHE}/mod --env GOCACHE=${CORSO_BUILD_CACHE}/cache \
--env GOOS=${GOOS} --env GOARCH=${GOARCH} \
--workdir "/app/src" \
golang:${GOVER} \
go build -o corso -ldflags "${CORSO_BUILD_LDFLAGS}"
OUTFILE="corso"
[ "$GOOS" == "windows" ] && OUTFILE="corso.exe"
mkdir -p "${ROOT}/bin/${GOOS}-${GOARCH}"
mv "${ROOT}/src/corso" "${ROOT}/bin/${GOOS}-${GOARCH}/${OUTFILE}"
echo Corso $platform binary available in "${ROOT}/bin/${GOOS}-${GOARCH}/${OUTFILE}"
mkdir -p ${ROOT}/bin/${GOOS}-${GOARCH}
mv ${ROOT}/src/corso ${ROOT}/bin/${GOOS}-${GOARCH}/corso
echo Corso $platform binary available in ${ROOT}/bin/${GOOS}-${GOARCH}/corso
done
else
for platform in ${PLATFORMS/,/ }; do
echo "$platform" | grep -Eq "^darwin" &&
echo Cannot create darwin images "($platform)" && exit 1
done
echo Building "$TAG" image for "$PLATFORMS"
docker buildx build --tag ${TAG} \
--platform ${PLATFORMS} \
--file "${ROOT}/build/Dockerfile" \
--file ${ROOT}/build/Dockerfile \
--build-arg CORSO_BUILD_LDFLAGS="$CORSO_BUILD_LDFLAGS" \
--load "${ROOT}"
--load ${ROOT}
echo Built container image "$TAG"
fi

214
design/cli.md Normal file
View File

@ -0,0 +1,214 @@
# CLI Commands
## Status
Revision: v0.0.1
-----
This is a proposal for Corso cli commands extrapolated from the Functional Requirements product documentation. Open questions are listed in the `Details & Discussion` section. The command set includes some p1/p2 actions for completeness. This proposal only intends to describe the available commands themselves and does not evaluate functionality or feature design beyond that goal.
# CLI Goals
- Ease (and enjoyment) of Use, more than minimal functionality.
- Intended for use by Humans, not Computers.
- Outputs should be either interactive/progressive (for ongoing work) or easily greppable/parseable.
## Todo/Undefined:
- Interactivity and sub-selection/helpful action completion within command operation.
- Quality-of-life and niceties such as interactive/output display, formatting and presentation, or maximum minimization of user effort to run Corso.
-----
## Commands
Standard format:
`corso {command} [{subcommand}] [{service|repository}] [{flag}...]`
| Cmd | | | Flags | Notes |
| --- | --- | --- | --- | --- |
| version | | | | Same as `corso --version` |
| | | | —version | Outputs Corso version details. |
| help | | | | Same as `corso —-help` |
| * | * | help | | Same as `{command} -—help` |
| * | * | | —help | Same as `{command} help` |
| Cmd | | | Flags | Notes |
| --- | --- | --- | --- | --- |
| repo | * | | | Same as `repo [*] --help`. |
| repo | init | {repository} | | Initialize a Corso repository. |
| repo | init | {repository} | —tenant {azure_tenant_id} | Provides the accounts tenant ID. |
| repo | init | {repository} | —client {azure_client_id} | Provides the accounts client ID. |
| repo | connect | {repository} | | Connects to the specified repo. |
| repo | configure | {repository} | | Sets mutable config properties to the provided values. |
| repo | * | * | —config {cfg_file_path} | Specify a repo configuration file. Values may also be provided via individual flags and env vars. |
| repo | * | * | —{config-prop} | Blanket commitment to support config via flags. |
| repo | * | * | —credentials {creds_file_path} | Specify a file containing credentials or secrets. Values may also be provided via env vars. |
| Cmd | | | Flags | Notes |
| --- | --- | --- | --- | --- |
| backup | * | | | Same as backup [*] -—help |
| backup | list | {service} | | List all backups in the repository for the specified service. |
| backup | create | {service} | | Backup the specified service. |
| backup | * | {service} | —token {token} | Provides a security key for permission to perform backup. |
| backup | * | {service} | —{entity} {entity_id}... | Only involve the target entity(s). Entities are things like users, groups, sites, etc. Entity flag support is service-specific. |
| Cmd | | | Flags | Notes |
| --- | --- | --- | --- | --- |
| restore | | | | Same as `restore -—help` |
| restore | {service} | | | Complete service restoration using the latest versioned backup. |
| restore | {service} | | —backup {backup_id} | Restore data from only the targeted backup(s). |
| restore | {service} | | —{entity} {entity_id}... | Only involve the target entity(s). Entities are things like users, groups, sites, etc. Entity flag support is service-specific. |
---
## Examples
### Basic Usage
**First Run**
```bash
$ export AZURE_CLIENT_SECRET=my_azure_secret
$ export AWS_SECRET_ACCESS_KEY=my_s3_secret
$ corso repo init s3 --bucket my_s3_bucket --access-key my_s3_key \
--tenant my_azure_tenant_id --clientid my_azure_client_id
$ corso backup express
```
**Follow-up Actions**
```bash
$ corso repo connect s3 --bucket my_s3_bucket --access-key my_s3_key
$ corso backup express
$ corso backup list express
```
-----
# Details & Discussion
## UC0 - CLI User Interface
Base command: `corso`
Standard format: `corso {command} [{subcommand}] [{service}] [{flag}...]`
Examples:
- `corso help`
- `corso repo init --repository s3 --tenant t_1`
- `corso backup create teams`
- `corso restore teams --backup b_1`
## UC1 - Initialization and Connection
**Account Handling**
M365 accounts are paired with repo initialization, resulting in a single-tenancy storage. Any `repo` action applies the same behavior to the account as well. That is, `init` will handle all initialization steps for both the repository and the account, and both must succeed for the command to complete successfully, including all necessary validation checks. Likewise, `connect` will validate and establish a connection (or, at least, the ability to communicate) with both the account and the repository.
**Init**
`corso repo init {repository} --config {cfg} --credentials {creds}`
Initializes a repository, bootstrapping resources as necessary and storing configuration details within Corso. Repo is the name of the repository provider, eg: s3. Cfg and creds, in this example, point to json (or alternatively yaml?) files containing the details required to establish the connection. Configuration options, when known, will get support for flag-based declaration. Similarly, env vars will be supported as needed.
**Connection**
`corso repo connect {repository} --credentials {creds}`
[https://docs.flexera.com/flexera/EN/SaaSManager/M365CCIntegration.htm#integrations_3059193938_1840275](https://docs.flexera.com/flexera/EN/SaaSManager/M365CCIntegration.htm#integrations_3059193938_1840275)
Connects to an existing (ie, initialized) repository.
Corso is expected to gracefully handle transient disconnections during backup/restore runtimes (and otherwise, as needed).
**Deletion**
`corso repo delete {repository}`
(Included here for discussion, but not being added to the CLI command set at this time.)
Removes a repository from Corso. More exploration is needed here to explore cascading effects (or lack thereof) from the command. At minimum, expect additional user involvement to confirm that the deletion is wanted, and not erroneous.
## UC1.1 - Version
`corso --version` outputs the current version details such as: commit id and datetime, maybe semver (complete release version details to be decided).
Further versioning controls are not currently covered in this proposal.
## UC2 - Configuration
`corso repo configure --reposiory {repo} --config {cfg}`
Updates the configuration details for an existing repository.
Configuration is divided between mutable and immutable properties. Generally, initialization-specific configurations (those that identify the storage repository, its connection, and its fundamental behavior), among other properties, are considered immutable and cannot be reconfigured. As a result, `repo configure` will not be able to rectify a misconfigured init; some other user flow will be needed to resolve that issue.
Configure allows mutation of config properties that can be safely and transiently applied. For example: backup retention and expiration policies. A complete list of how each property is classified is forthcoming as we build that list of properties.
## UC3 - On-Demand Backup
`corso backup` is reserved as a non-actionable command, rather than have it kick off a backup action. This is to ensure users dont accidentally kick off a migration in the process of exploring the api.  `corso backup` produces the same output as `corso backup --help`.
**Full Service Backup**
- `corso backup create {service}`
**Selective Backup**
- `corso backup create {service} --{entity} {entity_id}...`
Entities are service-applicable objects that match up to m365 objects. Users, groups, sites, mailboxes, etc. Entity flags are available on a per-service basis. For example, —site is available for the sharepoint service, and —mailbox for express, but not the reverse. A full list of system-entity mappings is coming in the future.
**Examples**
- `corso backup` → displays the help output.
- `corso backup create teams` → generates a full backup of the teams service.
- `corso backup create express --group g_1` → backs up the g_1 group within express.
## UC3.2 - Security Token
(This section is incomplete: further design details are needed about security expression.) Some commands, such as Backup/Restore require a security key declaration to verify that the caller has permission to perform the command.
`corso * * --token {token}`
## UC5 - Backup Ops
`corso backup list {service}`
Produces a list of the backups which currently exist in the repository.
`corso backup list {service} --{entity} {entity_id}...`
The list can be filtered to contain backups relevant to the specified entities. A possible user flow for restoration is for the user to use this to discover which backups match their needs, and then apply those backups in a restore operation.
**Expiration Control**
Will appear in a future revision.
## UC6 - Restore
Similar to backup, `corso restore` is reserved as a non-actionable command to serve up the same output as `corso restore —help`.
### UC6.1
**Full Service Restore**
- `corso restore {service} [--backup {backup_id}...]`
If no backups are specified, this defaults to the most recent backup of the specified service.
**Selective Restore**
- `corso restore {service} [--backup {backup_id}...] [--{entity} {entity_id}...]`
Entities are service-applicable objects that match up to m365 objects. Users, groups, sites, mailboxes, etc. Entity flags are available on a per-service basis. For example, —site is available for the sharepoint service, and —mailbox for express, but not the reverse. A full list of system-entity mappings is coming in the future.
**Examples**
- `corso restore` → displays the help output.
- `corso restore teams` → restores all data in the teams service.
- `corso restore sharepoint --backup b_1` → restores the sharepoint data in the b_1 backup.
- `corso restore express --group g_1` → restores the g_1 group within sharepoint.
## UC6.2 - disaster recovery
Multi-service backup/restoration is still under review.

View File

@ -4,7 +4,7 @@
# It is not used for deployments.
## Build
FROM golang:1.19 AS base
FROM golang:1.18 AS base
WORKDIR /src
@ -20,7 +20,7 @@ ARG TARGETARCH
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o /corso .
## Deploy
FROM ubuntu:22.10
FROM ubuntu:latest
COPY --from=build /corso /

View File

@ -7,13 +7,7 @@
"TOCInline",
"br",
"img",
"div",
"Tabs",
"TabItem",
"CodeBlock",
"DownloadBinaries",
"details",
"summary"
"div"
]
},
}
}

26
docs/Dockerfile Normal file
View File

@ -0,0 +1,26 @@
FROM ubuntu:22.04
LABEL MAINTAINER="Niraj Tolia"
ARG DEBIAN_FRONTEND=noninteractive
# NOTE for lines 13,15: update in CI when updating
RUN apt-get -y update && apt-get -y install gpg emacs curl git make \
&& curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \
&& apt-get -y install nodejs \
&& apt-get autoclean \
&& node --version \
&& npm --version \
&& cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \
&& tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \
&& npm install -g markdownlint-cli@0.32.2
WORKDIR /usr/src
COPY package.json package-lock.json* ./
RUN npm ci \
&& npm cache clean --force \
&& rm -f package.json package-lock.json*
ENV PATH /usr/src/node_modules/.bin:$PATH
WORKDIR /usr/src/docs
CMD ["npm", "start", "--", "--host", "0.0.0.0"]

60
docs/Makefile Normal file
View File

@ -0,0 +1,60 @@
.PHONY: buildimage build dev shell check genclidocs _validatemdgen
# Specify `CORSO_USE_DOCKER=-1` to skip using Docker for builds
CORSO_BUILD_DIR := /tmp/.corsobuild
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod
CORSO_BUILD_BIN := ${CORSO_BUILD_DIR}/bin
CORSO_REPO := /go/src/github.com/alcionai/corso
CORSO_LOCAL_PATH := $(shell git rev-parse --show-toplevel)
DOCSC := docker run --rm -it -p 3000:3000 -v ${PWD}:/usr/src/docs alcion/docs
DOCSE := ${DOCSC} # for enforcing docker container as environment
GOC := docker run --rm -it \
-v ${CORSO_LOCAL_PATH}:${CORSO_REPO} -v ${CORSO_BUILD_DIR}:${CORSO_BUILD_DIR} \
--env GOCACHE=${CORSO_BUILD_CACHE} --env GOMODCACHE=${CORSO_BUILD_MOD} --env GOTMPDIR=${CORSO_BUILD_DIR} \
--workdir ${CORSO_REPO}/src \
golang:1.18
ifeq (${CORSO_USE_DOCKER},-1)
DOCSC :=
GOC := cd ${CORSO_LOCAL_PATH}/src &&
CORSO_REPO := ${CORSO_LOCAL_PATH}
endif
MDGEN_SRC := ${CORSO_REPO}/src/cmd/mdgen/mdgen.go
MDGEN_BINARY := ${CORSO_BUILD_BIN}/mdgen
CLI_DOCS := ${CORSO_REPO}/docs/docs/cli
buildimage:
docker build -t "alcion/docs:latest" .
dev: genclidocs
$(DOCSC) npm start -- --host 0.0.0.0
VALE_TARGET ?= docs README.md
check: genclidocs
$(DOCSC) vale $(VALE_TARGET)
$(DOCSC) markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
dockershell:
$(DOCSE) /bin/bash
build: genclidocs
$(DOCSC) npm run build
genclidocs: ${MDGEN_BINARY}
@echo 'Auto-generating Corso CLI docs...'
$(DOCSC) rm -rf docs/cli
$(GOC) ${MDGEN_BINARY} --cli-folder ${CLI_DOCS}
_validatemdgen: # in case we have a different architecture
${MDGEN_BINARY} --help >/dev/null || rm -rf ${MDGEN_BINARY}
${MDGEN_BINARY}: $(shell find ${CORSO_LOCAL_PATH}/src -type f -name *.go) $(shell find ${CORSO_LOCAL_PATH}/src -type d ) _validatemdgen
@echo 'Re-building Corso CLI docs auto-gen tooling...'
$(GOC) go mod download
$(GOC) go build -o ${MDGEN_BINARY} ${MDGEN_SRC}
clean:
$(DOCSC) rm -rf docs/cli
$(DOCSC) rm -rf ${CORSO_BUILD_DIR}

69
docs/README.md Normal file
View File

@ -0,0 +1,69 @@
# Corso documentation
Corso documentation uses [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
[Mermaid](https://mermaid-js.github.io/mermaid/) provides support for native diagrams in Markdown.
## Requirements
Developing documentation for Corso requires the following tools on your machine:
- `make`
- Docker
## Installation
```bash
make buildimage
```
## Live documentation development
```bash
make dev
```
This command starts a local development server within the Docker container and will expose docs at [http://localhost:3000](http://localhost:3000).
## Generating Corso CLI docs
```bash
make genclidocs
```
Corso's CLI documents are auto generated. This command explicitly triggers generating these docs. This step will happen
automatically for the other commands where this is relevant.
## Building static documentation
```bash
make build
```
This command generates static content into the `build` directory for integration with any static contents hosting service.
## Style and linting
```bash
# Lint all docs
make dockercheck
# Lint specific files and/or folders
make dockercheck VALE_TARGET="README.md docs/concepts"
```
This command will lint all Markdown files and check them for style issues using the Docker container
```bash
make check
```
Same as `make dockercheck` but runs locally. Requires `vale` to be installed.
## Documentation platform development
```bash
make shell
```
Use this command to interactively (and temporarily!) change the contents or
configuration of the live documentation container image (for example, when
experimenting with new plugins).

View File

@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
## Corso concepts {#corso-concepts}
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
*M365 Service*'s data. See [Repositories](../repos) for more information.
*M365 Services* data. See [Repositories](/configuration/repos) for more information.
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss,
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has
changed between backup iterations.
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.

View File

@ -0,0 +1,81 @@
---
description: "Connect to a Microsft 365 tenant"
---
# Microsoft 365 access
To perform backup and restore operations, Corso requires access to your [M365 tenant](concepts#m365-concepts)
through an [Azure AD application](concepts#m365-concepts) with appropriate permissions.
## Create an Azure AD application
For the official documentation for adding an Azure AD Application and Service Principal using the Azure Portal see
[here](https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-create-service-principal-portal).
The following steps outline a simplified procedure for creating an Azure Ad application suitable for use with Corso.
1. **Create a new application**
Select **Azure Active Directory &#8594; App Registrations &#8594; New Registration**
<img src="/img/m365app_create_new.png" className="guideImages"/>
1. **Configure basic settings**
* Give the application a name
* Select **Accounts in this organizational directory only**
* Skip the **Redirect URI** option
<br/><img src="/img/m365app_configure.png" className="guideImages"/>
1. **Configure required permissions**
Select **API Permissions** from the app management panel.
<img src="/img/m365app_permissions.png" className="guideImages"/>
Select the following permissions from **Microsoft API &#8594; Microsoft Graph &#8594; Application Permissions**:
<!-- vale Microsoft.Spacing = NO -->
| API / Permissions Name | Type | Description
|:--|:--|:--|
| Calendars.ReadWrite | Application | Read and write calendars in all mailboxes |
| Contacts.ReadWrite | Application | Read and write contacts in all mailboxes |
| Files.ReadWrite.All | Application | Read and write files in all site collections |
| Mail.ReadWrite | Application | Read and write mail in all mailboxes |
| User.Read.All | Application | Read all users' full profiles |
<!-- vale Microsoft.Spacing = YES -->
1. **Grant admin consent**
<img src="/img/m365app_consent.png" className="guideImages"/>
## Export application credentials
After configuring the Corso Azure AD application, store the information needed by Corso to connect to the application
as environment variables.
### Tenant ID and client ID
To extract the tenant and client ID, select Overview from the app management panel and export the corresponding
environment variables.
```bash
export AZURE_TENANT_ID=<Directory (tenent) ID for configured app>
export AZURE_CLIENT_ID=<Application (client) ID for configured app>
```
<img src="/img/m365app_ids.png" className="guideImages"/>
### Azure client secret
Lastly, you need to configure a client secret associated with the app using **Certificates & Secrets** from the app
management panel.
Click **New Client Secret** and follow the instructions to create a secret. After creating the secret, copy the secret
value right away because it won't be available later and export it as an environment variable.
```bash
export AZURE_CLIENT_SECRET=<client secret value>
```
<img src="/img/m365app_secret.png" className="guideImages"/>

View File

@ -0,0 +1,75 @@
---
description: "Configure backup repository"
---
# Repositories
A Corso [repository](concepts#corso-concepts) stores encrypted copies of your backup data. Repositories are
supported on the following object storage systems:
import TOCInline from '@theme/TOCInline';
<TOCInline toc={toc} maxHeadingLevel={2}/><br/>
:::note
Depending on community interest, Corso may support other object storage backends in the future.
:::
## Amazon S3
### Prerequisites
Before setting you your Corso S3 repository, the following prerequisites must be met:
* S3 bucket for the repository already exists. Corso won't create it for you.
* You have access to credentials for a user or an IAM role that represent the following permissions
<!-- vale proselint.Annotations = NO -->
**TODO: Verify if these permissions are correct? What about multi-part upload permissions?**
<!-- vale proselint.Annotations = YES -->
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:PutObject",
"s3:GetObject",
"s3:ListBucket",
"s3:DeleteObject",
"s3:GetBucketLocation",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads"
],
"Resource": [
"arn:aws:s3:::<YOUR_BUCKET_NAME>",
"arn:aws:s3:::<YOUR_BUCKET_NAME>/*"
]
}
]
}
```
### Credential setup {#s3-creds-setup}
Corso supports the credential options offered by the Go SDK. For Full details, see the *Specifying Credentials*
section of the [official documentation](https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html).
* **Environment variables** - set and export `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. If using temporary
credentials derived by assuming an IAM Role, you will also need `AWS_SESSION_TOKEN`.
* **Credentials file** - ensure that the credentials file is available to Corso (for example, may need to map it if
using Corso as a container). You may also want to set and export `AWS_PROFILE`, if not using the default profile, and
`AWS_SHARED_CREDENTIALS_FILE`, if not using the default file location. You can learn more about the AWS CLI
environment variables [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html).
### Initialize repository
Before first use, you need to initialize a Corso repository with `corso repo init s3`. See command details
[here](/cli/corso_repo_init_s3).
If a repository already exists, you can connect to it with `corso repo connect s3`. See command details
[here](/cli/corso_repo_connect_s3).

View File

@ -0,0 +1,5 @@
# Architecture
<!-- vale proselint.Annotations = NO -->
TODO
<!-- vale proselint.Annotations = YES -->

View File

@ -18,15 +18,6 @@ If you don't have Go available, you can find installation instructions [here](ht
This will generate a binary named `corso` in the directory where you run the build.
:::note
Prebuilt binary artifacts of the latest commit are available on GitHub.
You can access them by navigating to the "Summary" page of
the [`Build/Release Corso` CI job](https://github.com/alcionai/corso/actions/workflows/ci.yml?query=branch%3Amain)
that was run for the latest commit on the `main` branch.
The downloads will be available in the "Artifacts" section towards the
bottom of the page.
:::
### Building via Docker
For convenience, the Corso build tooling is containerized. To take advantage, you need
@ -70,10 +61,10 @@ In addition, you can optionally pass the tag that you would like to
apply for the image using `--tag` option.
For example, you can use the following command to create a `arm64`
image with the tag `ghcr.io/alcionai/corso:latest`, you can run:
image with the tag `gcr.io/alcionai/corso:latest`, you can run:
```bash
./build/build.sh image --platforms linux/arm64 --tag ghcr.io/alcionai/corso:latest
./build/build.sh image --platforms linux/arm64 --tag gcr.io/alcionai/corso:latest
```
:::info

View File

@ -13,7 +13,7 @@ available on the golangci-lint
version that you install should match the version the GitHub workflow uses to
avoid failures even after running locally. The current version in use can be
[found](https://github.com/alcionai/corso/blob/main/.github/workflows/lint.yml#L55)
in `.github/workflows/lint.yaml`.
in `.github/worflows/lint.yaml`.
## Running the linter
@ -21,16 +21,6 @@ You can run the linter manually or with the `Makefile` in the repository. Runnin
the `Makefile` will also ensure you have the proper version of golangci-lint
installed.
### Running the actions linter
Installation:
```sh
go install github.com/rhysd/actionlint/cmd/actionlint@latest
```
[Instructions for running locally.](https://github.com/rhysd/actionlint/blob/main/docs/usage.md)
### Running with the `Makefile`
Theres a `Makefile` in the corso/src that will automatically check if the proper
@ -199,9 +189,7 @@ cryptic messages how you can fix the problems the linters flag.
Each subsection also includes the version of golangci-lint it applies to and the
linter in question.
```sh
gci Expected 's', Found 'a' at file.go
```
### `gci` `Expected 's', Found 'a' at file.go`
This applies to golangci-lint v1.45.2 for the `gci` linter and is due to an import
ordering issue. It occurs because imports in the file aren't grouped according

View File

@ -30,7 +30,7 @@
- Set M365 Credentials environment variables
> You can find more information on how to get these values in our [M365 docs](../../setup/m365-access/).
> You can find more information on how to get these values in [configuration docs](/configuration/m365_access).
```bash
export AZURE_CLIENT_ID=<id>
@ -51,8 +51,8 @@ The complete list of environment constants is available at
## Advanced options
- To override the M365 user for tests, use `CORSO_M365_TEST_USER_ID`
- To override the M365 user for tests, use `CORSO_M356_TEST_USER_ID`
```bash
export CORSO_M365_TEST_USER_ID="..."
export CORSO_M356_TEST_USER_ID="..."
```

83
docs/docs/install.md Normal file
View File

@ -0,0 +1,83 @@
# Installation
Corso releases are available using the following options:
import TOCInline from '@theme/TOCInline';
<TOCInline toc={toc} maxHeadingLevel={2}/><br/>
:::note
To maximize portability across platforms, Corso is available as a container image. In the future,
releases may also be available as operating system specific pre-built binaries.
In the meantime, if you want to run Corso as a binary, refer to the
[instructions on how to build from source](developers/build).
:::
## Docker image
To use Corso as a Docker image, you need to have [Docker installed](https://docs.docker.com/engine/install/)
on your machine.
### Docker command
To run the Corso container, it's recommended that you:
* Export [Corso key configuration environment variables](cli/corso_env) and add their names to an
[environment variables file](https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file)
* Map a local directory to `/app/corso`. Corso will look for or create the `corso.toml` config file there. This will preserve
configuration across container runs. Corso will use the directory for logs, if enabled.
To create the environment variables file, you can run the following.
```bash
# create an env vars file
$ cat <<EOF ~/.corso/corso.env
CORSO_PASSPHRASE
AZURE_TENANT_ID
AZURE_CLIENT_ID
AZURE_CLIENT_SECRET
AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY
AWS_SESSION_TOKEN
EOF
```
:::note
Depending on your preferred method for passing AWS credentials, you may need to adapt the `AWS_` variables in the file.
See [AWS Credentials Setup](/configuration/repos##s3-creds-setup) for more details.
:::
The following command will list the Corso Exchange backups. You can adapt the folder mappings, container tag, and the command
as needed.
```bash
$ docker run --env-file ~/.corso/corso.env \
-v ~/.corso/corso:/app/corso \
corso/corso backup list exchange
```
### Available variants
The Corso image is available on DockerHub for the following architectures:
* Linux and Windows x86-64 - `amd64`
* ARM 64-bit - `arm64`
:::tip
For Windows, you can run the `amd64` container in
[Linux Mode](https://docs.microsoft.com/en-us/virtualization/windowscontainers/quick-start/quick-start-windows-10-linux)
:::
The following tags are available:
* `:x.y.z` - A specific release build
* `:pre-release` - The most recent pre-release if newer that the latest stable release
* `:nightly` - The most recent unstable developer build
* `:SHA` - A specific build

View File

@ -12,6 +12,5 @@ beyond M365, will expand based on the interest and needs of the community.
## Getting started
You can follow the [Quick Start](../quickstart) guide for an end-to-end Corso walk through. Alternatively, follow
the instructions in the [Corso Setup](../setup/concepts) section to dive into the details on how to configure and
run Corso.
Follow the instructions in the [Installation](/install) and [Initial Configuration](/configuration/concepts) sections to
setup Corso and get it ready for first use. You can then go through the [Tutorial](/tutorial) section to see it in action.

78
docs/docs/tutorial.md Normal file
View File

@ -0,0 +1,78 @@
# Tutorial
In this tutorial you will perform your first backup followed by a restore.
## Prerequisites
* Install Docker
* Pull the Corso container (see [Installation](/install))
* Configure connection to your M365 Tenant (see [M365 Access](/configuration/m365_access))
* Initialize a Corso backup repository (see [Repositories](/configuration/repos))
## Your first backup
Corso can do much more, but you can start by creating a backup of your Exchange mailbox.
To do this, you can run the following command:
```bash
$ docker run -e CORSO_PASSPHRASE \
--env-file ~/.corso/corso.env \
-v ~/.corso:/app/corso corso/corso:<release tag> \
backup create exchange --user <your exchange email address>
Started At ID Status Selectors
2022-10-10T19:46:43Z 41e93db7-650d-44ce-b721-ae2e8071c728 Completed (0 errors) alice@example.com
```
:::note
Your first backup may take some time if your mailbox is large.
:::
## Restore an email
Now lets explore how you can restore data from one of your backups.
You can see all Exchange backups available with the following command:
```bash
$ docker run -e CORSO_PASSPHRASE \
--env-file ~/.corso/corso.env \
-v ~/.corso:/app/corso corso/corso:<release tag> \
backup list exchange
Started At ID Status Selectors
2022-09-09T42:27:16Z 72d12ef6-420a-15bd-c862-fd7c9023a014 Completed (0 errors) alice@example.com
2022-10-10T19:46:43Z 41e93db7-650d-44ce-b721-ae2e8071c728 Completed (0 errors) alice@example.com
```
Select one of the available backups and search through its contents.
```bash
$ docker run -e CORSO_PASSPHRASE \
--env-file ~/.corso/corso.env \
-v ~/.corso:/app/corso corso/corso:<release tag> \
backup details exchange \
--backup <id of your selected backup> \
--user <your exchange email address> \
--email-subject <portion of subject of email you want to recover>
```
The output from the command above should display a list of any matching emails. Note the ID
of the one to use for testing restore.
When you are ready to restore, use the following command:
```bash
$ docker run -e CORSO_PASSPHRASE \
--env-file ~/.corso/corso.env \
-v ~/.corso:/app/corso corso/corso:<release tag> \
backup details exchange \
--backup <id of your selected backup> \
--user <your exchange email address> \
--email <id of your selected email>
```
You can now find the recovered email in a mailbox folder named `Corso_Restore_DD-MMM-YYYY_HH:MM:SS`.
You are now ready to explore the [Command Line Reference](cli/corso) and try everything that Corso can do.

View File

@ -1,19 +1,18 @@
// @ts-check
// Note: type annotations allow type checking and IDEs autocompletion
const lightCodeTheme = require('prism-react-renderer').themes.github;
const darkCodeTheme = require('prism-react-renderer').themes.dracula;
const lightCodeTheme = require('prism-react-renderer/themes/github');
const darkCodeTheme = require('prism-react-renderer/themes/dracula');
/** @type {import('@docusaurus/types').Config} */
const config = {
title: 'Corso',
title: 'Corso Documentation',
tagline: 'Free, Secure, and Open-Source Backup for Microsoft 365',
url: 'https://corsobackup.io',
baseUrl: process.env.CORSO_DOCS_BASEURL || '/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'throw',
favicon: 'img/corso_logo.svg',
trailingSlash: true,
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
@ -34,32 +33,21 @@ const config = {
require.resolve('docusaurus-plugin-image-zoom')
],
customFields: {
corsoVersion: `${process.env.CORSO_VERSION}`,
},
presets: [
[
'classic',
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
routeBasePath: '/',
sidebarPath: require.resolve('./sidebars.js'),
remarkPlugins: [require('mdx-mermaid')],
editUrl:
'https://github.com/alcionai/corso/tree/main/website',
'https://github.com/alcionai/corso/tree/main/docs',
},
blog: {
showReadingTime: true,
blogTitle: 'Corso Blog',
blogDescription: 'Blog about Microsoft 365 protection, backup, and security',
},
sitemap: {
ignorePatterns: ['/tags/**'],
filename: 'sitemap.xml',
},
gtag: {
trackingID: 'GTM-KM3XWPV',
blog: false,
googleAnalytics: {
trackingID: 'G-4EXT1PY6KC',
},
theme: {
customCss: require.resolve('./src/css/custom.scss'),
@ -68,6 +56,16 @@ const config = {
],
],
themes: [
[
require.resolve('@easyops-cn/docusaurus-search-local'),
{
hashed: true,
docsRouteBasePath: '/',
},
],
],
themeConfig:
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
({
@ -79,28 +77,12 @@ const config = {
srcDark: 'img/corso_horizontal_logo_white.svg',
},
items: [
{
type: 'doc',
docId: 'quickstart',
position: 'left',
label: 'Quick Start',
},
{
type: 'doc',
docId: 'intro',
position: 'left',
label: 'Docs',
},
{
href: 'https://discord.gg/63DTTSnuhT',
label: 'Community',
position: 'left',
},
{
to: '/blog',
label: 'Blog',
position: 'left'
},
{
href: 'https://github.com/alcionai/corso',
label: 'GitHub',
@ -117,44 +99,47 @@ const config = {
},
links: [
{
title: 'Open Source',
title: 'Resources',
items: [
{
label: 'Docs',
to: '/docs/intro',
to: '/intro',
},
],
},
{
title: 'Community',
items: [
{
label: 'Discussions',
href: 'https://github.com/alcionai/corso/discussions',
},
{
label: 'Blog',
to: '/blog',
label: 'Discord',
href: 'https://discord.gg/63DTTSnuhT',
},
{
label: 'Twitter',
href: 'https://twitter.com/CorsoBackup',
},
],
},
{
title: 'More',
items: [
{
label: 'GitHub',
href: 'https://github.com/alcionai/corso',
},
{
label: 'Corso Discord',
href: 'https://discord.gg/63DTTSnuhT',
},
],
},
{
title: ' ',
},
{
title: 'Alcion, Powered by Corso',
items: [
{
label: 'Backup as a Service',
href: 'https://www.alcion.ai',
},
{
label: 'Alcion Discord',
href: 'https://www.alcion.ai/discord',
label: 'Corso Website',
href: 'https://corsobackup.io/',
},
],
},
],
copyright: `Copyright © ${new Date().getFullYear()} | Version ${process.env.CORSO_VERSION}`,
copyright: `Copyright © ${new Date().getFullYear()}`,
},
colorMode: {
defaultMode: 'dark',
@ -176,24 +161,9 @@ const config = {
},
},
algolia: {
appId: 'EPJZU1WKE7',
apiKey: 'd432a94741013719fdd0d78275c7aa9c',
indexName: 'corsobackup',
contextualSearch: true,
},
image: 'img/cloudbackup.png',
metadata : [
{name: 'twitter:card', content: 'summary_large_image'},
{name: 'twitter:site', content: '@corsobackup'},
],
prism: {
theme: lightCodeTheme,
darkTheme: darkCodeTheme,
additionalLanguages: ['powershell'],
},
}),
};

24485
docs/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

46
docs/package.json Normal file
View File

@ -0,0 +1,46 @@
{
"name": "docs",
"version": "0.1.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
"start": "docusaurus start",
"build": "docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "2.1.0",
"@docusaurus/preset-classic": "2.1.0",
"@easyops-cn/docusaurus-search-local": "^0.32.1",
"@mdx-js/react": "^1.6.22",
"clsx": "^1.2.1",
"docusaurus-plugin-image-zoom": "^0.1.1",
"docusaurus-plugin-sass": "^0.2.2",
"mdx-mermaid": "^1.3.2",
"mermaid": "^9.1.6",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"sass": "^1.54.8"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "2.0.1"
},
"browserslist": {
"production": [
">0.5%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

54
docs/sidebars.js Normal file
View File

@ -0,0 +1,54 @@
/**
* Creating a sidebar enables you to:
- create an ordered group of docs
- render a sidebar for each doc of that group
- provide next/previous navigation
The sidebars can be generated from the filesystem, or explicitly defined here.
Create as many sidebars as you want.
*/
// @ts-check
/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
const sidebars = {
// By default, Docusaurus generates a sidebar from the docs folder structure
docsSidebar: [
'intro',
'install',
'tutorial',
{
type: 'category',
label: 'Initial Configuration',
items: ['configuration/concepts', 'configuration/m365_access', 'configuration/repos'],
},
{
type: 'category',
label: 'Command Line Reference',
link: {
slug: 'cli/corso',
description: 'Explore the commonly used Corso CLI commands',
type: 'generated-index',
},
items: [
'cli/corso_repo_init_s3', 'cli/corso_repo_connect_s3',
'cli/corso_backup_create_exchange', 'cli/corso_backup_list_exchange', 'cli/corso_backup_details_exchange',
'cli/corso_restore_exchange',
'cli/corso_backup_create_onedrive', 'cli/corso_backup_list_onedrive', 'cli/corso_backup_details_onedrive',
'cli/corso_restore_onedrive',
'cli/corso_env'
]
},
{
type: 'category',
label: 'Developer Guide',
items: [
'developers/architecture', 'developers/build', 'developers/testing', 'developers/linters'
],
},
],
};
module.exports = sidebars;

View File

@ -0,0 +1,61 @@
import React from 'react';
import clsx from 'clsx';
import styles from './styles.module.css';
const FeatureList = [
{
title: 'Secure',
Svg: require('@site/static/img/security.svg').default,
description: (
<>
Corso provides secure data backup that protects customers against accidental data loss, service provider downtime, and malicious threats including ransomware attacks.
</>
),
},
{
title: 'Robust',
Svg: require('@site/static/img/data.svg').default,
description: (
<>
Corso, purpose-built for M365 protection, provides easy-to-use comprehensive backup and restore workflows that reduce backup time, improve time-to-recovery, reduce admin overhead, and replace unreliable scripts or workarounds.
</>
),
},
{
title: 'Low Cost',
Svg: require('@site/static/img/savings.svg').default,
description: (
<>
Corso, a 100% open-source tool, provides a free alternative for cost-conscious teams. It further reduces storage costs by supporting flexible retention policies and efficiently compressing and deduplicating data before storing it in low-cost cloud object storage.
</>
),
},
];
function Feature({Svg, title, description}) {
return (
<div className={clsx('col col--4')}>
<div className="text--center">
<Svg className={styles.featureSvg} role="img" />
</div>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
}
export default function HomepageFeatures() {
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}

View File

@ -0,0 +1,11 @@
.features {
display: flex;
align-items: center;
padding: 2rem 0;
width: 100%;
}
.featureSvg {
height: 200px;
width: 200px;
}

267
docs/src/css/custom.scss Normal file
View File

@ -0,0 +1,267 @@
/**
* Any CSS included here will be global. The classic template
* bundles Infima by default. Infima is a CSS framework designed to
* work well for content-centric websites.
*/
/* You can override the default Infima variables here. */
:root {
--ifm-color-primary: #1e204e;
--ifm-color-primary-dark: #1b1d46;
--ifm-color-primary-darker: #1a1b42;
--ifm-color-primary-darkest: #151637;
--ifm-color-primary-light: #212356;
--ifm-color-primary-lighter: #23255a;
--ifm-color-primary-lightest: #272a65;
--ifm-code-font-size: 95%;
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
}
/* For readability concerns, you should choose a lighter palette in dark mode. */
[data-theme='dark'] {
--ifm-color-primary: #cdd4f9;
--ifm-color-primary-dark: #a4b1f4;
--ifm-color-primary-darker: #90a0f2;
--ifm-color-primary-darkest: #536bea;
--ifm-color-primary-light: #f6f7fe;
--ifm-color-primary-lighter: #ffffff;
--ifm-color-primary-lightest: #ffffff;
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
}
html[data-theme='dark'] {
--ifm-background-color: #111629;
}
.navbar {
background-color: var(--ifm-background-color);
}
.footer {
--ifm-footer-background-color: #1b2131;
}
.guideImages {
max-width: 650px;
width: 100%;
border: 1px solid #80808029
}
.markdown {
code {
border-color: transparent;
vertical-align: initial;
font-size: var(--ifm-code-font-size) !important;
}
blockquote {
background-color: var(--rn-note-background);
border-left: 8px solid var(--ifm-color-warning);
padding: 15px 30px 15px 15px;
code {
background-color: rgba(0, 0, 0, 0.07);
}
a code {
background-color: transparent;
white-space: pre;
}
}
strong {
font-weight: 600;
}
table {
border-collapse: collapse;
display: table;
margin: 20px 0;
width: 100%;
thead tr {
background-color: var(--divider);
border: 0;
> .cliFlagsCol {
width: 20%;
}
> .cliShortCol {
width: 5%;
}
> .cliDefaultCol {
width: 20%;
}
> .cliHelpCol {
width: 55%;
}
}
tr th {
text-transform: uppercase;
padding: 6px 10px;
font-size: 0.7rem;
text-align: left;
}
tr td {
font-size: 90%;
line-height: 1.3em;
padding: 10px;
text-align: left;
code {
display: inline-block;
line-height: 1.2em;
vertical-align: top;
color: #e74c3c;
background-color: unset;
}
ul {
margin: 8px 0 0 0;
padding: 4px 0 4px 20px;
}
.required {
margin-left: 8px;
margin-right: 0;
position: relative;
padding: 0 6px;
border-width: 1px;
border-radius: 3px;
border-color: #fa5035;
border-style: solid;
color: #fa5035;
display: inline-block;
letter-spacing: .02rem;
font-size: 0.8rem;
}
}
hr {
margin: 12px 0;
}
.wideColumn {
width: 128px;
}
.widerColumn {
width: 180px;
}
.table-heading {
font-weight: var(--ifm-table-head-font-weight);
border-bottom: 2px solid var(--ifm-table-border-color);
}
div[class*="codeBlockContainer"] {
&,
& pre {
margin-bottom: 0;
}
pre code {
padding: 12px 16px;
white-space: break-spaces;
}
button {
display: none;
}
}
}
figure {
text-align: center;
padding: 8px;
figcaption {
padding: 8px;
font-size: 0.9rem;
color: var(--subtle);
code {
color: var(--subtle);
}
}
}
.docusaurus-highlight-code-line {
background-color: var(--light);
}
div[class*="codeBlockContainer"] {
box-shadow: none;
border-radius: 8px;
pre {
border-radius: 8px;
}
button {
border-color: var(--light);
background: var(--deepdark);
}
}
div[class*="codeBlockContent"] {
display: grid;
min-width: 100%;
}
div[class*="codeBlockLines"] {
font-size: 80%;
}
div[class*="codeBlockTitle"] {
color: var(--subtle);
background-color: var(--ifm-color-emphasis-300);
}
.tabs {
border-bottom: 1px solid var(--ifm-table-border-color);
color: var(--ifm-font-color-base);
.tabs__item {
font-weight: normal;
font-size: 15px;
color: var(--ifm-font-color-base);
user-select: none;
&:hover {
border-radius: var(--ifm-global-radius) var(--ifm-global-radius) 0 0;
}
&.tabs__item--active {
font-weight: 700;
}
}
}
}
html[class*='docs-doc-id-cli'] .markdown table thead tr {
:nth-of-type(1) {
width: 20%;
}
:nth-of-type(2) {
width: 5%;
}
:nth-of-type(3) {
width: 20%;
}
:nth-of-type(4) {
width: 50%;
}
}

40
docs/src/pages/index.js Normal file
View File

@ -0,0 +1,40 @@
import React from 'react';
import clsx from 'clsx';
import Layout from '@theme/Layout';
import Link from '@docusaurus/Link';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import styles from './index.module.css';
import HomepageFeatures from '@site/src/components/HomepageFeatures';
function HomepageHeader() {
const {siteConfig} = useDocusaurusContext();
return (
<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<h1 className="hero__title">{siteConfig.title}</h1>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
<Link
className="button button--secondary button--lg"
to="tutorial">
Corso Tutorial - 5min
</Link>
</div>
</div>
</header>
);
}
export default function Home() {
const {siteConfig} = useDocusaurusContext();
return (
<Layout
title={`${siteConfig.title}`}
description="Documentation for Corso, a free, secure, and open-source backup tool for Microsoft 365">
<HomepageHeader />
<main>
<HomepageFeatures />
</main>
</Layout>
);
}

View File

@ -0,0 +1,7 @@
---
title: Markdown page example
---
# Markdown page example
You don't need React to write simple standalone pages.

View File

Before

Width:  |  Height:  |  Size: 5.3 KiB

After

Width:  |  Height:  |  Size: 5.3 KiB

1
docs/static/img/corso_horizontal_logo.svg vendored Executable file
View File

@ -0,0 +1 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#205eab;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -0,0 +1 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#fff;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

Before

Width:  |  Height:  |  Size: 5.3 KiB

After

Width:  |  Height:  |  Size: 5.3 KiB

View File

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 38 KiB

View File

Before

Width:  |  Height:  |  Size: 5.0 KiB

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
docs/static/img/favicon.ico vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

BIN
docs/static/img/m365app_configure.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 761 KiB

View File

Before

Width:  |  Height:  |  Size: 800 KiB

After

Width:  |  Height:  |  Size: 800 KiB

BIN
docs/static/img/m365app_create_new.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 696 KiB

View File

Before

Width:  |  Height:  |  Size: 740 KiB

After

Width:  |  Height:  |  Size: 740 KiB

View File

Before

Width:  |  Height:  |  Size: 803 KiB

After

Width:  |  Height:  |  Size: 803 KiB

View File

Before

Width:  |  Height:  |  Size: 1.0 MiB

After

Width:  |  Height:  |  Size: 1.0 MiB

View File

Before

Width:  |  Height:  |  Size: 115 KiB

After

Width:  |  Height:  |  Size: 115 KiB

View File

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

@ -66,4 +66,3 @@ exceptions:
- YAML
- ZIP
- HOME
- COVID

Some files were not shown because too many files have changed in this diff Show More