Compare commits
1 Commits
main
...
memory_pro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5360b0074 |
40
.github/ISSUE_TEMPLATE/BUG-REPORT.yaml
vendored
@ -1,40 +0,0 @@
|
||||
name: Bug Report
|
||||
description: File a Corso bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Corso Version?
|
||||
description: What version of Corso (`corso --version`) are you running?
|
||||
value: "Corso vX.X.X"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os
|
||||
attributes:
|
||||
label: Where are you running Corso?
|
||||
description: Include OS version (e.g., macOS 13.0.1, Windows 11 Pro) and Object Storage system being used.
|
||||
placeholder: Also include additional system details if relevant (e.g., shell, antivirus, firewall/proxy used on the network, etc.)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please run Corso with `--log-level debug --mask-sensitive-data` and attach the log file.
|
||||
placeholder: This will be automatically formatted, so no need for backticks.
|
||||
render: shell
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Corso Documentation
|
||||
url: https://corsobackup.io/docs/intro/
|
||||
about: Did you search the documentation?
|
||||
268
.github/actions/backup-restore-test/action.yml
vendored
@ -1,268 +0,0 @@
|
||||
name: Backup Restore Test
|
||||
description: Run various backup/restore/export tests for a service.
|
||||
|
||||
inputs:
|
||||
service:
|
||||
description: Service to test
|
||||
required: true
|
||||
kind:
|
||||
description: Kind of test
|
||||
required: true
|
||||
backup-id:
|
||||
description: Backup to retrieve data out of
|
||||
required: false
|
||||
backup-args:
|
||||
description: Arguments to pass for backup
|
||||
required: false
|
||||
default: ""
|
||||
restore-args:
|
||||
description: Arguments to pass for restore; restore is skipped when missing.
|
||||
required: false
|
||||
default: ""
|
||||
export-args:
|
||||
description: Arguments to pass for export.
|
||||
required: false
|
||||
default: ""
|
||||
restore-container:
|
||||
description: Folder to use for testing
|
||||
required: true
|
||||
log-dir:
|
||||
description: Folder to store test log files
|
||||
required: true
|
||||
on-collision:
|
||||
description: Value for the --collisions flag
|
||||
required: false
|
||||
default: "replace"
|
||||
with-export:
|
||||
description: Runs export tests when true
|
||||
required: false
|
||||
default: false
|
||||
category:
|
||||
description: category of data for given service
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
backup-id:
|
||||
value: ${{ steps.backup.outputs.result }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||
id: backup
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
|
||||
./corso backup create '${{ inputs.service }}' \
|
||||
--no-stats --hide-progress --json \
|
||||
${{ inputs.backup-args }} |
|
||||
tee /dev/stderr | # for printing logs
|
||||
jq -r '.[0] | .id' |
|
||||
sed 's/^/result=/' |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: inputs.restore-args
|
||||
id: restore
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
||||
./corso restore '${{ inputs.service }}' \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--collisions ${{ inputs.on-collision }} \
|
||||
${{ inputs.restore-args }} \
|
||||
--backup '${{ steps.backup.outputs.result }}' \
|
||||
2>&1 |
|
||||
tee /tmp/corsologs |
|
||||
grep -i -e 'Restoring to folder ' |
|
||||
sed "s/Restoring to folder /result=/" |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
cat /tmp/corsologs
|
||||
|
||||
- name: Check restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: inputs.restore-args
|
||||
shell: bash
|
||||
working-directory: src
|
||||
env:
|
||||
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||
# lists are not restored to a different folder. they get created adjacent to their originals
|
||||
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original
|
||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
||||
./sanity-test restore ${{ inputs.service }}
|
||||
|
||||
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
id: export
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
||||
./corso export '${{ inputs.service }}' \
|
||||
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
${{ inputs.export-args }} \
|
||||
--backup '${{ steps.backup.outputs.result }}'
|
||||
|
||||
cat /tmp/corsologs
|
||||
|
||||
- name: Check export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
env:
|
||||
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }}
|
||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||
# applies only for sharepoint lists
|
||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
||||
./sanity-test export ${{ inputs.service }}
|
||||
|
||||
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
id: export-archive
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
||||
./corso export '${{ inputs.service }}' \
|
||||
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--archive \
|
||||
${{ inputs.export-args }} \
|
||||
--backup '${{ steps.backup.outputs.result }}'
|
||||
|
||||
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \
|
||||
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped
|
||||
cat /tmp/corsologs
|
||||
|
||||
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
env:
|
||||
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped
|
||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||
# applies only for sharepoint lists
|
||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
||||
./sanity-test export ${{ inputs.service }}
|
||||
|
||||
- name: List ${{ inputs.service }} ${{ inputs.kind }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
|
||||
./corso backup list ${{ inputs.service }} \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
2>&1 |
|
||||
tee /tmp/corso-backup-list.log
|
||||
|
||||
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list.log
|
||||
then
|
||||
echo "Unable to find backup from previous run in backup list"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: List item ${{ inputs.service }} ${{ inputs.kind }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
# Include category in the log file name if present
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
|
||||
./corso backup list ${{ inputs.service }} \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--backup "${{ steps.backup.outputs.result }}" \
|
||||
2>&1 |
|
||||
tee /tmp/corso-backup-list-item.log
|
||||
|
||||
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list-item.log
|
||||
then
|
||||
echo "Unable to list previous backup"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Logging Results
|
||||
echo "---------------------------"
|
||||
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
|
||||
path: ${{ inputs.log-dir }}/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
5
.github/actions/go-setup-cache/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Setup and Cache Golang
|
||||
description: Build golang binaries for later use in CI.
|
||||
|
||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||
#
|
||||
@ -38,8 +37,8 @@ runs:
|
||||
- name: Determine Golang cache paths
|
||||
id: golang-path
|
||||
run: |
|
||||
echo "build=$(go env GOCACHE)" | tee -a $GITHUB_OUTPUT
|
||||
echo "module=$(go env GOMODCACHE)" | tee -a $GITHUB_OUTPUT
|
||||
echo "::set-output name=build::$(go env GOCACHE)"
|
||||
echo "::set-output name=module::$(go env GOMODCACHE)"
|
||||
shell: bash
|
||||
|
||||
- name: Setup Golang cache
|
||||
|
||||
76
.github/actions/publish-binary/action.yml
vendored
@ -1,76 +0,0 @@
|
||||
name: Publish Binary
|
||||
description: Publish binary artifacts.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: Corso version to use for publishing
|
||||
required: true
|
||||
github_token:
|
||||
description: GitHub token for publishing
|
||||
required: true
|
||||
rudderstack_write_key:
|
||||
description: Write key for RudderStack
|
||||
required: true
|
||||
rudderstack_data_plane_url:
|
||||
description: Data plane URL for RudderStack
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # needed to pull changelog
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Mark snapshot release
|
||||
shell: bash
|
||||
if: ${{ !startsWith(github.ref , 'refs/tags/') }}
|
||||
run: |
|
||||
echo "grflags=--snapshot" >> $GITHUB_ENV
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
version: latest
|
||||
args: release --clean --timeout 500m --parallelism 1 ${{ env.grflags }}
|
||||
workdir: src
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.github_token }}
|
||||
RUDDERSTACK_CORSO_WRITE_KEY: ${{ inputs.rudderstack_write_key }}
|
||||
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ inputs.rudderstack_data_plane_url }}
|
||||
CORSO_VERSION: ${{ inputs.version }}
|
||||
|
||||
- name: Upload darwin arm64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Darwin_arm64
|
||||
path: src/dist/corso_darwin_arm64/corso
|
||||
|
||||
- name: Upload linux arm64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Linux_arm64
|
||||
path: src/dist/corso_linux_arm64/corso
|
||||
|
||||
- name: Upload darwin amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Darwin_amd64
|
||||
path: src/dist/corso_darwin_amd64_v1/corso
|
||||
|
||||
- name: Upload linux amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Linux_amd64
|
||||
path: src/dist/corso_linux_amd64_v1/corso
|
||||
|
||||
- name: Upload windows amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Windows_amd64
|
||||
path: src/dist/corso_windows_amd64_v1/corso.exe
|
||||
47
.github/actions/publish-website/action.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: Publish Website
|
||||
description: Publish website artifacts.
|
||||
|
||||
inputs:
|
||||
aws-iam-role:
|
||||
description: IAM role for connecting to AWS
|
||||
bucket:
|
||||
description: Bucket to push the website files
|
||||
cfid:
|
||||
description: Cloudfront ID for invalidation
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/download-artifact@master
|
||||
name: Download website from build step
|
||||
with:
|
||||
name: website
|
||||
path: website/build
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
role-to-assume: ${{ inputs.aws-iam-role }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Add rotbots.txt
|
||||
if: github.ref == 'refs/heads/asset-validation-explore'
|
||||
working-directory: website
|
||||
shell: bash
|
||||
run: |
|
||||
printf 'User-agent: *\nDisallow: /' > build/robots.txt
|
||||
|
||||
- name: Push website
|
||||
working-directory: website
|
||||
shell: bash
|
||||
run: |
|
||||
aws s3 sync build "s3://${{ inputs.bucket }}" --delete
|
||||
|
||||
- name: Invalidate cloudfront
|
||||
working-directory: website
|
||||
shell: bash
|
||||
run: |
|
||||
aws cloudfront create-invalidation --distribution-id ${{ inputs.cfid }} --paths "/*"
|
||||
121
.github/actions/purge-m365-data/action.yml
vendored
@ -1,121 +0,0 @@
|
||||
name: Purge M365 User Data
|
||||
description: Deletes M365 data generated during CI tests.
|
||||
|
||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||
# of data churn (creation and immediate deletion) of files, the likes
|
||||
# of which wouldn't otherwise be seen by users of the system. Standard
|
||||
# APIs don't have the tooling to gut out all the cruft which we accrue
|
||||
# in microsoft's hidden nooks and secret crannies. A manual, SOAPy
|
||||
# exorcism is the only way.
|
||||
#
|
||||
# The script focuses on the cleaning up the following:
|
||||
# * All folders, descending from the exchange root, of a given prefix.
|
||||
# * All folders in PersonMetadata
|
||||
# * All already soft-deleted items
|
||||
# * All folders under recoverable items
|
||||
|
||||
inputs:
|
||||
user:
|
||||
description: User whose data is to be purged.
|
||||
site:
|
||||
description: Sharepoint site where data is to be purged.
|
||||
libraries:
|
||||
description: List of library names within the site where data is to be purged.
|
||||
library-prefix:
|
||||
description: List of library names within the site where the library will get deleted entirely.
|
||||
folder-prefix:
|
||||
description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead.
|
||||
older-than:
|
||||
description: Minimum-age of folders to be deleted.
|
||||
azure-client-id:
|
||||
description: Secret value of for AZURE_CLIENT_ID
|
||||
azure-client-secret:
|
||||
description: Secret value of for AZURE_CLIENT_SECRET
|
||||
azure-pnp-client-id:
|
||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
||||
azure-pnp-client-cert:
|
||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
||||
azure-tenant-id:
|
||||
description: Secret value of AZURE_TENANT_ID
|
||||
m365-admin-user:
|
||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||
m365-admin-password:
|
||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||
tenant-domain:
|
||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
|
||||
################################################################################################################
|
||||
# Exchange
|
||||
#
|
||||
|
||||
- name: Run the Exchange purge scripts for user
|
||||
if: ${{ inputs.user != '' }}
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
||||
#- name: Reset retention for all mailboxes to 0
|
||||
# if: ${{ inputs.user == '' }}
|
||||
# shell: pwsh
|
||||
# working-directory: ./src/cmd/purge/scripts
|
||||
# env:
|
||||
# M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
# M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
# run: ./exchangeRetention.ps1
|
||||
|
||||
################################################################################################################
|
||||
# OneDrive
|
||||
#
|
||||
|
||||
- name: Run the OneDrive purge scripts for user
|
||||
if: ${{ inputs.user != '' }}
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
################################################################################################################
|
||||
# Sharepoint
|
||||
#
|
||||
|
||||
- name: Run SharePoint purge script
|
||||
if: ${{ inputs.site != '' }}
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
63
.github/actions/purge-m365-user-data/action.yml
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
name: Purge M365 User Data
|
||||
|
||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||
# of data churn (creation and immediate deletion) of files, the likes
|
||||
# of which wouldn't otherwise be seen by users of the system. Standard
|
||||
# APIs don't have the tooling to gut out all the cruft which we accrue
|
||||
# in microsoft's hidden nooks and secret crannies. A manual, SOAPy
|
||||
# exorcism is the only way.
|
||||
#
|
||||
# The script focuses on the cleaning up the following:
|
||||
# * All folders, descending from the exchange root, of a given prefix.
|
||||
# * All folders in PersonMetadata
|
||||
# * All already soft-deleted items
|
||||
# * All recoverable items in Audits
|
||||
# * All recoverable items in Purges
|
||||
|
||||
inputs:
|
||||
user:
|
||||
description: User whose data is to be purged.
|
||||
folder-prefix:
|
||||
description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead.
|
||||
older-than:
|
||||
description: Minimum-age of folders to be deleted.
|
||||
azure-client-id:
|
||||
description: Secret value of for AZURE_CLIENT_ID
|
||||
azure-client-secret:
|
||||
description: Secret value of for AZURE_CLIENT_SECRET
|
||||
azure-tenant-id:
|
||||
description: Secret value of for AZURE_TENANT_ID
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
|
||||
- name: Run the folder-matrix purge script set
|
||||
if: ${{ inputs.folder-prefix != '' }}
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||
run: |
|
||||
./foldersAndItems.ps1 -WellKnownRoot root -User ${{ inputs.user }} -FolderPrefixPurge ${{ inputs.folder-prefix }} -FolderBeforePurge ${{ inputs.older-than }}
|
||||
|
||||
- name: Run the static purge script set
|
||||
if: ${{ inputs.folder-prefix == '' }}
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||
# powershell doesn't like multiline commands, each of these must be on a single line
|
||||
run: |
|
||||
./foldersAndItems.ps1 -WellKnownRoot root -User ${{ inputs.user }} -FolderNamePurge PersonMetadata
|
||||
./foldersAndItems.ps1 -WellKnownRoot deleteditems -User ${{ inputs.user }}
|
||||
./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Audits
|
||||
./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Purges
|
||||
./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge Deletions
|
||||
|
||||
# possible future extensions
|
||||
# ./foldersAndItems.ps1 -WellKnownRoot recoverableitemsroot -User ${{ inputs.user }} -FolderNamePurge "Calendar Logging"
|
||||
92
.github/actions/teams-message/action.yml
vendored
@ -1,92 +0,0 @@
|
||||
name: Send a message to Teams
|
||||
description: Send messages to communication apps.
|
||||
|
||||
inputs:
|
||||
msg:
|
||||
description: The teams message text
|
||||
teams_url:
|
||||
description: passthrough for secrets.TEAMS_CORSO_CI_WEBHOOK_URL
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: set github ref
|
||||
shell: bash
|
||||
run: |
|
||||
echo "github_reference=${{ github.ref }}" >> $GITHUB_ENV
|
||||
|
||||
- name: trim github ref
|
||||
shell: bash
|
||||
run: |
|
||||
echo "trimmed_ref=${github_reference#refs/}" >> $GITHUB_ENV
|
||||
|
||||
- name: build urls
|
||||
shell: bash
|
||||
run: |
|
||||
echo "logurl=$(printf 'https://github.com/alcionai/corso/actions/runs/%s' ${{ github.run_id }})" >> $GITHUB_ENV
|
||||
echo "commiturl=$(printf 'https://github.com/alcionai/corso/commit/%s' ${{ github.sha }})" >> $GITHUB_ENV
|
||||
echo "refurl=$(printf 'https://github.com/alcionai/corso/%s' ${{ env.trimmed_ref }})" >> $GITHUB_ENV
|
||||
|
||||
- name: use url or blank val
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
|
||||
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
|
||||
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
|
||||
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
|
||||
echo "REF=${{ env.trimmed_ref && env.refurl || '-' }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Send JSON payload to Teams Webhook
|
||||
shell: bash
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"type":"message",
|
||||
"attachments":[
|
||||
{
|
||||
"contentType":"application/vnd.microsoft.card.adaptive",
|
||||
"contentUrl":null,
|
||||
"content":{
|
||||
"$schema":"http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type":"AdaptiveCard",
|
||||
"body": [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"size": "Medium",
|
||||
"weight": "Bolder",
|
||||
"text": "${{ inputs.msg }}",
|
||||
"color": "Attention"
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": "${{ env.JOB }} :: ${{ env.STEP }}",
|
||||
"wrap": true
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": "Action.OpenUrl",
|
||||
"title": "Action",
|
||||
"url": "${{ env.LOGS }}"
|
||||
},
|
||||
{
|
||||
"type": "Action.OpenUrl",
|
||||
"title": "Commit",
|
||||
"url": "${{ env.COMMIT }}"
|
||||
},
|
||||
{
|
||||
"type": "Action.OpenUrl",
|
||||
"title": "Ref",
|
||||
"url": "${{ env.REF }}"
|
||||
}
|
||||
],
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"version": "1.5"
|
||||
}
|
||||
}
|
||||
]
|
||||
}' \
|
||||
${{ inputs.teams_url }}
|
||||
58
.github/actions/website-linting/action.yml
vendored
@ -1,58 +0,0 @@
|
||||
name: Lint Website
|
||||
description: Lint website content.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: Corso version to use for publishing
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Generate CLI Docs
|
||||
working-directory: ./src
|
||||
shell: bash
|
||||
run: |
|
||||
go run ./cmd/mdgen/mdgen.go generate
|
||||
|
||||
# migrate generated md files into /website/docs/cli
|
||||
- name: Move CLI .md to Docs
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ./website/docs/cli
|
||||
mv ./src/cmd/mdgen/cli_markdown/* ./website/docs/cli/
|
||||
rm -R ./src/cmd/mdgen/cli_markdown/
|
||||
|
||||
- name: Install dependencies for website lint
|
||||
shell: bash
|
||||
run: |
|
||||
wget --quiet https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating
|
||||
mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin
|
||||
echo "$PWD/bin" >> $GITHUB_PATH
|
||||
npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating
|
||||
|
||||
- name: Run website lint
|
||||
shell: bash
|
||||
run: |
|
||||
cd website && make -o genclidocs localcheck
|
||||
|
||||
- name: Build website
|
||||
shell: bash
|
||||
env:
|
||||
CORSO_VERSION: ${{ inputs.version }}
|
||||
run: |
|
||||
cd website &&
|
||||
npm ci &&
|
||||
npm run build
|
||||
|
||||
- uses: actions/upload-artifact@master
|
||||
name: Upload website as artifacts
|
||||
with:
|
||||
name: website
|
||||
path: website/build
|
||||
12
.github/dependabot.yml
vendored
@ -24,6 +24,18 @@ updates:
|
||||
open-pull-requests-limit: 50
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# Maintain dependencies for npm - docs
|
||||
- package-ecosystem: "npm"
|
||||
directory: "docs/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
reviewers:
|
||||
- "alcionai/corso-maintainers"
|
||||
- "ntolia"
|
||||
- "gmatev"
|
||||
open-pull-requests-limit: 50
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# Maintain dependencies for go - src
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "src/"
|
||||
|
||||
20
.github/pull_request_template.md
vendored
@ -1,29 +1,23 @@
|
||||
<!-- PR description-->
|
||||
## Description
|
||||
|
||||
---
|
||||
<!-- Insert PR description-->
|
||||
|
||||
#### Does this PR need a docs update or release note?
|
||||
|
||||
- [ ] :white_check_mark: Yes, it's included
|
||||
- [ ] :clock1: Yes, but in a later PR
|
||||
- [ ] :no_entry: No
|
||||
|
||||
#### Type of change
|
||||
## Type of change
|
||||
|
||||
<!--- Please check the type of change your PR introduces: --->
|
||||
- [ ] :sunflower: Feature
|
||||
- [ ] :bug: Bugfix
|
||||
- [ ] :world_map: Documentation
|
||||
- [ ] :robot: Supportability/Tests
|
||||
- [ ] :robot: Test
|
||||
- [ ] :computer: CI/Deployment
|
||||
- [ ] :broom: Tech Debt/Cleanup
|
||||
- [ ] :hamster: Trivial/Minor
|
||||
|
||||
#### Issue(s)
|
||||
## Issue(s)
|
||||
|
||||
<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
|
||||
* #<issue>
|
||||
|
||||
#### Test Plan
|
||||
## Test Plan
|
||||
|
||||
<!-- How will this be tested prior to merging.-->
|
||||
- [ ] :muscle: Manual
|
||||
|
||||
34
.github/workflows/_filechange_checker.yml
vendored
@ -6,12 +6,12 @@ on:
|
||||
srcfileschanged:
|
||||
description: "'true' if src/** or .github/workflows/** files have changed in the branch"
|
||||
value: ${{ jobs.file-change-check.outputs.srcfileschanged }}
|
||||
docfileschanged:
|
||||
description: "'true' if docs/** or src/** or .github/workflows/** files have changed in the branch"
|
||||
value: ${{ jobs.file-change-check.outputs.docfileschanged }}
|
||||
websitefileschanged:
|
||||
description: "'true' if websites/** or .github/workflows/** files have changed in the branch"
|
||||
value: ${{ jobs.file-change-check.outputs.websitefileschanged }}
|
||||
actionsfileschanged:
|
||||
description: "'true' if .github/actions/** or .github/workflows/** files have changed in the branch"
|
||||
value: ${{ jobs.file-change-check.outputs.actionsfileschanged }}
|
||||
|
||||
jobs:
|
||||
file-change-check:
|
||||
@ -21,20 +21,22 @@ jobs:
|
||||
pull-requests: read
|
||||
outputs:
|
||||
srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }}
|
||||
docfileschanged: ${{ steps.docchecker.outputs.docfileschanged }}
|
||||
websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }}
|
||||
actionsfileschanged: ${{ steps.actionschecker.outputs.actionsfileschanged }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# only run CI tests if the src folder or workflow actions have changed
|
||||
- name: Check for file changes in src/ or .github/workflows/
|
||||
uses: dorny/paths-filter@v3
|
||||
uses: dorny/paths-filter@v2
|
||||
id: dornycheck
|
||||
with:
|
||||
list-files: json
|
||||
filters: |
|
||||
src:
|
||||
- 'src/**'
|
||||
docs:
|
||||
- 'docs/**'
|
||||
website:
|
||||
- 'website/**'
|
||||
actions:
|
||||
@ -46,18 +48,18 @@ jobs:
|
||||
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.actions == 'true'
|
||||
run: |
|
||||
echo "src or workflow file changes occurred"
|
||||
echo srcfileschanged=true >> $GITHUB_OUTPUT
|
||||
echo ::set-output name=srcfileschanged::true
|
||||
|
||||
- name: Check dorny for changes in docs related filepaths
|
||||
id: docchecker
|
||||
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.docs == 'true' || steps.dornycheck.outputs.actions == 'true'
|
||||
run: |
|
||||
echo "docs, src or workflow file changes occurred"
|
||||
echo ::set-output name=docfileschanged::true
|
||||
|
||||
- name: Check dorny for changes in website related filepaths
|
||||
id: websitechecker
|
||||
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true'
|
||||
if: steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true'
|
||||
run: |
|
||||
echo "website or workflow file changes occurred"
|
||||
echo websitefileschanged=true >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check dorny for changes in actions filepaths
|
||||
id: actionschecker
|
||||
if: steps.dornycheck.outputs.actions == 'true'
|
||||
run: |
|
||||
echo "actions file changes occurred"
|
||||
echo actionsfileschanged=true >> $GITHUB_OUTPUT
|
||||
echo ::set-output name=websitefileschanged::true
|
||||
46
.github/workflows/accSelector.yaml
vendored
@ -1,46 +0,0 @@
|
||||
name: SetM365AppAcc
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
client_app_slot:
|
||||
value: ${{ jobs.GetM365App.outputs.client_app_slot }}
|
||||
client_id_env:
|
||||
value: ${{ jobs.GetM365App.outputs.client_id_env }}
|
||||
client_secret_env:
|
||||
value: ${{ jobs.GetM365App.outputs.client_secret_env }}
|
||||
|
||||
jobs:
|
||||
GetM365App:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
|
||||
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
|
||||
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
|
||||
steps:
|
||||
- name: Figure out which client id to use
|
||||
id: roundrobin
|
||||
run: |
|
||||
slot=$((GITHUB_RUN_NUMBER % 4))
|
||||
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
|
||||
|
||||
case $slot in
|
||||
|
||||
0)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
1)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
2)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
3)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
esac
|
||||
22
.github/workflows/auto-merge.yml
vendored
@ -1,4 +1,3 @@
|
||||
# See https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#common-dependabot-automations
|
||||
name: auto-merge
|
||||
|
||||
on:
|
||||
@ -6,24 +5,11 @@ on:
|
||||
paths-ignore:
|
||||
- "src/**" # prevent auto-merge for go dependencies
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
auto-approve-label:
|
||||
auto-merge:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ahmadnassri/action-dependabot-auto-merge@v2 # https://github.com/marketplace/actions/dependabot-auto-merge
|
||||
with:
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Enable auto-merge for Dependabot PRs
|
||||
if: ${{steps.metadata.outputs.update-type == 'version-update:semver-minor'}}
|
||||
run: |
|
||||
gh pr edit "$PR_URL" --add-label "mergequeue"
|
||||
gh pr review --approve "$PR_URL"
|
||||
env:
|
||||
PR_URL: ${{github.event.pull_request.html_url}}
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
github-token: ${{ secrets.DEPENDABOT_TOKEN }}
|
||||
|
||||
44
.github/workflows/binary-publish.yml
vendored
@ -1,44 +0,0 @@
|
||||
name: Publish binary
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
SetEnv:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
Publish-Binary:
|
||||
needs: [SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Publish Binary
|
||||
uses: ./.github/actions/publish-binary
|
||||
with:
|
||||
version: ${{ needs.SetEnv.outputs.version }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
|
||||
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Publishing Binary"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
756
.github/workflows/ci.yml
vendored
@ -1,6 +1,4 @@
|
||||
name: Build/Release Corso
|
||||
env:
|
||||
IMAGE_NAME: ghcr.io/alcionai/corso
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
@ -40,332 +38,124 @@ jobs:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# single setup and sum cache handling here.
|
||||
# the results will cascade onto both testing and linting.
|
||||
- name: Setup Golang with cache
|
||||
uses: ./.github/actions/go-setup-cache
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true'
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.docfileschanged == 'true'
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
# SetM365App will decide which M365 app to use for this CI run
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
SetEnv:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
environment: ${{ steps.environment.outputs.environment }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
website-bucket: ${{ steps.website-bucket.outputs.website-bucket }}
|
||||
website-cfid: ${{ steps.website-cfid.outputs.website-cfid }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Figure out environment
|
||||
id: environment
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "environment=Production" | tee -a $GITHUB_OUTPUT
|
||||
echo "set-output name=environment::Production"
|
||||
echo "::set-output name=environment::Production"
|
||||
else
|
||||
echo "environment=Testing" | tee -a $GITHUB_OUTPUT
|
||||
echo "set-output name=environment::Testing"
|
||||
echo "::set-output name=environment::Testing"
|
||||
fi
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
echo "set-output name=version::$(git describe --exact-match --tags $(git rev-parse HEAD))"
|
||||
echo "::set-output name=version::$(git describe --exact-match --tags $(git rev-parse HEAD))"
|
||||
else
|
||||
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
echo "set-output name=version::$(echo unreleased-$(git rev-parse --short HEAD))"
|
||||
echo "::set-output name=version::$(echo unreleased-$(git rev-parse --short HEAD))"
|
||||
fi
|
||||
|
||||
- name: Get bucket name for website
|
||||
id: website-bucket
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "website-bucket=corsobackup.io" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "website-bucket=test-corso-docs" | tee -a $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Get cfid for website
|
||||
id: website-cfid
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "website-cfid=E1W9NGI9YTVZ1A" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "website-cfid=ESFTEIYTIP7Y3" | tee -a $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Website Linting -----------------------------------------------------------------------------------
|
||||
# --- Docs Linting -----------------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Website-Linting:
|
||||
Docs-Linting:
|
||||
needs: [Precheck, Checkout, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.docfileschanged == 'true' # docsfileschanged also includes srcfileschanged
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Lint Website
|
||||
uses: ./.github/actions/website-linting
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
version: ${{ needs.SetEnv.outputs.version }}
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Generate CLI Docs
|
||||
working-directory: ./src
|
||||
run: |
|
||||
go run ./cmd/mdgen/mdgen.go generate
|
||||
|
||||
# migrate generated md files into /docs/docs/cli
|
||||
- name: Move CLI .md to Docs
|
||||
run: |
|
||||
mkdir -p ./docs/docs/cli
|
||||
mv ./src/cmd/mdgen/cli_markdown/* ./docs/docs/cli/
|
||||
rm -R ./src/cmd/mdgen/cli_markdown/
|
||||
|
||||
- name: Install dependencies for docs lint
|
||||
run: |
|
||||
wget https://github.com/errata-ai/vale/releases/download/v2.20.2/vale_2.20.2_Linux_64-bit.tar.gz # NOTE: update in Dockerfile when updating
|
||||
mkdir bin && tar -xvzf vale_2.20.2_Linux_64-bit.tar.gz -C bin
|
||||
echo "$PWD/bin" >> $GITHUB_PATH
|
||||
npm i -g markdownlint-cli@0.32.2 # NOTE: update in Dockerfile when updating
|
||||
|
||||
- name: Run docs lint
|
||||
env:
|
||||
CORSO_USE_DOCKER: -1 # prevent using docker inside makefile
|
||||
run: |
|
||||
cd docs && make -o genclidocs localcheck
|
||||
|
||||
- name: Build docs
|
||||
env:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
run: |
|
||||
cd docs &&
|
||||
npm ci &&
|
||||
npm run build
|
||||
|
||||
- uses: actions/upload-artifact@master
|
||||
name: Upload docs as artifacts
|
||||
with:
|
||||
name: docs
|
||||
path: docs/build
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Integration and Unit Testing -------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Test-Suite-Trusted:
|
||||
needs: [Precheck, Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
# Resolve the name of the secret that contains the Azure client ID/secret
|
||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
# run the tests
|
||||
- name: Integration Tests
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_CI_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 20m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ci-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
Retention-Test-Suite-Trusted:
|
||||
needs: [Precheck, Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
# Resolve the name of the secret that contains the Azure client ID/secret
|
||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci-retention.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
# run the tests
|
||||
- name: Retention Tests
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_RETENTION_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 10m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ci-retention-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
Unit-Test-Suite:
|
||||
needs: [Precheck, Checkout]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: needs.precheck.outputs.srcfileschanged == 'true'
|
||||
if: (startsWith(github.ref, 'refs/tags/') ||
|
||||
github.ref == 'refs/heads/main' ||
|
||||
needs.precheck.outputs.srcfileschanged == 'true') &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# run the tests
|
||||
- name: Unit Tests
|
||||
env:
|
||||
# Set these to a bad value so we don't accidentally fall back to
|
||||
# something elsewhere.
|
||||
CORSO_M365_TEST_USER_ID: 'foo'
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 20m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unit-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
Test-Suite-Fork:
|
||||
needs: [Precheck]
|
||||
environment: Testing
|
||||
if: (!startsWith(github.ref , 'refs/tags/') && github.ref != 'refs/heads/main') && (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name != github.repository)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- name: Fail check if not repository_dispatch
|
||||
if: github.event_name != 'repository_dispatch'
|
||||
run: |
|
||||
echo "Workflow requires approval from a maintainer to run. It will be automatically rerun on approval."
|
||||
exit 1
|
||||
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
if: github.event.client_payload.slash_command.args.named.sha != '' && contains(github.event.client_payload.pull_request.head.sha, github.event.client_payload.slash_command.args.named.sha)
|
||||
with:
|
||||
message: |
|
||||
Workflow run sha specified via `ok-to-test` is not the latest commit on PR. Run canceled.
|
||||
|
||||
- name: Fail check if not head of PR
|
||||
if: github.event.client_payload.slash_command.args.named.sha != '' && contains(github.event.client_payload.pull_request.head.sha, github.event.client_payload.slash_command.args.named.sha)
|
||||
run: |
|
||||
echo "Workflow run sha specified is not the latest commit on PR. Exiting."
|
||||
exit 1
|
||||
|
||||
# add comment to PR with link to workflow run
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
message: |
|
||||
Test suite run will be available at https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
||||
|
||||
# Check out merge commit
|
||||
- name: Fork based /ok-to-test checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
@ -377,7 +167,7 @@ jobs:
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
@ -390,31 +180,105 @@ jobs:
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_CI_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ secrets.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test log as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-log
|
||||
path: src/testlog/gotest.log
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
Test-Suite-Fork:
|
||||
needs: [Precheck]
|
||||
environment: Testing
|
||||
if: (needs.precheck.outputs.srcfileschanged == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name != github.repository) # only run when repo is forked
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- name: Fail check
|
||||
if: github.event_name != 'repository_dispatch'
|
||||
run: |
|
||||
echo "Workflow requires approval from a maintainer to run. It will be automatically rerun on approval."
|
||||
exit 1
|
||||
|
||||
# add comment to PR with link to workflow run
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
message: |
|
||||
https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
||||
|
||||
# Check out merge commit
|
||||
- name: Fork based /ok-to-test checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
# run the tests
|
||||
- name: Integration Tests
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_CI_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-json \
|
||||
-v \
|
||||
-timeout 15m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-fork.log | gotestfmt -hide successful-tests
|
||||
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test log as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: fork-test-log
|
||||
path: src/testlog/*
|
||||
name: test-log
|
||||
path: src/testlog/gotest.log
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
# Update check run called "Test-Suite-Fork"
|
||||
- uses: actions/github-script@v7
|
||||
- uses: actions/github-script@v5
|
||||
id: update-check-run
|
||||
if: failure()
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
number: ${{ github.event.client_payload.pull_request.number }}
|
||||
job: ${{ github.job }}
|
||||
@ -445,32 +309,28 @@ jobs:
|
||||
# --- Source Code Linting ----------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Source-Code-Linting:
|
||||
Linting:
|
||||
needs: [Precheck, Checkout]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Go Lint
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
# Keep pinned to a verson as sometimes updates will add new lint
|
||||
# failures in unchanged code.
|
||||
version: v1.54.2
|
||||
version: v1.45.2
|
||||
working-directory: src
|
||||
skip-pkg-cache: true
|
||||
skip-build-cache: true
|
||||
skip-cache: true
|
||||
|
||||
# check licenses
|
||||
- name: Get go-licenses
|
||||
@ -479,127 +339,108 @@ jobs:
|
||||
- name: Run go-licenses
|
||||
run: go-licenses check github.com/alcionai/corso/src --ignore github.com/alcionai/corso/src
|
||||
|
||||
- name: Run staticcheck
|
||||
uses: dominikh/staticcheck-action@v1.3.0
|
||||
with:
|
||||
install-go: false
|
||||
working-directory: src
|
||||
|
||||
- name: Run allowtags
|
||||
run: |
|
||||
go install github.com/ashmrtn/allowtags@latest
|
||||
allowtags --allow-key json --allow-key uriparametername ./...
|
||||
|
||||
# I could not find a way to install tree-grepper without nix
|
||||
# https://github.com/BrianHicks/tree-grepper/issues/293
|
||||
- uses: cachix/install-nix-action@v25
|
||||
- uses: cachix/cachix-action@v14
|
||||
with:
|
||||
name: tree-grepper
|
||||
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
|
||||
- name: Run trailing comma lint rule
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '(argument_list "," @no-trailing-comma .)' | grep .; then
|
||||
echo "No trailing commas for function calls"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check for empty string comparison
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((binary_expression (identifier) ["==" "!="] (interpreted_string_literal) @_ri) @exp (#eq? @_ri "\"\""))' | grep .; then
|
||||
echo "Use len check instead of empty string comparison"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check for cases where errors are not propagated
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
|
||||
echo "Make sure to propagate errors with clues"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check if clues without context are used when context is passed in
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
|
||||
echo "Do not use clues.*WC when context is passed in"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check clues with context is used when context is not passed in
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
|
||||
echo "Use clues.*WC when context is not passed in"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- GitHub Actions Linting -------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Actions-Lint:
|
||||
needs: [Precheck]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.precheck.outputs.actionsfileschanged == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: actionlint
|
||||
uses: raven-actions/actionlint@v1
|
||||
with:
|
||||
fail-on-error: true
|
||||
cache: true
|
||||
# Ignore
|
||||
# * combining commands into a subshell and using single output
|
||||
# redirect
|
||||
# * various variable quoting patterns
|
||||
# * possible ineffective echo commands
|
||||
flags: "-ignore SC2129 -ignore SC2086 -ignore SC2046 -ignore 2116"
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Publish steps ----------------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Publish-Binary:
|
||||
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Publish Binary
|
||||
uses: ./.github/actions/publish-binary
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
version: ${{ needs.SetEnv.outputs.version }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
|
||||
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
|
||||
fetch-depth: 0 # needed to pull changelog
|
||||
|
||||
Publish-Image:
|
||||
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v3
|
||||
with:
|
||||
version: latest
|
||||
args: release --rm-dist --timeout 500m
|
||||
workdir: src
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RUDDERSTACK_CORSO_WRITE_KEY: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
|
||||
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso
|
||||
path: src/dist/*
|
||||
|
||||
Publish-Docs:
|
||||
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/download-artifact@master
|
||||
name: Download docs from build step
|
||||
with:
|
||||
name: docs
|
||||
path: docs/build
|
||||
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Add rotbots.txt
|
||||
if: github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
printf 'User-agent: *\nDisallow: /' > build/robots.txt
|
||||
|
||||
- name: Push docs
|
||||
run: |
|
||||
aws s3 sync build "s3://${{ secrets.DOCS_S3_BUCKET }}"
|
||||
|
||||
- name: Invalidate cloudfront
|
||||
run: |
|
||||
aws cloudfront create-invalidation --distribution-id ${{ secrets.DOCS_CF_DISTRIBUTION }} --paths "/*"
|
||||
|
||||
Publish-Image:
|
||||
needs: [Test-Suite-Trusted, Linting, Docs-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: build
|
||||
env:
|
||||
imageName: ghcr.io/alcionai/corso
|
||||
PLATFORMS: linux/amd64,linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Setup buildx
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# retrieve credentials for ghcr.io
|
||||
- name: Login to Github Packages
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@ -607,9 +448,9 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
images: ${{ env.imageName }}
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=sha,format=short,prefix=
|
||||
@ -617,7 +458,7 @@ jobs:
|
||||
|
||||
# deploy the image
|
||||
- name: Build image and push to GitHub Container Registry
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./build/Dockerfile
|
||||
@ -626,140 +467,7 @@ jobs:
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
CORSO_BUILD_LDFLAGS=-X 'github.com/alcionai/corso/src/internal/events.RudderStackWriteKey=${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}' -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL=${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}' -X 'github.com/alcionai/corso/src/internal/version.Version=${{ needs.SetEnv.outputs.version }}'
|
||||
CORSO_BUILD_LDFLAGS=-X 'github.com/alcionai/corso/src/internal/events.RudderStackWriteKey=${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}' -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL=${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}' -X 'github.com/alcionai/corso/src/cli.version=${{ needs.SetEnv.outputs.version }}'
|
||||
# use the github cache
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
Validate-Linux-Artifacts:
|
||||
needs: [Publish-Binary, Publish-Image, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
env:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
steps:
|
||||
- name: Validate x86_64 binary artifacts
|
||||
run: |
|
||||
set -ex
|
||||
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Linux_x86_64.tar.gz > corso.tar.gz
|
||||
tar -xf corso.tar.gz
|
||||
./corso --help
|
||||
./corso --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
- name: Validate arm64 binary artifacts
|
||||
uses: uraimo/run-on-arch-action@v2
|
||||
with:
|
||||
arch: armv7
|
||||
distro: ubuntu18.04
|
||||
install: |
|
||||
apt-get -y update && apt-get -y install curl
|
||||
run: |
|
||||
set -ex
|
||||
sudo apt-get update && sudo apt-get install curl
|
||||
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Linux_arm64.tar.gz > corso.tar.gz
|
||||
tar -xf corso.tar.gz
|
||||
./corso --help
|
||||
./corso --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
|
||||
Validate-Docker-Artifacts:
|
||||
needs: [Publish-Binary, Publish-Image, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
env:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Validate amd64 container images
|
||||
run: |
|
||||
docker run --platform linux/amd64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --help
|
||||
docker run --platform linux/amd64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --version | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
- name: Validate arm64 container images
|
||||
run: |
|
||||
docker run --platform linux/arm64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --help
|
||||
docker run --platform linux/amd64 ${{ env.IMAGE_NAME }}:${{ env.CORSO_VERSION }} --version | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
|
||||
Validate-MacOS-Artifacts:
|
||||
needs: [Publish-Binary, Publish-Image, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: macos-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
env:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
steps:
|
||||
- name: Validate x86_64 binary artifacts
|
||||
run: |
|
||||
set -ex
|
||||
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Darwin_x86_64.tar.gz > corso.tar.gz
|
||||
tar -xf corso.tar.gz
|
||||
./corso --help
|
||||
./corso --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
- name: Validate arm64 binary artifacts
|
||||
run: |
|
||||
set -ex
|
||||
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Darwin_arm64.tar.gz > corso.tar.gz
|
||||
tar -xf corso.tar.gz
|
||||
# TODO: test/validate arm64 executable once we have arm64 CI
|
||||
|
||||
Validate-Windows-Artifacts:
|
||||
needs: [Publish-Binary, Publish-Image, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: windows-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
env:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
steps:
|
||||
- name: Validate x86_64 binary artifacts
|
||||
run: |
|
||||
curl -L https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Windows_x86_64.zip -o corso.zip
|
||||
7z x corso.zip
|
||||
./corso.exe --help
|
||||
./corso.exe --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
|
||||
Publish-Website-Test:
|
||||
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/main'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4 # need to checkout to make the action available
|
||||
|
||||
- name: Publish website
|
||||
uses: ./.github/actions/publish-website
|
||||
with:
|
||||
aws-iam-role: ${{ secrets.AWS_IAM_ROLE }}
|
||||
bucket: ${{ needs.SetEnv.outputs.website-bucket }}
|
||||
cfid: ${{ needs.SetEnv.outputs.website-cfid }}
|
||||
|
||||
Publish-Website-Prod:
|
||||
needs: [SetEnv, Validate-Linux-Artifacts, Validate-MacOS-Artifacts, Validate-Docker-Artifacts, Validate-Windows-Artifacts]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4 # need to checkout to make the action available
|
||||
|
||||
- name: Publish website
|
||||
uses: ./.github/actions/publish-website
|
||||
with:
|
||||
aws-iam-role: ${{ secrets.AWS_IAM_ROLE }}
|
||||
bucket: ${{ needs.SetEnv.outputs.website-bucket }}
|
||||
cfid: ${{ needs.SetEnv.outputs.website-cfid }}
|
||||
|
||||
Validate-Website-Artifacts:
|
||||
needs: [Publish-Website-Prod, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
env:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
steps:
|
||||
- name: Validate docs
|
||||
run: |
|
||||
curl https://corsobackup.io/docs/quickstart/ | grep https://github.com/alcionai/corso/releases/download/${{ env.CORSO_VERSION }}/corso_${{ env.CORSO_VERSION }}_Linux_x86_64.tar.gz
|
||||
|
||||
75
.github/workflows/ci_test_cleanup.yml
vendored
@ -1,6 +1,5 @@
|
||||
name: CI Test Cleanup
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# every half hour
|
||||
- cron: "*/30 * * * *"
|
||||
@ -12,74 +11,26 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
||||
folder: [Corso_Restore_, TestRestore, '']
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# sets the maximum time to now-30m.
|
||||
# CI test have a 20 minute timeout.
|
||||
# sets the maximimum time to now-30m.
|
||||
# CI test have a 10 minute timeout.
|
||||
# At 20 minutes ago, we should be safe from conflicts.
|
||||
# The additional 10 minutes is just to be good citizens.
|
||||
- name: Set purge boundary
|
||||
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
run: |
|
||||
echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
|
||||
- name: Purge CI-Produced Folders for Users
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
- name: Purge CI-Produced Folders
|
||||
uses: ./.github/actions/purge-m365-user-data
|
||||
with:
|
||||
user: ${{ vars[matrix.user] }}
|
||||
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
|
||||
user: ${{ secrets[matrix.user] }}
|
||||
folder-prefix: ${{ matrix.folder }}
|
||||
older-than: ${{ env.HALF_HOUR_AGO }}
|
||||
azure-client-id: ${{ secrets.CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
Test-Site-Data-Cleanup:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# sets the maximum time to now-30m.
|
||||
# CI test have a 20 minute timeout.
|
||||
- name: Set purge boundary
|
||||
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
|
||||
- name: Purge CI-Produced Folders for Sites
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
with:
|
||||
site: ${{ vars[matrix.site] }}
|
||||
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
library-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
|
||||
older-than: ${{ env.HALF_HOUR_AGO }}
|
||||
azure-client-id: ${{ secrets.CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
75
.github/workflows/load_test.yml
vendored
@ -1,8 +1,10 @@
|
||||
name: Nightly Load Testing
|
||||
on:
|
||||
schedule:
|
||||
# every day at 03:59 GMT (roughly 8pm PST)
|
||||
- cron: "59 3 * * *"
|
||||
# every day at 01:59 (01:59am) UTC
|
||||
# - cron: "59 1 * * *"
|
||||
# temp, for testing: every 4 hours
|
||||
- cron: "0 */4 * * *"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -18,29 +20,33 @@ jobs:
|
||||
Load-Tests:
|
||||
environment: Load Testing
|
||||
runs-on: ubuntu-latest
|
||||
# Skipping load testing for now. They need some love to get up and
|
||||
# running properly, and it's better to not fight for resources with
|
||||
# tests that are guaranteed to fail.
|
||||
if: false
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Build the otel-daemon
|
||||
run: make build-otel-daemon
|
||||
|
||||
# TODO: write logs to a file in src/testlog for archival
|
||||
# TODO: write logs to a file in src/test_results for archival
|
||||
- name: Run the otel-daemon
|
||||
run: make otel-daemon
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
uses: magnetikonline/action-golang-cache@v3
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir ${{ github.workspace }}/testlog
|
||||
- run: mkdir test_results
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
@ -49,18 +55,13 @@ jobs:
|
||||
# run the tests
|
||||
- name: Integration Tests
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||
CORSO_LOAD_TESTS: true
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-load.log
|
||||
CORSO_PASSPHRASE: ${{ secrets.CORSO_PASSPHRASE }}
|
||||
CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }}
|
||||
CORSO_M365_LOAD_TEST_ORG_USERS: ${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.CORSO_PASSPHRASE }}
|
||||
IGNORE_LOAD_TEST_USER_ID: ${{ vars.EXT_SDK_TEST_USER_ID }}
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
CORSO_LOAD_TESTS: true
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
@ -73,9 +74,9 @@ jobs:
|
||||
-memprofile=mem.prof \
|
||||
-mutexprofile=mutex.prof \
|
||||
-trace=trace.out \
|
||||
-outputdir=testlog \
|
||||
-outputdir=test_results \
|
||||
./pkg/repository/repository_load_test.go \
|
||||
2>&1 | tee ${{ github.workspace }}/testlog/gotest-load.log | gotestfmt -hide successful-tests
|
||||
2>&1 | tee ./test_results/goloadtest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# generate new entries to roll into the next load test
|
||||
# only runs if the test was successful
|
||||
@ -88,15 +89,15 @@ jobs:
|
||||
CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }}
|
||||
run: |
|
||||
go run . exchange emails \
|
||||
--mailbox ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
|
||||
--user ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
|
||||
--destination lt_${{ env.NOW }} \
|
||||
--count 10
|
||||
go run . exchange contacts \
|
||||
--mailbox ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
|
||||
--user ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
|
||||
--destination lt_${{ env.NOW }} \
|
||||
--count 10
|
||||
go run . exchange events \
|
||||
--mailbox ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
|
||||
--user ${{ env.CORSO_M365_LOAD_TEST_USER_ID }} \
|
||||
--destination lt_${{ env.NOW }} \
|
||||
--count 10
|
||||
|
||||
@ -107,10 +108,10 @@ jobs:
|
||||
# package all artifacts for later review
|
||||
- name: Upload Log, Profilers, Traces
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: load-test-profiling
|
||||
path: ${{ github.workspace }}/testlog/*
|
||||
path: src/test_results/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
@ -120,7 +121,7 @@ jobs:
|
||||
outputs:
|
||||
matrix: ${{ steps.build.outputs.matrix }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- id: build
|
||||
run: |
|
||||
u=$(echo "${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}" | sed 's/\[/["/g' | sed 's/\]/"]/g' | sed 's/|/","/g')
|
||||
@ -138,23 +139,19 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
user: [ CORSO_M365_LOAD_TEST_USER_ID, '' ]
|
||||
user: ${{ fromJson(needs.setup.outputs.matrix).user }}
|
||||
folder: [Corso_Restore_,'']
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set folder boundary datetime
|
||||
run: |
|
||||
echo "NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
- name: Purge Load-Test-Produced Folders
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
uses: ./.github/actions/purge-m365-user-data
|
||||
with:
|
||||
user: ${{ secrets[matrix.user] }}
|
||||
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
|
||||
older-than: ${{ env.HALF_HOUR_AGO }}
|
||||
azure-client-id: ${{ secrets.CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
older-than: ${{ env.NOW }}
|
||||
folder-prefix: ${{ matrix.folder }}
|
||||
azure-client-id: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||
azure-tenant-id: ${{ secrets.AZURE_TENANT_ID }}
|
||||
user: ${{ matrix.user }}
|
||||
|
||||
396
.github/workflows/longevity_test.yml
vendored
@ -1,396 +0,0 @@
|
||||
name: Longevity Testing
|
||||
on:
|
||||
schedule:
|
||||
# Run every day at 04:00 GMT (roughly 8pm PST)
|
||||
- cron: "0 4 * * *"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run longevity test on"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
# cancel currently running jobs if a new version of the branch is pushed
|
||||
concurrency:
|
||||
group: longevity_testing-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Longevity-Tests:
|
||||
needs: [SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Need these in the local env so that corso can read them
|
||||
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
# re-used values
|
||||
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||
PREFIX: "longevity"
|
||||
|
||||
# Options for retention.
|
||||
RETENTION_MODE: GOVERNANCE
|
||||
# Time to retain blobs for in hours.
|
||||
RETENTION_DURATION: 216
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
############################################################################
|
||||
# setup
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # needed to get latest tag
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: |
|
||||
go build -o longevity-test ./cmd/longevity_test
|
||||
go build -o s3checker ./cmd/s3checker
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
|
||||
|
||||
# Checkout the .github directory at the original branch's ref so we have a
|
||||
# stable view of the actions.
|
||||
- name: Code Checkout
|
||||
working-directory: ${{ github.workspace }}
|
||||
run: |
|
||||
git checkout ${{ steps.version.outputs.version }}
|
||||
git checkout ${{ github.ref }} -- .github
|
||||
|
||||
- run: go build -o corso
|
||||
timeout-minutes: 10
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
# Use shorter-lived credentials obtained from assume-role since these
|
||||
# runs haven't been taking long.
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
timeout-minutes: 10
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
##########################################################################
|
||||
# Repository commands
|
||||
|
||||
- name: Version Test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
./corso --version | grep -c 'Corso version:'
|
||||
|
||||
- name: Repo init test
|
||||
id: repo-init
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo init s3 \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--retention-mode $(echo "${{ env.RETENTION_MODE }}" | tr '[:upper:]' '[:lower:]') \
|
||||
--retention-duration "${{ env.RETENTION_DURATION }}h" \
|
||||
--extend-retention \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
|
||||
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
then
|
||||
echo "Repo could not be initialized"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Repo connect test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo connect s3 \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
|
||||
|
||||
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
|
||||
then
|
||||
echo "Repo could not be connected"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
##########################################################################
|
||||
# Exchange
|
||||
|
||||
- name: Backup exchange test
|
||||
id: exchange-test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
echo -e "\nBackup Exchange test\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup create exchange \
|
||||
--no-stats \
|
||||
--mailbox "${TEST_USER}" \
|
||||
--hide-progress \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt
|
||||
|
||||
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt )
|
||||
|
||||
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
|
||||
echo "backup was not successful"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
##########################################################################
|
||||
# Onedrive
|
||||
|
||||
- name: Backup onedrive test
|
||||
id: onedrive-test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nBackup OneDrive test\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup create onedrive \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--user "${TEST_USER}" \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt
|
||||
|
||||
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt )
|
||||
|
||||
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
|
||||
echo "backup was not successful"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
##########################################################################
|
||||
# Sharepoint test
|
||||
- name: Backup sharepoint test
|
||||
id: sharepoint-test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nBackup SharePoint test\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
./corso backup create sharepoint \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt
|
||||
|
||||
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt )
|
||||
|
||||
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
|
||||
echo "backup was not successful"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
##########################################################################
|
||||
# Backup Exchange Deletion test
|
||||
- name: Backup Delete exchange test
|
||||
id: delete-exchange-test
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
SERVICE: "exchange"
|
||||
DELETION_DAYS: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
|
||||
./longevity-test
|
||||
|
||||
##########################################################################
|
||||
# Backup Onedrive Deletion test
|
||||
- name: Backup Delete onedrive test
|
||||
id: delete-onedrive-test
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
SERVICE: "onedrive"
|
||||
DELETION_DAYS: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
|
||||
./longevity-test
|
||||
|
||||
##########################################################################
|
||||
# Backup Sharepoint Deletion test
|
||||
- name: Backup Delete Sharepoint test
|
||||
id: delete-sharepoint-test
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
SERVICE: "sharepoint"
|
||||
DELETION_DAYS: 5
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
|
||||
./longevity-test
|
||||
|
||||
##########################################################################
|
||||
# Export OneDrive Test
|
||||
- name: OneDrive Export test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export onedrive \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
|
||||
done
|
||||
|
||||
echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export onedrive \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
|
||||
done
|
||||
|
||||
##########################################################################
|
||||
# Export SharePoint Test
|
||||
- name: SharePoint Export test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export sharepoint \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
|
||||
done
|
||||
|
||||
echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export sharepoint \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
|
||||
done
|
||||
|
||||
##########################################################################
|
||||
# Maintenance test
|
||||
- name: Maintenance test Daily
|
||||
id: maintenance-test-daily
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\n Maintenance test Daily\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
# Run with the force flag so it doesn't fail if the github runner
|
||||
# hostname isn't what's expected. This is only safe because we can
|
||||
# guarantee only one runner will be executing maintenance at a time.
|
||||
./corso repo maintenance --mode metadata \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--force \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||
|
||||
- name: Maintenance test Weekly
|
||||
id: maintenance-test-weekly
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
if [[ $(date +%A) == "Saturday" ]]; then
|
||||
set -euo pipefail
|
||||
echo -e "\n Maintenance test Weekly\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
./corso repo maintenance --mode complete \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--force \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_complete.txt
|
||||
|
||||
# TODO(ashmrtn): We can also check that non-current versions of
|
||||
# blobs don't have their retention extended if we want.
|
||||
#
|
||||
# Assuming no failures during full maintenance, current versions of
|
||||
# objects with the below versions should have retention times that
|
||||
# are roughly (now + RETENTION_DURATION). We can explicitly check
|
||||
# for this, but leave a little breathing room since maintenance may
|
||||
# take some time to run.
|
||||
#
|
||||
# If we pick a live-retention-duration that is too small then we'll
|
||||
# start seeing failures. The check for live objects is a lower bound
|
||||
# check.
|
||||
#
|
||||
# Blob prefixes are as follows:
|
||||
# - kopia.blobcfg - repo-wide config
|
||||
# - kopia.repository - repo-wide config
|
||||
# - p - data pack blobs (i.e. file data)
|
||||
# - q - metadata pack blobs (i.e. manifests, directory listings, etc)
|
||||
# - x - index blobs
|
||||
./s3checker \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--retention-mode ${{ env.RETENTION_MODE }} \
|
||||
--live-retention-duration "$((${{ env.RETENTION_DURATION }}-1))h" \
|
||||
--object-prefix "kopia.blobcfg" \
|
||||
--object-prefix "kopia.repository" \
|
||||
--object-prefix "p" \
|
||||
--object-prefix "q" \
|
||||
--object-prefix "x"
|
||||
fi
|
||||
|
||||
##########################################################################
|
||||
# Logging & Notifications
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: longevity-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Longevity Test"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
122
.github/workflows/nightly_test.yml
vendored
@ -1,122 +0,0 @@
|
||||
name: Nightly Test Corso
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
schedule:
|
||||
# Run every day at 04:00 GMT (roughly 8pm PST)
|
||||
- cron: "0 4 * * *"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
contents: write
|
||||
packages: write
|
||||
pull-requests: read
|
||||
|
||||
# cancel currently running jobs if a new version of the branch is pushed
|
||||
concurrency:
|
||||
group: nightly-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Checkouts ------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Checkout:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# single setup and sum cache handling here.
|
||||
# the results will cascade onto both testing and linting.
|
||||
- name: Setup Golang with cache
|
||||
uses: ./.github/actions/go-setup-cache
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
# SetM365App will decide which M365 app to use for this CI run
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Nightly Testing -------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Test-Suite-Trusted:
|
||||
needs: [Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
# Resolve the name of the secret that contains the Azure client ID/secret
|
||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# run the tests
|
||||
- name: Integration Tests
|
||||
env:
|
||||
# Use long-lived AWS credentials.
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
|
||||
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_NIGHTLY_TESTS: true
|
||||
CORSO_E2E_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-nightly.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-p 1 \
|
||||
-timeout 2h \
|
||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nightly-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[COROS FAILED] Nightly Checks"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
5
.github/workflows/ok-to-test.yml
vendored
@ -13,13 +13,13 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.PRIVATE_KEY }}
|
||||
|
||||
- name: Slash Command Dispatch
|
||||
uses: peter-evans/slash-command-dispatch@v4
|
||||
uses: peter-evans/slash-command-dispatch@v1
|
||||
env:
|
||||
TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
@ -27,4 +27,5 @@ jobs:
|
||||
reaction-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-type: pull-request
|
||||
commands: ok-to-test
|
||||
named-args: true
|
||||
permission: write
|
||||
|
||||
540
.github/workflows/sanity-test.yaml
vendored
@ -1,540 +0,0 @@
|
||||
name: Sanity Testing
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run sanity test on"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
# cancel currently running jobs if a new version of the branch is pushed
|
||||
concurrency:
|
||||
group: sanity_testing-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Sanity-Tests:
|
||||
needs: [SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Need these in the local env so that corso can read them
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
|
||||
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
# re-used values
|
||||
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
|
||||
RESTORE_DEST_PFX: Corso_Test_Sanity_
|
||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# setup
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: go build -o corso
|
||||
timeout-minutes: 10
|
||||
|
||||
- run: go build -o sanity-test ./cmd/sanity_test
|
||||
timeout-minutes: 10
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Pre-Run cleanup
|
||||
|
||||
# unlike CI tests, sanity tests are not expected to run concurrently.
|
||||
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
||||
# the latest release. If we wait to clean up the production til after the tests
|
||||
# It would be possible to complete all the testing but cancel the run before
|
||||
# cleanup occurs. Setting the cleanup before the tests ensures we always begin
|
||||
# with a clean slate, and cannot compound data production.
|
||||
- name: Set purge boundary
|
||||
if: always()
|
||||
run: |
|
||||
echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
|
||||
- name: Purge CI-Produced Folders for Users
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
with:
|
||||
user: ${{ env.TEST_USER }}
|
||||
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
|
||||
older-than: ${{ env.NOW }}
|
||||
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Purge CI-Produced Folders for Sites
|
||||
timeout-minutes: 30
|
||||
if: always()
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
with:
|
||||
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
|
||||
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
older-than: ${{ env.NOW }}
|
||||
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Repository commands
|
||||
|
||||
- name: Version Test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
./corso --version | grep -c 'Corso version:'
|
||||
|
||||
- name: Repo init test
|
||||
timeout-minutes: 10
|
||||
id: repo-init
|
||||
run: |
|
||||
set -euo pipefail
|
||||
prefix=$(date +"%Y-%m-%d-%T")
|
||||
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo init s3 \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--prefix $prefix \
|
||||
--bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
|
||||
if ! grep -q 'Initialized a S3 repository within bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
then
|
||||
echo "Repo could not be initialized"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo result="$prefix" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Repo connect test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo connect s3 \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--prefix ${{ steps.repo-init.outputs.result }} \
|
||||
--bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
|
||||
|
||||
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
|
||||
then
|
||||
echo "Repo could not be connected"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run maintenance on an empty repo just to make sure the command still
|
||||
# works.
|
||||
- name: Repo maintenance test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nRepo maintenance test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo maintenance \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--mode complete \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Exchange
|
||||
|
||||
# generate new entries to roll into the next load test
|
||||
# only runs if the test was successful
|
||||
- name: Exchange - Create new data
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
go run . exchange emails \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }} \
|
||||
--count 4
|
||||
|
||||
- name: Exchange - Backup
|
||||
timeout-minutes: 30
|
||||
id: exchange-backup
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: exchange
|
||||
kind: first-backup
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
- name: Exchange - Incremental backup
|
||||
timeout-minutes: 30
|
||||
id: exchange-backup-incremental
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: exchange
|
||||
kind: incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
- name: Exchange - Non delta backup
|
||||
timeout-minutes: 30
|
||||
id: exchange-backup-non-delta
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: exchange
|
||||
kind: non-delta
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
- name: Exchange - Incremental backup after non-delta
|
||||
timeout-minutes: 30
|
||||
id: exchange-backup-incremental-after-non-delta
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: exchange
|
||||
kind: non-delta-incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Onedrive
|
||||
|
||||
# generate new entries for test
|
||||
- name: OneDrive - Create new data
|
||||
id: new-data-creation-onedrive
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
go run . onedrive files \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
||||
--count 4
|
||||
|
||||
echo result="${suffix}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: OneDrive - Backup
|
||||
id: onedrive-backup
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: onedrive
|
||||
kind: first-backup
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
# generate some more enteries for incremental check
|
||||
- name: OneDrive - Create new data (for incremental)
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
go run . onedrive files \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} \
|
||||
--count 4
|
||||
|
||||
- name: OneDrive - Incremental backup
|
||||
id: onedrive-incremental
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: onedrive
|
||||
kind: incremental
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Library
|
||||
|
||||
# generate new entries for test
|
||||
- name: SharePoint - Create new data
|
||||
id: new-data-creation-sharepoint
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
go run . sharepoint files \
|
||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
||||
--count 4
|
||||
|
||||
echo result="${suffix}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: SharePoint - Backup
|
||||
id: sharepoint-backup
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: sharepoint
|
||||
kind: first-backup
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
|
||||
# generate some more enteries for incremental check
|
||||
- name: SharePoint - Create new data (for incremental)
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
go run . sharepoint files \
|
||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} \
|
||||
--count 4
|
||||
|
||||
- name: SharePoint - Incremental backup
|
||||
id: sharepoint-incremental
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: sharepoint
|
||||
kind: incremental
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Lists
|
||||
|
||||
# generate new entries for test
|
||||
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
||||
- name: SharePoint Lists - Create new data
|
||||
id: new-data-creation-sharepoint-lists
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
go run . sharepoint lists \
|
||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
||||
--count 4 |
|
||||
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
|
||||
tee $GITHUB_OUTPUT
|
||||
# Extracts the common prefix for the Sharepoint list names.
|
||||
- name: SharePoint Lists - Store restore container
|
||||
id: sharepoint-lists-store-restore-container
|
||||
run: |
|
||||
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
|
||||
cut -d',' -f1 |
|
||||
cut -d'_' -f1,2,3,4,5 |
|
||||
sed -e 's/^/result=/' |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
- name: SharePoint Lists - Backup
|
||||
id: sharepoint-lists-backup
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: sharepoint
|
||||
kind: first-backup-lists
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
|
||||
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
|
||||
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
|
||||
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: lists
|
||||
on-collision: copy
|
||||
|
||||
# generate some more enteries for incremental check
|
||||
- name: SharePoint Lists - Create new data (for incremental)
|
||||
id: inc-data-creation-sharepoint-lists
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
go run . sharepoint lists \
|
||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
||||
--count 4 |
|
||||
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
- name: SharePoint Lists - Store restore container (for incremental)
|
||||
id: sharepoint-lists-store-restore-container-inc
|
||||
run: |
|
||||
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
|
||||
cut -d',' -f1 |
|
||||
cut -d'_' -f1,2,3,4,5 |
|
||||
sed -e 's/^/result=/' |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
- name: SharePoint Lists - Incremental backup
|
||||
id: sharepoint-lists-incremental
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: sharepoint
|
||||
kind: incremental-lists
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
|
||||
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
|
||||
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
|
||||
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: lists
|
||||
on-collision: copy
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Groups and Teams
|
||||
|
||||
# generate new entries for test
|
||||
- name: Groups - Create new data
|
||||
id: new-data-creation-groups
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
go run . sharepoint files \
|
||||
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
||||
--count 4
|
||||
|
||||
echo result="${suffix}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Groups - Backup
|
||||
id: groups-backup
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: groups
|
||||
kind: first-backup
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
# generate some more entries for incremental check
|
||||
- name: Groups - Create new data (for incremental)
|
||||
timeout-minutes: 30
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
go run . sharepoint files \
|
||||
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
||||
--tenant ${{ secrets.TENANT_ID }} \
|
||||
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
|
||||
--count 4
|
||||
|
||||
- name: Groups - Incremental backup
|
||||
id: groups-incremental
|
||||
timeout-minutes: 30
|
||||
uses: ./.github/actions/backup-restore-test
|
||||
with:
|
||||
service: groups
|
||||
kind: incremental
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sanity-test-log
|
||||
path: ${{ env.CORSO_LOG_DIR }}/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Sanity Tests"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
23
.github/workflows/testnotification.yml
vendored
@ -1,23 +0,0 @@
|
||||
name: Manually Test Teams Action
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
msg:
|
||||
description: 'Message to send:'
|
||||
required: true
|
||||
default: 'This is a test message'
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Send notification
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: ${{ github.event.inputs.msg }}
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
34
.github/workflows/website-ci.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: CI for Website
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
# cancel currently running jobs if a new version of the branch is pushed
|
||||
concurrency:
|
||||
group: website-ci-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
Precheck:
|
||||
uses: alcionai/corso/.github/workflows/_filechange_checker.yml@main
|
||||
|
||||
Website-Build:
|
||||
needs: [Precheck]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: website
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build website image
|
||||
run: |
|
||||
make buildimage
|
||||
|
||||
- name: Build website
|
||||
run: |
|
||||
make build
|
||||
59
.github/workflows/website-publish.yml
vendored
@ -9,58 +9,27 @@ permissions:
|
||||
packages: write
|
||||
pull-requests: read
|
||||
|
||||
# cancel currently running jobs if a new version of the branch is pushed
|
||||
concurrency:
|
||||
group: push-website-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
SetEnv:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # needed to get latest tag
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Website Linting -----------------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Website-Linting:
|
||||
needs: [SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Lint Website
|
||||
uses: ./.github/actions/website-linting
|
||||
with:
|
||||
version: ${{ needs.SetEnv.outputs.version }}
|
||||
|
||||
Publish-Website:
|
||||
needs: [Website-Linting]
|
||||
environment: Production
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: website
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4 # need to checkout to make the action available
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Publish website
|
||||
uses: ./.github/actions/publish-website
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-iam-role: ${{ secrets.AWS_IAM_ROLE }}
|
||||
bucket: "corsobackup.io"
|
||||
cfid: "E1W9NGI9YTVZ1A"
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Build docs image
|
||||
run: |
|
||||
make buildimage
|
||||
|
||||
- name: Build & Publish docs
|
||||
run: |
|
||||
make publish
|
||||
26
.github/workflows/weekly_cleanup.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
name: Weekly S3 Test Bucket Cleanup
|
||||
on:
|
||||
schedule:
|
||||
# every saturday at 23:59 (11:59pm)
|
||||
- cron: "59 23 * * 6"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
S3-Test-Cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
environment: Testing
|
||||
|
||||
steps:
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Delete all files in the test bucket
|
||||
run: |
|
||||
aws s3 rm s3://${{ secrets.CI_TESTS_S3_BUCKET }} --recursive
|
||||
9
.gitignore
vendored
@ -8,8 +8,6 @@
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
test_results/
|
||||
testlog/
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
@ -22,10 +20,9 @@ testlog/
|
||||
.corso_test.toml
|
||||
.corso.toml
|
||||
|
||||
# Logging
|
||||
*.log
|
||||
|
||||
# Build directories
|
||||
/bin
|
||||
/docker/bin
|
||||
/website/dist
|
||||
/website/dist
|
||||
|
||||
*/test_results/**
|
||||
475
CHANGELOG.md
@ -5,457 +5,8 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased] (beta)
|
||||
### Fixed
|
||||
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
|
||||
- Emails attached within other emails are now correctly exported
|
||||
- Gracefully handle email and post attachments without name when exporting to eml
|
||||
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
||||
- Fixed an issue causing exports dealing with calendar data to have high memory usage
|
||||
## [Unreleased]
|
||||
|
||||
## [v0.19.0] (beta) - 2024-02-06
|
||||
|
||||
### Added
|
||||
- Events can now be exported from Exchange backups as .ics files.
|
||||
- Update repo init configuration to reduce the total number of GET requests sent
|
||||
to the object store when using corso. This affects repos that have many
|
||||
backups created in them per day the most.
|
||||
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
|
||||
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
|
||||
|
||||
### Fixed
|
||||
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
|
||||
- Backup attachments associated with group mailbox items.
|
||||
- Groups and Teams backups no longer fail when a resource has no display name.
|
||||
- Contacts in-place restore failed if the restore destination was empty.
|
||||
- Link shares with external users are now backed up and restored as expected
|
||||
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
|
||||
|
||||
### Changed
|
||||
- When running `backup details` on an empty backup returns a more helpful error message.
|
||||
- Backup List additionally shows the data category for each backup.
|
||||
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
|
||||
|
||||
### Known issues
|
||||
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
|
||||
- Event description for exchange exports might look slightly different for certain events.
|
||||
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
|
||||
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
|
||||
- External users with access through shared links will not receive these links as they are not sent via email during restore.
|
||||
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
|
||||
- SharePoint list item attachments are not available due to graph API limitations.
|
||||
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
|
||||
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
|
||||
|
||||
## [v0.18.0] (beta) - 2024-01-02
|
||||
|
||||
### Fixed
|
||||
- Handle the case where an email cannot be retrieved from Exchange due to an `ErrorInvalidRecipients` error. In
|
||||
this case, Corso will skip over the item but report this in the backup summary.
|
||||
- Fix `ErrorItemNotFound` errors when restoring emails with multiple attachments.
|
||||
- Avoid Graph SDK `Requests must contain extension changes exclusively.` errors by removing server-populated field from restored event items.
|
||||
- Improve Group mailbox(conversations) backup performance by only downloading new items or items with modified content.
|
||||
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore.
|
||||
|
||||
### Known issues
|
||||
- Restoring OneDrive, SharePoint, or Teams & Groups items shared with external users while the tenant or site is configured to not allow sharing with external users will not restore permissions.
|
||||
|
||||
### Added
|
||||
- Contacts can now be exported from Exchange backups as .vcf files
|
||||
|
||||
## [v0.17.0] (beta) - 2023-12-11
|
||||
|
||||
### Changed
|
||||
- Memory optimizations for large scale OneDrive and Sharepoint backups.
|
||||
|
||||
### Fixed
|
||||
- Resolved a possible deadlock when backing up Teams Channel Messages.
|
||||
- Fixed an attachment download failure(ErrorTooManyObjectsOpened) during exchange backup.
|
||||
|
||||
## [v0.16.0] (beta) - 2023-11-28
|
||||
|
||||
### Added
|
||||
- Export support for emails in exchange backups as `.eml` files.
|
||||
- More colorful and informational cli display.
|
||||
|
||||
### Changed
|
||||
- The file extension in Teams messages exports has switched to json to match the content type.
|
||||
- SDK consumption of the /services/m365 package has shifted from independent functions to a client-based api.
|
||||
- SDK consumers can now configure the /services/m365 graph api client configuration when constructing a new m365 client.
|
||||
- Dynamic api rate limiting allows small-scale Exchange backups to complete more quickly.
|
||||
- Kopia's local config files now uses unique filenames that match Corso configurations. This can protect concurrent Corso operations from mistakenly clobbering storage configs during runtime.
|
||||
|
||||
### Fixed
|
||||
- Handle OneDrive folders being deleted and recreated midway through a backup.
|
||||
- Automatically re-run a full delta query on incremental if the prior backup is found to have malformed prior-state information.
|
||||
- Retry drive item permission downloads during long-running backups after the jwt token expires and refreshes.
|
||||
- Retry item downloads during connection timeouts.
|
||||
|
||||
## [v0.15.0] (beta) - 2023-10-31
|
||||
|
||||
### Added
|
||||
- Added `corso repo update-passphrase` command to update the passphrase of an existing Corso repository
|
||||
- Added Subject and Message preview to channel messages detail entries
|
||||
|
||||
### Fixed
|
||||
- SharePoint backup would fail if any site had an empty display name
|
||||
- Fix a bug with exports hanging post completion
|
||||
- Handle 503 errors in nested OneDrive packages
|
||||
|
||||
### Changed
|
||||
- Item Details formatting in Groups and Teams backups
|
||||
|
||||
## [v0.14.2] (beta) - 2023-10-17
|
||||
|
||||
### Added
|
||||
- Skips graph calls for expired item download URLs.
|
||||
- Export operation now shows the stats at the end of the run
|
||||
|
||||
### Fixed
|
||||
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
|
||||
- Fix a case where missing item LastModifiedTimes could cause incremental backups to fail.
|
||||
- Email size metadata was incorrectly set to the size of the last attachment. Emails will now correctly report the size of the mail content plus the size of all attachments.
|
||||
- Improves the filtering capabilities for Groups restore and backup
|
||||
- Improve check to skip OneNote files that cannot be downloaded.
|
||||
- Fix Groups backup for non Team groups
|
||||
|
||||
### Changed
|
||||
- Groups restore now expects the site whose backup we should restore
|
||||
|
||||
## [v0.14.0] (beta) - 2023-10-09
|
||||
|
||||
### Added
|
||||
- Enables local or network-attached storage for Corso repositories.
|
||||
- Reduce backup runtime for OneDrive and SharePoint incremental backups that have no file changes.
|
||||
- Increase Exchange backup performance by lazily fetching data only for items whose content changed.
|
||||
- Added `--backups` flag to delete multiple backups in `corso backup delete` command.
|
||||
- Backup now includes all sites that belongs to a team, not just the root site.
|
||||
|
||||
### Fixed
|
||||
- Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup.
|
||||
|
||||
### Known issues
|
||||
- Restoring the data into a different Group from the one it was backed up from is not currently supported
|
||||
|
||||
### Other
|
||||
- Groups and Teams service support is still in feature preview
|
||||
|
||||
## [v0.13.0] (beta) - 2023-09-18
|
||||
|
||||
### Added
|
||||
- Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'`
|
||||
- The cli commands for "groups" and "teams" can be used interchangeably, and will operate on the same backup data.
|
||||
- New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details.
|
||||
Even though Channel message restoration is not available, message write permissions are included to cover future integration.
|
||||
- This is a feature preview, and may be subject to breaking changes based on feedback and testing.
|
||||
|
||||
### Changed
|
||||
- Switched to Go 1.21
|
||||
- SharePoint exported libraries are now exported with a `Libraries` prefix.
|
||||
|
||||
### Fixed
|
||||
- Contacts backups no longer slices root-folder data if outlook is set to languages other than english.
|
||||
- Failed backups if the --disable-incrementals flag was passed when there was a valid merge base under some conditions.
|
||||
|
||||
## [v0.12.0] (beta) - 2023-08-29
|
||||
|
||||
### Added
|
||||
- Added `export` command to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
|
||||
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
|
||||
- Improve `--mask-sensitive-data` logging mode.
|
||||
- Reliability: Handle connection cancellation and resets observed when backing up or restoring large data sets.
|
||||
- Reliability: Recover from Graph SDK panics when the Graph API returns incomplete responses.
|
||||
- Performance: Improve backup delete performance by batching multiple storage operations into a single operation.
|
||||
|
||||
### Fixed
|
||||
- SharePoint document libraries deleted after the last backup can now be restored.
|
||||
- Restore requires the protected resource to have access to the service being restored.
|
||||
- SharePoint data from multiple document libraries are not merged in exports
|
||||
- `corso backup delete` was not removing the backup details data associated with that snapshot
|
||||
- Fix OneDrive restores could fail with a concurrent map write error
|
||||
- Fix backup list displaying backups that had errors
|
||||
- Fix OneDrive backup could fail if item was deleted during backup
|
||||
- Exchange backups would fail attempting to use delta tokens even if the user was over quota
|
||||
|
||||
|
||||
## [v0.11.1] (beta) - 2023-07-20
|
||||
|
||||
### Fixed
|
||||
- Allow repo connect to succeed when a `corso.toml` file was not provided but configuration is specified using environment variables and flags.
|
||||
|
||||
## [v0.11.0] (beta) - 2023-07-18
|
||||
|
||||
### Added
|
||||
- Drive items backup and restore link shares
|
||||
- Restore commands now accept an optional top-level restore destination with the `--destination` flag. Setting the destination to '/' will restore items back into their original location.
|
||||
- Restore commands can specify item collision behavior. Options are Skip (default), Replace, and Copy.
|
||||
- Introduced repository maintenance commands to help optimize the repository as well as unreferenced data.
|
||||
|
||||
### Fixed
|
||||
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
|
||||
- Added retries for http/2 stream connection failures when downloading large item content.
|
||||
- SharePoint document libraries that were deleted after the last backup can now be restored.
|
||||
|
||||
### Known issues
|
||||
- If a link share is created for an item with inheritance disabled
|
||||
(via the Graph API), the link shares restored in that item will
|
||||
not be inheritable by children
|
||||
- Link shares with password protection can't be restored
|
||||
|
||||
## [v0.10.0] (beta) - 2023-06-26
|
||||
|
||||
### Added
|
||||
- Exceptions and cancellations for recurring events are now backed up and restored
|
||||
- Introduced a URL cache for OneDrive that helps reduce Graph API calls for long running (>1hr) backups
|
||||
- Improve incremental backup behavior by leveraging information from incomplete backups
|
||||
- Improve restore performance and memory use for Exchange and OneDrive
|
||||
|
||||
### Fixed
|
||||
- Handle OLE conversion errors when trying to fetch attachments
|
||||
- Fix uploading large attachments for emails and calendar
|
||||
- Fixed high memory use in OneDrive backup related to logging
|
||||
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
|
||||
|
||||
### Changed
|
||||
- Switched to Go 1.20
|
||||
|
||||
## [v0.9.0] (beta) - 2023-06-05
|
||||
|
||||
### Added
|
||||
- Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type.
|
||||
- Rework base selection logic for incremental backups so it's more likely to find a valid base.
|
||||
- Improve OneDrive restore performance by paralleling item restores
|
||||
|
||||
### Fixed
|
||||
- Fix Exchange folder cache population error when parent folder isn't found.
|
||||
- Fix Exchange backup issue caused by incorrect json serialization
|
||||
- Fix issues with details model containing duplicate entry for api consumers
|
||||
|
||||
### Changed
|
||||
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.
|
||||
|
||||
## [v0.8.0] (beta) - 2023-05-15
|
||||
|
||||
### Added
|
||||
- Released the --mask-sensitive-data flag, which will automatically obscure private data in logs.
|
||||
- Added `--disable-delta` flag to disable delta based backups for Exchange
|
||||
- Permission support for SharePoint libraries.
|
||||
|
||||
### Fixed
|
||||
- Graph requests now automatically retry in case of a Bad Gateway or Gateway Timeout.
|
||||
- POST Retries following certain status codes (500, 502, 504) will re-use the post body instead of retrying with a no-content request.
|
||||
- Fix nil pointer exception when running an incremental backup on SharePoint where the base backup used an older index data format.
|
||||
- --user and --mailbox flags have been removed from CLI examples for details and restore commands (they were already not supported, this only updates the docs).
|
||||
- Improve restore time on large restores by optimizing how items are loaded from the remote repository.
|
||||
- Remove exchange item filtering based on m365 item ID via the CLI.
|
||||
- OneDrive backups no longer include a user's non-default drives.
|
||||
- OneDrive and SharePoint file downloads will properly redirect from 3xx responses.
|
||||
- Refined oneDrive rate limiter controls to reduce throttling errors.
|
||||
- Fix handling of duplicate folders at the same hierarchy level in Exchange. Duplicate folders will be merged during restore operations.
|
||||
- Fix backup for mailboxes that has used up all their storage quota
|
||||
- Restored folders no longer appear in the Restore results. Only restored items will be displayed.
|
||||
|
||||
### Known Issues
|
||||
- Restore operations will merge duplicate Exchange folders at the same hierarchy level into a single folder.
|
||||
- Sharepoint SiteGroup permissions are not restored.
|
||||
- SharePoint document library data can't be restored after the library has been deleted.
|
||||
|
||||
## [v0.7.0] (beta) - 2023-05-02
|
||||
|
||||
### Added
|
||||
- Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.)
|
||||
- LocationRef is now populated for all services and data types. It should be used in place of RepoRef if a location for an item is required.
|
||||
- User selection for Exchange and OneDrive can accept either a user PrincipalName or the user's canonical ID.
|
||||
- Add path information to items that were skipped during backup because they were flagged as malware.
|
||||
|
||||
### Fixed
|
||||
- Fixed permissions restore in latest backup version.
|
||||
- Incremental OneDrive backups could panic if the delta token expired and a folder was seen and deleted in the course of item enumeration for the backup.
|
||||
- Incorrectly moving subfolder hierarchy from a deleted folder to a new folder at the same path during OneDrive incremental backup.
|
||||
- Handle calendar events with no body.
|
||||
- Items not being deleted if they were created and deleted during item enumeration of a OneDrive backup.
|
||||
- Enable compression for all data uploaded by kopia.
|
||||
- SharePoint --folder selectors correctly return items.
|
||||
- Fix Exchange cli args for filtering items
|
||||
- Skip OneNote items bigger than 2GB (Graph API prevents us from downloading them)
|
||||
- ParentPath of json output for Exchange calendar now shows names instead of IDs.
|
||||
- Fixed failure when downloading huge amount of attachments
|
||||
- Graph API requests that return an ECONNRESET error are now retried.
|
||||
- Fixed edge case in incremental backups where moving a subfolder, deleting and recreating the subfolder's original parent folder, and moving the subfolder back to where it started would skip backing up unchanged items in the subfolder.
|
||||
- SharePoint now correctly displays site urls on `backup list`, instead of the site id.
|
||||
- Drives with a directory containing a folder named 'folder' will now restore without error.
|
||||
- The CORSO_LOG_FILE env is appropriately utilized if no --log-file flag is provided.
|
||||
- Fixed Exchange events progress output to show calendar names instead of IDs.
|
||||
- Fixed reporting no items match if restoring or listing details on an older Exchange backup and filtering by folder.
|
||||
- Fix backup for mailboxes that has used up all their storage quota
|
||||
|
||||
### Known Issues
|
||||
- Restoring a OneDrive or SharePoint file with the same name as a file with that name as its M365 ID may restore both items.
|
||||
- Exchange event restores will display calendar IDs instead of names in the progress output.
|
||||
|
||||
## [v0.6.1] (beta) - 2023-03-21
|
||||
|
||||
### Added
|
||||
- Sharepoint library (document files) support: backup, list, details, and restore.
|
||||
- OneDrive item downloads that return 404 during backup (normally due to external deletion while Corso processes) are now skipped instead of quietly dropped. These items will appear in the skipped list alongside other skipped cases such as malware detection.
|
||||
- Listing a single backup by id will also list the skipped and failed items that occurred during the backup. These can be filtered out with the flags `--failed-items hide`, `--skipped-items hide`, and `--recovered-errors hide`.
|
||||
- Enable incremental backups for OneDrive if permissions aren't being backed up.
|
||||
- Show progressbar while files for user are enumerated
|
||||
- Hidden flag to control parallelism for fetching Exchange items (`--fetch-parallelism`). May help reduce `ApplicationThrottled` errors but will slow down backup.
|
||||
|
||||
### Fixed
|
||||
- Fix repo connect not working without a config file
|
||||
- Fix item re-download on expired links silently being skipped
|
||||
- Improved permissions backup and restore for OneDrive
|
||||
|
||||
### Known Issues
|
||||
- Owner (Full control) or empty (Restricted View) roles cannot be restored for OneDrive
|
||||
- OneDrive will not do an incremental backup if permissions are being backed up.
|
||||
- SharePoint --folder selection in details and restore always return "no items match the specified selectors".
|
||||
- Event instance exceptions (ie: changes to a single event within a recurring series) are not backed up.
|
||||
|
||||
## [v0.5.0] (beta) - 2023-03-13
|
||||
|
||||
### Added
|
||||
- Show owner information when doing backup list in json format
|
||||
- Permissions for groups can now be backed up and restored
|
||||
- Onedrive files that are flagged as malware get skipped during backup. Skipped files are listed in the backup results as part of the status, including a reference to their categorization, eg: "Completed (0 errors, 1 skipped: 1 malware)".
|
||||
|
||||
### Fixed
|
||||
- Corso-generated .meta files and permissions no longer appear in the backup details.
|
||||
- Panic and recovery if a user didn't exist in the tenant.
|
||||
|
||||
### Known Issues
|
||||
- Folders and Calendars containing zero items or subfolders are not included in the backup.
|
||||
- OneDrive files ending in `.meta` or `.dirmeta` are omitted from details and restores.
|
||||
- Backups generated prior to this version will show `0 errors` when listed, even if error count was originally non-zero.
|
||||
|
||||
## [v0.4.0] (beta) - 2023-02-20
|
||||
|
||||
### Fixed
|
||||
- Support for item.Attachment:Mail restore
|
||||
- Errors from duplicate names in Exchange Calendars
|
||||
- Resolved an issue where progress bar displays could fail to exit, causing unbounded CPU consumption.
|
||||
- Fix Corso panic within Docker images
|
||||
- Debugging with the CORSO_URL_LOGGING env variable no longer causes accidental request failures.
|
||||
- Don't discover all users when backing up each user in a multi-user backup
|
||||
|
||||
### Changed
|
||||
- When using Restore and Details on Exchange Calendars, the `--event-calendar` flag can now identify calendars by either a Display Name or a Microsoft 365 ID.
|
||||
- Exchange Calendars storage entries now construct their paths using container IDs instead of display names. This fixes cases where duplicate display names caused system failures.
|
||||
|
||||
### Known Issues
|
||||
- Nested attachments are currently not restored due to an [issue](https://github.com/microsoft/kiota-serialization-json-go/issues/61) discovered in the Graph APIs
|
||||
- Breaking changes to Exchange Calendar backups.
|
||||
- The debugging env variable CORSO_URL_LOGGING causes exchange get requests to fail.
|
||||
- Onedrive files that are flagged as Malware consistently fail during backup.
|
||||
|
||||
## [v0.3.0] (alpha) - 2023-02-07
|
||||
|
||||
### Added
|
||||
|
||||
- Document Corso's fault-tolerance and restartability features
|
||||
- Add retries on timeouts and status code 500 for Exchange
|
||||
- Increase page size preference for delta requests for Exchange to reduce number of roundtrips
|
||||
- OneDrive file/folder permissions can now be backed up and restored
|
||||
- Add `--restore-permissions` flag to toggle restoration of OneDrive permissions
|
||||
- Add versions to backups so that we can understand/handle older backup formats
|
||||
|
||||
### Fixed
|
||||
|
||||
- Added additional backoff-retry to all OneDrive queries.
|
||||
- Users with `null` userType values are no longer excluded from user queries.
|
||||
- Fix bug when backing up a calendar that has the same name as the default calendar
|
||||
|
||||
### Known Issues
|
||||
|
||||
- When the same user has permissions to a file and the containing
|
||||
folder, we only restore folder level permissions for the user and no
|
||||
separate file only permission is restored.
|
||||
- Link shares are not restored
|
||||
|
||||
## [v0.2.0] (alpha) - 2023-01-29
|
||||
|
||||
### Fixed
|
||||
|
||||
- Check if the user specified for an exchange backup operation has a mailbox.
|
||||
|
||||
### Changed
|
||||
- Item.Attachments are disabled from being restored for the patching of ([#2353](https://github.com/alcionai/corso/issues/2353))
|
||||
- BetaClient introduced. Enables Corso to be able to interact with SharePoint Page objects. Package located `/internal/connector/graph/betasdk`
|
||||
- Handle case where user's drive has not been initialized
|
||||
- Inline attachments (e.g. copy/paste ) are discovered and backed up correctly ([#2163](https://github.com/alcionai/corso/issues/2163))
|
||||
- Guest and External users (for cloud accounts) and non-on-premise users (for systems that use on-prem AD syncs) are now excluded from backup and restore operations.
|
||||
- Remove the M365 license guid check in OneDrive backup which wasn't reliable.
|
||||
- Reduced extra socket consumption while downloading multiple drive files.
|
||||
- Extended timeout boundaries for exchange attachment downloads, reducing risk of cancellation on large files.
|
||||
- Identify all drives associated with a user or SharePoint site instead of just the results on the first page returned by Graph API.
|
||||
|
||||
## [v0.1.0] (alpha) - 2023-01-13
|
||||
|
||||
### Added
|
||||
|
||||
- Folder entries in backup details now indicate whether an item in the hierarchy was updated
|
||||
- Incremental backup support for exchange is now enabled by default.
|
||||
|
||||
### Changed
|
||||
|
||||
- The selectors Reduce() process will only include details that match the DiscreteOwner, if one is specified.
|
||||
- New selector constructors will automatically set the DiscreteOwner if given a single-item slice.
|
||||
- Write logs to disk by default ([#2082](https://github.com/alcionai/corso/pull/2082))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Issue where repository connect progress bar was clobbering backup/restore operation output.
|
||||
- Issue where a `backup create exchange` produced one backup record per data type.
|
||||
- Specifying multiple users in a onedrive backup (ex: `--user a,b,c`) now properly delimits the input along the commas.
|
||||
- Updated the list of M365 SKUs used to check if a user has a OneDrive license.
|
||||
|
||||
### Known Issues
|
||||
|
||||
- `backup list` will not display a resource owner for backups created prior to this release.
|
||||
|
||||
## [v0.0.4] (alpha) - 2022-12-23
|
||||
|
||||
### Added
|
||||
|
||||
- Incremental backup support for Exchange ([#1777](https://github.com/alcionai/corso/issues/1777)). This is currently enabled by specifying the `--enable-incrementals`
|
||||
with the `backup create` command. This functionality will be enabled by default in an upcoming release.
|
||||
- Folder entries in backup details now include size and modified time for the hierarchy ([#1896](https://github.com/alcionai/corso/issues/1896))
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking Change**:
|
||||
Changed how backup details are stored in the repository to
|
||||
improve memory usage ([#1735](https://github.com/alcionai/corso/issues/1735))
|
||||
- Improve OneDrive backup speed ([#1842](https://github.com/alcionai/corso/issues/1842))
|
||||
- Upgrade MS Graph SDK libraries ([#1856](https://github.com/alcionai/corso/issues/1856))
|
||||
- Docs: Add Algolia docsearch to Corso docs ([#1844](https://github.com/alcionai/corso/pull/1844))
|
||||
- Add an `updated` flag to backup details ([#1813](https://github.com/alcionai/corso/pull/1813))
|
||||
- Docs: Speed up Windows Powershell download ([#1798](https://github.com/alcionai/corso/pull/1798))
|
||||
- Switch to Go 1.19 ([#1632](https://github.com/alcionai/corso/pull/1632))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed retry logic in the Graph SDK that would result in an `400 Empty Payload` error when the request was retried ([1778](https://github.com/alcionai/corso/issues/1778))([msgraph-sdk-go #341](https://github.com/microsoftgraph/msgraph-sdk-go/issues/341))
|
||||
- Don't error out if a folder was deleted during an exchange backup operation ([#1849](https://github.com/alcionai/corso/pull/1849))
|
||||
- Docs: Fix CLI auto-generated docs headers ([#1845](https://github.com/alcionai/corso/pull/1845))
|
||||
|
||||
## [v0.0.3] (alpha) - 2022-12-05
|
||||
|
||||
### Added
|
||||
|
||||
- Display backup size in backup list command (#1648) from [meain](https://github.com/meain)
|
||||
- Improve OneDrive backup performance (#1607) from [meain](https://github.com/meain)
|
||||
- Improve Exchange backup performance (#1608) from [meain](https://github.com/meain)
|
||||
- Add flag to retain all progress bars (#1582) from [ryanfkeepers](https://github.com/ryanfkeepers)
|
||||
- Fix resource owner display on backup list (#1580) from [ryanfkeepers](https://github.com/ryanfkeepers)
|
||||
|
||||
### Changed
|
||||
|
||||
- Improve logging (#1642) from [ryanfkeepers](https://github.com/ryanfkeepers)
|
||||
- Generate separate backup for each resource owner (#1609) from [ashmrtn](https://github.com/ashmrtn)
|
||||
- Print version info to stdout instead of stderr (#1503) from [meain](https://github.com/meain)
|
||||
|
||||
## [v0.0.2] (alpha) - 2022-11-14
|
||||
|
||||
@ -502,28 +53,6 @@ this case, Corso will skip over the item but report this in the backup summary.
|
||||
- Miscellaneous
|
||||
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
||||
|
||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
|
||||
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
|
||||
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
|
||||
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
|
||||
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
|
||||
[v0.15.0]: https://github.com/alcionai/corso/compare/v0.14.0...v0.15.0
|
||||
[v0.14.0]: https://github.com/alcionai/corso/compare/v0.13.0...v0.14.0
|
||||
[v0.13.0]: https://github.com/alcionai/corso/compare/v0.12.0...v0.13.0
|
||||
[v0.12.0]: https://github.com/alcionai/corso/compare/v0.11.1...v0.12.0
|
||||
[v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1
|
||||
[v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0
|
||||
[v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0
|
||||
[v0.9.0]: https://github.com/alcionai/corso/compare/v0.8.1...v0.9.0
|
||||
[v0.8.0]: https://github.com/alcionai/corso/compare/v0.7.1...v0.8.0
|
||||
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0
|
||||
[v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1
|
||||
[v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0
|
||||
[v0.4.0]: https://github.com/alcionai/corso/compare/v0.3.0...v0.4.0
|
||||
[v0.3.0]: https://github.com/alcionai/corso/compare/v0.2.0...v0.3.0
|
||||
[v0.2.0]: https://github.com/alcionai/corso/compare/v0.1.0...v0.2.0
|
||||
[v0.1.0]: https://github.com/alcionai/corso/compare/v0.0.4...v0.1.0
|
||||
[v0.0.4]: https://github.com/alcionai/corso/compare/v0.0.3...v0.0.4
|
||||
[v0.0.3]: https://github.com/alcionai/corso/compare/v0.0.2...v0.0.3
|
||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.0.2...HEAD
|
||||
[v0.0.2]: https://github.com/alcionai/corso/compare/v0.0.1...v0.0.2
|
||||
[v0.0.1]: https://github.com/alcionai/corso/tag/v0.0.1
|
||||
|
||||
12
README.md
@ -1,10 +1,6 @@
|
||||
> [!NOTE]
|
||||
> **The Corso project is no longer actively maintained and has been archived**.
|
||||
<img src="https://github.com/alcionai/corso/blob/main/docs/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||
</p>
|
||||
<h1 align="center">Corso</h1>
|
||||
# Corso
|
||||
|
||||
[](https://goreportcard.com/report/github.com/alcionai/corso/src)
|
||||
[](https://discord.gg/63DTTSnuhT)
|
||||
@ -16,14 +12,14 @@ Microsoft 365 data. It provides a reliable, secure, and efficient data protectio
|
||||
the backup data and have the flexibility to perform backups of their desired service through an intuitive interface.
|
||||
As Corso evolves, it can become a great building block for more complex data protection workflows.
|
||||
|
||||
**Corso is currently in Beta.**
|
||||
**Corso is currently in ALPHA and should NOT be used in production.**
|
||||
|
||||
Corso supports M365 Exchange and OneDrive with SharePoint and Teams support in active development. Coverage for more
|
||||
services, possibly beyond M365, will expand based on the interest and needs of the community.
|
||||
|
||||
# Getting Started
|
||||
|
||||
See the [Corso Quickstart](https://corsobackup.io/docs/quickstart/) on our docs page.
|
||||
See the [Corso Documentation](https://corsobackup.io/docs/intro) for more information.
|
||||
|
||||
# Building Corso
|
||||
|
||||
|
||||
@ -1,7 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you find a security vulnerability, please do not file a public GitHub
|
||||
issue. Please report this to security@corsobackup.io. In most cases, we
|
||||
will respond within 48 hours or less.
|
||||
@ -1,4 +1,4 @@
|
||||
FROM golang:1.21-alpine as builder
|
||||
FROM golang:1.18-alpine as builder
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY src .
|
||||
@ -6,7 +6,7 @@ COPY src .
|
||||
ARG CORSO_BUILD_LDFLAGS=""
|
||||
RUN go build -o corso -ldflags "$CORSO_BUILD_LDFLAGS"
|
||||
|
||||
FROM alpine:3
|
||||
FROM alpine:3.16
|
||||
|
||||
LABEL org.opencontainers.image.title="Corso"
|
||||
LABEL org.opencontainers.image.description="Free, Secure, and Open-Source Backup for Microsoft 365"
|
||||
@ -16,14 +16,18 @@ LABEL org.opencontainers.image.vendor="Alcion, Inc."
|
||||
|
||||
COPY --from=builder /go/src/app/corso /corso
|
||||
|
||||
RUN apk add --no-cache ca-certificates
|
||||
# Pull tls certs directly from latest upstream image
|
||||
COPY --from=alpine:latest /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||
|
||||
RUN apk add --no-cache --update binutils util-linux logrotate
|
||||
|
||||
ENV CORSO_HOME=/app/corso
|
||||
ENV CORSO_CONFIG_DIR=$CORSO_HOME \
|
||||
KOPIA_CONFIG_PATH=$CORSO_HOME/kopia/config/repository.config \
|
||||
KOPIA_LOG_DIR=$CORSO_HOME/kopia/logs \
|
||||
KOPIA_CACHE_DIRECTORY=$CORSO_HOME/kopia/cache \
|
||||
RCLONE_CONFIG=$CORSO_HOME/kopia/rclone/rclone.conf \
|
||||
KOPIA_PERSIST_CREDENTIALS_ON_CONNECT=false \
|
||||
KOPIA_CHECK_FOR_UPDATES=false
|
||||
|
||||
ENTRYPOINT ["/corso"]
|
||||
ENTRYPOINT ["tail", "-f", "/dev/null"]
|
||||
|
||||
@ -12,7 +12,7 @@ usage() {
|
||||
}
|
||||
|
||||
ROOT=$(dirname $(dirname $(readlink -f $0)))
|
||||
GOVER=1.21 # go version
|
||||
GOVER=1.18 # go version
|
||||
CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache
|
||||
|
||||
# Figure out os and architecture
|
||||
|
||||
214
design/cli.md
Normal file
@ -0,0 +1,214 @@
|
||||
# CLI Commands
|
||||
## Status
|
||||
|
||||
Revision: v0.0.1
|
||||
|
||||
-----
|
||||
|
||||
|
||||
This is a proposal for Corso cli commands extrapolated from the Functional Requirements product documentation. Open questions are listed in the `Details & Discussion` section. The command set includes some p1/p2 actions for completeness. This proposal only intends to describe the available commands themselves and does not evaluate functionality or feature design beyond that goal.
|
||||
|
||||
# CLI Goals
|
||||
|
||||
- Ease (and enjoyment) of Use, more than minimal functionality.
|
||||
- Intended for use by Humans, not Computers.
|
||||
- Outputs should be either interactive/progressive (for ongoing work) or easily greppable/parseable.
|
||||
|
||||
## Todo/Undefined:
|
||||
|
||||
- Interactivity and sub-selection/helpful action completion within command operation.
|
||||
- Quality-of-life and niceties such as interactive/output display, formatting and presentation, or maximum minimization of user effort to run Corso.
|
||||
|
||||
-----
|
||||
## Commands
|
||||
|
||||
Standard format:
|
||||
`corso {command} [{subcommand}] [{service|repository}] [{flag}...]`
|
||||
|
||||
| Cmd | | | Flags | Notes |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| version | | | | Same as `corso --version` |
|
||||
| | | | —version | Outputs Corso version details. |
|
||||
| help | | | | Same as `corso —-help` |
|
||||
| * | * | help | | Same as `{command} -—help` |
|
||||
| * | * | | —help | Same as `{command} help` |
|
||||
|
||||
| Cmd | | | Flags | Notes |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| repo | * | | | Same as `repo [*] --help`. |
|
||||
| repo | init | {repository} | | Initialize a Corso repository. |
|
||||
| repo | init | {repository} | —tenant {azure_tenant_id} | Provides the account’s tenant ID. |
|
||||
| repo | init | {repository} | —client {azure_client_id} | Provides the account’s client ID. |
|
||||
| repo | connect | {repository} | | Connects to the specified repo. |
|
||||
| repo | configure | {repository} | | Sets mutable config properties to the provided values. |
|
||||
| repo | * | * | —config {cfg_file_path} | Specify a repo configuration file. Values may also be provided via individual flags and env vars. |
|
||||
| repo | * | * | —{config-prop} | Blanket commitment to support config via flags. |
|
||||
| repo | * | * | —credentials {creds_file_path} | Specify a file containing credentials or secrets. Values may also be provided via env vars. |
|
||||
|
||||
| Cmd | | | Flags | Notes |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| backup | * | | | Same as backup [*] -—help |
|
||||
| backup | list | {service} | | List all backups in the repository for the specified service. |
|
||||
| backup | create | {service} | | Backup the specified service. |
|
||||
| backup | * | {service} | —token {token} | Provides a security key for permission to perform backup. |
|
||||
| backup | * | {service} | —{entity} {entity_id}... | Only involve the target entity(s). Entities are things like users, groups, sites, etc. Entity flag support is service-specific. |
|
||||
|
||||
| Cmd | | | Flags | Notes |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| restore | | | | Same as `restore -—help` |
|
||||
| restore | {service} | | | Complete service restoration using the latest versioned backup. |
|
||||
| restore | {service} | | —backup {backup_id} | Restore data from only the targeted backup(s). |
|
||||
| restore | {service} | | —{entity} {entity_id}... | Only involve the target entity(s). Entities are things like users, groups, sites, etc. Entity flag support is service-specific. |
|
||||
---
|
||||
|
||||
|
||||
## Examples
|
||||
### Basic Usage
|
||||
|
||||
**First Run**
|
||||
|
||||
```bash
|
||||
$ export AZURE_CLIENT_SECRET=my_azure_secret
|
||||
$ export AWS_SECRET_ACCESS_KEY=my_s3_secret
|
||||
$ corso repo init s3 --bucket my_s3_bucket --access-key my_s3_key \
|
||||
--tenant my_azure_tenant_id --clientid my_azure_client_id
|
||||
$ corso backup express
|
||||
```
|
||||
|
||||
**Follow-up Actions**
|
||||
|
||||
```bash
|
||||
$ corso repo connect s3 --bucket my_s3_bucket --access-key my_s3_key
|
||||
$ corso backup express
|
||||
$ corso backup list express
|
||||
```
|
||||
-----
|
||||
|
||||
# Details & Discussion
|
||||
|
||||
## UC0 - CLI User Interface
|
||||
|
||||
Base command: `corso`
|
||||
|
||||
Standard format: `corso {command} [{subcommand}] [{service}] [{flag}...]`
|
||||
|
||||
Examples:
|
||||
|
||||
- `corso help`
|
||||
- `corso repo init --repository s3 --tenant t_1`
|
||||
- `corso backup create teams`
|
||||
- `corso restore teams --backup b_1`
|
||||
|
||||
## UC1 - Initialization and Connection
|
||||
|
||||
**Account Handling**
|
||||
|
||||
M365 accounts are paired with repo initialization, resulting in a single-tenancy storage. Any `repo` action applies the same behavior to the account as well. That is, `init` will handle all initialization steps for both the repository and the account, and both must succeed for the command to complete successfully, including all necessary validation checks. Likewise, `connect` will validate and establish a connection (or, at least, the ability to communicate) with both the account and the repository.
|
||||
|
||||
**Init**
|
||||
|
||||
`corso repo init {repository} --config {cfg} --credentials {creds}`
|
||||
|
||||
Initializes a repository, bootstrapping resources as necessary and storing configuration details within Corso. Repo is the name of the repository provider, eg: ‘s3’. Cfg and creds, in this example, point to json (or alternatively yaml?) files containing the details required to establish the connection. Configuration options, when known, will get support for flag-based declaration. Similarly, env vars will be supported as needed.
|
||||
|
||||
**Connection**
|
||||
|
||||
`corso repo connect {repository} --credentials {creds}`
|
||||
|
||||
[https://docs.flexera.com/flexera/EN/SaaSManager/M365CCIntegration.htm#integrations_3059193938_1840275](https://docs.flexera.com/flexera/EN/SaaSManager/M365CCIntegration.htm#integrations_3059193938_1840275)
|
||||
|
||||
Connects to an existing (ie, initialized) repository.
|
||||
|
||||
Corso is expected to gracefully handle transient disconnections during backup/restore runtimes (and otherwise, as needed).
|
||||
|
||||
**Deletion**
|
||||
|
||||
`corso repo delete {repository}`
|
||||
|
||||
(Included here for discussion, but not being added to the CLI command set at this time.)
|
||||
|
||||
Removes a repository from Corso. More exploration is needed here to explore cascading effects (or lack thereof) from the command. At minimum, expect additional user involvement to confirm that the deletion is wanted, and not erroneous.
|
||||
|
||||
## UC1.1 - Version
|
||||
|
||||
`corso --version` outputs the current version details such as: commit id and datetime, maybe semver (complete release version details to be decided).
|
||||
Further versioning controls are not currently covered in this proposal.
|
||||
|
||||
## UC2 - Configuration
|
||||
|
||||
`corso repo configure --reposiory {repo} --config {cfg}`
|
||||
|
||||
Updates the configuration details for an existing repository.
|
||||
|
||||
Configuration is divided between mutable and immutable properties. Generally, initialization-specific configurations (those that identify the storage repository, it’s connection, and its fundamental behavior), among other properties, are considered immutable and cannot be reconfigured. As a result, `repo configure` will not be able to rectify a misconfigured init; some other user flow will be needed to resolve that issue.
|
||||
|
||||
Configure allows mutation of config properties that can be safely and transiently applied. For example: backup retention and expiration policies. A complete list of how each property is classified is forthcoming as we build that list of properties.
|
||||
|
||||
## UC3 - On-Demand Backup
|
||||
|
||||
`corso backup` is reserved as a non-actionable command, rather than have it kick off a backup action. This is to ensure users don’t accidentally kick off a migration in the process of exploring the api. `corso backup` produces the same output as `corso backup --help`.
|
||||
|
||||
**Full Service Backup**
|
||||
|
||||
- `corso backup create {service}`
|
||||
|
||||
**Selective Backup**
|
||||
|
||||
- `corso backup create {service} --{entity} {entity_id}...`
|
||||
|
||||
Entities are service-applicable objects that match up to m365 objects. Users, groups, sites, mailboxes, etc. Entity flags are available on a per-service basis. For example, —site is available for the sharepoint service, and —mailbox for express, but not the reverse. A full list of system-entity mappings is coming in the future.
|
||||
|
||||
**Examples**
|
||||
|
||||
- `corso backup` → displays the help output.
|
||||
- `corso backup create teams` → generates a full backup of the teams service.
|
||||
- `corso backup create express --group g_1` → backs up the g_1 group within express.
|
||||
|
||||
## UC3.2 - Security Token
|
||||
|
||||
(This section is incomplete: further design details are needed about security expression.) Some commands, such as Backup/Restore require a security key declaration to verify that the caller has permission to perform the command.
|
||||
|
||||
`corso * * --token {token}`
|
||||
|
||||
## UC5 - Backup Ops
|
||||
|
||||
`corso backup list {service}`
|
||||
|
||||
Produces a list of the backups which currently exist in the repository.
|
||||
|
||||
`corso backup list {service} --{entity} {entity_id}...`
|
||||
|
||||
The list can be filtered to contain backups relevant to the specified entities. A possible user flow for restoration is for the user to use this to discover which backups match their needs, and then apply those backups in a restore operation.
|
||||
|
||||
**Expiration Control**
|
||||
|
||||
Will appear in a future revision.
|
||||
|
||||
## UC6 - Restore
|
||||
|
||||
Similar to backup, `corso restore` is reserved as a non-actionable command to serve up the same output as `corso restore —help`.
|
||||
|
||||
### UC6.1
|
||||
|
||||
**Full Service Restore**
|
||||
|
||||
- `corso restore {service} [--backup {backup_id}...]`
|
||||
|
||||
If no backups are specified, this defaults to the most recent backup of the specified service.
|
||||
|
||||
**Selective Restore**
|
||||
|
||||
- `corso restore {service} [--backup {backup_id}...] [--{entity} {entity_id}...]`
|
||||
|
||||
Entities are service-applicable objects that match up to m365 objects. Users, groups, sites, mailboxes, etc. Entity flags are available on a per-service basis. For example, —site is available for the sharepoint service, and —mailbox for express, but not the reverse. A full list of system-entity mappings is coming in the future.
|
||||
|
||||
**Examples**
|
||||
|
||||
- `corso restore` → displays the help output.
|
||||
- `corso restore teams` → restores all data in the teams service.
|
||||
- `corso restore sharepoint --backup b_1` → restores the sharepoint data in the b_1 backup.
|
||||
- `corso restore express --group g_1` → restores the g_1 group within sharepoint.
|
||||
|
||||
## UC6.2 - disaster recovery
|
||||
|
||||
Multi-service backup/restoration is still under review.
|
||||
@ -4,7 +4,7 @@
|
||||
# It is not used for deployments.
|
||||
|
||||
## Build
|
||||
FROM golang:1.19 AS base
|
||||
FROM golang:1.18 AS base
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
@ -20,7 +20,7 @@ ARG TARGETARCH
|
||||
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o /corso .
|
||||
|
||||
## Deploy
|
||||
FROM ubuntu:22.10
|
||||
FROM ubuntu:latest
|
||||
|
||||
COPY --from=build /corso /
|
||||
|
||||
|
||||
0
website/.gitignore → docs/.gitignore
vendored
28
docs/Dockerfile
Normal file
@ -0,0 +1,28 @@
|
||||
FROM ubuntu:22.04
|
||||
LABEL MAINTAINER="Niraj Tolia"
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# NOTE for lines 13,15: update in CI when updating
|
||||
RUN apt-get -y update && apt-get -y install gpg emacs curl zip git make \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_current.x | bash - \
|
||||
&& apt-get -y install nodejs \
|
||||
&& apt-get autoclean \
|
||||
&& node --version \
|
||||
&& npm --version \
|
||||
&& cd /tmp && curl -O -L https://github.com/errata-ai/vale/releases/download/v2.20.1/vale_2.20.1_Linux_64-bit.tar.gz \
|
||||
&& tar -xvzf vale_2.20.1_Linux_64-bit.tar.gz -C /usr/bin vale \
|
||||
&& npm install -g markdownlint-cli@0.32.2 \
|
||||
&& curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
|
||||
&& unzip awscliv2.zip && /bin/bash aws/install && rm -rf awscliv2.zip aws
|
||||
|
||||
WORKDIR /usr/src
|
||||
COPY package.json package-lock.json* ./
|
||||
RUN npm ci \
|
||||
&& npm cache clean --force \
|
||||
&& rm -f package.json package-lock.json*
|
||||
ENV PATH /usr/src/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /usr/src/docs
|
||||
|
||||
CMD ["npm", "start", "--", "--host", "0.0.0.0"]
|
||||
72
docs/Makefile
Normal file
@ -0,0 +1,72 @@
|
||||
.PHONY: buildimage build serve dev shell check genclidocs _validatemdgen publish sync
|
||||
|
||||
CORSO_BUILD_DIR := /tmp/.corsobuild
|
||||
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
|
||||
CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod
|
||||
CORSO_BUILD_BIN := ${CORSO_BUILD_DIR}/bin
|
||||
CORSO_REPO := /go/src/github.com/alcionai/corso
|
||||
CORSO_LOCAL_PATH := $(shell git rev-parse --show-toplevel)
|
||||
GIT_SHA := $(shell git rev-parse --short HEAD)
|
||||
DOCSC := docker run --rm -it -p 3000:3000 -v ${PWD}:/usr/src/docs --env CORSO_VERSION=unreleased-${GIT_SHA} corso/docs
|
||||
CBASE := docker run --rm -it \
|
||||
-v ${CORSO_LOCAL_PATH}:${CORSO_REPO} -v ${CORSO_BUILD_DIR}:${CORSO_BUILD_DIR} \
|
||||
--env GOCACHE=${CORSO_BUILD_CACHE} --env GOMODCACHE=${CORSO_BUILD_MOD} --env GOTMPDIR=${CORSO_BUILD_DIR} \
|
||||
--workdir ${CORSO_REPO}/src
|
||||
GOC := ${CBASE} golang:1.18
|
||||
GOBASHC := ${CBASE} --entrypoint bash golang:1.18
|
||||
MDGEN_SRC := ${CORSO_REPO}/src/cmd/mdgen/mdgen.go
|
||||
MDGEN_BINARY := ${CORSO_BUILD_BIN}/mdgen
|
||||
CLI_DOCS := ${CORSO_REPO}/docs/docs/cli
|
||||
|
||||
buildimage:
|
||||
docker build -t "corso/docs:latest" .
|
||||
|
||||
dev: genclidocs
|
||||
$(DOCSC) npm start -- --host 0.0.0.0
|
||||
|
||||
VALE_TARGET ?= docs README.md
|
||||
|
||||
check: genclidocs
|
||||
$(DOCSC) vale $(VALE_TARGET)
|
||||
$(DOCSC) markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
|
||||
|
||||
localcheck: genclidocs
|
||||
vale $(VALE_TARGET)
|
||||
markdownlint '**/*.md' --ignore styles/ --ignore src/ --ignore node_modules/
|
||||
|
||||
dockershell:
|
||||
$(DOCSC) bash
|
||||
|
||||
build: genclidocs
|
||||
$(DOCSC) npm run build
|
||||
|
||||
serve:
|
||||
$(DOCSC) npm run serve
|
||||
|
||||
genclidocs: _validatemdgen ${MDGEN_BINARY}
|
||||
@echo 'Auto-generating Corso CLI docs...'
|
||||
$(DOCSC) rm -rf docs/cli
|
||||
$(GOC) ${MDGEN_BINARY} --cli-folder ${CLI_DOCS}
|
||||
|
||||
_validatemdgen: # in case we have a different architecture
|
||||
@echo 'Verifying dependencies...'
|
||||
$(GOBASHC) -c "${MDGEN_BINARY} --help >/dev/null || rm -rf ${MDGEN_BINARY}"
|
||||
|
||||
${MDGEN_BINARY}: $(shell find ${CORSO_LOCAL_PATH}/src -type f -name *.go) $(shell find ${CORSO_LOCAL_PATH}/src -type d )
|
||||
@echo 'Re-building Corso CLI docs auto-gen tooling...'
|
||||
$(GOC) go mod download
|
||||
$(GOC) go build -o ${MDGEN_BINARY} ${MDGEN_SRC}
|
||||
|
||||
clean:
|
||||
$(DOCSC) rm -rf docs/cli build node_modules
|
||||
$(GOC) rm -rf ${CORSO_BUILD_DIR}/*
|
||||
|
||||
publish: clean build
|
||||
docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY \
|
||||
-e AWS_SESSION_TOKEN -e AWS_REGION \
|
||||
--rm -v ${PWD}:/usr/src/docs corso/docs:latest \
|
||||
make sync
|
||||
|
||||
sync:
|
||||
aws s3 sync /usr/src/docs/build/ s3://corsobackup.io/ --exclude ".git/*" --delete
|
||||
aws cloudfront create-invalidation --distribution-id E1W9NGI9YTVZ1A --paths "/*"
|
||||
71
docs/README.md
Normal file
@ -0,0 +1,71 @@
|
||||
# Corso documentation
|
||||
|
||||
Corso documentation uses [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
|
||||
[Mermaid](https://mermaid-js.github.io/mermaid/) provides support for native diagrams in Markdown.
|
||||
|
||||
## Requirements
|
||||
|
||||
Developing documentation for Corso requires the following tools on your machine:
|
||||
|
||||
- `make`
|
||||
- Docker
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
make buildimage
|
||||
```
|
||||
|
||||
## Live documentation development
|
||||
|
||||
```bash
|
||||
make dev
|
||||
```
|
||||
|
||||
This command starts a local development server within the Docker container and will expose docs at [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
## Generating Corso CLI docs
|
||||
|
||||
```bash
|
||||
make genclidocs
|
||||
```
|
||||
|
||||
Corso's CLI documents are auto generated. This command explicitly triggers generating these docs. This step will happen
|
||||
automatically for the other commands where this is relevant.
|
||||
|
||||
## Building static documentation
|
||||
|
||||
```bash
|
||||
make build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory for integration with any static contents hosting service.
|
||||
|
||||
## Serving static documentation
|
||||
|
||||
```bash
|
||||
make serve
|
||||
```
|
||||
|
||||
This command will serve the static content generated with `make build` at [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
## Style and linting
|
||||
|
||||
```bash
|
||||
# Lint all docs
|
||||
make check
|
||||
# Lint specific files and/or folders
|
||||
make check VALE_TARGET="README.md docs/concepts"
|
||||
```
|
||||
|
||||
This command will lint all Markdown files and check them for style issues using the Docker container
|
||||
|
||||
## Documentation platform development
|
||||
|
||||
```bash
|
||||
make shell
|
||||
```
|
||||
|
||||
Use this command to interactively (and temporarily!) change the contents or
|
||||
configuration of the live documentation container image (for example, when
|
||||
experimenting with new plugins).
|
||||
@ -1,19 +1,17 @@
|
||||
---
|
||||
slug: corso-announcement-free-backup-for-microsoft-365
|
||||
title: "A Home to Call Your Own: The Need for Your Own Backups in IT"
|
||||
description: "Announcing Corso, a free, open-source, and secure backup tool for Microsoft 365."
|
||||
authors: nica
|
||||
tags: [corso, microsoft 365]
|
||||
image: ./images/office_desk.jpg
|
||||
---
|
||||
|
||||

|
||||

|
||||
|
||||
Have you had it with Google sheets? Me too! Excel is my home. It’s where I write all my best formulae. And what
|
||||
Have you had it with Google sheets? So have I. Excel is my home. It’s where I write all my best formulae. And what
|
||||
about PowerPoint? The way it just finds stock photos for you? The automatic ‘alternative designs for this slide’
|
||||
button? It’s too good. I can’t give up Microsoft 365.
|
||||
|
||||
If you did some work today, there’s a good chance you opened a Microsoft tool. M365 is used by
|
||||
If you did some work today, there’s a very good chance you opened a Microsoft tool. M365 is used by
|
||||
[more than a million](https://www.statista.com/statistics/983321/worldwide-office-365-user-numbers-by-country/)
|
||||
companies worldwide, and nearly 880,000 companies in the U.S. use the software suite. But with that widespread usage
|
||||
comes risk, business-critical data is at risk of loss or corruption, if not securely backed up and protected.
|
||||
@ -24,9 +22,9 @@ comes risk, business-critical data is at risk of loss or corruption, if not secu
|
||||
|
||||
A couple of years back I took the time to get the AWS ‘baby cert,’ their first certification. The focus of the
|
||||
material surprised me, along with learning about the most popular AWS products and their benefits, you had to learn
|
||||
cold all the things that AWS won't do for you. AWS won’t alert you to poor performance on your applications. It
|
||||
cold all the things that AWS will not do for you. AWS won’t alert you to poor performance on your applications. It
|
||||
won’t automatically scale down your instances. And while AWS and the other public cloud providers completely meet their
|
||||
promised SLA, they don't promise to deliver the backups that you expect them to deliver.
|
||||
promised SLA, they do not promise to deliver the backups that you expect them to deliver.
|
||||
|
||||
“I accidentally deleted the customer DB,” isn’t a situation that public cloud companies are built to prevent or
|
||||
ameliorate. Fundamentally, on all public clouds, backups are a shared responsibility between administrator and service.
|
||||
@ -39,8 +37,8 @@ Let’s talk about the stats on data loss:
|
||||
|
||||
Data loss can result from accidental or intentional deletion, cyber-attacks and malware, a poorly executed migration,
|
||||
or the cancellation of a software license, among other reasons. For example,
|
||||
[2 out of 5 servers](https://www.veeam.com/blog/data-loss-2022.html) had at least one or more outages over the
|
||||
past 12 months. And cybercrime is on a continual rise; the average number of data breaches and cyberattacks were
|
||||
[40% – 2 out of 5 servers](https://www.veeam.com/blog/data-loss-2022.html) – had at least one or more outages over the
|
||||
past 12 months. And cybercrime is on a continual rise – the average number of data breaches and cyberattacks were
|
||||
[up 15.1% in 2021](https://www.forbes.com/sites/chuckbrooks/2022/06/03/alarming-cyber-statistics-for-mid-year-2022-that-you-need-to-know/?sh=642204357864),
|
||||
compared with the previous year. As of 2022, the average cost of a data breach in the U.S. was $9.44 million.
|
||||
|
||||
@ -55,8 +53,6 @@ efficiently backing up all business-critical data to object storage.
|
||||
|
||||
## Why Corso?
|
||||
|
||||
<!-- vale alex.Condescending = NO -->
|
||||
|
||||
Corso is purpose-built for protection of your M365 organization account (this tool doesn’t work with consumer accounts)
|
||||
with easy-to-use comprehensive backup and restore workflows that reduce backup time and administrative overhead,
|
||||
improve time-to-recovery, and replace unreliable scripts or workarounds. It enables high-throughput, high-tolerance
|
||||
@ -64,8 +60,6 @@ backups that feature end-to-end encryption, deduplication, and compression. Plus
|
||||
object storage system: AWS S3 (including Glacier Instant Access), Google Cloud Storage and Backblaze. (Azure Blob
|
||||
support is coming soon).
|
||||
|
||||
<!-- vale alex.Condescending = YES -->
|
||||
|
||||
Corso’s secure backup protects against accidental data loss, service provider downtime and malicious threats, including
|
||||
ransomware attacks. Plus, a robust user community provides a venue for admins to share and learn about data protection
|
||||
and find best practices for how to securely configure their M365 environments. As a member of the community, you’ll
|
||||
@ -74,24 +68,19 @@ have access to blogs, forums, and discussion, as well as updates on public and f
|
||||
|
||||
## Low-Cost and Highly Secure
|
||||
|
||||
Corso's source code is licensed under the Apache v2 open-source license. It’s open source, and it’s free, which makes
|
||||
Corso's source code is licensed under the Apache v2 open-source license. It’s open source, and it’s free – which makes
|
||||
it the perfect solution for cost-conscious teams. And that’s not where the cost savings end, Corso’s flexible retention
|
||||
policies and ability to compress and deduplicate data efficiently before sending it to storage, helps reduce storage
|
||||
costs, as well.
|
||||
|
||||
## Interested in Trying Corso?
|
||||
|
||||
<!-- vale Microsoft.Contractions = NO -->
|
||||
|
||||
Corso, currently in ~~~alpha~~~ beta, provides a CLI-based tool for backups of your M365 data.
|
||||
Corso, currently in alpha, provides a CLI-based tool for backups of your M365 data.
|
||||
[Follow the quickstart guide](../../docs/quickstart) to start protecting your business-critical M365 data in
|
||||
just a few minutes. Because Corso is currently in ~~~alpha, it should **NOT** be used in production~~~
|
||||
beta, there might still be breaking changes in its backup format.
|
||||
|
||||
<!-- vale Microsoft.Contractions = YES -->
|
||||
just a few minutes. Because Corso is currently in alpha, it should NOT be used in production.
|
||||
|
||||
Corso supports Microsoft 365 Exchange and OneDrive, with SharePoint and Teams support in active development. Coverage
|
||||
for more services, beyond M365, will expand based on the interests and needs of the community.
|
||||
|
||||
Your feedback is critical for our work on this tool! Please
|
||||
So, your feedback is critical for our work on this tool! Please
|
||||
[tell us what you think of Corso](https://discord.gg/63DTTSnuhT).
|
||||
11
docs/blog/authors.yml
Normal file
@ -0,0 +1,11 @@
|
||||
ntolia:
|
||||
name: Niraj Tolia
|
||||
title: Founder
|
||||
url: https://github.com/ntolia
|
||||
image_url: https://github.com/ntolia.png
|
||||
|
||||
nica:
|
||||
name: Nočnica Mellifera
|
||||
title: Head of Developer Advocacy
|
||||
url: https://github.com/serverless-mom
|
||||
image_url: https://github.com/serverless-mom.png
|
||||
@ -18,15 +18,6 @@ If you don't have Go available, you can find installation instructions [here](ht
|
||||
|
||||
This will generate a binary named `corso` in the directory where you run the build.
|
||||
|
||||
:::note
|
||||
Prebuilt binary artifacts of the latest commit are available on GitHub.
|
||||
You can access them by navigating to the "Summary" page of
|
||||
the [`Build/Release Corso` CI job](https://github.com/alcionai/corso/actions/workflows/ci.yml?query=branch%3Amain)
|
||||
that was run for the latest commit on the `main` branch.
|
||||
The downloads will be available in the "Artifacts" section towards the
|
||||
bottom of the page.
|
||||
:::
|
||||
|
||||
### Building via Docker
|
||||
|
||||
For convenience, the Corso build tooling is containerized. To take advantage, you need
|
||||
@ -13,7 +13,7 @@ available on the golangci-lint
|
||||
version that you install should match the version the GitHub workflow uses to
|
||||
avoid failures even after running locally. The current version in use can be
|
||||
[found](https://github.com/alcionai/corso/blob/main/.github/workflows/lint.yml#L55)
|
||||
in `.github/workflows/lint.yaml`.
|
||||
in `.github/worflows/lint.yaml`.
|
||||
|
||||
## Running the linter
|
||||
|
||||
@ -21,16 +21,6 @@ You can run the linter manually or with the `Makefile` in the repository. Runnin
|
||||
the `Makefile` will also ensure you have the proper version of golangci-lint
|
||||
installed.
|
||||
|
||||
### Running the actions linter
|
||||
|
||||
Installation:
|
||||
|
||||
```sh
|
||||
go install github.com/rhysd/actionlint/cmd/actionlint@latest
|
||||
```
|
||||
|
||||
[Instructions for running locally.](https://github.com/rhysd/actionlint/blob/main/docs/usage.md)
|
||||
|
||||
### Running with the `Makefile`
|
||||
|
||||
There’s a `Makefile` in the corso/src that will automatically check if the proper
|
||||
@ -199,9 +189,7 @@ cryptic messages how you can fix the problems the linters flag.
|
||||
Each subsection also includes the version of golangci-lint it applies to and the
|
||||
linter in question.
|
||||
|
||||
```sh
|
||||
gci Expected 's', Found 'a' at file.go
|
||||
```
|
||||
### `gci` `Expected 's', Found 'a' at file.go`
|
||||
|
||||
This applies to golangci-lint v1.45.2 for the `gci` linter and is due to an import
|
||||
ordering issue. It occurs because imports in the file aren't grouped according
|
||||
@ -30,7 +30,7 @@
|
||||
|
||||
- Set M365 Credentials environment variables
|
||||
|
||||
> You can find more information on how to get these values in our [M365 docs](../../setup/m365-access/).
|
||||
> You can find more information on how to get these values in our [M365 docs](../../setup/m365_access/).
|
||||
|
||||
```bash
|
||||
export AZURE_CLIENT_ID=<id>
|
||||
@ -21,13 +21,13 @@ pull the image.
|
||||
## Connect to Microsoft 365
|
||||
|
||||
Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time operation. Follow the instructions
|
||||
[here](../setup/m365-access) to obtain the necessary credentials and then make them available to Corso.
|
||||
[here](../setup/m365_access) to obtain the necessary credentials and then make them available to Corso.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
$Env:AZURE_CLIENT_ID = "<Application (client) ID for configured app>"
|
||||
$Env:AZURE_CLIENT_ID = "<Application (../client) ID for configured app>"
|
||||
$Env:AZURE_TENANT_ID = "<Directory (tenant) ID for configured app>"
|
||||
$Env:AZURE_CLIENT_SECRET = "<Client secret value>"
|
||||
```
|
||||
@ -36,7 +36,7 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
|
||||
<TabItem value="unix" label="Linux/macOS">
|
||||
|
||||
```bash
|
||||
export AZURE_CLIENT_ID=<Application (client) ID for configured app>
|
||||
export AZURE_CLIENT_ID=<Application (../client) ID for configured app>
|
||||
export AZURE_TENANT_ID=<Directory (tenant) ID for configured app>
|
||||
export AZURE_CLIENT_SECRET=<Client secret value>
|
||||
```
|
||||
@ -45,7 +45,7 @@ Obtaining credentials from Microsoft 365 to allow Corso to run is a one-time ope
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
```bash
|
||||
export AZURE_CLIENT_ID=<Application (client) ID for configured app>
|
||||
export AZURE_CLIENT_ID=<Application (../client) ID for configured app>
|
||||
export AZURE_TENANT_ID=<Directory (tenant) ID for configured app>
|
||||
export AZURE_CLIENT_SECRET=<Client secret value>
|
||||
```
|
||||
@ -125,7 +125,7 @@ you initialized the Corso repository, you might need to [connect to it again](..
|
||||
|
||||
```powershell
|
||||
# Backup your inbox
|
||||
.\corso backup create exchange --mailbox <your exchange email address>
|
||||
.\corso backup create exchange --user <your exchange email address>
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -133,7 +133,7 @@ you initialized the Corso repository, you might need to [connect to it again](..
|
||||
|
||||
```bash
|
||||
# Backup your inbox
|
||||
./corso backup create exchange --mailbox <your exchange email address>
|
||||
./corso backup create exchange --user <your exchange email address>
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -143,7 +143,7 @@ you initialized the Corso repository, you might need to [connect to it again](..
|
||||
`# Backup your inbox
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
backup create exchange --mailbox <your exchange email address>`
|
||||
backup create exchange --user <your exchange email address>`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
@ -156,7 +156,7 @@ Your first backup may take some time if your mailbox is large.
|
||||
There will be progress indicators as the backup and, on completion, you should see output similar to:
|
||||
|
||||
```text
|
||||
Started At ID Status Resource Owner
|
||||
Started At ID Status Selectors
|
||||
2022-10-20T18:28:53Z d8cd833a-fc63-4872-8981-de5c08e0661b Completed (0 errors) alice@contoso.com
|
||||
```
|
||||
|
||||
@ -195,14 +195,14 @@ docker run --env-file $HOME/.corso/corso.env \\
|
||||
</Tabs>
|
||||
|
||||
```text
|
||||
Started At ID Status Resource Owner
|
||||
Started At ID Status Selectors
|
||||
2022-10-20T18:28:53Z d8cd833a-fc63-4872-8981-de5c08e0661b Completed (0 errors) alice@contoso.com
|
||||
2022-10-20T18:40:45Z 391ceeb3-b44d-4365-9a8e-8a8e1315b565 Completed (0 errors) alice@contoso.com
|
||||
...
|
||||
```
|
||||
|
||||
Next, select one of the available backups and list all backed up emails. See
|
||||
[here](../cli/corso-backup-details-exchange) for more advanced filtering options.
|
||||
[here](../cli/corso_backup_details_exchange) for more advanced filtering options.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
@ -250,7 +250,7 @@ To restore the selected email, use the following command.
|
||||
|
||||
```powershell
|
||||
# Restore a selected email
|
||||
.\corso restore exchange --backup <id of your selected backup> --email <email message ID>
|
||||
.\corso restore exchange --backup <id of your selected backup> --email <email ID>
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -258,7 +258,7 @@ To restore the selected email, use the following command.
|
||||
|
||||
```bash
|
||||
# Restore a selected email
|
||||
./corso restore exchange --backup <id of your selected backup> --email <email message ID>
|
||||
./corso restore exchange --backup <id of your selected backup> --email <email ID>
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -268,7 +268,7 @@ To restore the selected email, use the following command.
|
||||
`# Restore a selected email
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
restore exchange --backup <id of your selected backup> --email <email message ID>`
|
||||
restore exchange --backup <id of your selected backup> --email <email ID>`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
@ -281,63 +281,9 @@ A confirmation of the recovered email will be shown and the email will appear in
|
||||
360bf6840396 phish@contoso.info Re: Request for Apple/Amazon gift cards 2022-10-18T02:27:47Z
|
||||
```
|
||||
|
||||
## Advanced Restores
|
||||
|
||||
You can control where your data gets restored, and what happens if restored items would overwrite existing
|
||||
ones. Let's restore a folder in OneDrive back to its original location. Since the folder already exists, we can
|
||||
tell corso to with duplicate names as copies.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
# Restore a folder to its original location
|
||||
.\corso restore onedrive --backup <id of your selected backup> `
|
||||
--folder "/presentations/company_culture" `
|
||||
--destination "/" `
|
||||
--collisions copy
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="unix" label="Linux/macOS">
|
||||
|
||||
```bash
|
||||
# Restore a folder to its original location
|
||||
./corso restore onedrive --backup <id of your selected backup> \
|
||||
--folder "/presentations/company_culture" \
|
||||
--destination "/" \
|
||||
--collisions copy
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
<CodeBlock language="bash">{
|
||||
`# Restore a folder to its original location
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
restore exchange --backup <id of your selected backup> \\
|
||||
--email <email message ID> --folder '/presentations/company_culture' \\
|
||||
--destination '/' --collisions copy`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
A confirmation of the recovered files will be shown and those files will appear back in their original folder.
|
||||
|
||||
```text
|
||||
ID ItemName ParentPath Size Owner Created Modified
|
||||
f43bff59de56 slides 1.ppt /presentations/company_culture 23 kB 2023-07-05T18:37:57Z 2023-07-05T18:37:58Z
|
||||
c0de2282e9c7 giftcards.xls /presentations/company_culture 0 B 2023-07-05T18:37:47Z 2023-07-05T18:37:47Z
|
||||
```
|
||||
|
||||
See [here](../setup/restore-options) for more restoration options.
|
||||
|
||||
## Read more
|
||||
|
||||
The above tutorial only scratches the surface for Corso's capabilities. We encourage you to dig deeper by:
|
||||
|
||||
* Learning about [Corso concepts and setup](../setup/concepts)
|
||||
* Explore Corso backup and restore options for M365 Applications in the [Command Line Reference](../cli/corso)
|
||||
* Leverage Corso's [Advanced Restoration Options](../setup/restore-options)
|
||||
* Explore Corso backup and restore options for Exchange and Onedrive in the [Command Line Reference](../cli/corso)
|
||||
@ -10,8 +10,7 @@ import {Version} from '@site/src/corsoEnv';
|
||||
<TabItem value="win" label="Windows (Powershell)">
|
||||
|
||||
<CodeBlock language="powershell">{
|
||||
`$ProgressPreference = 'SilentlyContinue'
|
||||
Invoke-WebRequest \`
|
||||
`Invoke-WebRequest \`
|
||||
-Uri https://github.com/alcionai/corso/releases/download/${Version()}/corso_${Version()}_Windows_x86_64.zip \`
|
||||
-UseBasicParsing -Outfile corso_${Version()}_Windows_x86_64.zip
|
||||
Expand-Archive .\\corso_${Version()}_Windows_x86_64.zip`
|
||||
@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
|
||||
## Corso concepts {#corso-concepts}
|
||||
|
||||
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
|
||||
*M365 Service*'s data. See [Repositories](../repos) for more information.
|
||||
*M365 Services* data. See [Repositories](../repos) for more information.
|
||||
|
||||
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss,
|
||||
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has
|
||||
changed between backup iterations.
|
||||
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
|
||||
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.
|
||||
@ -10,13 +10,6 @@ Two things are needed to configure Corso:
|
||||
* Environment variables containing configuration information
|
||||
* A directory for Corso to store its configuration file
|
||||
|
||||
Apart from Environment variables configuration information can also be provided from flags or configuration files.
|
||||
Corso uses the following priority order for configuration:
|
||||
|
||||
1. Flags values
|
||||
2. Environment variables
|
||||
3. Configuration File information
|
||||
|
||||
## Environment variables
|
||||
|
||||
Three distinct pieces of configuration are required by Corso:
|
||||
@ -41,15 +34,15 @@ alternate ways to pass AWS credentials.
|
||||
Ensure that all of the above environment variables are defined in your Powershell environment.
|
||||
|
||||
```powershell
|
||||
$Env:AWS_ACCESS_KEY_ID = '...'
|
||||
$Env:AWS_SECRET_ACCESS_KEY = '...'
|
||||
$Env:AWS_ACCESS_KEY_ID = "..."
|
||||
$Env:AWS_SECRET_ACCESS_KEY = "..."
|
||||
$Env:AWS_SESSION_TOKEN = ""
|
||||
|
||||
$Env:AZURE_CLIENT_ID = '...'
|
||||
$Env:AZURE_TENANT_ID = '...'
|
||||
$Env:AZURE_CLIENT_SECRET = '...'
|
||||
$Env:AZURE_CLIENT_ID = "..."
|
||||
$Env:AZURE_TENANT_ID = "..."
|
||||
$Env:AZURE_CLIENT_SECRET = "..."
|
||||
|
||||
$Env:CORSO_PASSPHRASE = 'CHANGE-ME-THIS-IS-INSECURE'
|
||||
$Env:CORSO_PASSPHRASE = "CHANGE-ME-THIS-IS-INSECURE"
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -112,48 +105,12 @@ To create the environment variables file, you can run the following command:
|
||||
By default, Corso stores its configuration file (`.corso.toml`) in the user's home directory.
|
||||
The location of the configuration file can be specified using the `--config-file` option.
|
||||
|
||||
The config file can also be used to provide other configuration information like Azure and AWS credentials as mentioned below:
|
||||
|
||||
```bash
|
||||
# AWS configs
|
||||
aws_access_key_id = '...'
|
||||
aws_secret_access_key = '...'
|
||||
aws_session_token = '...'
|
||||
|
||||
# M365 config
|
||||
account_provider = '...'
|
||||
azure_tenantid = '...'
|
||||
azure_client_id = '...'
|
||||
azure_secret = '...'
|
||||
|
||||
# Corso passphrase
|
||||
passphrase = '...'
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="unix" label="Linux/macOS">
|
||||
|
||||
By default, Corso stores its configuration file (`.corso.toml`) in the user's home directory.
|
||||
The location of the configuration file can be specified using the `--config-file` option.
|
||||
|
||||
The config file can also be used to provide other configuration information like Azure and AWS credentials as mentioned below:
|
||||
|
||||
```bash
|
||||
# AWS configs
|
||||
aws_access_key_id = '...'
|
||||
aws_secret_access_key = '...'
|
||||
aws_session_token = '...'
|
||||
|
||||
# M365 config
|
||||
account_provider = '...'
|
||||
azure_tenantid = '...'
|
||||
azure_client_id = '...'
|
||||
azure_secret = '...'
|
||||
|
||||
# Corso passphrase
|
||||
passphrase = '...'
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
@ -167,60 +124,5 @@ directory within the container.
|
||||
<command> <command options>`
|
||||
}</CodeBlock>
|
||||
|
||||
The config file can also be used to provide other configuration information like Azure and AWS credentials as mentioned below:
|
||||
|
||||
```bash
|
||||
# AWS configs
|
||||
aws_access_key_id = '...'
|
||||
aws_secret_access_key = '...'
|
||||
aws_session_token = '...'
|
||||
|
||||
# M365 config
|
||||
account_provider = '...'
|
||||
azure_tenantid = '...'
|
||||
azure_client_id = '...'
|
||||
azure_secret = '...'
|
||||
|
||||
# Corso passphrase
|
||||
passphrase = '...'
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Log Files
|
||||
|
||||
Corso generates a unique log file named with its timestamp for every invocation.
|
||||
The default location of Corso's log file is shown below but the location can be overridden by using the `--log-file` flag.
|
||||
The log file will be appended to if multiple Corso invocations are pointed to the same file.
|
||||
|
||||
You can also use `stdout` or `stderr` as the `--log-file` location to redirect the logs to "stdout" and "stderr" respectively.
|
||||
This setting can cause logs to compete with progress bar displays in the terminal.
|
||||
We suggest using the `--hide-progress` option if you plan to log to stdout or stderr.
|
||||
|
||||
Log entries, by default, include user names and file names. The `--mask-sensitive-data` option can be
|
||||
used to replace this information with anonymized hashes.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Windows">
|
||||
|
||||
```powershell
|
||||
%LocalAppData%\corso\logs\<timestamp>.log
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="unix" label="Linux">
|
||||
|
||||
```bash
|
||||
$HOME/.cache/corso/logs/<timestamp>.log
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="macos" label="macOS">
|
||||
|
||||
```bash
|
||||
$HOME/Library/Logs/corso/logs/<timestamp>.log
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
description: "Connect to a Microsoft 365 tenant"
|
||||
description: "Connect to a Microsft 365 tenant"
|
||||
---
|
||||
|
||||
# Microsoft 365 access
|
||||
@ -52,20 +52,10 @@ then click **Add permissions**.
|
||||
| API / Permissions Name | Type | Description
|
||||
|:--|:--|:--|
|
||||
| Calendars.ReadWrite | Application | Read and write calendars in all mailboxes |
|
||||
| ChannelMessage.Read.All | Application | Read all messages in Teams' channels |
|
||||
| ChannelSettings.Read.All | Application | Read all Teams' channel settings |
|
||||
| Chat.Read.All | Application | Read all Teams' chats and chat messages |
|
||||
| Contacts.ReadWrite | Application | Read and write contacts in all mailboxes |
|
||||
| Directory.Read.All | Application | Read all organization directory data |
|
||||
| Files.ReadWrite.All | Application | Read and write files in all site collections |
|
||||
| MailboxSettings.Read | Application | Read all user mailbox settings |
|
||||
| Mail.ReadWrite | Application | Read and write mail in all mailboxes |
|
||||
| Member.Read.Hidden | Application | Read hidden group memberships |
|
||||
| Sites.FullControl.All | Application | Have full control of all site collections |
|
||||
| TeamMember.Read.All | Application | Read all Teams' user memberships |
|
||||
| TeamSettings.Read.All | Application | Read all Teams' settings |
|
||||
| User.Read.All | Application | Read all users' full profiles |
|
||||
|
||||
<!-- vale Microsoft.Spacing = YES -->
|
||||
|
||||
### Grant admin consent
|
||||
@ -92,8 +82,8 @@ Copy the client and tenant IDs and export them into the following environment va
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
$Env:AZURE_CLIENT_ID = '<Application (client) ID for configured app>'
|
||||
$Env:AZURE_TENANT_ID = '<Directory (tenant) ID for configured app>'
|
||||
$Env:AZURE_CLIENT_ID = "<Application (client) ID for configured app>"
|
||||
$Env:AZURE_TENANT_ID = "<Directory (tenant) ID for configured app>"
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -131,7 +121,7 @@ environment variable.
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
$Env:AZURE_CLIENT_SECRET = '<Client secret value>'
|
||||
$Env:AZURE_CLIENT_SECRET = "<Client secret value>"
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -10,27 +10,18 @@ import TabItem from '@theme/TabItem';
|
||||
import TOCInline from '@theme/TOCInline';
|
||||
import {Version} from '@site/src/corsoEnv';
|
||||
|
||||
A Corso [repository](../concepts#corso-concepts) stores encrypted copies of a Microsoft 365 tenant's
|
||||
backup data. Each repository is configured to store data in an object storage bucket and, optionally,
|
||||
a user-specified prefix within the bucket. A repository is only meant to store a single tenant's data
|
||||
but a single object storage bucket can contain multiple repositories if unique `--prefix` options are
|
||||
specified when initializing a repository.
|
||||
|
||||
Within a repository, Corso uses
|
||||
AES256-GCM-HMAC-SHA256 to encrypt data at rest using keys that are derived from the repository passphrase.
|
||||
Data in flight to and from the repository is encrypted via TLS.
|
||||
|
||||
Repositories are supported on the following storage systems:
|
||||
A Corso [repository](../concepts#corso-concepts) stores encrypted copies of your backup data. Repositories are
|
||||
supported on the following object storage systems:
|
||||
|
||||
<TOCInline toc={toc} maxHeadingLevel={2}/><br/>
|
||||
|
||||
:::note
|
||||
Depending on community interest, Corso will add support for other storage backends in the future.
|
||||
Depending on community interest, Corso will add support for other object storage backends in the future.
|
||||
:::
|
||||
|
||||
## Amazon S3
|
||||
|
||||
### S3 Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
Before setting up your Corso S3 repository, the following prerequisites must be met:
|
||||
|
||||
@ -73,18 +64,18 @@ The two most commonly-used options are:
|
||||
`AWS_SHARED_CREDENTIALS_FILE`, if not using the default file location. You can learn more about the AWS CLI
|
||||
environment variables [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html).
|
||||
|
||||
### Initialize a S3 repository
|
||||
### Initialize repository
|
||||
|
||||
Before first use, you need to initialize a Corso repository with `corso repo init s3`. See the command details
|
||||
[here](../../cli/corso-repo-init-s3).
|
||||
[here](../../cli/corso_repo_init_s3).
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
# Initialize the Corso Repository
|
||||
$Env:CORSO_PASSPHRASE = 'CHANGE-ME-THIS-IS-INSECURE'
|
||||
.\corso repo init s3 --bucket corso-repo
|
||||
$Env:CORSO_PASSPHRASE = "CHANGE-ME-THIS-IS-INSECURE"
|
||||
.\corso repo init s3 --bucket corso-test
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -93,7 +84,7 @@ Before first use, you need to initialize a Corso repository with `corso repo ini
|
||||
```bash
|
||||
# Initialize the Corso Repository
|
||||
export CORSO_PASSPHRASE="CHANGE-ME-THIS-IS-INSECURE"
|
||||
./corso repo init s3 --bucket corso-repo
|
||||
./corso repo init s3 --bucket corso-test
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -104,23 +95,23 @@ Before first use, you need to initialize a Corso repository with `corso repo ini
|
||||
export CORSO_PASSPHRASE="CHANGE-ME-THIS-IS-INSECURE"
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
repo init s3 --bucket corso-repo`
|
||||
repo init s3 --bucket corso-test`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Connect to a S3 repository
|
||||
### Connect to a repository
|
||||
|
||||
If a repository already exists, you can connect to it with `corso repo connect s3`. See the command details
|
||||
[here](../../cli/corso-repo-connect-s3).
|
||||
[here](../../cli/corso_repo_connect_s3).
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
# Connect to the Corso Repository
|
||||
.\corso repo connect s3 --bucket corso-repo
|
||||
.\corso repo connect s3 --bucket corso-test
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -128,7 +119,7 @@ If a repository already exists, you can connect to it with `corso repo connect s
|
||||
|
||||
```bash
|
||||
# Connect to the Corso Repository
|
||||
./corso repo connect s3 --bucket corso-repo
|
||||
./corso repo connect s3 --bucket corso-test
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -138,7 +129,7 @@ If a repository already exists, you can connect to it with `corso repo connect s
|
||||
`# Connect to the Corso Repository
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
repo connect s3 --bucket corso-repo`
|
||||
repo connect s3 --bucket corso-test`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
@ -158,89 +149,4 @@ need to use the following flag with the initial Corso `repo init` command:
|
||||
|
||||
Corso also supports the use of object storage systems with no TLS certificate or with self-signed
|
||||
TLS certificates with the `--disable-tls` or `--disable-tls-verification` flags.
|
||||
[These flags](../../cli/corso-repo-init-s3) should only be used for testing.
|
||||
|
||||
## Filesystem Storage
|
||||
|
||||
:::note
|
||||
Filesystem repositories are only recommended for testing and pre-production use. They aren't recommended for
|
||||
production.
|
||||
:::
|
||||
|
||||
### Initialize a filesystem repository
|
||||
|
||||
Before first use, you need to initialize a Corso repository with `corso repo init filesystem`. See the command details
|
||||
[here](../../cli/corso-repo-init-filesystem). Corso will create the directory structure if necessary, including any
|
||||
missing parent directories.
|
||||
|
||||
Filesystem repositories don't support the `--prefix` option but instead use the `--path` option. Repository directories
|
||||
are created with `0700` permission mode and files withing the repository are created with `0600`. Once created, these
|
||||
repositories can't be moved to object storage later.
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
# Initialize the Corso Repository
|
||||
$Env:CORSO_PASSPHRASE = 'CHANGE-ME-THIS-IS-INSECURE'
|
||||
.\corso repo init filesystem --path C:\Users\user\corso-repo
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="unix" label="Linux/macOS">
|
||||
|
||||
```bash
|
||||
# Initialize the Corso Repository
|
||||
export CORSO_PASSPHRASE="CHANGE-ME-THIS-IS-INSECURE"
|
||||
./corso repo init filesystem --path $HOME/corso-repo
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
<CodeBlock language="bash">{
|
||||
`# Initialize the Corso Repository
|
||||
export CORSO_PASSPHRASE="CHANGE-ME-THIS-IS-INSECURE"
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
--volume /path/on/host/corso-repo:/path/in/container/corso-repo \\
|
||||
repo init filesystem --path /path/in/container/corso-repo`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Connect to a filesystem repository
|
||||
|
||||
If a repository already exists, you can connect to it with `corso repo connect filesystem`. See the command details
|
||||
[here](../../cli/corso-repo-connect-filesystem).
|
||||
|
||||
<Tabs groupId="os">
|
||||
<TabItem value="win" label="Powershell">
|
||||
|
||||
```powershell
|
||||
# Connect to the Corso Repository
|
||||
.\corso repo connect filesystem --path C:\Users\user\corso-repo
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="unix" label="Linux/macOS">
|
||||
|
||||
```bash
|
||||
# Connect to the Corso Repository
|
||||
./corso repo connect filesystem --path $HOME/corso-repo
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
<CodeBlock language="bash">{
|
||||
`# Connect to the Corso Repository
|
||||
docker run --env-file $HOME/.corso/corso.env \\
|
||||
--volume $HOME/.corso:/app/corso ghcr.io/alcionai/corso:${Version()} \\
|
||||
--volume /path/on/host/corso-repo:/path/in/container/corso-repo \\
|
||||
repo connect filesystem --path /path/in/container/corso-repo`
|
||||
}</CodeBlock>
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
[These flags](../../cli/corso_repo_init_s3) should only be used for testing.
|
||||
6
docs/docs/support/bugs_and_features.md
Normal file
@ -0,0 +1,6 @@
|
||||
# Bugs and new features
|
||||
|
||||
You can learn more about the Corso roadmap and how to interpret it [here](https://github.com/alcionai/corso-roadmap).
|
||||
|
||||
If you run into a bug or have feature requests, please file a [GitHub issue](https://github.com/alcionai/corso/issues/)
|
||||
and attach the `bug` or `enhancement` label to the issue.
|
||||
14
docs/docs/support/known_issues.md
Normal file
@ -0,0 +1,14 @@
|
||||
# Known issues
|
||||
|
||||
Below is a list of known Corso issues and limitations:
|
||||
|
||||
* Only supports Exchange (email, calendars, contact) and OneDrive (files) M365 data. Additional
|
||||
data types and services will be added in subsequent releases.
|
||||
|
||||
* Restores are non-destructive to a dedicated restore folder in the original Exchange mailbox or OneDrive account.
|
||||
Advanced restore options such as in-place restore, or restore to a specific folder or to a different account aren't
|
||||
yet supported.
|
||||
|
||||
* Provides no guarantees about whether data moved, added, or deleted in M365
|
||||
while a backup is being created will be included in the running backup.
|
||||
Future backups run when the data isn't modified will include the data.
|
||||
@ -1,8 +1,8 @@
|
||||
// @ts-check
|
||||
// Note: type annotations allow type checking and IDEs autocompletion
|
||||
|
||||
const lightCodeTheme = require('prism-react-renderer').themes.github;
|
||||
const darkCodeTheme = require('prism-react-renderer').themes.dracula;
|
||||
const lightCodeTheme = require('prism-react-renderer/themes/github');
|
||||
const darkCodeTheme = require('prism-react-renderer/themes/dracula');
|
||||
|
||||
/** @type {import('@docusaurus/types').Config} */
|
||||
const config = {
|
||||
@ -47,7 +47,7 @@ const config = {
|
||||
sidebarPath: require.resolve('./sidebars.js'),
|
||||
remarkPlugins: [require('mdx-mermaid')],
|
||||
editUrl:
|
||||
'https://github.com/alcionai/corso/tree/main/website',
|
||||
'https://github.com/alcionai/corso/tree/main/docs',
|
||||
},
|
||||
blog: {
|
||||
showReadingTime: true,
|
||||
@ -59,7 +59,7 @@ const config = {
|
||||
filename: 'sitemap.xml',
|
||||
},
|
||||
gtag: {
|
||||
trackingID: 'GTM-KM3XWPV',
|
||||
trackingID: 'G-YXBFPQZ05N',
|
||||
},
|
||||
theme: {
|
||||
customCss: require.resolve('./src/css/custom.scss'),
|
||||
@ -79,23 +79,12 @@ const config = {
|
||||
srcDark: 'img/corso_horizontal_logo_white.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'quickstart',
|
||||
position: 'left',
|
||||
label: 'Quick Start',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'intro',
|
||||
position: 'left',
|
||||
label: 'Docs',
|
||||
},
|
||||
{
|
||||
href: 'https://discord.gg/63DTTSnuhT',
|
||||
label: 'Community',
|
||||
position: 'left',
|
||||
},
|
||||
{
|
||||
to: '/blog',
|
||||
label: 'Blog',
|
||||
@ -117,12 +106,30 @@ const config = {
|
||||
},
|
||||
links: [
|
||||
{
|
||||
title: 'Open Source',
|
||||
title: 'Resources',
|
||||
items: [
|
||||
{
|
||||
label: 'Docs',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: 'Discord',
|
||||
href: 'https://discord.gg/63DTTSnuhT',
|
||||
},
|
||||
{
|
||||
label: 'Twitter',
|
||||
href: 'https://twitter.com/CorsoBackup',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'More',
|
||||
items: [
|
||||
{
|
||||
label: 'Blog',
|
||||
to: '/blog',
|
||||
@ -131,26 +138,6 @@ const config = {
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/alcionai/corso',
|
||||
},
|
||||
{
|
||||
label: 'Corso Discord',
|
||||
href: 'https://discord.gg/63DTTSnuhT',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: ' ',
|
||||
},
|
||||
{
|
||||
title: 'Alcion, Powered by Corso',
|
||||
items: [
|
||||
{
|
||||
label: 'Backup as a Service',
|
||||
href: 'https://www.alcion.ai',
|
||||
},
|
||||
{
|
||||
label: 'Alcion Discord',
|
||||
href: 'https://www.alcion.ai/discord',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
@ -158,8 +145,8 @@ const config = {
|
||||
},
|
||||
colorMode: {
|
||||
defaultMode: 'dark',
|
||||
disableSwitch: false,
|
||||
respectPrefersColorScheme: true,
|
||||
disableSwitch: true,
|
||||
respectPrefersColorScheme: false,
|
||||
},
|
||||
|
||||
zoom: {
|
||||
@ -176,18 +163,13 @@ const config = {
|
||||
},
|
||||
},
|
||||
|
||||
algolia: {
|
||||
appId: 'EPJZU1WKE7',
|
||||
apiKey: 'd432a94741013719fdd0d78275c7aa9c',
|
||||
indexName: 'corsobackup',
|
||||
contextualSearch: true,
|
||||
},
|
||||
|
||||
image: 'img/cloudbackup.png',
|
||||
|
||||
metadata : [
|
||||
{name: 'twitter:card', content: 'summary_large_image'},
|
||||
{name: 'twitter:site', content: '@corsobackup'},
|
||||
{name: 'twitter:title', content: 'Corso: Free, Secure, and Open-Source Backup for Microsoft 365'},
|
||||
{name: 'twitter:description', content: 'Corso is an open-source tool that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage.'},
|
||||
],
|
||||
|
||||
prism: {
|
||||
24886
docs/package-lock.json
generated
Normal file
56
docs/package.json
Normal file
@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "docs",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start",
|
||||
"build": "docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"serve": "docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "2.2.0",
|
||||
"@docusaurus/plugin-google-gtag": "^2.2.0",
|
||||
"@docusaurus/preset-classic": "2.2.0",
|
||||
"@loadable/component": "^5.15.2",
|
||||
"@mdx-js/react": "^1.6.22",
|
||||
"animate.css": "^4.1.1",
|
||||
"clsx": "^1.2.1",
|
||||
"docusaurus-plugin-image-zoom": "^0.1.1",
|
||||
"docusaurus-plugin-sass": "^0.2.2",
|
||||
"feather-icons": "^4.29.0",
|
||||
"jarallax": "^2.0.4",
|
||||
"mdx-mermaid": "^1.3.2",
|
||||
"mermaid": "^9.2.2",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"sass": "^1.56.1",
|
||||
"tw-elements": "^1.0.0-alpha12",
|
||||
"wowjs": "^1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "2.2.0",
|
||||
"@iconify/react": "^4.0.0",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"postcss": "^8.4.19",
|
||||
"tailwindcss": "^3.2.4"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.5%",
|
||||
"not dead",
|
||||
"not op_mini all"
|
||||
],
|
||||
"development": [
|
||||
"last 1 chrome version",
|
||||
"last 1 firefox version",
|
||||
"last 1 safari version"
|
||||
]
|
||||
}
|
||||
}
|
||||
58
docs/sidebars.js
Normal file
@ -0,0 +1,58 @@
|
||||
/**
|
||||
* Creating a sidebar enables you to:
|
||||
- create an ordered group of docs
|
||||
- render a sidebar for each doc of that group
|
||||
- provide next/previous navigation
|
||||
|
||||
The sidebars can be generated from the filesystem, or explicitly defined here.
|
||||
|
||||
Create as many sidebars as you want.
|
||||
*/
|
||||
|
||||
// @ts-check
|
||||
|
||||
/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
|
||||
const sidebars = {
|
||||
// By default, Docusaurus generates a sidebar from the docs folder structure
|
||||
docsSidebar: [
|
||||
'intro',
|
||||
'quickstart',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Corso setup',
|
||||
items: ['setup/concepts', 'setup/download', 'setup/m365_access', 'setup/configuration', 'setup/repos'],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Command line reference',
|
||||
link: {
|
||||
slug: 'cli/corso',
|
||||
description: 'Explore the commonly used Corso CLI commands',
|
||||
type: 'generated-index',
|
||||
},
|
||||
items: [
|
||||
'cli/corso_repo_init_s3', 'cli/corso_repo_connect_s3',
|
||||
'cli/corso_backup_create_exchange', 'cli/corso_backup_list_exchange', 'cli/corso_backup_details_exchange',
|
||||
'cli/corso_backup_create_onedrive', 'cli/corso_backup_list_onedrive', 'cli/corso_backup_details_onedrive',
|
||||
'cli/corso_restore_exchange', 'cli/corso_restore_onedrive',
|
||||
'cli/corso_env'
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Support',
|
||||
items: [
|
||||
'support/bugs_and_features', 'support/known_issues', 'support/faq'
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Developer guide',
|
||||
items: [
|
||||
'developers/build', 'developers/testing', 'developers/linters'
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
module.exports = sidebars;
|
||||
@ -16,7 +16,8 @@ export default function Cookies() {
|
||||
This website uses cookies to provide you with a great user experience.
|
||||
By using it, you accept our{" "}
|
||||
<a
|
||||
href="/cookies"
|
||||
href="cookies.html"
|
||||
target="_blank"
|
||||
className="text-emerald-600 dark:text-emerald-500 font-semibold"
|
||||
>
|
||||
use of cookies
|
||||
@ -9,7 +9,7 @@ export default function Demo() {
|
||||
<div className="wow w-[95%] sm:w-[80%] animate__animated relative animate__fadeIn">
|
||||
<div className="flex flex-row items-center bg-gray-200 rounded-t-lg h-6">
|
||||
<div className="align-middle flex flex-col items-center justify-center">
|
||||
<img className="h-4 px-2" src="assets/images/powershell.svg" alt="Powershell logo"/>
|
||||
<img className="h-4 px-2" src="assets/images/powershell.svg" />
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
@ -18,7 +18,6 @@ export default function Demo() {
|
||||
borderLeft: "2px solid #e5e7eb",
|
||||
borderRight: "2px solid #e5e7eb",
|
||||
borderBottom: "2px solid #e5e7eb",
|
||||
backgroundColor: "#121831",
|
||||
}}
|
||||
>
|
||||
<video className="w-full" poster="assets/images/corso_demo_thumbnail.png" muted loop autoPlay playsInline>
|
||||
@ -2,7 +2,7 @@ import React from "react";
|
||||
import "animate.css";
|
||||
import { Icon } from "@iconify/react";
|
||||
|
||||
export default function CTA() {
|
||||
export default function FourthSection() {
|
||||
return (
|
||||
<section className="relative !tracking-wide md:py-16 py-12 md:pt-0 pt-0">
|
||||
<div className="absolute bottom-0 left-0 !z-0 right-0 sm:h-2/3 h-4/5 bg-gradient-to-b from-indigo-500 to-indigo-600"></div>
|
||||
@ -33,7 +33,7 @@ export default function CTA() {
|
||||
<br /> Microsoft 365 Data!
|
||||
</h3>
|
||||
<h6 className="text-white/50 text-lg font-semibold">
|
||||
Corso (Free and Open Source) or <br/> Alcion (Managed Backup as a Service)
|
||||
Corso is Free and Open Source
|
||||
</h6>
|
||||
</div>
|
||||
</div>
|
||||
@ -41,24 +41,15 @@ export default function CTA() {
|
||||
<div className="mt-8">
|
||||
<div className="section-title text-md-start">
|
||||
<p className="text-white/50 max-w-xl mx-auto mb-2">
|
||||
Whether you want to self-host or use a managed service, we have you covered!
|
||||
Follow our quick-start guide to start protecting your
|
||||
business-critical Microsoft 365 data in just a few
|
||||
minutes.
|
||||
</p>
|
||||
<a
|
||||
href="https://www.alcion.ai/"
|
||||
href="docs/quickstart"
|
||||
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
|
||||
>
|
||||
Try Alcion{" "}
|
||||
<Icon
|
||||
icon="uim:angle-right-b"
|
||||
className="align-middle"
|
||||
/>
|
||||
</a>
|
||||
<p></p>
|
||||
<a
|
||||
href="docs/quickstart/"
|
||||
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
|
||||
>
|
||||
Corso Quickstart{" "}
|
||||
Get Started{" "}
|
||||
<Icon
|
||||
icon="uim:angle-right-b"
|
||||
className="align-middle"
|
||||
74
docs/src/components/parts/Hero.js
Normal file
@ -0,0 +1,74 @@
|
||||
import React from "react";
|
||||
import "animate.css";
|
||||
|
||||
export default function Hero() {
|
||||
return (
|
||||
<section className="relative !tracking-wide flex flex-col home-wrapper items-center overflow-hidden">
|
||||
<div
|
||||
className="bg-[#151C3D] absolute"
|
||||
style={{
|
||||
left: "-20rem",
|
||||
right: 0,
|
||||
zIndex: 1,
|
||||
top: "-30%",
|
||||
height: "62rem",
|
||||
width: "140rem",
|
||||
transform: "rotate(-12deg)",
|
||||
}}
|
||||
></div>
|
||||
<div
|
||||
style={{
|
||||
zIndex: "1 !important",
|
||||
}}
|
||||
className="!container relative !z-10"
|
||||
>
|
||||
<div className="grid !z-10 grid-cols-1 mt-28 text-center">
|
||||
<div className="wow !z-10 animate__animated animate__fadeIn">
|
||||
<h4 className="font-bold !text-white !z-10 !leading-normal text-4xl lg:text-5xl mb-5">
|
||||
Free, Secure, and Open-Source
|
||||
<br /> Backup for Microsoft 365
|
||||
</h4>
|
||||
<p className="text-slate-300 !z-10 text-xl max-w-xl mx-auto">
|
||||
The #1 open-source backup tool for Microsoft 365
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="mt-12 !z-10 mb-6 space-x-4">
|
||||
<a
|
||||
href="../docs/quickstart"
|
||||
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
|
||||
>
|
||||
Quickstart
|
||||
</a>
|
||||
<a
|
||||
href="../docs/setup/download"
|
||||
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="home-dashboard mt-8 !z-10 wow animate__ animate__fadeIn animated"
|
||||
style={{
|
||||
visibility: "visible",
|
||||
animationName: "fadeIn",
|
||||
}}
|
||||
>
|
||||
<img
|
||||
src="img/cloudbackup.svg"
|
||||
className="w-[70%] inline-block object-contain"
|
||||
alt="Cloud backup and storage"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="bg-indigo-600 w-8 h-16 !z-10 absolute left-8 lg:bottom-28 md:bottom-36 sm:bottom-40 bottom-16"></div>
|
||||
<div className="bg-indigo-600/20 w-8 h-16 !z-10 absolute left-20 lg:bottom-32 md:bottom-40 sm:bottom-44 bottom-20"></div>
|
||||
|
||||
<div className="bg-indigo-600/20 !z-10 w-8 h-16 absolute right-20 xl:bottom-[420px] lg:bottom-[315px] md:bottom-[285px] sm:bottom-80 bottom-32"></div>
|
||||
<div className="bg-indigo-600 w-8 h-16 !z-10 absolute right-8 xl:bottom-[440px] lg:bottom-[335px] md:bottom-[305px] sm:bottom-[340px] bottom-36"></div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@ -1,24 +1,19 @@
|
||||
import React, { useEffect, useRef } from "react";
|
||||
import React, { useEffect } from "react";
|
||||
import feather from "feather-icons";
|
||||
import { WOW } from "wowjs";
|
||||
import { jarallax } from "jarallax";
|
||||
import { Icon } from "@iconify/react";
|
||||
import "animate.css";
|
||||
import "tw-elements";
|
||||
|
||||
export default function KeyLoveFAQ() {
|
||||
const jarallaxRef = useRef(null);
|
||||
useEffect(() => {
|
||||
if (typeof window !== "undefined") {
|
||||
const WOW = require("wow.js");
|
||||
const father = require("feather-icons");
|
||||
const jarallax = require("jarallax");
|
||||
require("tw-elements");
|
||||
|
||||
new WOW({
|
||||
live: false,
|
||||
}).init();
|
||||
father.replace();
|
||||
jarallax.jarallax(jarallaxRef.current, {
|
||||
speed: 0.2,
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
new WOW().init();
|
||||
feather.replace();
|
||||
jarallax(document.querySelectorAll(".jarallax"), {
|
||||
speed: 0.2,
|
||||
});
|
||||
});
|
||||
|
||||
return (
|
||||
<section className="relative md:py-24 !tracking-wide py-16 overflow-hidden">
|
||||
@ -213,9 +208,9 @@ export default function KeyLoveFAQ() {
|
||||
Community
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
The Corso community provides a venue for Microsoft 365 admins to share and
|
||||
The Corso community provides a venue for M365 admins to share and
|
||||
learn about the importance of data protection as well as best
|
||||
practices around Microsoft 365 secure configuration and compliance
|
||||
practices around M365 secure configuration and compliance
|
||||
management.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
@ -279,7 +274,8 @@ export default function KeyLoveFAQ() {
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso provides secure data backup that protects customers against
|
||||
accidental data loss and service provider downtime.
|
||||
accidental data loss, service provider downtime, and malicious
|
||||
threats including ransomware attacks.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
<li className="mb-1 flex">
|
||||
@ -330,7 +326,7 @@ export default function KeyLoveFAQ() {
|
||||
Robust Backups
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso, purpose-built for Microsoft 365 protection, provides easy-to-use
|
||||
Corso, purpose-built for M365 protection, provides easy-to-use
|
||||
comprehensive backup and restore workflows that reduces backup
|
||||
time, improve time-to-recovery, reduce admin overhead, and replace
|
||||
unreliable scripts or workarounds.
|
||||
@ -341,7 +337,7 @@ export default function KeyLoveFAQ() {
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Constantly updated Microsoft 365 Graph Data engine
|
||||
Constantly updated M365 Graph Data engine
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
@ -361,7 +357,7 @@ export default function KeyLoveFAQ() {
|
||||
|
||||
<div className="mt-4">
|
||||
<a
|
||||
href="docs/quickstart/"
|
||||
href="docs/quickstart"
|
||||
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
|
||||
>
|
||||
Use The Quick Start For Your First Backup{" "}
|
||||
@ -426,7 +422,7 @@ export default function KeyLoveFAQ() {
|
||||
|
||||
<div className="mt-4">
|
||||
<a
|
||||
href="docs/setup/repos/"
|
||||
href="docs/setup/repos"
|
||||
className="btn btn-link !no-underline link-underline link-underline-black text-indigo-600 hover:text-indigo-600 after:bg-indigo-600 duration-500 ease-in-out"
|
||||
>
|
||||
Read about our Object Storage support{" "}
|
||||
@ -452,7 +448,6 @@ export default function KeyLoveFAQ() {
|
||||
<div className="relative rounded-xl overflow-hidden shadow-md dark:shadow-gray-800">
|
||||
<div
|
||||
className="w-full jarallax py-72 bg-slate-400 custom-bg_ bg-no-repeat bg-top"
|
||||
ref={jarallaxRef}
|
||||
data-jarallax='{"speed": 0.1}'
|
||||
></div>
|
||||
</div>
|
||||
@ -461,7 +456,7 @@ export default function KeyLoveFAQ() {
|
||||
|
||||
<div className="md:col-span-6">
|
||||
<div className="accordion space-y-3" id="accordionExample">
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="headingOne"
|
||||
@ -474,7 +469,7 @@ export default function KeyLoveFAQ() {
|
||||
aria-expanded="false"
|
||||
aria-controls="collapseOne"
|
||||
>
|
||||
<span>How do I choose between Corso and Alcion, powered by Corso?</span>
|
||||
<span>What platforms does Corso run on?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
@ -485,7 +480,8 @@ export default function KeyLoveFAQ() {
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso is a good fit for basic backup while Alcion is a better fit if you need increased reliability, security, and support.
|
||||
Corso has both native binaries and container images for
|
||||
Windows, Linux, and macOS.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@ -503,7 +499,9 @@ export default function KeyLoveFAQ() {
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse2"
|
||||
>
|
||||
<span>What platforms does Corso run on?</span>
|
||||
<span>
|
||||
What Microsoft 365 services can I backup using Corso?
|
||||
</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
@ -514,8 +512,9 @@ export default function KeyLoveFAQ() {
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso has both native binaries and container images for
|
||||
Windows, Linux, and macOS.
|
||||
Corso currently supports OneDrive and Exchange. Support
|
||||
for Teams and SharePoint is in active development and is
|
||||
therefore not recommended for production use.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@ -533,9 +532,7 @@ export default function KeyLoveFAQ() {
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse3"
|
||||
>
|
||||
<span>
|
||||
What Microsoft 365 services can I backup using Corso?
|
||||
</span>
|
||||
<span>What object storage does Corso support?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
@ -543,36 +540,6 @@ export default function KeyLoveFAQ() {
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading3"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso currently supports OneDrive, Exchange, SharePoint,
|
||||
and Teams.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading4"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse4"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse4"
|
||||
>
|
||||
<span>What object storage does Corso support?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse4"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading4"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 dark:text-gray-400 !visible">
|
||||
@ -587,23 +554,23 @@ export default function KeyLoveFAQ() {
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 font-semibold"
|
||||
id="heading5"
|
||||
id="heading4"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse5"
|
||||
data-bs-target="#collapse4"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse5"
|
||||
aria-controls="collapse4"
|
||||
>
|
||||
<span>How can I get help for Corso?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse5"
|
||||
id="collapse4"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading5"
|
||||
aria-labelledby="heading4"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
@ -633,23 +600,23 @@ export default function KeyLoveFAQ() {
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading6"
|
||||
id="heading5"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse6"
|
||||
data-bs-target="#collapse5"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse6"
|
||||
aria-controls="collapse5"
|
||||
>
|
||||
<span>What is Corso's open-source license?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse6"
|
||||
id="collapse5"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading6"
|
||||
aria-labelledby="heading5"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
@ -663,23 +630,23 @@ export default function KeyLoveFAQ() {
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading7"
|
||||
id="heading6"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse7"
|
||||
data-bs-target="#collapse6"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse7"
|
||||
aria-controls="collapse6"
|
||||
>
|
||||
<span>How do I request a new feature?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse7"
|
||||
id="collapse6"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading7"
|
||||
aria-labelledby="heading6"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
@ -3,20 +3,19 @@ import "animate.css";
|
||||
import loadable from "@loadable/component";
|
||||
import Hero from "./Hero";
|
||||
import Demo from "./Demo";
|
||||
import CTA from "./CTA";
|
||||
import Users from "./Users";
|
||||
import FourthSection from "./FourthSection";
|
||||
import Cookies from "./Cookies";
|
||||
import KeyLoveFAQ from "./KeyLoveFAQ";
|
||||
|
||||
const KeyLoveFAQComp = loadable(() => import("./KeyLoveFAQ"));
|
||||
const BackToTopComp = loadable(() => import("./BackToTop"));
|
||||
|
||||
export function MainComp() {
|
||||
return (
|
||||
<>
|
||||
<Hero />
|
||||
<Users />
|
||||
<KeyLoveFAQ />
|
||||
<CTA />
|
||||
<Demo />
|
||||
<KeyLoveFAQComp />
|
||||
<FourthSection />
|
||||
<BackToTopComp />
|
||||
<Cookies />
|
||||
</>
|
||||
@ -2,24 +2,15 @@
|
||||
CUSTOM TO THE NEW HOME PAGE
|
||||
*/
|
||||
|
||||
@import "tailwind.scss";
|
||||
@import "icons.scss";
|
||||
@import url('https://fonts.googleapis.com/css2?family=Nunito&display=swap');
|
||||
@import url('https://fonts.googleapis.com/css2?family=Nunito&display=swap');
|
||||
|
||||
:root {
|
||||
--ifm-font-family-base: 'Nunito', sans-serif;
|
||||
*{
|
||||
font-family: 'Nunito', sans-serif !important;
|
||||
}
|
||||
|
||||
html {
|
||||
html{
|
||||
scroll-behavior: smooth !important;
|
||||
}
|
||||
|
||||
.plugin-pages {
|
||||
.colorModeToggle_---node_modules-\@docusaurus-theme-classic-lib-theme-Navbar-Content-styles-module {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.accordion-button-custom::after {
|
||||
-ms-flex-shrink: 0;
|
||||
flex-shrink: 0;
|
||||
@ -37,14 +28,13 @@ html {
|
||||
background-image: url("../../static/assets/download_blue.svg");
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
|
||||
.accordion-button-custom:not(.collapsed) {
|
||||
color: #2563eb;
|
||||
background-color: #fff;
|
||||
box-shadow: inset 0 -1px 0 #e5e7eb;
|
||||
}
|
||||
|
||||
.link-underline {
|
||||
.link-underline {
|
||||
border-bottom-width: 0;
|
||||
background-image: linear-gradient(transparent, transparent), linear-gradient(#4f46E5, #4f46E5);
|
||||
background-size: 0 2px;
|
||||
@ -72,7 +62,7 @@ html {
|
||||
}
|
||||
}
|
||||
|
||||
.custom-bg_ {
|
||||
.custom-bg_{
|
||||
background: url("../../static/assets/images/cta.jpg")
|
||||
}
|
||||
|
||||
@ -108,9 +98,11 @@ html {
|
||||
|
||||
html[data-theme='dark'] {
|
||||
--ifm-background-color: #111629;
|
||||
--ifm-navbar-background-color: #111629;
|
||||
--ifm-link-color: #087EA4;
|
||||
--docsearch-primary-color: #5468ff;
|
||||
}
|
||||
|
||||
.navbar {
|
||||
background-color: var(--ifm-background-color);
|
||||
}
|
||||
|
||||
.footer {
|
||||
@ -162,22 +154,24 @@ html[data-theme='dark'] {
|
||||
background-color: var(--divider);
|
||||
border: 0;
|
||||
|
||||
>.cliFlagsCol {
|
||||
> .cliFlagsCol {
|
||||
width: 20%;
|
||||
}
|
||||
|
||||
>.cliShortCol {
|
||||
width: 5%;
|
||||
> .cliShortCol {
|
||||
width: 5%;
|
||||
}
|
||||
|
||||
>.cliDefaultCol {
|
||||
> .cliDefaultCol {
|
||||
width: 20%;
|
||||
}
|
||||
|
||||
>.cliHelpCol {
|
||||
width: 55%;
|
||||
> .cliHelpCol {
|
||||
width: 55%;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
tr th {
|
||||
@ -197,7 +191,7 @@ html[data-theme='dark'] {
|
||||
display: inline-block;
|
||||
line-height: 1.2em;
|
||||
vertical-align: top;
|
||||
color: var(--ifm-color-primary-dark);
|
||||
color: #e74c3c;
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
@ -240,7 +234,6 @@ html[data-theme='dark'] {
|
||||
}
|
||||
|
||||
div[class*="codeBlockContainer"] {
|
||||
|
||||
&,
|
||||
& pre {
|
||||
margin-bottom: 0;
|
||||
@ -342,175 +335,3 @@ html[class*='docs-doc-id-cli'] .markdown table thead tr {
|
||||
width: 50%;
|
||||
}
|
||||
}
|
||||
|
||||
/* Tiny-Slider CSS */
|
||||
|
||||
.tns-outer {
|
||||
padding: 0 !important
|
||||
}
|
||||
|
||||
.tns-outer [hidden] {
|
||||
display: none !important
|
||||
}
|
||||
|
||||
.tns-outer [aria-controls],
|
||||
.tns-outer [data-action] {
|
||||
cursor: pointer
|
||||
}
|
||||
|
||||
.tns-slider {
|
||||
-webkit-transition: all 0s;
|
||||
-moz-transition: all 0s;
|
||||
transition: all 0s
|
||||
}
|
||||
|
||||
.tns-slider>.tns-item {
|
||||
-webkit-box-sizing: border-box;
|
||||
-moz-box-sizing: border-box;
|
||||
box-sizing: border-box
|
||||
}
|
||||
|
||||
.tns-horizontal.tns-subpixel {
|
||||
white-space: nowrap
|
||||
}
|
||||
|
||||
.tns-horizontal.tns-subpixel>.tns-item {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
white-space: normal
|
||||
}
|
||||
|
||||
.tns-horizontal.tns-no-subpixel:after {
|
||||
content: '';
|
||||
display: table;
|
||||
clear: both
|
||||
}
|
||||
|
||||
.tns-horizontal.tns-no-subpixel>.tns-item {
|
||||
float: left
|
||||
}
|
||||
|
||||
.tns-horizontal.tns-carousel.tns-no-subpixel>.tns-item {
|
||||
margin-right: -100%
|
||||
}
|
||||
|
||||
.tns-no-calc {
|
||||
position: relative;
|
||||
left: 0
|
||||
}
|
||||
|
||||
.tns-gallery {
|
||||
position: relative;
|
||||
left: 0;
|
||||
min-height: 1px
|
||||
}
|
||||
|
||||
.tns-gallery>.tns-item {
|
||||
position: absolute;
|
||||
left: -100%;
|
||||
-webkit-transition: transform 0s, opacity 0s;
|
||||
-moz-transition: transform 0s, opacity 0s;
|
||||
transition: transform 0s, opacity 0s
|
||||
}
|
||||
|
||||
.tns-gallery>.tns-slide-active {
|
||||
position: relative;
|
||||
left: auto !important
|
||||
}
|
||||
|
||||
.tns-gallery>.tns-moving {
|
||||
-webkit-transition: all 0.25s;
|
||||
-moz-transition: all 0.25s;
|
||||
transition: all 0.25s
|
||||
}
|
||||
|
||||
.tns-autowidth {
|
||||
display: inline-block
|
||||
}
|
||||
|
||||
.tns-lazy-img {
|
||||
-webkit-transition: opacity 0.6s;
|
||||
-moz-transition: opacity 0.6s;
|
||||
transition: opacity 0.6s;
|
||||
opacity: 0.6
|
||||
}
|
||||
|
||||
.tns-lazy-img.tns-complete {
|
||||
opacity: 1
|
||||
}
|
||||
|
||||
.tns-ah {
|
||||
-webkit-transition: height 0s;
|
||||
-moz-transition: height 0s;
|
||||
transition: height 0s
|
||||
}
|
||||
|
||||
.tns-ovh {
|
||||
overflow: hidden
|
||||
}
|
||||
|
||||
.tns-visually-hidden {
|
||||
position: absolute;
|
||||
left: -10000em
|
||||
}
|
||||
|
||||
.tns-transparent {
|
||||
opacity: 0;
|
||||
visibility: hidden
|
||||
}
|
||||
|
||||
.tns-fadeIn {
|
||||
opacity: 1;
|
||||
filter: alpha(opacity=100);
|
||||
z-index: 0
|
||||
}
|
||||
|
||||
.tns-normal,
|
||||
.tns-fadeOut {
|
||||
opacity: 0;
|
||||
filter: alpha(opacity=0);
|
||||
z-index: -1
|
||||
}
|
||||
|
||||
.tns-vpfix {
|
||||
white-space: nowrap
|
||||
}
|
||||
|
||||
.tns-vpfix>div,
|
||||
.tns-vpfix>li {
|
||||
display: inline-block
|
||||
}
|
||||
|
||||
.tns-t-subp2 {
|
||||
margin: 0 auto;
|
||||
width: 310px;
|
||||
position: relative;
|
||||
height: 10px;
|
||||
overflow: hidden
|
||||
}
|
||||
|
||||
.tns-t-ct {
|
||||
width: 2333.3333333%;
|
||||
width: -webkit-calc(100% * 70 / 3);
|
||||
width: -moz-calc(100% * 70 / 3);
|
||||
width: calc(100% * 70 / 3);
|
||||
position: absolute;
|
||||
right: 0
|
||||
}
|
||||
|
||||
.tns-t-ct:after {
|
||||
content: '';
|
||||
display: table;
|
||||
clear: both
|
||||
}
|
||||
|
||||
.tns-t-ct>div {
|
||||
width: 1.4285714%;
|
||||
width: -webkit-calc(100% / 70);
|
||||
width: -moz-calc(100% / 70);
|
||||
width: calc(100% / 70);
|
||||
height: 10px;
|
||||
float: left
|
||||
}
|
||||
|
||||
/* Tiny-slider CSS end */
|
||||
14
docs/src/pages/index.js
Normal file
@ -0,0 +1,14 @@
|
||||
import React from "react";
|
||||
import Layout from "@theme/Layout";
|
||||
import { MainComp } from "@site/src/components/parts/MainComp";
|
||||
|
||||
export default function Home() {
|
||||
return (
|
||||
<Layout
|
||||
title="Free, Secure, and Open-Source Backup for Microsoft 365"
|
||||
description="Intro, docs, and blog for Corso, an open-source tool, that protects Microsoft 365 data by securely and efficiently backing up all business-critical data to object storage."
|
||||
>
|
||||
<MainComp />
|
||||
</Layout>
|
||||
);
|
||||
}
|
||||
|
Before Width: | Height: | Size: 228 B After Width: | Height: | Size: 228 B |
|
Before Width: | Height: | Size: 231 B After Width: | Height: | Size: 231 B |
|
Before Width: | Height: | Size: 335 KiB After Width: | Height: | Size: 335 KiB |
|
Before Width: | Height: | Size: 80 KiB After Width: | Height: | Size: 80 KiB |
|
Before Width: | Height: | Size: 419 KiB After Width: | Height: | Size: 419 KiB |
|
Before Width: | Height: | Size: 415 KiB After Width: | Height: | Size: 415 KiB |
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 2.6 KiB |
|
Before Width: | Height: | Size: 252 KiB After Width: | Height: | Size: 252 KiB |
|
Before Width: | Height: | Size: 145 KiB After Width: | Height: | Size: 145 KiB |
|
Before Width: | Height: | Size: 138 KiB After Width: | Height: | Size: 138 KiB |
|
Before Width: | Height: | Size: 5.3 KiB After Width: | Height: | Size: 5.3 KiB |
1
docs/static/img/corso_horizontal_logo.svg
vendored
Executable file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#205eab;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
1
docs/static/img/corso_horizontal_logo_white.svg
vendored
Executable file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#fff;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 5.3 KiB After Width: | Height: | Size: 5.3 KiB |
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 5.0 KiB |
|
Before Width: | Height: | Size: 762 KiB After Width: | Height: | Size: 762 KiB |
|
Before Width: | Height: | Size: 800 KiB After Width: | Height: | Size: 800 KiB |
|
Before Width: | Height: | Size: 700 KiB After Width: | Height: | Size: 700 KiB |
|
Before Width: | Height: | Size: 740 KiB After Width: | Height: | Size: 740 KiB |
|
Before Width: | Height: | Size: 803 KiB After Width: | Height: | Size: 803 KiB |
|
Before Width: | Height: | Size: 1.0 MiB After Width: | Height: | Size: 1.0 MiB |