Compare commits
3 Commits
main
...
return-not
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9369bc36c8 | ||
|
|
5b7d1559ca | ||
|
|
a915cd1dc1 |
@ -1,5 +1,4 @@
|
||||
name: Backup Restore Test
|
||||
description: Run various backup/restore/export tests for a service.
|
||||
|
||||
inputs:
|
||||
service:
|
||||
|
||||
1
.github/actions/go-setup-cache/action.yml
vendored
1
.github/actions/go-setup-cache/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Setup and Cache Golang
|
||||
description: Build golang binaries for later use in CI.
|
||||
|
||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||
#
|
||||
|
||||
1
.github/actions/publish-binary/action.yml
vendored
1
.github/actions/publish-binary/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Publish Binary
|
||||
description: Publish binary artifacts.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
1
.github/actions/publish-website/action.yml
vendored
1
.github/actions/publish-website/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Publish Website
|
||||
description: Publish website artifacts.
|
||||
|
||||
inputs:
|
||||
aws-iam-role:
|
||||
|
||||
44
.github/actions/purge-m365-data/action.yml
vendored
44
.github/actions/purge-m365-data/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Purge M365 User Data
|
||||
description: Deletes M365 data generated during CI tests.
|
||||
|
||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||
# of data churn (creation and immediate deletion) of files, the likes
|
||||
@ -31,19 +30,12 @@ inputs:
|
||||
description: Secret value of for AZURE_CLIENT_ID
|
||||
azure-client-secret:
|
||||
description: Secret value of for AZURE_CLIENT_SECRET
|
||||
azure-pnp-client-id:
|
||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
||||
azure-pnp-client-cert:
|
||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
||||
azure-tenant-id:
|
||||
description: Secret value of AZURE_TENANT_ID
|
||||
description: Secret value of for AZURE_TENANT_ID
|
||||
m365-admin-user:
|
||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||
m365-admin-password:
|
||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||
tenant-domain:
|
||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
@ -61,13 +53,7 @@ runs:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
|
||||
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
||||
#- name: Reset retention for all mailboxes to 0
|
||||
@ -88,16 +74,10 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
|
||||
################################################################################################################
|
||||
# Sharepoint
|
||||
@ -108,14 +88,6 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
|
||||
1
.github/actions/teams-message/action.yml
vendored
1
.github/actions/teams-message/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Send a message to Teams
|
||||
description: Send messages to communication apps.
|
||||
|
||||
inputs:
|
||||
msg:
|
||||
|
||||
1
.github/actions/website-linting/action.yml
vendored
1
.github/actions/website-linting/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Lint Website
|
||||
description: Lint website content.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
2
.github/workflows/binary-publish.yml
vendored
2
.github/workflows/binary-publish.yml
vendored
@ -40,5 +40,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Publishing Binary"
|
||||
msg: "[FAILED] Publishing Binary"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
14
.github/workflows/ci_test_cleanup.yml
vendored
14
.github/workflows/ci_test_cleanup.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
||||
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -33,15 +33,12 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
Test-Site-Data-Cleanup:
|
||||
@ -50,7 +47,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
||||
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -73,13 +70,10 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
3
.github/workflows/load_test.yml
vendored
3
.github/workflows/load_test.yml
vendored
@ -155,6 +155,3 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
16
.github/workflows/longevity_test.yml
vendored
16
.github/workflows/longevity_test.yml
vendored
@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run longevity test on"
|
||||
description: 'User to run longevity test on'
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Longevity-Tests:
|
||||
needs: [SetM365App]
|
||||
needs: [ SetM365App ]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -37,7 +37,7 @@ jobs:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||
PREFIX: "longevity"
|
||||
PREFIX: 'longevity'
|
||||
|
||||
# Options for retention.
|
||||
RETENTION_MODE: GOVERNANCE
|
||||
@ -46,7 +46,7 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
|
||||
############################################################################
|
||||
# setup
|
||||
steps:
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
|
||||
- run: go build -o corso
|
||||
timeout-minutes: 10
|
||||
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
# Use shorter-lived credentials obtained from assume-role since these
|
||||
@ -163,7 +163,7 @@ jobs:
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Onedrive
|
||||
|
||||
@ -328,7 +328,7 @@ jobs:
|
||||
--hide-progress \
|
||||
--force \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||
|
||||
- name: Maintenance test Weekly
|
||||
id: maintenance-test-weekly
|
||||
@ -392,5 +392,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Longevity Test"
|
||||
msg: "[FAILED] Longevity Test"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
8
.github/workflows/nightly_test.yml
vendored
8
.github/workflows/nightly_test.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Test-Suite-Trusted:
|
||||
needs: [Checkout, SetM365App]
|
||||
needs: [ Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
@ -100,9 +100,9 @@ jobs:
|
||||
-timeout 2h \
|
||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
@ -118,5 +118,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[COROS FAILED] Nightly Checks"
|
||||
msg: "[FAILED] Nightly Checks"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
96
.github/workflows/sanity-test.yaml
vendored
96
.github/workflows/sanity-test.yaml
vendored
@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run sanity test on"
|
||||
description: 'User to run sanity test on'
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Sanity-Tests:
|
||||
needs: [SetM365App]
|
||||
needs: [ SetM365App ]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -43,11 +43,12 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# setup
|
||||
# setup
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
@ -63,9 +64,9 @@ jobs:
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Pre-Run cleanup
|
||||
# Pre-Run cleanup
|
||||
|
||||
# unlike CI tests, sanity tests are not expected to run concurrently.
|
||||
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
||||
@ -90,9 +91,6 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Purge CI-Produced Folders for Sites
|
||||
timeout-minutes: 30
|
||||
@ -101,20 +99,17 @@ jobs:
|
||||
with:
|
||||
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
|
||||
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
older-than: ${{ env.NOW }}
|
||||
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Repository commands
|
||||
# Repository commands
|
||||
|
||||
- name: Version Test
|
||||
timeout-minutes: 10
|
||||
@ -174,9 +169,9 @@ jobs:
|
||||
--mode complete \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Exchange
|
||||
# Exchange
|
||||
|
||||
# generate new entries to roll into the next load test
|
||||
# only runs if the test was successful
|
||||
@ -198,8 +193,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: first-backup
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -211,8 +206,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
@ -225,8 +220,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: non-delta
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
@ -239,15 +234,16 @@ jobs:
|
||||
service: exchange
|
||||
kind: non-delta-incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Onedrive
|
||||
##########################################################################################################################################
|
||||
|
||||
# Onedrive
|
||||
|
||||
# generate new entries for test
|
||||
- name: OneDrive - Create new data
|
||||
@ -274,8 +270,8 @@ jobs:
|
||||
service: onedrive
|
||||
kind: first-backup
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -299,14 +295,14 @@ jobs:
|
||||
service: onedrive
|
||||
kind: incremental
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Library
|
||||
# Sharepoint Library
|
||||
|
||||
# generate new entries for test
|
||||
- name: SharePoint - Create new data
|
||||
@ -334,8 +330,8 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: first-backup
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
@ -361,15 +357,15 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: incremental
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Lists
|
||||
# Sharepoint Lists
|
||||
|
||||
# generate new entries for test
|
||||
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
||||
@ -422,7 +418,7 @@ jobs:
|
||||
working-directory: ./src/cmd/factory
|
||||
run: |
|
||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
|
||||
go run . sharepoint lists \
|
||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
||||
--user ${{ env.TEST_USER }} \
|
||||
@ -458,9 +454,9 @@ jobs:
|
||||
category: lists
|
||||
on-collision: copy
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Groups and Teams
|
||||
# Groups and Teams
|
||||
|
||||
# generate new entries for test
|
||||
- name: Groups - Create new data
|
||||
@ -487,8 +483,8 @@ jobs:
|
||||
with:
|
||||
service: groups
|
||||
kind: first-backup
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -512,15 +508,15 @@ jobs:
|
||||
with:
|
||||
service: groups
|
||||
kind: incremental
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
@ -536,5 +532,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Sanity Tests"
|
||||
msg: "[FAILED] Sanity Tests"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
@ -11,7 +11,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Emails attached within other emails are now correctly exported
|
||||
- Gracefully handle email and post attachments without name when exporting to eml
|
||||
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
||||
- Fixed an issue causing exports dealing with calendar data to have high memory usage
|
||||
|
||||
## [v0.19.0] (beta) - 2024-02-06
|
||||
|
||||
@ -30,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Contacts in-place restore failed if the restore destination was empty.
|
||||
- Link shares with external users are now backed up and restored as expected
|
||||
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
|
||||
- User-not-found and mailbox-not-found errors no longer collide when checking backup capacity for mailboxes.
|
||||
|
||||
### Changed
|
||||
- When running `backup details` on an empty backup returns a more helpful error message.
|
||||
|
||||
@ -1,6 +1,3 @@
|
||||
> [!NOTE]
|
||||
> **The Corso project is no longer actively maintained and has been archived**.
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||
</p>
|
||||
|
||||
@ -151,10 +151,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
|
||||
t.Log("backup results", result)
|
||||
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
assert.Contains(t, result, suite.m365.User.Provider.Name())
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
|
||||
@ -187,10 +184,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
|
||||
t.Log("backup results", result)
|
||||
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
assert.Contains(t, result, suite.m365.User.Provider.Name())
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
|
||||
@ -289,10 +283,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
|
||||
t.Log("backup results", result)
|
||||
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
assert.Contains(t, result, suite.m365.User.Provider.Name())
|
||||
}
|
||||
|
||||
// AWS flags
|
||||
|
||||
@ -114,8 +114,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
@ -219,9 +217,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||
// Skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
@ -305,10 +300,7 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
||||
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChannelMessagesCategory,
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
||||
// we go fix the group mailbox.
|
||||
// path.ConversationPostsCategory,
|
||||
path.ConversationPostsCategory,
|
||||
path.LibrariesCategory,
|
||||
}
|
||||
)
|
||||
@ -462,8 +454,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
|
||||
@ -6,6 +6,12 @@ Param (
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
||||
[String]$Site,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
|
||||
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
|
||||
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
||||
[String[]]$LibraryNameList = @(),
|
||||
|
||||
@ -16,16 +22,7 @@ Param (
|
||||
[String[]]$FolderPrefixPurgeList,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
||||
[String[]]$LibraryPrefixDeleteList = @(),
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
|
||||
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
|
||||
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
|
||||
[String]$AppCert = $ENV:AZURE_APP_CERT
|
||||
[String[]]$LibraryPrefixDeleteList = @()
|
||||
)
|
||||
|
||||
Set-StrictMode -Version 2.0
|
||||
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
|
||||
|
||||
$name = $folder.Name
|
||||
|
||||
#fallback on folder create time
|
||||
#fallback on folder create time
|
||||
[datetime]$timestamp = $folder.TimeCreated
|
||||
|
||||
try {
|
||||
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
|
||||
|
||||
$name = $list.Title
|
||||
|
||||
#fallback on list create time
|
||||
#fallback on list create time
|
||||
[datetime]$timestamp = $list.LastItemUserModifiedDate
|
||||
|
||||
try {
|
||||
@ -109,9 +106,8 @@ function Purge-Library {
|
||||
Write-Host "`nPurging library: $LibraryName"
|
||||
|
||||
$foldersToPurge = @()
|
||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||
|
||||
Write-Host "`nFolders: $folders"
|
||||
foreach ($f in $folders) {
|
||||
$folderName = $f.Name
|
||||
$createTime = Get-TimestampFromFolderName -Folder $f
|
||||
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
|
||||
Write-Host "`nDeleting library: $LibraryNamePrefix"
|
||||
|
||||
$listsToDelete = @()
|
||||
$lists = Get-PnPList
|
||||
$lists = Get-PnPList
|
||||
|
||||
foreach ($l in $lists) {
|
||||
$listName = $l.Title
|
||||
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
|
||||
Write-Host "Deleting list: "$l.Title
|
||||
try {
|
||||
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
|
||||
|
||||
|
||||
# Check if the 'hidden' property is true
|
||||
if ($listInfo.Hidden) {
|
||||
Write-Host "List: $($l.Title) is hidden. Skipping..."
|
||||
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
|
||||
}
|
||||
|
||||
|
||||
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
|
||||
Write-Host "ClientId and AppCert required as arguments or environment variables."
|
||||
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
|
||||
Write-Host "Admin user name and password required as arguments or environment variables."
|
||||
Exit
|
||||
}
|
||||
|
||||
@ -255,8 +251,12 @@ else {
|
||||
Exit
|
||||
}
|
||||
|
||||
|
||||
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
|
||||
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
|
||||
|
||||
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
||||
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
|
||||
Connect-PnPOnline -Url $siteUrl -Credential $cred
|
||||
Write-Host "Connected to $siteUrl`n"
|
||||
|
||||
# ensure that there are no unexpanded entries in the list of parameters
|
||||
|
||||
@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
@ -21,20 +20,19 @@ const (
|
||||
// this increases the chance that we'll run into a race collision with
|
||||
// the cleanup script. Sometimes that's okay (deleting old data that
|
||||
// isn't scrutinized in the test), other times it's not. We mark whether
|
||||
// that's okay to do or not by specifying the folders being
|
||||
// scrutinized for the test. Any errors within those folders should cause
|
||||
// a fatal exit. Errors outside of those folders get ignored.
|
||||
// that's okay to do or not by specifying the folder that's being
|
||||
// scrutinized for the test. Any errors within that folder should cause
|
||||
// a fatal exit. Errors outside of that folder get ignored.
|
||||
//
|
||||
// since we're using folder names, mustPopulateFolders will
|
||||
// since we're using folder names, requireNoErrorsWithinFolderName will
|
||||
// work best (ie: have the fewest collisions/side-effects) if the folder
|
||||
// names are very specific. Standard sanity tests should include timestamps,
|
||||
// name is very specific. Standard sanity tests should include timestamps,
|
||||
// which should help ensure that. Be warned if you try to use it with
|
||||
// a more generic name: unintended effects could occur.
|
||||
func populateSanitree(
|
||||
ctx context.Context,
|
||||
ac api.Client,
|
||||
driveID string,
|
||||
mustPopulateFolders []string,
|
||||
driveID, requireNoErrorsWithinFolderName string,
|
||||
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
|
||||
common.Infof(ctx, "building sanitree for drive: %s", driveID)
|
||||
|
||||
@ -58,8 +56,8 @@ func populateSanitree(
|
||||
ac,
|
||||
driveID,
|
||||
stree.Name+"/",
|
||||
mustPopulateFolders,
|
||||
slices.Contains(mustPopulateFolders, rootName),
|
||||
requireNoErrorsWithinFolderName,
|
||||
rootName == requireNoErrorsWithinFolderName,
|
||||
stree)
|
||||
|
||||
return stree
|
||||
@ -68,9 +66,7 @@ func populateSanitree(
|
||||
func recursivelyBuildTree(
|
||||
ctx context.Context,
|
||||
ac api.Client,
|
||||
driveID string,
|
||||
location string,
|
||||
mustPopulateFolders []string,
|
||||
driveID, location, requireNoErrorsWithinFolderName string,
|
||||
isChildOfFolderRequiringNoErrors bool,
|
||||
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
||||
) {
|
||||
@ -84,9 +80,9 @@ func recursivelyBuildTree(
|
||||
|
||||
common.Infof(
|
||||
ctx,
|
||||
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
|
||||
"ignoring error getting children in directory %q because it is not within directory %q\nerror: %s\n%+v",
|
||||
location,
|
||||
mustPopulateFolders,
|
||||
requireNoErrorsWithinFolderName,
|
||||
err.Error(),
|
||||
clues.ToCore(err))
|
||||
|
||||
@ -103,12 +99,11 @@ func recursivelyBuildTree(
|
||||
// currently we don't restore blank folders.
|
||||
// skip permission check for empty folders
|
||||
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
||||
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
|
||||
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
|
||||
continue
|
||||
}
|
||||
|
||||
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
|
||||
slices.Contains(mustPopulateFolders, itemName)
|
||||
cannotAllowErrors := isChildOfFolderRequiringNoErrors || itemName == requireNoErrorsWithinFolderName
|
||||
|
||||
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
||||
Parent: stree,
|
||||
@ -129,7 +124,7 @@ func recursivelyBuildTree(
|
||||
ac,
|
||||
driveID,
|
||||
location+branch.Name+"/",
|
||||
mustPopulateFolders,
|
||||
requireNoErrorsWithinFolderName,
|
||||
cannotAllowErrors,
|
||||
branch)
|
||||
}
|
||||
|
||||
@ -32,7 +32,7 @@ func CheckExport(
|
||||
ctx,
|
||||
ac,
|
||||
driveID,
|
||||
[]string{envs.SourceContainer})
|
||||
envs.RestoreContainer)
|
||||
|
||||
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||
common.Assert(
|
||||
|
||||
@ -45,14 +45,7 @@ func CheckRestoration(
|
||||
"drive_id", driveID,
|
||||
"drive_name", driveName)
|
||||
|
||||
root := populateSanitree(
|
||||
ctx,
|
||||
ac,
|
||||
driveID,
|
||||
[]string{
|
||||
envs.SourceContainer,
|
||||
envs.RestoreContainer,
|
||||
})
|
||||
root := populateSanitree(ctx, ac, driveID, envs.RestoreContainer)
|
||||
|
||||
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||
common.Assert(
|
||||
|
||||
@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
|
||||
go 1.21
|
||||
|
||||
replace (
|
||||
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
|
||||
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
|
||||
|
||||
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
|
||||
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
|
||||
|
||||
@ -23,8 +23,8 @@ github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEej
|
||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
|
||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
|
||||
@ -99,8 +99,6 @@ func ZipExportCollection(
|
||||
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
||||
return
|
||||
}
|
||||
|
||||
item.Body.Close()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,13 +1,10 @@
|
||||
package jwt
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
jwt "github.com/golang-jwt/jwt/v5"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
|
||||
@ -40,51 +37,3 @@ func IsJWTExpired(
|
||||
|
||||
return expired, nil
|
||||
}
|
||||
|
||||
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
|
||||
// present in the JWT token. These are optional claims and may not be present
|
||||
// in the token. Absence is not reported as an error.
|
||||
//
|
||||
// An error is returned if the supplied token is malformed. Times are returned
|
||||
// in UTC to have parity with graph responses.
|
||||
func GetJWTLifetime(
|
||||
ctx context.Context,
|
||||
rawToken string,
|
||||
) (time.Time, time.Time, error) {
|
||||
var (
|
||||
issuedAt time.Time
|
||||
expiresAt time.Time
|
||||
)
|
||||
|
||||
p := jwt.NewParser()
|
||||
|
||||
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Debug("parsing jwt token")
|
||||
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
|
||||
}
|
||||
|
||||
exp, err := token.Claims.GetExpirationTime()
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Debug("extracting exp claim")
|
||||
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
|
||||
}
|
||||
|
||||
iat, err := token.Claims.GetIssuedAt()
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Debug("extracting iat claim")
|
||||
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
|
||||
}
|
||||
|
||||
// Absence of iat or exp claims is not reported as an error by jwt library as these
|
||||
// are optional as per spec.
|
||||
if iat != nil {
|
||||
issuedAt = iat.UTC()
|
||||
}
|
||||
|
||||
if exp != nil {
|
||||
expiresAt = exp.UTC()
|
||||
}
|
||||
|
||||
return issuedAt, expiresAt, nil
|
||||
}
|
||||
|
||||
@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
|
||||
// Set of time values to be used in the tests.
|
||||
// Truncate to seconds for comparisons since jwt tokens have second
|
||||
// level precision.
|
||||
idToTime := map[string]time.Time{
|
||||
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
|
||||
"T1": time.Now().UTC().Truncate(time.Second),
|
||||
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
getToken func() (string, error)
|
||||
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "alive token",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
|
||||
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, idToTime["T0"], iat)
|
||||
assert.Equal(t, idToTime["T1"], exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
// Test with a token which is not generated using the go-jwt lib.
|
||||
// This is a long lived token which is valid for 100 years.
|
||||
{
|
||||
name: "alive raw token with iat and exp claims",
|
||||
getToken: func() (string, error) {
|
||||
return rawToken, nil
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Less(t, iat, time.Now(), "iat should be in the past")
|
||||
assert.Greater(t, exp, time.Now(), "exp should be in the future")
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
// Regardless of whether the token is expired or not, we should be able to
|
||||
// extract the iat and exp claims from it without error.
|
||||
{
|
||||
name: "expired token",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
|
||||
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, idToTime["T1"], iat)
|
||||
assert.Equal(t, idToTime["T0"], exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "missing iat claim",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, time.Time{}, iat)
|
||||
assert.Equal(t, idToTime["T2"], exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "missing exp claim",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, idToTime["T0"], iat)
|
||||
assert.Equal(t, time.Time{}, exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "both claims missing",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(jwt.RegisteredClaims{})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, time.Time{}, iat)
|
||||
assert.Equal(t, time.Time{}, exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "malformed token",
|
||||
getToken: func() (string, error) {
|
||||
return "header.claims.signature", nil
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, time.Time{}, iat)
|
||||
assert.Equal(t, time.Time{}, exp)
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
token, err := test.getToken()
|
||||
require.NoError(t, err)
|
||||
|
||||
iat, exp, err := GetJWTLifetime(ctx, token)
|
||||
test.expectErr(t, err)
|
||||
|
||||
test.expectFunc(t, iat, exp)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -484,14 +484,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
desc := replacer.Replace(description)
|
||||
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
|
||||
} else {
|
||||
// Disable auto wrap, causes huge memory spikes
|
||||
// https://github.com/jaytaylor/html2text/issues/48
|
||||
prettyTablesOptions := html2text.NewPrettyTablesOptions()
|
||||
prettyTablesOptions.AutoWrapText = false
|
||||
|
||||
stripped, err := html2text.FromString(
|
||||
description,
|
||||
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
|
||||
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "converting html to text").
|
||||
With("description_length", len(description))
|
||||
|
||||
@ -23,12 +23,13 @@ func IsServiceEnabled(
|
||||
) (bool, error) {
|
||||
_, err := gmi.GetMailInbox(ctx, resource)
|
||||
if err != nil {
|
||||
if err := api.EvaluateMailboxError(err); err != nil {
|
||||
ignorable := api.IsMailboxErrorIgnorable(err)
|
||||
if !ignorable {
|
||||
logger.CtxErr(ctx, err).Error("getting user's mail folder")
|
||||
return false, clues.Stack(err)
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Info("resource owner does not have a mailbox enabled")
|
||||
logger.CtxErr(ctx, err).Info("resource owner does not have a mailbox enabled")
|
||||
|
||||
return false, nil
|
||||
}
|
||||
@ -54,13 +55,13 @@ func GetMailboxInfo(
|
||||
// First check whether the user is able to access their inbox.
|
||||
inbox, err := gmb.GetMailInbox(ctx, userID)
|
||||
if err != nil {
|
||||
if err := api.EvaluateMailboxError(clues.Stack(err)); err != nil {
|
||||
ignorable := api.IsMailboxErrorIgnorable(clues.Stack(err))
|
||||
if !ignorable {
|
||||
logger.CtxErr(ctx, err).Error("getting user's mail folder")
|
||||
|
||||
return mi, err
|
||||
return mi, clues.Stack(err)
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Info("resource owner does not have a mailbox enabled")
|
||||
logger.CtxErr(ctx, err).Info("resource owner does not have a mailbox enabled")
|
||||
|
||||
mi.ErrGetMailBoxSetting = append(
|
||||
mi.ErrGetMailBoxSetting,
|
||||
|
||||
@ -305,10 +305,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead int
|
||||
itemsWritten int
|
||||
nonMetaItemsWritten int
|
||||
|
||||
// TODO: Temporary mechanism to skip permissions
|
||||
// related tests. Remove once we figure out the issue.
|
||||
skipChecks bool
|
||||
}{
|
||||
{
|
||||
name: "clean incremental, no changes",
|
||||
@ -357,7 +353,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 1, // .data file for newitem
|
||||
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
||||
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "remove permission from new file",
|
||||
@ -377,7 +372,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 1, // .data file for newitem
|
||||
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
|
||||
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "add permission to container",
|
||||
@ -398,7 +392,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 0,
|
||||
itemsWritten: 2, // .dirmeta for collection
|
||||
nonMetaItemsWritten: 0, // no files updated as update on container
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "remove permission from container",
|
||||
@ -419,7 +412,6 @@ func RunIncrementalDriveishBackupTest(
|
||||
itemsRead: 0,
|
||||
itemsWritten: 2, // .dirmeta for collection
|
||||
nonMetaItemsWritten: 0, // no files updated
|
||||
skipChecks: true,
|
||||
},
|
||||
{
|
||||
name: "update contents of a file",
|
||||
@ -749,11 +741,9 @@ func RunIncrementalDriveishBackupTest(
|
||||
assertReadWrite = assert.LessOrEqual
|
||||
}
|
||||
|
||||
if !test.skipChecks {
|
||||
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
|
||||
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
|
||||
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
|
||||
}
|
||||
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
|
||||
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
|
||||
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
|
||||
|
||||
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
|
||||
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
|
||||
|
||||
@ -175,7 +175,7 @@ func runGroupsIncrementalBackupTests(
|
||||
suite,
|
||||
opts,
|
||||
m365.Group.ID,
|
||||
m365.SecondaryGroup.ID, // more reliable than user
|
||||
m365.User.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory,
|
||||
ic,
|
||||
@ -201,12 +201,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
|
||||
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
||||
// we go fix the group mailbox.
|
||||
// selTD.GroupsBackupConversationScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
selTD.GroupsBackupConversationScope(sel))
|
||||
|
||||
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
|
||||
defer bod.Close(t, ctx)
|
||||
@ -330,12 +326,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBas
|
||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
|
||||
// we go fix the group mailbox.
|
||||
// selTD.GroupsBackupConversationScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
selTD.GroupsBackupConversationScope(sel))
|
||||
|
||||
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
|
||||
}
|
||||
@ -344,12 +336,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBa
|
||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||
sel.Include(
|
||||
selTD.GroupsBackupLibraryFolderScope(sel),
|
||||
selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
|
||||
// we go fix the group mailbox.
|
||||
// selTD.GroupsBackupConversationScope(sel))
|
||||
selTD.GroupsBackupChannelScope(sel),
|
||||
selTD.GroupsBackupConversationScope(sel))
|
||||
|
||||
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
|
||||
}
|
||||
|
||||
@ -16,8 +16,8 @@ import "errors"
|
||||
//
|
||||
// 2. Maintain coarseness.
|
||||
// We won't need a core.Err version of every lower-level error. Try, where possible,
|
||||
// to group concepts into broad categories. Ex: prefer "resource not found" over
|
||||
// "user not found" or "site not found".
|
||||
// to group concepts into broad categories. Ex: prefer "not found" over "resource not
|
||||
// found", and "resource not found" over "user not found".
|
||||
//
|
||||
// 3. Always Stack/Wrap core.Errs. Only once.
|
||||
// `return core.ErrFoo` should be avoided. Also, if you're handling a error returned
|
||||
|
||||
@ -32,9 +32,6 @@ func (suite *ConversationsPagerIntgSuite) SetupSuite() {
|
||||
}
|
||||
|
||||
func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() {
|
||||
// Skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
|
||||
var (
|
||||
t = suite.T()
|
||||
ac = suite.its.ac.Conversations()
|
||||
|
||||
@ -127,6 +127,12 @@ var (
|
||||
// error categorization
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// ErrResourceNotFound is a special case handler to differentiate from the
|
||||
// more generic core.ErrNotFound. Two rules for usage, to preserve sanity:
|
||||
// 1. it should get stacked on top of a clues.NotFound error.
|
||||
// 2. it should not be investigated above the api layer.
|
||||
var ErrResourceNotFound = clues.New("resource not found")
|
||||
|
||||
func stackWithCoreErr(ctx context.Context, err error, traceDepth int) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
@ -140,8 +146,11 @@ func stackWithCoreErr(ctx context.Context, err error, traceDepth int) error {
|
||||
err = clues.Stack(core.ErrAuthTokenExpired)
|
||||
case isErrApplicationThrottled(ode, err):
|
||||
err = clues.Stack(core.ErrApplicationThrottled, err)
|
||||
case isErrUserNotFound(ode, err):
|
||||
err = clues.Stack(core.ErrNotFound, err)
|
||||
case isErrResourceNotFound(ode, err):
|
||||
// stack both resourceNotFound and notFound to ensure some graph api
|
||||
// internals can distinguish between the two cases (where possible).
|
||||
// layers above the api should still handle only the core notFound.
|
||||
err = clues.Stack(ErrResourceNotFound, core.ErrNotFound, err)
|
||||
case isErrResourceLocked(ode, err):
|
||||
err = clues.Stack(core.ErrResourceNotAccessible, err)
|
||||
case isErrInsufficientAuthorization(ode, err):
|
||||
@ -204,7 +213,7 @@ func isErrNotFound(ode oDataErr, err error) bool {
|
||||
notFound)
|
||||
}
|
||||
|
||||
func isErrUserNotFound(ode oDataErr, err error) bool {
|
||||
func isErrResourceNotFound(ode oDataErr, err error) bool {
|
||||
if ode.hasErrorCode(err, RequestResourceNotFound, invalidUser) {
|
||||
return true
|
||||
}
|
||||
@ -701,48 +710,10 @@ func (ode oDataErr) errMessageMatchesAllFilters(err error, fs ...filters.Filter)
|
||||
// ---------------------------------------------------------------------------
|
||||
// other helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
const (
|
||||
// JWTQueryParam is a query param embed in graph download URLs which holds
|
||||
// JWT token.
|
||||
JWTQueryParam = "tempauth"
|
||||
// base64 encoded json header. Contains {"alg":"HS256","typ":"JWT"}
|
||||
//
|
||||
// Hardcoding this instead of generating it every time on the fly.
|
||||
// The algorithm doesn't matter as we are not verifying the token.
|
||||
jwtHeader = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"
|
||||
)
|
||||
|
||||
func sanitizeToken(rawToken string) string {
|
||||
segments := strings.Split(rawToken, ".")
|
||||
|
||||
// Check if the token has the old format, in which it has 3 segments and
|
||||
// conforms to jwt spec. Format is seg1.seg2.seg3.
|
||||
if len(segments) == 3 {
|
||||
return rawToken
|
||||
}
|
||||
|
||||
// Check if it is a msft proprietary token in which it has 4 segments and
|
||||
// doesn't meet jwt spec. Format is v1.seg1.seg2.seg3. Return a token which
|
||||
// meets jwt spec.
|
||||
//
|
||||
// In this proprietary token, there is no jwt header segment. Also, the claims
|
||||
// section is split into first and segments. The first segment contains the
|
||||
// `exp` claim that we are interested in.
|
||||
//
|
||||
// The second segment contains the rest of the claims, but likely encrypted.
|
||||
// We don't need it so discard it. The last segment contains the signature which
|
||||
// we don't care about either, as we are not verifying the token. So append it as is.
|
||||
//
|
||||
// It's okay if the sanitized token still doesn't meet jwt spec. It'll fail decoding
|
||||
// later and we have fallbacks for that.
|
||||
if len(segments) == 4 && segments[0] == "v1" {
|
||||
return jwtHeader + "." + segments[1] + "." + segments[3]
|
||||
}
|
||||
|
||||
// If MSFT change the token format again on us, just return empty string and let caller
|
||||
// handle it as an error.
|
||||
return ""
|
||||
}
|
||||
// JWTQueryParam is a query param embed in graph download URLs which holds
|
||||
// JWT token.
|
||||
const JWTQueryParam = "tempauth"
|
||||
|
||||
// IsURLExpired inspects the jwt token embed in the item download url
|
||||
// and returns true if it is expired.
|
||||
@ -753,20 +724,12 @@ func IsURLExpired(
|
||||
expiredErr error,
|
||||
err error,
|
||||
) {
|
||||
ctx = clues.Add(ctx, "checked_url", urlStr)
|
||||
|
||||
// Extract the raw JWT string from the download url.
|
||||
rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam)
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "jwt query param not found")
|
||||
}
|
||||
|
||||
// Token may have a proprietary format. Try to sanitize it to jwt format.
|
||||
rawJWT = sanitizeToken(rawJWT)
|
||||
if len(rawJWT) == 0 {
|
||||
return nil, clues.WrapWC(ctx, err, "sanitizing jwt")
|
||||
}
|
||||
|
||||
expired, err := jwt.IsJWTExpired(rawJWT)
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "checking jwt expiry")
|
||||
|
||||
@ -549,7 +549,7 @@ func (suite *GraphErrorsUnitSuite) TestIsErrUserNotFound() {
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
ode := parseODataErr(test.err)
|
||||
test.expect(suite.T(), isErrUserNotFound(ode, test.err))
|
||||
test.expect(suite.T(), isErrResourceNotFound(ode, test.err))
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1111,7 +1111,6 @@ func (suite *GraphErrorsUnitSuite) TestToErrByRespCode() {
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
err := toErrByRespCode(parseODataErr(test.err), test.err)
|
||||
|
||||
if test.expectNoStack {
|
||||
@ -1122,3 +1121,96 @@ func (suite *GraphErrorsUnitSuite) TestToErrByRespCode() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *GraphErrorsUnitSuite) TestIsErrItemAlreadyExists() {
|
||||
table := []struct {
|
||||
name string
|
||||
err error
|
||||
expect assert.BoolAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "nil",
|
||||
err: nil,
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "non-matching",
|
||||
err: assert.AnError,
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "non-matching oDataErr",
|
||||
err: graphTD.ODataErrWithMsg("InvalidRequest", "item already exists"),
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "matching oDataErr code",
|
||||
err: graphTD.ODataInner(string(nameAlreadyExists)),
|
||||
expect: assert.True,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
ode := parseODataErr(test.err)
|
||||
test.expect(suite.T(), isErrItemAlreadyExists(ode, test.err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *GraphErrorsUnitSuite) TestStackWithCoreErr() {
|
||||
table := []struct {
|
||||
name string
|
||||
err error
|
||||
expect []error
|
||||
}{
|
||||
{
|
||||
name: "bad jwt",
|
||||
err: graphTD.ODataErr(string(invalidAuthenticationToken)),
|
||||
expect: []error{core.ErrAuthTokenExpired},
|
||||
},
|
||||
{
|
||||
name: "throttled",
|
||||
err: graphTD.ODataErr(string(ApplicationThrottled)),
|
||||
expect: []error{core.ErrApplicationThrottled},
|
||||
},
|
||||
{
|
||||
name: "user not found",
|
||||
err: graphTD.ODataErrWithMsg(string(ResourceNotFound), "User not found"),
|
||||
expect: []error{ErrResourceNotFound, core.ErrNotFound},
|
||||
},
|
||||
{
|
||||
name: "resource locked",
|
||||
err: graphTD.ODataErr(string(NotAllowed)),
|
||||
expect: []error{core.ErrResourceNotAccessible},
|
||||
},
|
||||
{
|
||||
name: "insufficient auth",
|
||||
err: graphTD.ODataErr(string(AuthorizationRequestDenied)),
|
||||
expect: []error{core.ErrInsufficientAuthorization},
|
||||
},
|
||||
{
|
||||
name: "already exists",
|
||||
err: graphTD.ODataInner(string(nameAlreadyExists)),
|
||||
expect: []error{core.ErrAlreadyExists},
|
||||
},
|
||||
{
|
||||
name: "not found",
|
||||
err: graphTD.ODataErr(string(ItemNotFound)),
|
||||
expect: []error{core.ErrNotFound},
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
result := stackWithCoreErr(ctx, test.err, 1)
|
||||
|
||||
for _, ex := range test.expect {
|
||||
assert.ErrorIs(t, result, ex, clues.ToCore(result))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -146,7 +146,7 @@ func (hw httpWrapper) Request(
|
||||
|
||||
resp, err := hw.client.Do(req)
|
||||
if err == nil {
|
||||
logResp(ictx, resp, req)
|
||||
logResp(ictx, resp)
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
|
||||
@ -5,12 +5,9 @@ import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/jwt"
|
||||
"github.com/alcionai/corso/src/internal/common/pii"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
@ -31,7 +28,7 @@ func shouldLogRespBody(resp *http.Response) bool {
|
||||
resp.StatusCode > 399
|
||||
}
|
||||
|
||||
func logResp(ctx context.Context, resp *http.Response, req *http.Request) {
|
||||
func logResp(ctx context.Context, resp *http.Response) {
|
||||
var (
|
||||
log = logger.Ctx(ctx)
|
||||
respClass = resp.StatusCode / 100
|
||||
@ -48,25 +45,6 @@ func logResp(ctx context.Context, resp *http.Response, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Log bearer token iat and exp claims if we hit 401s. This is purely for
|
||||
// debugging purposes and will be removed in the future.
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
errs := []any{"graph api error: " + resp.Status}
|
||||
|
||||
// As per MSFT docs, the token may have a special format and may not always
|
||||
// validate as a JWT. Hence log token lifetime in a best effort manner only.
|
||||
iat, exp, err := getTokenLifetime(ctx, req)
|
||||
if err != nil {
|
||||
errs = append(errs, " getting token lifetime: ", err)
|
||||
}
|
||||
|
||||
log.With("response", getRespDump(ctx, resp, logBody)).
|
||||
With("token issued at", iat, "token expires at", exp).
|
||||
Error(errs...)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Log api calls according to api debugging configurations.
|
||||
switch respClass {
|
||||
case 2:
|
||||
@ -113,32 +91,3 @@ func getReqCtx(req *http.Request) context.Context {
|
||||
"url", logURL,
|
||||
"request_content_len", req.ContentLength)
|
||||
}
|
||||
|
||||
// GetTokenLifetime extracts the JWT token embedded in the request and returns
|
||||
// the token's issue and expiration times. The token is expected to be in the
|
||||
// "Authorization" header, with a "Bearer " prefix. If the token is not present
|
||||
// or is malformed, an error is returned.
|
||||
func getTokenLifetime(
|
||||
ctx context.Context,
|
||||
req *http.Request,
|
||||
) (time.Time, time.Time, error) {
|
||||
if req == nil {
|
||||
return time.Time{}, time.Time{}, clues.New("nil request")
|
||||
}
|
||||
|
||||
// Don't throw an error if auth header is absent. This is to prevent
|
||||
// unnecessary noise in the logs for requests served by the http requestor
|
||||
// client. These requests may be preauthenticated and may not carry auth headers.
|
||||
rawToken := req.Header.Get("Authorization")
|
||||
if len(rawToken) == 0 {
|
||||
return time.Time{}, time.Time{}, nil
|
||||
}
|
||||
|
||||
// Strip the "Bearer " prefix from the token. This prefix is guaranteed to be
|
||||
// present as per msft docs. But even if it's not, the jwt lib will handle
|
||||
// malformed tokens gracefully and return an error.
|
||||
rawToken = strings.TrimPrefix(rawToken, "Bearer ")
|
||||
iat, exp, err := jwt.GetJWTLifetime(ctx, rawToken)
|
||||
|
||||
return iat, exp, clues.Stack(err).OrNil()
|
||||
}
|
||||
|
||||
@ -130,7 +130,7 @@ func (mw *LoggingMiddleware) Intercept(
|
||||
"resp_status_code", resp.StatusCode,
|
||||
"resp_content_len", resp.ContentLength)
|
||||
|
||||
logResp(ctx, resp, req)
|
||||
logResp(ctx, resp)
|
||||
|
||||
return resp, err
|
||||
}
|
||||
|
||||
@ -505,95 +505,3 @@ func (suite *MiddlewareUnitSuite) TestLimiterConsumption() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
// Raw test token valid for 100 years.
|
||||
rawToken = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9." +
|
||||
"eyJuYmYiOiIxNjkxODE5NTc5IiwiZXhwIjoiMzk0NTUyOTE3OSIsImVuZHBvaW50dXJsTGVuZ3RoIjoiMTYw" +
|
||||
"IiwiaXNsb29wYmFjayI6IlRydWUiLCJ2ZXIiOiJoYXNoZWRwcm9vZnRva2VuIiwicm9sZXMiOiJhbGxmaWxl" +
|
||||
"cy53cml0ZSBhbGxzaXRlcy5mdWxsY29udHJvbCBhbGxwcm9maWxlcy5yZWFkIiwidHQiOiIxIiwiYWxnIjoi" +
|
||||
"SFMyNTYifQ" +
|
||||
".signature"
|
||||
)
|
||||
|
||||
// Tests getTokenLifetime
|
||||
func (suite *MiddlewareUnitSuite) TestGetTokenLifetime() {
|
||||
table := []struct {
|
||||
name string
|
||||
request *http.Request
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "nil request",
|
||||
request: nil,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
// Test that we don't throw an error if auth header is absent.
|
||||
// This is to prevent unnecessary noise in logs for requestor http client.
|
||||
{
|
||||
name: "no authorization header",
|
||||
request: &http.Request{
|
||||
Header: http.Header{},
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "well formed auth header with token",
|
||||
request: &http.Request{
|
||||
Header: http.Header{
|
||||
"Authorization": []string{"Bearer " + rawToken},
|
||||
},
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "Missing Bearer prefix but valid token",
|
||||
request: &http.Request{
|
||||
Header: http.Header{
|
||||
"Authorization": []string{rawToken},
|
||||
},
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "invalid token",
|
||||
request: &http.Request{
|
||||
Header: http.Header{
|
||||
"Authorization": []string{"Bearer " + "invalid"},
|
||||
},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "valid prefix but empty token",
|
||||
request: &http.Request{
|
||||
Header: http.Header{
|
||||
"Authorization": []string{"Bearer "},
|
||||
},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "Invalid prefix but valid token",
|
||||
request: &http.Request{
|
||||
Header: http.Header{
|
||||
"Authorization": []string{"Bearer" + rawToken},
|
||||
},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
// iat, exp specific tests are in jwt package.
|
||||
_, _, err := getTokenLifetime(ctx, test.request)
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -185,26 +185,19 @@ func appendIfErr(errs []error, err error) []error {
|
||||
return append(errs, err)
|
||||
}
|
||||
|
||||
// EvaluateMailboxError checks whether the provided error can be interpreted
|
||||
// as "user does not have a mailbox", or whether it is some other error. If
|
||||
// the former (no mailbox), returns nil, otherwise returns an error.
|
||||
func EvaluateMailboxError(err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsMailboxErrorIgnorable checks whether the provided error (which is assumed to
|
||||
// have originated from a call to retrieve a user's mailbox) can be safely ignored
|
||||
// or not. In particular, the igorable cases are when the mail folder is not found
|
||||
// and when an authentication issue occurs.
|
||||
func IsMailboxErrorIgnorable(err error) bool {
|
||||
// must occur before MailFolderNotFound, due to overlapping cases.
|
||||
if errors.Is(err, core.ErrResourceNotAccessible) {
|
||||
return err
|
||||
if errors.Is(err, core.ErrResourceNotAccessible) || errors.Is(err, graph.ErrResourceNotFound) {
|
||||
return false
|
||||
}
|
||||
|
||||
if errors.Is(err, core.ErrNotFound) ||
|
||||
return err != nil && (errors.Is(err, core.ErrNotFound) ||
|
||||
graph.IsErrExchangeMailFolderNotFound(err) ||
|
||||
graph.IsErrAuthenticationError(err) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return err
|
||||
graph.IsErrAuthenticationError(err))
|
||||
}
|
||||
|
||||
// IsAnyErrMailboxNotFound inspects the secondary errors inside MailboxInfo and
|
||||
|
||||
@ -70,54 +70,47 @@ func (suite *UsersUnitSuite) TestEvaluateMailboxError() {
|
||||
table := []struct {
|
||||
name string
|
||||
err error
|
||||
expect func(t *testing.T, err error)
|
||||
expect assert.BoolAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "nil",
|
||||
err: nil,
|
||||
expect: func(t *testing.T, err error) {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
name: "nil",
|
||||
err: nil,
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "mail inbox err - user not found",
|
||||
err: core.ErrNotFound,
|
||||
expect: func(t *testing.T, err error) {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
name: "user not found - corso sentinel",
|
||||
err: graph.ErrResourceNotFound,
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "mail inbox err - resourceLocked",
|
||||
err: core.ErrResourceNotAccessible,
|
||||
expect: func(t *testing.T, err error) {
|
||||
assert.ErrorIs(t, err, core.ErrResourceNotAccessible, clues.ToCore(err))
|
||||
},
|
||||
name: "not found - corso sentinel",
|
||||
err: core.ErrNotFound,
|
||||
expect: assert.True,
|
||||
},
|
||||
{
|
||||
name: "mail inbox err - user not found",
|
||||
err: graphTD.ODataErr(string(graph.MailboxNotEnabledForRESTAPI)),
|
||||
expect: func(t *testing.T, err error) {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
name: "mailbox not enabled - graph api error",
|
||||
err: graphTD.ODataErr(string(graph.MailboxNotEnabledForRESTAPI)),
|
||||
expect: assert.True,
|
||||
},
|
||||
{
|
||||
name: "mail inbox err - authenticationError",
|
||||
err: graphTD.ODataErr(string(graph.AuthenticationError)),
|
||||
expect: func(t *testing.T, err error) {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
name: "resourceLocked",
|
||||
err: core.ErrResourceNotAccessible,
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "mail inbox err - other error",
|
||||
err: graphTD.ODataErrWithMsg("somecode", "somemessage"),
|
||||
expect: func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
},
|
||||
name: "authenticationError",
|
||||
err: graphTD.ODataErr(string(graph.AuthenticationError)),
|
||||
expect: assert.True,
|
||||
},
|
||||
{
|
||||
name: "other error",
|
||||
err: graphTD.ODataErrWithMsg("somecode", "somemessage"),
|
||||
expect: assert.False,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
test.expect(suite.T(), EvaluateMailboxError(test.err))
|
||||
test.expect(suite.T(), IsMailboxErrorIgnorable(test.err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,11 +76,20 @@ func (suite *siteIntegrationSuite) TestSites_GetByID() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
site, err := suite.cli.SiteByID(ctx, suite.m365.Site.ID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.NotEmpty(t, site.WebURL)
|
||||
assert.NotEmpty(t, site.ID)
|
||||
assert.NotEmpty(t, site.OwnerType)
|
||||
sites, err := suite.cli.Sites(ctx, fault.New(true))
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.NotEmpty(t, sites)
|
||||
|
||||
for _, s := range sites {
|
||||
suite.Run("site_"+s.ID, func() {
|
||||
t := suite.T()
|
||||
site, err := suite.cli.SiteByID(ctx, s.ID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.NotEmpty(t, site.WebURL)
|
||||
assert.NotEmpty(t, site.ID)
|
||||
assert.NotEmpty(t, site.OwnerType)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
|
||||
## Corso concepts {#corso-concepts}
|
||||
|
||||
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
|
||||
*M365 Service*'s data. See [Repositories](../repos) for more information.
|
||||
*M365 Services* data. See [Repositories](../repos) for more information.
|
||||
|
||||
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss,
|
||||
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has
|
||||
changed between backup iterations.
|
||||
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
|
||||
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.
|
||||
|
||||
@ -79,23 +79,12 @@ const config = {
|
||||
srcDark: 'img/corso_horizontal_logo_white.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'quickstart',
|
||||
position: 'left',
|
||||
label: 'Quick Start',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'intro',
|
||||
position: 'left',
|
||||
label: 'Docs',
|
||||
},
|
||||
{
|
||||
href: 'https://discord.gg/63DTTSnuhT',
|
||||
label: 'Community',
|
||||
position: 'left',
|
||||
},
|
||||
{
|
||||
to: '/blog',
|
||||
label: 'Blog',
|
||||
@ -117,12 +106,30 @@ const config = {
|
||||
},
|
||||
links: [
|
||||
{
|
||||
title: 'Open Source',
|
||||
title: 'Resources',
|
||||
items: [
|
||||
{
|
||||
label: 'Docs',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: 'Discord',
|
||||
href: 'https://discord.gg/63DTTSnuhT',
|
||||
},
|
||||
{
|
||||
label: 'Twitter',
|
||||
href: 'https://twitter.com/CorsoBackup',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'More',
|
||||
items: [
|
||||
{
|
||||
label: 'Blog',
|
||||
to: '/blog',
|
||||
@ -131,26 +138,6 @@ const config = {
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/alcionai/corso',
|
||||
},
|
||||
{
|
||||
label: 'Corso Discord',
|
||||
href: 'https://discord.gg/63DTTSnuhT',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: ' ',
|
||||
},
|
||||
{
|
||||
title: 'Alcion, Powered by Corso',
|
||||
items: [
|
||||
{
|
||||
label: 'Backup as a Service',
|
||||
href: 'https://www.alcion.ai',
|
||||
},
|
||||
{
|
||||
label: 'Alcion Discord',
|
||||
href: 'https://www.alcion.ai/discord',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
1811
website/package-lock.json
generated
1811
website/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.1.1",
|
||||
"@docusaurus/plugin-google-gtag": "^3.5.1",
|
||||
"@docusaurus/plugin-google-gtag": "^3.1.1",
|
||||
"@docusaurus/preset-classic": "3.1.1",
|
||||
"@loadable/component": "^5.16.3",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
@ -26,17 +26,17 @@
|
||||
"feather-icons": "^4.29.1",
|
||||
"jarallax": "^2.2.0",
|
||||
"mdx-mermaid": "^2.0.0",
|
||||
"mermaid": "^10.9.0",
|
||||
"mermaid": "^10.8.0",
|
||||
"prism-react-renderer": "^2.1.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.3.0",
|
||||
"sass": "^1.79.1",
|
||||
"react-dom": "^18.2.0",
|
||||
"sass": "^1.70.0",
|
||||
"tiny-slider": "^2.9.4",
|
||||
"tw-elements": "1.0.0-alpha13",
|
||||
"wow.js": "^1.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.5.1",
|
||||
"@docusaurus/module-type-aliases": "3.1.1",
|
||||
"@iconify/react": "^4.1.1",
|
||||
"autoprefixer": "^10.4.17",
|
||||
"postcss": "^8.4.33",
|
||||
|
||||
@ -33,7 +33,7 @@ export default function CTA() {
|
||||
<br /> Microsoft 365 Data!
|
||||
</h3>
|
||||
<h6 className="text-white/50 text-lg font-semibold">
|
||||
Corso (Free and Open Source) or <br/> Alcion (Managed Backup as a Service)
|
||||
Corso is Free and Open Source
|
||||
</h6>
|
||||
</div>
|
||||
</div>
|
||||
@ -41,24 +41,15 @@ export default function CTA() {
|
||||
<div className="mt-8">
|
||||
<div className="section-title text-md-start">
|
||||
<p className="text-white/50 max-w-xl mx-auto mb-2">
|
||||
Whether you want to self-host or use a managed service, we have you covered!
|
||||
Follow our quick-start guide to start protecting your
|
||||
business-critical Microsoft 365 data in just a few
|
||||
minutes.
|
||||
</p>
|
||||
<a
|
||||
href="https://www.alcion.ai/"
|
||||
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
|
||||
>
|
||||
Try Alcion{" "}
|
||||
<Icon
|
||||
icon="uim:angle-right-b"
|
||||
className="align-middle"
|
||||
/>
|
||||
</a>
|
||||
<p></p>
|
||||
<a
|
||||
href="docs/quickstart/"
|
||||
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
|
||||
>
|
||||
Corso Quickstart{" "}
|
||||
Get Started{" "}
|
||||
<Icon
|
||||
icon="uim:angle-right-b"
|
||||
className="align-middle"
|
||||
|
||||
@ -34,17 +34,10 @@ export default function Hero() {
|
||||
|
||||
<div className="mt-12 !z-10 mb-6 flex flex-col 2xs:flex-row items-center justify-center 2xs:space-y-0 space-y-4 2xs:space-x-4">
|
||||
<a
|
||||
href="https://github.com/alcionai/corso/releases" target="_blank"
|
||||
href="../docs/quickstart/"
|
||||
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
|
||||
>
|
||||
Download
|
||||
</a>
|
||||
|
||||
<a
|
||||
href="https://www.alcion.ai/"
|
||||
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-200 hover:bg-indigo-400 border-indigo-600 hover:border-indigo-800 text-blue rounded-md"
|
||||
>
|
||||
Try Alcion (Corso SaaS)
|
||||
Quick Start
|
||||
</a>
|
||||
</div>
|
||||
|
||||
|
||||
@ -213,9 +213,9 @@ export default function KeyLoveFAQ() {
|
||||
Community
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
The Corso community provides a venue for Microsoft 365 admins to share and
|
||||
The Corso community provides a venue for M365 admins to share and
|
||||
learn about the importance of data protection as well as best
|
||||
practices around Microsoft 365 secure configuration and compliance
|
||||
practices around M365 secure configuration and compliance
|
||||
management.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
@ -279,7 +279,8 @@ export default function KeyLoveFAQ() {
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso provides secure data backup that protects customers against
|
||||
accidental data loss and service provider downtime.
|
||||
accidental data loss, service provider downtime, and malicious
|
||||
threats including ransomware attacks.
|
||||
</p>
|
||||
<ul className="list-none text-slate-400 mt-4">
|
||||
<li className="mb-1 flex">
|
||||
@ -330,7 +331,7 @@ export default function KeyLoveFAQ() {
|
||||
Robust Backups
|
||||
</h3>
|
||||
<p className="text-slate-400">
|
||||
Corso, purpose-built for Microsoft 365 protection, provides easy-to-use
|
||||
Corso, purpose-built for M365 protection, provides easy-to-use
|
||||
comprehensive backup and restore workflows that reduces backup
|
||||
time, improve time-to-recovery, reduce admin overhead, and replace
|
||||
unreliable scripts or workarounds.
|
||||
@ -341,7 +342,7 @@ export default function KeyLoveFAQ() {
|
||||
className="text-indigo-600 text-xl mr-2"
|
||||
icon="material-symbols:check-circle-outline"
|
||||
/>{" "}
|
||||
Constantly updated Microsoft 365 Graph Data engine
|
||||
Constantly updated M365 Graph Data engine
|
||||
</li>
|
||||
<li className="mb-1 flex">
|
||||
<Icon
|
||||
@ -461,7 +462,7 @@ export default function KeyLoveFAQ() {
|
||||
|
||||
<div className="md:col-span-6">
|
||||
<div className="accordion space-y-3" id="accordionExample">
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="headingOne"
|
||||
@ -474,7 +475,7 @@ export default function KeyLoveFAQ() {
|
||||
aria-expanded="false"
|
||||
aria-controls="collapseOne"
|
||||
>
|
||||
<span>How do I choose between Corso and Alcion, powered by Corso?</span>
|
||||
<span>What platforms does Corso run on?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
@ -485,7 +486,8 @@ export default function KeyLoveFAQ() {
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso is a good fit for basic backup while Alcion is a better fit if you need increased reliability, security, and support.
|
||||
Corso has both native binaries and container images for
|
||||
Windows, Linux, and macOS.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@ -503,7 +505,9 @@ export default function KeyLoveFAQ() {
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse2"
|
||||
>
|
||||
<span>What platforms does Corso run on?</span>
|
||||
<span>
|
||||
What Microsoft 365 services can I backup using Corso?
|
||||
</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
@ -514,8 +518,8 @@ export default function KeyLoveFAQ() {
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso has both native binaries and container images for
|
||||
Windows, Linux, and macOS.
|
||||
Corso currently supports OneDrive, Exchange, SharePoint,
|
||||
and Teams.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@ -533,9 +537,7 @@ export default function KeyLoveFAQ() {
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse3"
|
||||
>
|
||||
<span>
|
||||
What Microsoft 365 services can I backup using Corso?
|
||||
</span>
|
||||
<span>What object storage does Corso support?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
@ -543,36 +545,6 @@ export default function KeyLoveFAQ() {
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading3"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 !visible dark:text-gray-400">
|
||||
Corso currently supports OneDrive, Exchange, SharePoint,
|
||||
and Teams.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading4"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse4"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse4"
|
||||
>
|
||||
<span>What object storage does Corso support?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse4"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading4"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
<p className="text-slate-400 dark:text-gray-400 !visible">
|
||||
@ -587,23 +559,23 @@ export default function KeyLoveFAQ() {
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 font-semibold"
|
||||
id="heading5"
|
||||
id="heading4"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse5"
|
||||
data-bs-target="#collapse4"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse5"
|
||||
aria-controls="collapse4"
|
||||
>
|
||||
<span>How can I get help for Corso?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse5"
|
||||
id="collapse4"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading5"
|
||||
aria-labelledby="heading4"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
@ -633,23 +605,23 @@ export default function KeyLoveFAQ() {
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading6"
|
||||
id="heading5"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse6"
|
||||
data-bs-target="#collapse5"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse6"
|
||||
aria-controls="collapse5"
|
||||
>
|
||||
<span>What is Corso's open-source license?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse6"
|
||||
id="collapse5"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading6"
|
||||
aria-labelledby="heading5"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
@ -663,23 +635,23 @@ export default function KeyLoveFAQ() {
|
||||
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
|
||||
<h2
|
||||
className="accordion-header mb-0 !cursor-pointer font-semibold"
|
||||
id="heading7"
|
||||
id="heading6"
|
||||
>
|
||||
<button
|
||||
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapse7"
|
||||
data-bs-target="#collapse6"
|
||||
aria-expanded="false"
|
||||
aria-controls="collapse7"
|
||||
aria-controls="collapse6"
|
||||
>
|
||||
<span>How do I request a new feature?</span>
|
||||
</button>
|
||||
</h2>
|
||||
<div
|
||||
id="collapse7"
|
||||
id="collapse6"
|
||||
className="accordion-collapse collapse"
|
||||
aria-labelledby="heading7"
|
||||
aria-labelledby="heading6"
|
||||
data-bs-parent="#accordionExample"
|
||||
>
|
||||
<div className="accordion-body p-5">
|
||||
|
||||
96
website/static/img/corso_horizontal_logo.svg
Normal file → Executable file
96
website/static/img/corso_horizontal_logo.svg
Normal file → Executable file
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 3.5 KiB |
96
website/static/img/corso_horizontal_logo_white.svg
Normal file → Executable file
96
website/static/img/corso_horizontal_logo_white.svg
Normal file → Executable file
@ -1,95 +1 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 28.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 1920 632.51" style="enable-background:new 0 0 1920 632.51;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{clip-path:url(#SVGID_00000065045999731516100160000007329899648576828572_);fill:#FFFFFF;}
|
||||
.st1{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g id="Layer_1">
|
||||
<g>
|
||||
<g>
|
||||
<defs>
|
||||
<rect id="SVGID_1_" y="2.64" width="1920" height="523"/>
|
||||
</defs>
|
||||
<clipPath id="SVGID_00000147923114548510084520000017867003880147110077_">
|
||||
<use xlink:href="#SVGID_1_" style="overflow:visible;"/>
|
||||
</clipPath>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M134.51,132.57
|
||||
c28.63,0,54.6,7.95,75.81,22.79c11.66,7.95,14.31,23.33,6.36,36.58c-7.42,12.19-25.98,12.73-37.64,5.83
|
||||
c-12.73-7.42-28.63-12.19-44.53-12.19c-41.35,0-77.93,30.22-77.93,76.34c0,46.12,36.58,75.81,77.93,75.81
|
||||
c15.91,0,31.81-4.77,44.53-12.19c11.66-6.89,30.22-6.36,37.64,5.83c7.95,13.26,5.3,28.63-6.36,36.58
|
||||
c-21.21,14.85-47.19,22.8-75.81,22.8C63.47,390.76,2.5,340.39,2.5,261.93C2.5,183.47,63.47,132.57,134.51,132.57"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M261.22,260.87
|
||||
c0-78.46,58.85-128.29,128.83-128.29c70.51,0,129.89,49.83,129.89,128.29s-59.37,129.89-129.89,129.89
|
||||
C320.06,390.76,261.22,339.33,261.22,260.87 M465.86,260.87c0-45.59-34.46-75.28-75.81-75.28c-40.82,0-74.75,29.69-74.75,75.28
|
||||
c0,46.66,33.93,76.87,74.75,76.87C431.4,337.74,465.86,307.52,465.86,260.87"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M633.91,296.39v64.15
|
||||
c0,14.85-12.19,27.57-28.1,27.57c-14.84,0-26.51-12.72-26.51-27.57V162.79c0-14.85,11.67-27.57,26.51-27.57
|
||||
c15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.02,67.86-62.02c7.95,0,15.91,0.53,23.33,2.12
|
||||
c13.79,3.18,22.8,16.97,19.62,31.28c-4.77,23.86-28.63,18.03-44.53,18.03C653.53,184,633.91,230.65,633.91,296.39"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M788.19,304.87
|
||||
c13.25-5.3,23.33,1.59,27.57,10.6c10.08,19.09,29.16,29.69,53.55,29.69c24.92,0,42.94-11.13,42.94-29.69
|
||||
c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.71c3.18-36.58,42.94-65.21,88.53-65.21
|
||||
c32.87,0,63.09,10.6,79.53,36.58c7.42,12.72,3.71,25.44-4.77,31.81c-9.01,7.42-20.15,6.89-31.81-3.18
|
||||
c-13.78-12.19-29.69-16.97-42.41-16.97c-13.79,0-29.16,4.77-34.46,13.25c-4.24,6.89-4.77,13.78-2.12,21.21
|
||||
c3.18,9.54,18.03,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16c0,47.71-41.88,74.75-98.61,74.75
|
||||
c-38.17,0-76.87-20.15-90.13-56.2C770.69,321.31,776.53,309.12,788.19,304.87"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1006.61,260.87
|
||||
c0-78.46,58.85-128.29,128.83-128.29c70.51,0,129.89,49.83,129.89,128.29s-59.37,129.89-129.89,129.89
|
||||
C1065.46,390.76,1006.61,339.33,1006.61,260.87 M1211.25,260.87c0-45.59-34.46-75.28-75.81-75.28
|
||||
c-40.82,0-74.75,29.69-74.75,75.28c0,46.66,33.93,76.87,74.75,76.87C1176.79,337.74,1211.25,307.52,1211.25,260.87"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1658.37,523.34
|
||||
c-33.98,0-65.93-13.24-89.96-37.26l-221.94-221.93l221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27
|
||||
c33.98,0,65.93,13.24,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98
|
||||
C1724.3,510.1,1692.35,523.34,1658.37,523.34 M1393.9,264.14l198.22,198.22c36.54,36.53,95.97,36.52,132.5,0l131.98-131.98
|
||||
c36.53-36.53,36.53-95.96,0-132.5L1724.61,65.92c-17.7-17.7-41.22-27.44-66.25-27.44c-25.03,0-48.55,9.75-66.25,27.44
|
||||
L1393.9,264.14z"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1813.99,263.46
|
||||
l-6.63-41.44c-1.01-7.18-6.37-11.15-14.81-11.15l-49.44-0.14c-7.95,0-13.8-3.97-14.81-11.15l-0.17-1.18
|
||||
c-1.61-11.39-9.6-20.86-20.58-24.34l-66.42-24.32c-1.66-0.53-3.19-1.38-4.51-2.5c-0.48-0.4-0.8-0.96-0.97-1.57l-12.42-42.67
|
||||
c-0.12-0.43-0.36-0.83-0.7-1.12c-3.65-3-9.24-1.95-11.5,2.32l-24.5,54.88c-0.89,1.68-1.4,3.55-1.5,5.45l3.01,25.01
|
||||
c-0.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85l12.2-50.1
|
||||
c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.89-63.08c1.35-2.12,2.07-4.58,2.07-7.08
|
||||
C1814.12,264.67,1814.07,264.06,1813.99,263.46 M1707.54,223.11c-5.96,1.5-22.58,0.54-24.08-5.43
|
||||
c-1.5-5.95,12.71-14.66,18.66-16.15c5.96-1.5,12,2.12,13.5,8.09C1717.11,215.57,1713.49,221.62,1707.54,223.11"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1680.04,156.45
|
||||
L1667,107.48c-0.14-0.53-0.47-1.01-0.92-1.33c-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9L1680.04,156.45z"/>
|
||||
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1655.16,406.81
|
||||
l-0.85,3.47c-1.93,7.89-11.75,10.65-17.49,4.9l-123.3-123.3l-11.74-11.74l13.35-13.35l11.74,11.74L1655.16,406.81z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M523.8,510.47c5.57-9.64,17.49-14.55,30.17-14.55c24.41,0,44.78,17.77,44.78,46.11
|
||||
c0,27.78-20.76,45.93-44.97,45.93c-12.11,0-24.41-5.29-29.98-14.74v3.97c0,5.29-4.42,9.83-10.19,9.83
|
||||
c-5.38,0-9.61-4.54-9.61-9.83v-118.5c0-5.29,4.23-9.83,9.61-9.83c5.77,0,10.19,4.54,10.19,9.83V510.47z M551.48,569.06
|
||||
c14.99,0,27.1-11.15,27.1-27.4s-12.11-26.84-27.1-26.84c-13.45,0-27.48,9.45-27.48,26.84
|
||||
C523.99,558.85,536.87,569.06,551.48,569.06z"/>
|
||||
<path class="st1" d="M645.84,623.3c-2.11,4.91-7.11,7.56-12.3,6.24s-8.07-7.37-6.15-12.28l14.61-35.34l-30.56-72.38
|
||||
c-2.11-4.91,0.96-10.96,6.15-12.29c5.19-1.32,10.19,1.32,12.3,6.24l22.68,54.81l22.87-54.81c2.11-4.91,7.11-7.56,12.3-6.24
|
||||
c5.19,1.32,8.26,7.37,6.15,12.29L645.84,623.3z"/>
|
||||
<path class="st1" d="M828.41,573.4c-5.96,9.64-19.03,14.55-30.17,14.55c-24.22,0-45.55-17.95-45.55-46.11
|
||||
s21.33-45.93,45.55-45.93c10.76,0,24.02,4.35,30.17,14.36v-3.59c0-5.29,4.23-9.83,9.61-9.83c5.77,0,10.19,4.54,10.19,9.83v70.5
|
||||
c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83V573.4z M800.55,569.06c14.61,0,27.67-10.02,27.67-27.4
|
||||
s-14.22-26.84-27.67-26.84c-14.99,0-27.48,10.58-27.48,26.84S785.56,569.06,800.55,569.06z"/>
|
||||
<path class="st1" d="M894.91,577.18c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83v-118.5c0-5.29,4.23-9.83,9.61-9.83
|
||||
c5.77,0,10.19,4.54,10.19,9.83V577.18z"/>
|
||||
<path class="st1" d="M964.67,495.91c10.38,0,19.8,2.83,27.48,8.13c4.23,2.83,5.19,8.32,2.31,13.04
|
||||
c-2.69,4.35-9.42,4.54-13.65,2.08c-4.61-2.65-10.38-4.35-16.14-4.35c-14.99,0-28.25,10.77-28.25,27.21s13.26,27.03,28.25,27.03
|
||||
c5.77,0,11.53-1.7,16.14-4.35c4.23-2.46,10.96-2.27,13.65,2.08c2.88,4.72,1.92,10.21-2.31,13.04c-7.69,5.29-17.1,8.13-27.48,8.13
|
||||
c-25.75,0-47.85-17.95-47.85-45.93C916.82,514.06,938.92,495.91,964.67,495.91z"/>
|
||||
<path class="st1" d="M1026.55,449.8c7.3,0,13.07,5.29,13.07,12.28c0,6.99-5.77,12.29-13.07,12.29c-7.11,0-13.26-5.29-13.26-12.29
|
||||
C1013.29,455.09,1019.44,449.8,1026.55,449.8z M1036.55,506.69c0-5.29-4.42-9.83-10.19-9.83c-5.38,0-9.61,4.54-9.61,9.83v70.5
|
||||
c0,5.29,4.23,9.83,9.61,9.83c5.77,0,10.19-4.54,10.19-9.83V506.69z"/>
|
||||
<path class="st1" d="M1058.07,541.65c0-27.97,21.33-45.74,46.7-45.74c25.56,0,47.08,17.77,47.08,45.74
|
||||
c0,27.97-21.52,46.3-47.08,46.3C1079.4,587.95,1058.07,569.62,1058.07,541.65z M1132.25,541.65c0-16.25-12.49-26.84-27.48-26.84
|
||||
c-14.8,0-27.1,10.58-27.1,26.84c0,16.63,12.3,27.4,27.1,27.4C1119.76,569.06,1132.25,558.28,1132.25,541.65z"/>
|
||||
<path class="st1" d="M1173.38,506.69c0-5.29,4.42-9.83,10.19-9.83c5.38,0,9.61,4.54,9.61,9.83v4.35
|
||||
c5.19-10.21,17.49-15.12,27.48-15.12c21.72,0,34.21,13.8,34.21,38.74v42.52c0,5.29-4.42,9.83-10.19,9.83
|
||||
c-5.38,0-9.61-4.54-9.61-9.83v-40.26c0-13.99-7.3-21.92-18.83-21.92c-11.72,0-23.06,6.24-23.06,23.62v38.55
|
||||
c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83V506.69z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g id="Layer_2">
|
||||
</g>
|
||||
</svg>
|
||||
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#fff;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 8.2 KiB After Width: | Height: | Size: 3.5 KiB |
Loading…
x
Reference in New Issue
Block a user